Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802788 )
Change subject: [IMPR] Simplify code
......................................................................
[IMPR] Simplify code
solve flake8-simplify issues
Change-Id: I9fc8379fa14a9ea88a4f70cf1083cac9e224f34a
---
M pywikibot/config.py
M pywikibot/date.py
M pywikibot/editor.py
M pywikibot/page/_pages.py
M pywikibot/page/_wikibase.py
M pywikibot/proofreadpage.py
M pywikibot/scripts/generate_family_file.py
M pywikibot/scripts/generate_user_files.py
M pywikibot/site/_apisite.py
M pywikibot/site/_datasite.py
M pywikibot/site/_generators.py
M pywikibot/textlib.py
M pywikibot/titletranslate.py
M scripts/archivebot.py
M scripts/category.py
M scripts/checkimages.py
M scripts/dataextend.py
M scripts/interwiki.py
M scripts/patrol.py
M scripts/solve_disambiguation.py
M scripts/welcome.py
M tests/api_tests.py
M tests/aspects.py
M tests/category_tests.py
M tests/dry_api_tests.py
M tests/edit_failure_tests.py
M tests/flow_thanks_tests.py
M tests/generate_user_files_tests.py
M tests/i18n_tests.py
M tests/interwikimap_tests.py
M tests/link_tests.py
M tests/pagegenerators_tests.py
M tests/redirect_bot_tests.py
M tests/site_tests.py
M tests/timestripper_tests.py
35 files changed, 202 insertions(+), 265 deletions(-)
Approvals:
DannyS712: Looks good to me, but someone else must approve
Xqt: Verified; Looks good to me, approved
diff --git a/pywikibot/config.py b/pywikibot/config.py
index efc0883..26ca728 100644
--- a/pywikibot/config.py
+++ b/pywikibot/config.py
@@ -1084,16 +1084,14 @@
if OSWIN32 and editor is None:
editor = _detect_win32_editor()
-if OSWIN32 and editor:
- # single character string literals from
- # https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-…
- # encode('unicode-escape') also changes Unicode characters
- if set(editor) & set('\a\b\f\n\r\t\v'):
- warning(
- 'The editor path contains probably invalid escaped '
- 'characters. Make sure to use a raw-string (r"..." or '
- "r'...'), forward slashes as a path delimiter or to escape the "
- 'normal path delimiter.')
+# single character string literals from
+# https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-…
+# encode('unicode-escape') also changes Unicode characters
+if OSWIN32 and editor and set(editor) & set('\a\b\f\n\r\t\v'):
+ warning(
+ 'The editor path contains probably invalid escaped characters. Make '
+ 'sure to use a raw-string (r"..." or r\'...\'), forward slashes as a '
+ 'path delimiter or to escape the normal path delimiter.')
if userinterface_lang is None:
userinterface_lang = os.getenv('PYWIKIBOT_USERINTERFACE_LANG') \
@@ -1139,7 +1137,7 @@
for _name in sorted(globals().keys()):
if _name[0] != '_' \
- and not type(globals()[_name]) in [types.FunctionType,
+ and type(globals()[_name]) not in [types.FunctionType,
types.ModuleType] \
and (_all or _name in _modified):
_value = globals()[_name]
diff --git a/pywikibot/date.py b/pywikibot/date.py
index 291a911..bd37377 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -704,6 +704,7 @@
formats = {
+ 'MonthName': MonthNames(),
'Number': {
'ar': lambda v: dh_number(v, '%d (عدد)'),
'ary': lambda v: dh_number(v, '%d'),
@@ -1676,7 +1677,6 @@
},
} # type: Dict[Union[str, int], Mapping[str, Callable[[int], str]]]
-formats['MonthName'] = MonthNames()
#
# Add auto-generated empty dictionaries for DayOfMonth and MonthOfYear articles
#
diff --git a/pywikibot/editor.py b/pywikibot/editor.py
index ab1f447..7a16571 100644
--- a/pywikibot/editor.py
+++ b/pywikibot/editor.py
@@ -45,9 +45,7 @@
assert config.editor is not None
if config.editor.startswith('kate'):
command = ['-l', str(line + 1), '-c', str(column + 1)]
- elif config.editor.startswith('gedit'):
- command = ['+{}'.format(line + 1)] # columns seem unsupported
- elif config.editor.startswith('emacs'):
+ elif config.editor.startswith(('gedit', 'emacs')):
command = ['+{}'.format(line + 1)] # columns seem unsupported
elif config.editor.startswith('jedit'):
command = ['+line:{}'.format(line + 1)] # columns seem unsupported
diff --git a/pywikibot/page/_pages.py b/pywikibot/page/_pages.py
index a5c5bff..96b0309 100644
--- a/pywikibot/page/_pages.py
+++ b/pywikibot/page/_pages.py
@@ -1458,13 +1458,12 @@
# only yield links that are to a different site and that
# are not language links
try:
- if link.site != self.site:
- if linktitle.lstrip().startswith(':'):
- # initial ":" indicates not a language link
- yield link
- elif link.site.family != self.site.family:
- # link to a different family is not a language link
- yield link
+ # initial ":" indicates not a language link
+ # link to a different family is not a language link
+ if link.site != self.site \
+ and (linktitle.lstrip().startswith(':')
+ or link.site.family != self.site.family):
+ yield link
except Error:
# ignore any links with invalid contents
continue
diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py
index 70246a4..4193764 100644
--- a/pywikibot/page/_wikibase.py
+++ b/pywikibot/page/_wikibase.py
@@ -1401,10 +1401,7 @@
for val in my_values:
if val not in other_values:
return False
- for val in other_values:
- if val not in my_values:
- return False
- return True
+ return all(val in my_values for val in other_values)
def same_as(
self,
@@ -1507,10 +1504,7 @@
# Before #84516 Wikibase did not implement snaks-order.
# https://gerrit.wikimedia.org/r/c/84516/
- if 'snaks-order' in data:
- prop_list = data['snaks-order']
- else:
- prop_list = data['snaks'].keys()
+ prop_list = data.get('snaks-order', data['snaks'].keys())
for prop in prop_list:
for claimsnak in data['snaks'][prop]:
@@ -1833,11 +1827,8 @@
if self.isQualifier or self.isReference:
raise ValueError('Qualifiers and references cannot have '
'qualifiers.')
-
- for qualifier in self.qualifiers.get(qualifier_id, []):
- if qualifier.target_equals(target):
- return True
- return False
+ return any(qualifier.target_equals(target)
+ for qualifier in self.qualifiers.get(qualifier_id, []))
def _formatValue(self) -> dict:
"""
diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py
index 566d4aa..3e82ae9 100644
--- a/pywikibot/proofreadpage.py
+++ b/pywikibot/proofreadpage.py
@@ -751,14 +751,13 @@
"ocr_tool must be in {}, not '{}'."
.format(self._OCR_METHODS, ocr_tool))
- if ocr_tool == self._PHETOOLS:
- # if _multi_page, try _do_hocr() first and fall back to _do_ocr()
- if self._multi_page:
- error, text = self._do_hocr()
- if not error and isinstance(text, str):
- return text
- pywikibot.warning('{}: phetools hocr failed, '
- 'falling back to ocr.'.format(self))
+ # if _multi_page, try _do_hocr() first and fall back to _do_ocr()
+ if ocr_tool == self._PHETOOLS and self._multi_page:
+ error, text = self._do_hocr()
+ if not error and isinstance(text, str):
+ return text
+ pywikibot.warning('{}: phetools hocr failed, falling back to ocr.'
+ .format(self))
error, text = self._do_ocr(ocr_tool=ocr_tool)
diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py
index 934552a..47c4b8b 100755
--- a/pywikibot/scripts/generate_family_file.py
+++ b/pywikibot/scripts/generate_family_file.py
@@ -215,15 +215,12 @@
"""Write the family file."""
fn = os.path.join(self.base_dir, 'families',
'{}_family.py'.format(self.name))
- print('Writing %s... ' % fn)
- try:
- open(fn)
- if input('{} already exists. Overwrite? (y/n)'
- .format(fn)).lower() == 'n':
- print('Terminating.')
- sys.exit(1)
- except OSError: # file not found
- pass
+ print('Writing {}... '.format(fn))
+
+ if os.path.exists(fn) and input('{} already exists. Overwrite? (y/n)'
+ .format(fn)).lower() == 'n':
+ print('Terminating.')
+ sys.exit(1)
code_hostname_pairs = '\n '.join(
"'{code}': '{hostname}',".format(
diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py
index 304b766..018440b 100755
--- a/pywikibot/scripts/generate_user_files.py
+++ b/pywikibot/scripts/generate_user_files.py
@@ -129,7 +129,7 @@
if hasattr(fam, 'langs'):
if hasattr(fam, 'languages_by_size'):
by_size = [code for code in fam.languages_by_size
- if code in fam.langs.keys()]
+ if code in fam.langs]
else:
by_size = []
known_langs = by_size + sorted(
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index a78dbf6..2cc5da7 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -1055,11 +1055,8 @@
:param name: The extension to check for, case sensitive
:return: If the extension is loaded
"""
- extensions = self.siteinfo['extensions']
- for ext in extensions:
- if 'name' in ext and ext['name'] == name:
- return True
- return False
+ return any('name' in ext and ext['name'] == name
+ for ext in self.siteinfo['extensions'])
@property
def siteinfo(self) -> Siteinfo:
@@ -1533,9 +1530,7 @@
req._warning_handler = warn_handler
data = req.submit()
-
- if 'query' in data:
- data = data['query']
+ data = data.get('query', data)
if 'tokens' in data and data['tokens']:
user_tokens = {key[:-5]: val
diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py
index 17b3e48..a3e2215 100644
--- a/pywikibot/site/_datasite.py
+++ b/pywikibot/site/_datasite.py
@@ -420,14 +420,12 @@
snak = {}
for sourceclaim in sources:
datavalue = sourceclaim._formatDataValue()
- valuesnaks = []
- if sourceclaim.getID() in snak:
- valuesnaks = snak[sourceclaim.getID()]
- valuesnaks.append({'snaktype': 'value',
- 'property': sourceclaim.getID(),
- 'datavalue': datavalue,
- },
- )
+ valuesnaks = snak.get(sourceclaim.getID(), [])
+ valuesnaks.append({
+ 'snaktype': 'value',
+ 'property': sourceclaim.getID(),
+ 'datavalue': datavalue,
+ })
snak[sourceclaim.getID()] = valuesnaks
# set the hash if the source should be changed.
@@ -768,8 +766,8 @@
assert keys < {'language', 'add', 'remove', 'set'}
assert 'language' in keys
assert ({'add', 'remove', 'set'} & keys)
- assert not ({'add', 'set'} < keys)
- assert not ({'remove', 'set'} < keys)
+ assert ({'add', 'set'} >= keys)
+ assert ({'remove', 'set'} >= keys)
elif action in ('wbsetlabel', 'wbsetdescription'):
res = data
keys = set(res)
diff --git a/pywikibot/site/_generators.py b/pywikibot/site/_generators.py
index 3edbd7a..641fd7f 100644
--- a/pywikibot/site/_generators.py
+++ b/pywikibot/site/_generators.py
@@ -694,8 +694,10 @@
raise ValueError(
'loadrevisions: endid > startid with rvdir=False')
- rvargs = {'type_arg': 'info|revisions'}
- rvargs['rvprop'] = self._rvprops(content=content)
+ rvargs = {
+ 'type_arg': 'info|revisions',
+ 'rvprop': self._rvprops(content=content),
+ }
if content and section is not None:
rvargs['rvsection'] = str(section)
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 0318b3d..dbe0997 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -819,7 +819,7 @@
new_title = new_link.canonical_title()
# Make correct langlink if needed
- if not new_link.site == site:
+ if new_link.site != site:
new_title = ':' + new_link.site.code + ':' + new_title
if is_link:
@@ -1055,8 +1055,7 @@
lang = lang.lower()
# Check if it really is in fact an interwiki link to a known
# language, or if it's e.g. a category tag or an internal link
- if lang in fam.obsolete:
- lang = fam.obsolete[lang]
+ lang = fam.obsolete.get(lang, lang)
if lang in fam.langs:
if '|' in pagetitle:
# ignore text after the pipe
@@ -1948,8 +1947,8 @@
"""
m = None
cnt = 0
- for m in pat.finditer(txt):
- cnt += 1
+ for cnt, m in enumerate(pat.finditer(txt), start=1):
+ pass
def marker(m):
"""
diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py
index c1f92ba..645c3c4 100644
--- a/pywikibot/titletranslate.py
+++ b/pywikibot/titletranslate.py
@@ -47,10 +47,8 @@
codes = site.family.languages_by_size[:int(codes)]
elif codes == 'all':
codes = site.family.languages_by_size
- elif codes in site.family.language_groups:
- codes = site.family.language_groups[codes]
else:
- codes = codes.split(',')
+ codes = site.family.language_groups.get(codes, codes.split(','))
for newcode in codes:
if newcode in site.languages():
diff --git a/scripts/archivebot.py b/scripts/archivebot.py
index 41609eb..95febaf 100755
--- a/scripts/archivebot.py
+++ b/scripts/archivebot.py
@@ -464,14 +464,10 @@
def is_full(self, max_archive_size: Size) -> bool:
"""Check whether archive size exceeded."""
size, unit = max_archive_size
- if self.size() > self.archiver.maxsize:
+ if (self.size() > self.archiver.maxsize
+ or unit == 'B' and self.size() >= size
+ or unit == 'T' and len(self.threads) >= size):
self.full = True # xxx: this is one-way flag
- elif unit == 'B':
- if self.size() >= size:
- self.full = True
- elif unit == 'T':
- if len(self.threads) >= size:
- self.full = True
return self.full
def feed_thread(self, thread: DiscussionThread,
@@ -564,12 +560,11 @@
value = value.replace('_', ' ')
elif attr == 'maxarchivesize':
size, unit = str2size(value)
- if unit == 'B':
- if size > self.maxsize:
- value = '{} K'.format(self.maxsize // 1024)
- warn('Siteinfo "maxarticlesize" exceeded. Decreasing '
- '"maxarchivesize" to ' + value,
- ResourceWarning, stacklevel=2)
+ if unit == 'B' and size > self.maxsize:
+ value = '{} K'.format(self.maxsize // 1024)
+ warn('Siteinfo "maxarticlesize" exceeded. Decreasing '
+ '"maxarchivesize" to ' + value,
+ ResourceWarning, stacklevel=2)
self.attributes[attr] = [value, out]
def saveables(self) -> List[str]:
@@ -893,8 +888,9 @@
follow_redirects=False,
namespaces=ns))
if filename:
- for pg in open(filename).readlines():
- pagelist.append(pywikibot.Page(site, pg, ns=10))
+ with open(filename) as f:
+ for pg in f.readlines():
+ pagelist.append(pywikibot.Page(site, pg, ns=10))
if pagename:
pagelist.append(pywikibot.Page(site, pagename, ns=3))
pagelist.sort()
diff --git a/scripts/category.py b/scripts/category.py
index 360c779..288b89b 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -412,9 +412,9 @@
'superclass_db': self.superclass_db
}
# store dump to disk in binary format
- with open_archive(filename, 'wb') as f:
- with suppress(pickle.PicklingError):
- pickle.dump(databases, f, protocol=config.pickle_protocol)
+ with open_archive(filename, 'wb') as f, \
+ suppress(pickle.PicklingError):
+ pickle.dump(databases, f, protocol=config.pickle_protocol)
else:
with suppress(EnvironmentError):
os.remove(filename)
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 9f6e3f0..b7cf1f3 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -1098,9 +1098,9 @@
self.settings_data = []
try:
testo = page.get()
- number = 1
- for m in SETTINGS_REGEX.finditer(testo):
+ for number, m in enumerate(SETTINGS_REGEX.finditer(testo),
+ start=1):
name = str(m.group(1))
find_tipe = str(m.group(2))
find = str(m.group(3))
@@ -1112,7 +1112,6 @@
tupla = [number, name, find_tipe, find, imagechanges,
summary, head, text, mexcatched]
self.settings_data += [tupla]
- number += 1
if not self.settings_data:
pywikibot.output(
diff --git a/scripts/dataextend.py b/scripts/dataextend.py
index 8249a33..a2c42c4 100644
--- a/scripts/dataextend.py
+++ b/scripts/dataextend.py
@@ -1112,8 +1112,8 @@
self.noname.add(name)
elif result[0].upper() != 'N':
returnvalue = [{}, {}]
- for language in realnewnames.keys():
- if language in existinglabels.keys():
+ for language in realnewnames:
+ if language in existinglabels:
returnvalue[1][language] = existingaliases.get(
language, []) + realnewnames[language]
else:
@@ -1531,13 +1531,10 @@
(self.findeyecolor, 'P1340'),
]:
result = function(self.html)
- if result:
- if prop == 'P856' and 'wikipedia.org' in result:
- pass
- elif prop in ['P2013', 'P4003'] and result == 'pages':
- pass
- else:
- newclaims.append((prop, result.strip(), self))
+ if result and not (
+ prop == 'P856' and 'wikipedia.org' in result
+ or prop in ['P2013', 'P4003'] and result == 'pages'):
+ newclaims.append((prop, result.strip(), self))
for (function, prop) in [
(self.findbirthdate, 'P569'),
@@ -2419,7 +2416,7 @@
and not (r[0] == 'P3258' and r[1].lower() in ['users',
'comunity',
'www'])
- and not r[1].lower() == 'search'
+ and r[1].lower() != 'search'
and not (r[0] == 'P3365' and ('(Dizionario_Biografico)' in r[1] or '(Enciclopedia-Italiana)' in r[1] or '(Enciclopedia-dei-Papi)' in r[1]))
and not (r[0] == 'P2013' and '.php' in r[1])]
@@ -5601,9 +5598,7 @@
if status == 'active':
result.append(self.findbyre(r'(?s)(.*)', part.strip().rstrip('.'), dtype, alt=alt))
status = 'waiting'
- elif field in part:
- status = 'active'
- elif status == 'waiting' and not part.strip():
+ elif field in part or status == 'waiting' and not part.strip():
status = 'active'
else:
status = 'inactive'
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 2b5d814cc..bdfb08b 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -702,11 +702,12 @@
for page in tree.filter(site):
# -hintsonly: before we have an origin page, any namespace will
# do.
- if self.origin and page.namespace() == self.origin.namespace():
- if page.exists() \
- and not page.isRedirectPage() \
- and not page.isCategoryRedirect():
- return page
+ if self.origin \
+ and page.namespace() == self.origin.namespace() \
+ and page.exists() \
+ and not page.isRedirectPage() \
+ and not page.isCategoryRedirect():
+ return page
return None
def translate(self, hints=None, keephintedsites: bool = False) -> None:
@@ -1899,10 +1900,9 @@
'Skipping: {} is an auto entry {}({})'
.format(page, dictName, year))
continue
- if self.conf.parenthesesonly:
- # Only yield pages that have ( ) in titles
- if '(' not in page.title():
- continue
+ # Only yield pages that have ( ) in titles
+ if self.conf.parenthesesonly and '(' not in page.title():
+ continue
if page.isTalkPage():
pywikibot.output('Skipping: {} is a talk page'
.format(page))
diff --git a/scripts/patrol.py b/scripts/patrol.py
index 48bc66f..004f823 100755
--- a/scripts/patrol.py
+++ b/scripts/patrol.py
@@ -297,13 +297,12 @@
pywikibot.output('User {} has created or modified page {}'
.format(username, title))
- if (self.opt.autopatroluserns
- and page['ns'] in (2, 3)):
- # simple rule to whitelist any user editing their own userspace
- if title.partition(':')[2].split('/')[0].startswith(username):
- verbose_output('{} is whitelisted to modify {}'
- .format(username, title))
- choice = True
+ # simple rule to whitelist any user editing their own userspace
+ if self.opt.autopatroluserns and page['ns'] in (2, 3) \
+ and title.partition(':')[2].split('/')[0].startswith(username):
+ verbose_output('{} is whitelisted to modify {}'
+ .format(username, title))
+ choice = True
if not choice and username in self.whitelist \
and self.in_list(self.whitelist[username], title):
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 5e3d8a4..4c818f6 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -468,16 +468,16 @@
"""
filename = os.path.join(
folder, self.disamb_page.title(as_filename=True) + '.txt')
- with suppress(IOError):
- # The file is stored in the disambiguation/ subdir.
- # Create if necessary.
- with codecs.open(filename, 'r', 'utf-8') as f:
- for line in f:
- # remove trailing newlines and carriage returns
- line = line.rstrip('\r\n')
- # skip empty lines
- if line:
- self.ignorelist.add(line)
+
+ # The file is stored in the disambiguation/ subdir.
+ # Create if necessary.
+ with suppress(IOError), codecs.open(filename, 'r', 'utf-8') as f:
+ for line in f:
+ # remove trailing newlines and carriage returns
+ line = line.rstrip('\r\n')
+ # skip empty lines
+ if line:
+ self.ignorelist.add(line)
def isIgnored(self, ref_page) -> bool: # noqa: N802
"""Return if ref_page is to be ignored.
@@ -500,10 +500,10 @@
filename = config.datafilepath(
'disambiguations',
self.disamb_page.title(as_url=True) + '.txt')
- with suppress(IOError):
- # Open file for appending. If none exists, create a new one.
- with codecs.open(filename, 'a', 'utf-8') as f:
- f.write('\n'.join(page_titles) + '\n')
+
+ # Open file for appending. If none exists, create a new one.
+ with suppress(IOError), codecs.open(filename, 'a', 'utf-8') as f:
+ f.write('\n'.join(page_titles) + '\n')
class AddAlternativeOption(OutputProxyOption):
@@ -1330,9 +1330,7 @@
alternatives.append(value)
else:
page = pywikibot.Page(pywikibot.Link(value, site))
- if page.exists():
- alternatives.append(page.title())
- elif pywikibot.input_yn(
+ if page.exists() or pywikibot.input_yn(
'Possibility {} does not actually exist. Use it anyway?'
.format(page.title()), default=False,
automatic_quit=False):
diff --git a/scripts/welcome.py b/scripts/welcome.py
index 5086e92..14b1573 100755
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -788,7 +788,7 @@
.format(user.username))
elif user.editCount() < globalvar.attach_edit_count:
- if not user.editCount() == 0:
+ if user.editCount() != 0:
self.show_status(Msg.IGNORE)
pywikibot.output('{} has only {} contributions.'
.format(user.username, user.editCount()))
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 25f1f7f..5e37368 100755
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -241,9 +241,9 @@
self.assertEqual(mod[6:], pi[mod]['name'])
self.assertEqual(mod, pi[mod]['path'])
- with patch.object(pywikibot, 'warning') as w:
- with self.assertRaises(KeyError):
- pi.__getitem__('query+foobar')
+ with patch.object(pywikibot, 'warning') as w, \
+ self.assertRaises(KeyError):
+ pi.__getitem__('query+foobar')
# The warning message may be different with older MW versions.
self.assertIn('API warning (paraminfo): ', w.call_args[0][0])
@@ -536,11 +536,10 @@
parameters={'titles': '|'.join(titles)})
count = 0
- for pagedata in gen:
+ for count, pagedata in enumerate(gen, start=1):
self.assertIsInstance(pagedata, dict)
self.assertIn('pageid', pagedata)
self.assertIn('lastrevid', pagedata)
- count += 1
self.assertLength(links, count)
def test_one_continuation(self):
@@ -554,12 +553,11 @@
gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
count = 0
- for pagedata in gen:
+ for count, pagedata in enumerate(gen, start=1):
self.assertIsInstance(pagedata, dict)
self.assertIn('pageid', pagedata)
self.assertIn('revisions', pagedata)
self.assertIn('revid', pagedata['revisions'][0])
- count += 1
self.assertLength(links, count)
def test_two_continuations(self):
@@ -573,12 +571,11 @@
gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
count = 0
- for pagedata in gen:
+ for count, pagedata in enumerate(gen, start=1):
self.assertIsInstance(pagedata, dict)
self.assertIn('pageid', pagedata)
self.assertIn('revisions', pagedata)
self.assertIn('revid', pagedata['revisions'][0])
- count += 1
self.assertLength(links, count)
def test_many_continuations_limited(self):
@@ -602,13 +599,12 @@
gen.set_query_increment(5)
count = 0
- for pagedata in gen:
+ for count, pagedata in enumerate(gen, start=1):
self.assertIsInstance(pagedata, dict)
if 'missing' in pagedata:
self.assertNotIn('pageid', pagedata)
else:
self.assertIn('pageid', pagedata)
- count += 1
self.assertLength(links, count)
def test_two_continuations_limited(self):
@@ -623,13 +619,12 @@
gen.set_query_increment(5)
count = 0
- for pagedata in gen:
+ for count, pagedata in enumerate(gen, start=1):
self.assertIsInstance(pagedata, dict)
if 'missing' in pagedata:
self.assertNotIn('pageid', pagedata)
else:
self.assertIn('pageid', pagedata)
- count += 1
self.assertLength(links, count)
diff --git a/tests/aspects.py b/tests/aspects.py
index 8bfbb66..084f649 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -620,11 +620,10 @@
if not site:
site = self.get_site()
- if hasattr(self, '_userpage'):
- # For multi-site test classes, or site is specified as a param,
- # the cached userpage object may not be the desired site.
- if self._userpage.site == site:
- return self._userpage
+ # For multi-site test classes, or site is specified as a param,
+ # the cached userpage object may not be the desired site.
+ if hasattr(self, '_userpage') and self._userpage.site == site:
+ return self._userpage
userpage = pywikibot.User(site, site.username())
self._userpage = userpage
@@ -713,10 +712,8 @@
if 'hostname' in dct:
hostnames = [dct['hostname']]
del dct['hostname']
- elif 'hostnames' in dct:
- hostnames = dct['hostnames']
else:
- hostnames = []
+ hostnames = dct.get('hostnames', [])
if dct.get('net') is False:
dct['site'] = False
@@ -983,11 +980,11 @@
if not site:
site = self.get_site()
- if hasattr(self, '_mainpage') and not force:
- # For multi-site test classes, or site is specified as a param,
- # the cached mainpage object may not be the desired site.
- if self._mainpage.site == site:
- return self._mainpage
+ # For multi-site test classes, or site is specified as a param,
+ # the cached mainpage object may not be the desired site.
+ if hasattr(self, '_mainpage') and not force \
+ and self._mainpage.site == site:
+ return self._mainpage
maintitle = site.siteinfo['mainpage']
maintitle = removeprefix(maintitle, 'Special:MyLanguage/') # T278702
diff --git a/tests/category_tests.py b/tests/category_tests.py
index 920f7d0..2e947c2 100755
--- a/tests/category_tests.py
+++ b/tests/category_tests.py
@@ -248,11 +248,10 @@
cat = pywikibot.Category(self.get_site(), 'Catégorie:Yukon Quest 2015')
last = pywikibot.Timestamp.max
count = 0
- for page in cat.newest_pages():
+ for count, page in enumerate(cat.newest_pages(), start=1):
creation_stamp = page.oldest_revision.timestamp
self.assertLessEqual(creation_stamp, last)
last = creation_stamp
- count += 1
self.assertEqual(count, cat.categoryinfo['size'])
diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py
index ddf9c84..fdf7a12 100755
--- a/tests/dry_api_tests.py
+++ b/tests/dry_api_tests.py
@@ -247,18 +247,15 @@
def test_no_user(self):
"""Test Request object when not a user."""
self.site._userinfo = {}
- with self.subTest(userinfo=self.site._userinfo):
- with self.assertRaisesRegex(
- Error,
- 'API write action attempted without user'):
- Request(site=self.site, parameters={'action': 'edit'})
+ with self.subTest(userinfo=self.site._userinfo), \
+ self.assertRaisesRegex(Error,
+ 'API write action attempted without user'):
+ Request(site=self.site, parameters={'action': 'edit'})
self.site._userinfo = {'name': '1.2.3.4', 'groups': [], 'anon': ''}
- with self.subTest(userinfo=self.site._userinfo):
- with self.assertRaisesRegex(
- Error,
- " as IP '1.2.3.4'"):
- Request(site=self.site, parameters={'action': 'edit'})
+ with self.subTest(userinfo=self.site._userinfo), \
+ self.assertRaisesRegex(Error, " as IP '1.2.3.4'"):
+ Request(site=self.site, parameters={'action': 'edit'})
def test_unexpected_user(self):
"""Test Request object when username is not correct."""
diff --git a/tests/edit_failure_tests.py b/tests/edit_failure_tests.py
index 331bc0c..80e4fc5 100755
--- a/tests/edit_failure_tests.py
+++ b/tests/edit_failure_tests.py
@@ -72,11 +72,9 @@
def test_nobots(self):
"""Test that {{nobots}} raise the appropriate exception."""
page = pywikibot.Page(self.site, 'User:John Vandenberg/nobots')
- with patch.object(config, 'ignore_bot_templates', False):
- with self.assertRaisesRegex(
- OtherPageSaveError,
- 'nobots'):
- page.save()
+ with patch.object(config, 'ignore_bot_templates', False), \
+ self.assertRaisesRegex(OtherPageSaveError, 'nobots'):
+ page.save()
def test_touch(self):
"""Test that Page.touch() does not do a real edit."""
diff --git a/tests/flow_thanks_tests.py b/tests/flow_thanks_tests.py
index d26d123..b4a83f5 100755
--- a/tests/flow_thanks_tests.py
+++ b/tests/flow_thanks_tests.py
@@ -6,8 +6,10 @@
# Distributed under the terms of the MIT license.
#
import unittest
+from contextlib import suppress
from pywikibot.flow import Topic
+
from tests.aspects import TestCase
@@ -62,7 +64,5 @@
if __name__ == '__main__': # pragma: no cover
- try:
+ with suppress(SystemExit):
unittest.main()
- except SystemExit:
- pass
diff --git a/tests/generate_user_files_tests.py b/tests/generate_user_files_tests.py
index eed07cc..f5e20bd 100755
--- a/tests/generate_user_files_tests.py
+++ b/tests/generate_user_files_tests.py
@@ -8,7 +8,10 @@
import re
import unittest
+from contextlib import suppress
+
from pywikibot.scripts import generate_user_files as guf
+
from tests.aspects import TestCase
@@ -102,7 +105,5 @@
if __name__ == '__main__': # pragma: no cover
- try:
+ with suppress(SystemExit):
unittest.main()
- except SystemExit:
- pass
diff --git a/tests/i18n_tests.py b/tests/i18n_tests.py
index 4a17748..7d8fa6d 100755
--- a/tests/i18n_tests.py
+++ b/tests/i18n_tests.py
@@ -191,9 +191,8 @@
def testNoEnglish(self):
"""Test translate with missing English text."""
for code in ('en', 'fy', 'nl'):
- with self.subTest(code=code):
- with self.assertRaises(KeyError):
- i18n.translate(code, self.msg_no_english, fallback=True)
+ with self.subTest(code=code), self.assertRaises(KeyError):
+ i18n.translate(code, self.msg_no_english, fallback=True)
class UserInterfaceLangTestCase(TestCase):
diff --git a/tests/interwikimap_tests.py b/tests/interwikimap_tests.py
index 971f9fd..fec9d2b 100755
--- a/tests/interwikimap_tests.py
+++ b/tests/interwikimap_tests.py
@@ -7,8 +7,11 @@
#
import unittest
+from contextlib import suppress
+
import pywikibot
from pywikibot.tools import suppress_warnings
+
from tests import WARN_SITE_CODE
from tests.aspects import TestCase
@@ -152,7 +155,5 @@
if __name__ == '__main__': # pragma: no cover
- try:
+ with suppress(SystemExit):
unittest.main()
- except SystemExit:
- pass
diff --git a/tests/link_tests.py b/tests/link_tests.py
index cbb0cd0..91d7076 100755
--- a/tests/link_tests.py
+++ b/tests/link_tests.py
@@ -410,7 +410,7 @@
def test_fully_qualified_NS0(self):
"""Test that fully qualified link is in namespace 0."""
- family, code = 'wikipedia:en'.split(':')
+ family, code = 'wikipedia', 'en'
for colon in ('', ':'): # with or without preleading colon
# switch code:family sequence en:wikipedia or wikipedia:en
for first, second in [(family, code), (code, family)]:
@@ -428,7 +428,7 @@
def test_fully_qualified_NS1(self):
"""Test that fully qualified link is in namespace 1."""
- family, code = 'wikipedia:en'.split(':')
+ family, code = 'wikipedia', 'en'
for colon in ('', ':'): # with or without preleading colon
# switch code:family sequence en:wikipedia or wikipedia:en
for first, second in [(family, code), (code, family)]:
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index 81f833f..e61a140 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -569,13 +569,13 @@
links = [page for page in self.site.pagelinks(mainpage, total=20)
if page.exists()]
count = 0
- for page in PreloadingGenerator(links, groupsize=20):
+ for count, page in enumerate(
+ PreloadingGenerator(links, groupsize=20), start=1):
self.assertIsInstance(page, pywikibot.Page)
self.assertIsInstance(page.exists(), bool)
self.assertLength(page._revisions, 1)
self.assertIsNotNone(page._revisions[page._revid].text)
self.assertFalse(hasattr(page, '_pageprops'))
- count += 1
self.assertLength(links, count)
def test_low_step(self):
@@ -584,13 +584,13 @@
links = [page for page in self.site.pagelinks(mainpage, total=20)
if page.exists()]
count = 0
- for page in PreloadingGenerator(links, groupsize=10):
+ for count, page in enumerate(
+ PreloadingGenerator(links, groupsize=10), start=1):
self.assertIsInstance(page, pywikibot.Page)
self.assertIsInstance(page.exists(), bool)
self.assertLength(page._revisions, 1)
self.assertIsNotNone(page._revisions[page._revid].text)
self.assertFalse(hasattr(page, '_pageprops'))
- count += 1
self.assertLength(links, count)
def test_order(self):
@@ -599,14 +599,14 @@
links = [page for page in self.site.pagelinks(mainpage, total=20)
if page.exists()]
count = 0
- for page in PreloadingGenerator(links, groupsize=10):
+ for count, page in enumerate(
+ PreloadingGenerator(links, groupsize=10), start=1):
self.assertIsInstance(page, pywikibot.Page)
self.assertIsInstance(page.exists(), bool)
self.assertLength(page._revisions, 1)
self.assertIsNotNone(page._revisions[page._revid].text)
self.assertFalse(hasattr(page, '_pageprops'))
self.assertEqual(page, links[count])
- count += 1
self.assertLength(links, count)
diff --git a/tests/redirect_bot_tests.py b/tests/redirect_bot_tests.py
index e41fdc9..05eca2a 100755
--- a/tests/redirect_bot_tests.py
+++ b/tests/redirect_bot_tests.py
@@ -60,9 +60,9 @@
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
bot = RedirectTestRobot('broken', **options)
- with patch.object(Page, 'exists', new=Mock(return_value=False)):
- with patch.object(pywikibot, 'warning') as w:
- self.assertIsNone(bot.sdtemplate, None)
+ with patch.object(Page, 'exists', new=Mock(return_value=False)), \
+ patch.object(pywikibot, 'warning') as w:
+ self.assertIsNone(bot.sdtemplate, None)
w.assert_called_with('No speedy deletion template "n" available.')
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 4b43f71..ec46674 100755
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -226,9 +226,8 @@
self.assertEqual(list(msg), months)
# mediawiki_messages must be given a list; using a string will split it
- with self.subTest(messages='about'):
- with self.assertRaises(KeyError):
- self.site.mediawiki_messages('about')
+ with self.subTest(messages='about'), self.assertRaises(KeyError):
+ self.site.mediawiki_messages('about')
msg = ('nosuchmessage1', 'about', 'aboutpage', 'nosuchmessage')
with self.subTest(messages=msg):
@@ -809,19 +808,14 @@
self.assertIn(prop, block)
# starttime earlier than endtime
- with self.subTest(starttime=low, endtime=high, reverse=False):
- with self.assertRaises(AssertionError):
- mysite.blocks(total=5,
- starttime=low,
- endtime=high)
+ with self.subTest(starttime=low, endtime=high, reverse=False), \
+ self.assertRaises(AssertionError):
+ mysite.blocks(total=5, starttime=low, endtime=high)
# reverse: endtime earlier than starttime
- with self.subTest(starttime=high, endtime=low, reverse=True):
- with self.assertRaises(AssertionError):
- mysite.blocks(total=5,
- starttime=high,
- endtime=low,
- reverse=True)
+ with self.subTest(starttime=high, endtime=low, reverse=True), \
+ self.assertRaises(AssertionError):
+ mysite.blocks(total=5, starttime=high, endtime=low, reverse=True)
def test_exturl_usage(self):
"""Test the site.exturlusage() method."""
@@ -1985,23 +1979,21 @@
# start earlier than end
with self.subTest(start='2008-09-03T00:00:01Z',
end='2008-09-03T23:59:59Z',
- reverse=False):
- with self.assertRaises(AssertionError):
- gen = mysite.deletedrevs(titles=mainpage,
- start='2008-09-03T00:00:01Z',
- end='2008-09-03T23:59:59Z', total=5)
- next(gen)
+ reverse=False), self.assertRaises(AssertionError):
+ gen = mysite.deletedrevs(titles=mainpage,
+ start='2008-09-03T00:00:01Z',
+ end='2008-09-03T23:59:59Z', total=5)
+ next(gen)
# reverse: end earlier than start
with self.subTest(start='2008-09-03T23:59:59Z',
end='2008-09-03T00:00:01Z',
- reverse=True):
- with self.assertRaises(AssertionError):
- gen = mysite.deletedrevs(titles=mainpage,
- start='2008-09-03T23:59:59Z',
- end='2008-09-03T00:00:01Z', total=5,
- reverse=True)
- next(gen)
+ reverse=True), self.assertRaises(AssertionError):
+ gen = mysite.deletedrevs(titles=mainpage,
+ start='2008-09-03T23:59:59Z',
+ end='2008-09-03T00:00:01Z', total=5,
+ reverse=True)
+ next(gen)
def test_alldeletedrevisions(self):
"""Test the site.alldeletedrevisions() method."""
@@ -2048,22 +2040,22 @@
# start earlier than end
with self.subTest(start='2008-09-03T00:00:01Z',
end='2008-09-03T23:59:59Z',
- reverse=False, prop=prop):
- with self.assertRaises(AssertionError):
- gen = mysite.alldeletedrevisions(start='2008-09-03T00:00:01Z',
- end='2008-09-03T23:59:59Z',
- total=5)
- next(gen)
+ reverse=False,
+ prop=prop), self.assertRaises(AssertionError):
+ gen = mysite.alldeletedrevisions(start='2008-09-03T00:00:01Z',
+ end='2008-09-03T23:59:59Z',
+ total=5)
+ next(gen)
# reverse: end earlier than start
with self.subTest(start='2008-09-03T23:59:59Z',
end='2008-09-03T00:00:01Z',
- reverse=True, prop=prop):
- with self.assertRaises(AssertionError):
- gen = mysite.alldeletedrevisions(start='2008-09-03T23:59:59Z',
- end='2008-09-03T00:00:01Z',
- total=5, reverse=True)
- next(gen)
+ reverse=True,
+ prop=prop), self.assertRaises(AssertionError):
+ gen = mysite.alldeletedrevisions(start='2008-09-03T23:59:59Z',
+ end='2008-09-03T00:00:01Z',
+ total=5, reverse=True)
+ next(gen)
class TestSiteSysopWrite(TestCase):
@@ -3008,7 +3000,6 @@
"""Test preloading continuation with groupsize greater than total."""
mysite = self.get_site()
mainpage = self.get_mainpage()
- count = 0
# Determine if there are enough links on the main page,
# for the test to be useful.
@@ -3019,21 +3010,21 @@
# get a fresh generator; we now know how many results it will have,
# if it is less than 10.
links = mysite.pagelinks(mainpage, total=10)
- for page in mysite.preloadpages(links, groupsize=50):
+ count = 0
+ for count, page in enumerate(
+ mysite.preloadpages(links, groupsize=50), start=1):
self.assertIsInstance(page, pywikibot.Page)
self.assertIsInstance(page.exists(), bool)
if page.exists():
self.assertLength(page._revisions, 1)
self.assertIsNotNone(page._revisions[page._revid].text)
self.assertFalse(hasattr(page, '_pageprops'))
- count += 1
self.assertEqual(count, link_count)
def test_preload_low_groupsize(self):
"""Test preloading continuation with groupsize greater than total."""
mysite = self.get_site()
mainpage = self.get_mainpage()
- count = 0
# Determine if there are enough links on the main page,
# for the test to be useful.
@@ -3044,14 +3035,15 @@
# get a fresh generator; we now know how many results it will have,
# if it is less than 10.
links = mysite.pagelinks(mainpage, total=10)
- for page in mysite.preloadpages(links, groupsize=5):
+ count = 0
+ for count, page in enumerate(
+ mysite.preloadpages(links, groupsize=5), start=1):
self.assertIsInstance(page, pywikibot.Page)
self.assertIsInstance(page.exists(), bool)
if page.exists():
self.assertLength(page._revisions, 1)
self.assertIsNotNone(page._revisions[page._revid].text)
self.assertFalse(hasattr(page, '_pageprops'))
- count += 1
self.assertEqual(count, link_count)
def test_preload_unexpected_titles_using_pageids(self):
diff --git a/tests/timestripper_tests.py b/tests/timestripper_tests.py
index 45c4eca..d2ef6d9 100755
--- a/tests/timestripper_tests.py
+++ b/tests/timestripper_tests.py
@@ -254,11 +254,8 @@
def test_timestripper_nomatch(self, key):
"""Test that correct date is not matched."""
self.ts = TimeStripper(self.get_site(key))
-
- if 'nomatch' in self.sites[key]:
- txt_no_match = self.sites[key]['nomatch']
- else:
- txt_no_match = '3 March 2011 19:48 (UTC) 7 March 2010 19:48 (UTC)'
+ txt_no_match = self.sites[key].get(
+ 'nomatch', '3 March 2011 19:48 (UTC) 7 March 2010 19:48 (UTC)')
self.assertIsNone(self.ts.timestripper(txt_no_match))
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802788
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I9fc8379fa14a9ea88a4f70cf1083cac9e224f34a
Gerrit-Change-Number: 802788
Gerrit-PatchSet: 9
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: DannyS712 <dannys712.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802911 )
Change subject: [sphinx] Use Wikimedia color for header/footer lines
......................................................................
[sphinx] Use Wikimedia color for header/footer lines
Also move icons to _static folder
Change-Id: I6f15defef1a080a2c448827defcde92dca899ef5
---
R docs/_static/Pywikibot.ico
R docs/_static/Pywikibot_MW_gear_icon.svg
A docs/_static/css/pywikibot.css
M docs/conf.py
4 files changed, 36 insertions(+), 2 deletions(-)
Approvals:
jenkins-bot: Verified
Xqt: Looks good to me, approved
diff --git a/docs/Pywikibot.ico b/docs/_static/Pywikibot.ico
similarity index 100%
rename from docs/Pywikibot.ico
rename to docs/_static/Pywikibot.ico
Binary files differ
diff --git a/docs/Pywikibot_MW_gear_icon.svg b/docs/_static/Pywikibot_MW_gear_icon.svg
similarity index 100%
rename from docs/Pywikibot_MW_gear_icon.svg
rename to docs/_static/Pywikibot_MW_gear_icon.svg
diff --git a/docs/_static/css/pywikibot.css b/docs/_static/css/pywikibot.css
new file mode 100644
index 0000000..cce4d5b
--- /dev/null
+++ b/docs/_static/css/pywikibot.css
@@ -0,0 +1,30 @@
+/** css/pywikibot.css **/
+
+/* This line is theme specific - it includes the base theme CSS */
+@import '../nature.css'; /* for Nature */
+
+
+div.related {
+ background-color: #396;
+ line-height: 32px;
+ color: #fff;
+ text-shadow: 0px 1px 0 #444;
+ font-size: 0.9em;
+}
+
+a:hover {
+ color: #900;
+ text-decoration: underline;
+}
+
+a.headerlink {
+ color: #069;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ background-color: #069;
+ color: white;
+}
diff --git a/docs/conf.py b/docs/conf.py
index f36bc62..441a7dd 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -173,13 +173,13 @@
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
-html_logo = 'Pywikibot_MW_gear_icon.svg'
+html_logo = '_static/Pywikibot_MW_gear_icon.svg'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
-html_favicon = 'Pywikibot.ico'
+html_favicon = '_static/Pywikibot.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -356,6 +356,10 @@
# Other settings
autodoc_typehints = 'description'
+# Pywikibot theme style
+html_static_path = ['_static']
+html_style = 'css/pywikibot.css'
+
extlinks = {
# MediaWiki API
'api': ('https://www.mediawiki.org/wiki/API:%s', 'API:%s'),
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802911
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I6f15defef1a080a2c448827defcde92dca899ef5
Gerrit-Change-Number: 802911
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802786 )
Change subject: [doc] require sphinx >= 4.5, !=5.0
......................................................................
[doc] require sphinx >= 4.5, !=5.0
Change-Id: Ia4456bca2619a7758333d9f1af1a1ab35286dfb2
---
M docs/requirements-py3.txt
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
jenkins-bot: Verified
Xqt: Looks good to me, approved
diff --git a/docs/requirements-py3.txt b/docs/requirements-py3.txt
index afbaa42..37333c5 100644
--- a/docs/requirements-py3.txt
+++ b/docs/requirements-py3.txt
@@ -1,4 +1,4 @@
# This is a PIP requirements file for building Sphinx documentation of pywikibot
# requirements.txt is also needed
-sphinx >= 4.1.0,!=5.0.0
\ No newline at end of file
+sphinx >= 4.5.0,!=5.0.0,!=5.0.1
\ No newline at end of file
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/802786
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ia4456bca2619a7758333d9f1af1a1ab35286dfb2
Gerrit-Change-Number: 802786
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged