jenkins-bot has submitted this change and it was merged.
Change subject: Add unicodecsv as test dep
......................................................................
Add unicodecsv as test dep
Change-Id: I259a4ca3534101a89d3bc65b2ca098cb1bd22542
---
M setup.py
1 file changed, 8 insertions(+), 4 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
XZise: Looks good to me, but someone else must approve
jenkins-bot: Verified
diff --git a/setup.py b/setup.py
index 649d0b8..99e27ca 100644
--- a/setup.py
+++ b/setup.py
@@ -29,8 +29,6 @@
'rcstream': ['socketIO-client'],
}
-test_deps.extend(extra_deps['rcstream'])
-
if sys.version_info[0] == 2:
# csv is used by wikistats and script data_ingestion
extra_deps['csv'] = ['unicodecsv']
@@ -86,11 +84,17 @@
test_deps += ['pywin32>=218', 'pywinauto>=0.4.0']
extra_deps.update(script_deps)
-# Add script dependencies as test dependencies,
-# so scripts can be compiled in test suite.
+
+# Add all script dependencies as test dependencies,
+# so all scripts can be compiled for script_tests, etc.
if 'PYSETUP_TEST_EXTRAS' in os.environ:
test_deps += list(itertools.chain(*(script_deps.values())))
+# These extra dependencies enable some tests to run on all builds
+if sys.version_info[0] == 2:
+ test_deps += extra_deps['csv']
+test_deps += extra_deps['rcstream']
+
# late import of setuptools due to monkey-patching above
from ez_setup import use_setuptools
use_setuptools()
--
To view, visit https://gerrit.wikimedia.org/r/186173
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I259a4ca3534101a89d3bc65b2ca098cb1bd22542
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] Cap redirs: Use generator in superclass
......................................................................
[IMPROV] Cap redirs: Use generator in superclass
The Bot superclass supports handling the generator directly, so the
generator is added to the constructor arguments of the Bot class call.
It is not inside the kwargs, because generator could've been provided
positionally.
Change-Id: I50cc4dbe19c7409376e727e6e74560dff7d8c1f4
---
M scripts/capitalize_redirects.py
1 file changed, 1 insertion(+), 2 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/capitalize_redirects.py b/scripts/capitalize_redirects.py
index f0a27d7..05a388e 100644
--- a/scripts/capitalize_redirects.py
+++ b/scripts/capitalize_redirects.py
@@ -58,8 +58,7 @@
'titlecase': False,
})
- super(CapitalizeBot, self).__init__(**kwargs)
- self.generator = generator
+ super(CapitalizeBot, self).__init__(generator=generator, **kwargs)
def treat_page(self):
"""Capitalize redirects of the current page."""
--
To view, visit https://gerrit.wikimedia.org/r/186155
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I50cc4dbe19c7409376e727e6e74560dff7d8c1f4
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Implement wbsearchentities
......................................................................
Implement wbsearchentities
'wbsearchentities' API request was implemented as search_entities method in
DataSite class. WikibaseSearchItemPageGenerator was created which yields
the pages from the newly created method, regarding the language code
specified.
Bug: T68949
Change-Id: Ib7459a4b7c6bafe04d56dcd09ee0f8386711b4cf
---
M pywikibot/data/api.py
M pywikibot/pagegenerators.py
M pywikibot/site.py
M tests/pagegenerators_tests.py
M tests/site_tests.py
5 files changed, 230 insertions(+), 0 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index f979b81..d5100ad 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1464,6 +1464,110 @@
return self._data
+class APIGenerator(object):
+
+ """Iterator that handle API responses containing lists.
+
+ The iterator will iterate each item in the query response and use the
+ continue request parameter to retrieve the next portion of items
+ automatically. If the limit attribute is set, the iterator will stop
+ after iterating that many values.
+ """
+
+ def __init__(self, action, continue_name='continue', limit_name='limit',
+ data_name='data', **kwargs):
+ """
+ Construct an APIGenerator object.
+
+ kwargs are used to create a Request object; see that object's
+ documentation for values.
+
+ @param action: API action name.
+ @type action: str
+ @param continue_name: Name of the continue API parameter.
+ @type continue_name: str
+ @param limit_name: Name of the limit API parameter.
+ @type limit_name: str
+ @param data_name: Name of the data in API response.
+ @type data_name: str
+ """
+ kwargs['action'] = action
+ try:
+ self.site = kwargs['site']
+ except KeyError:
+ self.site = pywikibot.Site()
+ kwargs['site'] = self.site
+
+ self.continue_name = continue_name
+ self.limit_name = limit_name
+ self.data_name = data_name
+
+ self.limit = None
+ self.starting_offset = kwargs.pop(self.continue_name, 0)
+ self.request = Request(**kwargs)
+ self.request[self.limit_name] = 50
+
+ def set_query_increment(self, value):
+ """
+ Set the maximum number of items to be retrieved per API query.
+
+ If not called, the default is 50.
+
+ @param value: The value of maximum number of items to be retrieved
+ per API request to set.
+ @type value: int
+ """
+ self.request[self.limit_name] = int(value)
+ pywikibot.debug(u"%s: Set query_limit to %i."
+ % (self.__class__.__name__, int(value)), _logger)
+
+ def set_maximum_items(self, value):
+ """
+ Set the maximum number of items to be retrieved from the wiki.
+
+ If not called, most queries will continue as long as there is
+ more data to be retrieved from the API.
+
+ @param value: The value of maximum number of items to be retrieved
+ in total to set.
+ @type value: int
+ """
+ self.limit = int(value)
+ if self.limit < self.request[self.limit_name]:
+ self.request[self.limit_name] = self.limit
+
+ def __iter__(self):
+ """Submit request and iterate the response.
+
+ Continues response as needed until limit (if defined) is reached.
+ """
+ offset = self.starting_offset
+ n = 0
+ while True:
+ self.request[self.continue_name] = offset
+ pywikibot.debug(u"%s: Request: %s" % (self.__class__.__name__,
+ self.request), _logger)
+ data = self.request.submit()
+
+ n_items = len(data[self.data_name])
+ pywikibot.debug(u"%s: Retrieved %d items" % (
+ self.__class__.__name__, n_items), _logger)
+ if n_items > 0:
+ for item in data[self.data_name]:
+ yield item
+ n += 1
+ if self.limit is not None and n >= self.limit:
+ pywikibot.debug(u"%s: Stopped iterating due to "
+ u"exceeding item limit." %
+ self.__class__.__name__, _logger)
+ return
+ offset += n_items
+ else:
+ pywikibot.debug(u"%s: Stopped iterating due to empty list in "
+ u"response." % self.__class__.__name__, _logger)
+ break
+
+
class QueryGenerator(object):
"""Base class for iterators that handle responses to API action=query.
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 6f01605..792e092 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -199,6 +199,12 @@
-wikidataquery Takes a WikidataQuery query string like claim[31:12280]
and works on the resulting pages.
+-searchitem Takes a search string and works on Wikibase pages that
+ contain it.
+ Argument can be given as "-searchitem:text", where text
+ is the string to look for, or "-searchitem:lang:text", where
+ lang is the langauge to search items in.
+
-random Work on random pages returned by [[Special:Random]].
Can also be given as "-random:n" where n is the number
of pages to be returned, otherwise the default is 10 pages.
@@ -668,6 +674,15 @@
imagelinksPage = pywikibot.Page(pywikibot.Link(imagelinkstitle,
self.site))
gen = ImagesPageGenerator(imagelinksPage)
+ elif arg.startswith('-searchitem'):
+ text = arg[len('-searchitem:'):]
+ if not text:
+ text = pywikibot.input(u'Text to look for:')
+ params = text.split(':')
+ text = params[-1]
+ lang = params[0] if len(params) == 2 else None
+ gen = WikibaseSearchItemPageGenerator(text, language=lang,
+ site=self.site)
elif arg.startswith('-search'):
mediawikiQuery = arg[8:]
if not mediawikiQuery:
@@ -2258,6 +2273,33 @@
yield pywikibot.Page(pywikibot.Link(link, site))
+def WikibaseSearchItemPageGenerator(text, language=None, total=None, site=None):
+ """
+ Generate pages that contain the provided text.
+
+ @param text: Text to look for.
+ @type text: str
+ @param language: Code of the language to search in. If not specified,
+ value from pywikibot.config.data_lang is used.
+ @type language: str
+ @param total: Maximum number of pages to retrieve in total, or None in
+ case of no limit.
+ @type total: int or None
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
+ if site is None:
+ site = pywikibot.Site()
+ if language is None:
+ language = site.lang
+ repo = site.data_repository()
+
+ data = repo.search_entities(text, language, limit=total, site=site)
+ pywikibot.output(u'retrieved %d items' % len(list(data)))
+ for item in data:
+ yield pywikibot.ItemPage(repo, item['id'])
+
+
if __name__ == "__main__":
pywikibot.output(u'Pagegenerators cannot be run as script - are you '
u'looking for listpages.py?')
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 5454267..0e2009e 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -5676,6 +5676,31 @@
result = self.editEntity({}, data, bot=bot, **kwargs)
return pywikibot.ItemPage(self, result['entity']['id'])
+ def search_entities(self, search, language, limit=None, **kwargs):
+ """
+ Search for pages or properties that contain the given text.
+
+ @param search: Text to find.
+ @type search: str
+ @param language: Language to search in.
+ @type language: str
+ @param limit: Maximum number of pages to retrieve in total, or None in
+ case of no limit.
+ @type limit: int or None
+ @return: 'search' list from API output.
+ """
+ lang_codes = [lang['code'] for lang in self._siteinfo.get('languages')]
+ if language not in lang_codes:
+ raise ValueError(u'Data site used does not support provided '
+ u'language.')
+
+ gen = api.APIGenerator('wbsearchentities', data_name='search',
+ search=search, language=language, **kwargs)
+ gen.set_query_increment(50)
+ if limit is not None:
+ gen.set_maximum_items(limit)
+ return gen
+
# deprecated BaseSite methods
def fam(self):
raise NotImplementedError
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index ae7e2d8..2dec8dd 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -657,6 +657,25 @@
gen = gf.getCombinedGenerator()
self.assertEqual(len(set(gen)), 1)
+ def test_searchitem(self):
+ """Test -searchitem."""
+ gf = pagegenerators.GeneratorFactory(site=self.site)
+ gf.handleArg('-searchitem:abc')
+ gen = gf.getCombinedGenerator()
+ self.assertGreater(len(set(gen)), 0)
+
+ def test_searchitem_language(self):
+ """Test -searchitem with custom language specified."""
+ gf = pagegenerators.GeneratorFactory(site=self.site)
+ gf.handleArg('-searchitem:pl:abc')
+ gen = gf.getCombinedGenerator()
+ pages = set(gen)
+ gf = pagegenerators.GeneratorFactory(site=self.site)
+ gf.handleArg('-searchitem:en:abc')
+ gen = gf.getCombinedGenerator()
+ pages2 = set(gen)
+ self.assertNotEqual(pages, pages2)
+
class TestLogeventsFactoryGenerator(DefaultSiteTestCase):
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 55e3d3f..760c965 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -2103,6 +2103,46 @@
self.assertEqual(item.id, 'Q5296')
+class TestDataSiteSearchEntities(WikidataTestCase):
+
+ """Test DataSite.search_entities."""
+
+ def test_general(self):
+ """Test basic search_entities functionality."""
+ datasite = self.get_repo()
+ pages = datasite.search_entities('abc', 'en', limit=50,
+ site=self.get_site())
+ self.assertGreater(len(list(pages)), 0)
+ self.assertLessEqual(len(list(pages)), 50)
+ pages = datasite.search_entities('alphabet', 'en', type='property',
+ limit=50, site=self.get_site())
+ self.assertGreater(len(list(pages)), 0)
+ self.assertLessEqual(len(list(pages)), 50)
+
+ def test_continue(self):
+ """Test that continue parameter in search_entities works."""
+ datasite = self.get_repo()
+ kwargs = {'limit': 50, 'site': self.get_site()}
+ pages = datasite.search_entities('Rembrandt', 'en', **kwargs)
+ kwargs['continue'] = 1
+ pages_continue = datasite.search_entities('Rembrandt', 'en', **kwargs)
+ self.assertNotEqual(list(pages), list(pages_continue))
+
+ def test_language_lists(self):
+ """Test that languages returned by paraminfo and MW are the same."""
+ site = self.get_site()
+ lang_codes = site._paraminfo.parameter('wbsearchentities',
+ 'language')['type']
+ lang_codes2 = [lang['code'] for lang in site._siteinfo.get('languages')]
+ self.assertEqual(lang_codes, lang_codes2)
+
+ def test_invalid_language(self):
+ """Test behavior of search_entities with invalid language provided."""
+ datasite = self.get_repo()
+ self.assertRaises(ValueError, datasite.search_entities, 'abc',
+ 'invalidlanguage')
+
+
class TestSametitleSite(TestCase):
"""Test APISite.sametitle on sites with known behaviour."""
--
To view, visit https://gerrit.wikimedia.org/r/179586
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib7459a4b7c6bafe04d56dcd09ee0f8386711b4cf
Gerrit-PatchSet: 9
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: M4tx <m4tx(a)m4tx.pl>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: M4tx <m4tx(a)m4tx.pl>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [i18n, backport] Use twn for i18n messages
......................................................................
[i18n, backport] Use twn for i18n messages
- use messages from i18n file
- update ignoreTemplates from core
- update documentation from core
- backport some trivial code parts from core
e.g. use site property instead of site() method,
site.code property instead of site language() method.
(note site.code is equal to site.lang on compat)
- show help doc when no generator is given
Change-Id: Ibf46165beb4a7e7109dfd1e220ca43d226226f2e
---
M commonscat.py
1 file changed, 81 insertions(+), 80 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/commonscat.py b/commonscat.py
index 6d5e4ea..97b5379 100644
--- a/commonscat.py
+++ b/commonscat.py
@@ -2,8 +2,9 @@
# -*- coding: utf-8 -*-
"""
With this tool you can add the template {{commonscat}} to categories.
+
The tool works by following the interwiki links. If the template is present on
-another langauge page, the bot will use it.
+another language page, the bot will use it.
You could probably use it at articles as well, but this isn't tested.
@@ -45,8 +46,8 @@
#
# (C) Multichill, 2008-2009
-# (C) Xqt, 2009-2013
-# (C) Pywikipedia bot team, 2008-2012
+# (C) Xqt, 2009-2015
+# (C) Pywikipedia bot team, 2008-2015
#
# Distributed under the terms of the MIT license.
#
@@ -55,9 +56,10 @@
import re
+import add_text
import wikipedia as pywikibot
import pagegenerators
-import add_text
+from pywikibot import i18n
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp
@@ -193,7 +195,11 @@
'nl': [u'Commons', u'Commonsklein', u'Commonscatklein', u'Catbeg',
u'Catsjab', u'Catwiki'],
'om': [u'Commons'],
- 'pt': [u'Correlatos'],
+ 'pt': [u'Correlatos',
+ u'Commons',
+ u'Commons cat multi',
+ u'Commons1',
+ u'Commons2'],
'simple': [u'Sisterlinks'],
'ru': [u'Навигация', u'Навигация для категорий', u'КПР', u'КБР',
u'Годы в России', u'commonscat-inline'],
@@ -205,26 +211,10 @@
u'分类重定向', u'追蹤分類', u'共享資源', u'追蹤分類'],
}
-msg_change = {
- 'be-x-old': u'Робат: зьмяніў шаблён [[:Commons:Category:%(oldcat)s|%(oldcat)s]] на [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'cs': u'Robot změnil šablonu Commonscat z [[:Commons:Category:%(oldcat)s|%(oldcat)s]] na [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'da': u'Robot: Ændrer commonscat link fra [[:Commons:Category:%(oldcat)s|%(oldcat)s]] til [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'de': u'Bot: Ändere commonscat link von [[:Commons:Category:%(oldcat)s|%(oldcat)s]] zu [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'en': u'Bot: Changing commonscat link from [[:Commons:Category:%(oldcat)s|%(oldcat)s]] to [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'fa': u'ربات: تغییر پیوند به انبار از [[:Commons:Category:%(oldcat)s|%(oldcat)s]] به [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'fr': u'Robot: Changé commonscat link de [[:Commons:Category:%(oldcat)s|%(oldcat)s]] à [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'frr': u'Bot: Feranere commonscat link faan [[:Commons:Category:%(oldcat)s|%(oldcat)s]] tu [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'is': u'Vélmenni: Breyti Commonscat tengli frá [[:Commons:Category:%(oldcat)s|%(oldcat)s]] í [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'pdc': u'Waddefresser: commonscat Gleecher vun [[:Commons:Category:%(oldcat)s|%(oldcat)s]] nooch [[:Commons:Category:%(newcat)s|%(newcat)s]] geennert',
- 'ru': u'Бот: Изменение commonscat-ссылки с [[:Commons:Category:%(oldcat)s|%(oldcat)s]] на [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'sk': u'Robot zmenil šablónu Commonscat z [[:Commons:Category:%(oldcat)s|%(oldcat)s]] na [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'uk': u'Бот: Зміна commonscat-посилання з [[:Commons:Category:%(oldcat)s|%(oldcat)s]] на [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'th': u'บอต: เปลี่ยนลิงก์หมวดหมู่คอมมอนส์จาก [[:Commons:Category:%(oldcat)s|%(oldcat)s]] เป็น [[:Commons:Category:%(newcat)s|%(newcat)s]]',
- 'zh': u'機器人:更改 commonscat 連結,從 %(oldcat)s 至 %(newcat)s',
-}
-
class CommonscatBot:
+
+ """Commons categorisation bot."""
def __init__(self, generator, always, summary=None):
self.generator = generator
@@ -237,7 +227,7 @@
self.treat(page)
def treat(self, page):
- """ Loads the given page, does some changes, and saves it. """
+ """ Load the given page, do some changes, and save it. """
if not page.exists():
pywikibot.output(u'Page %s does not exist. Skipping.'
% page.title(asLink=True))
@@ -295,25 +285,24 @@
return False
@classmethod
- def getCommonscatTemplate(self, lang=None):
- """Get the template name in a language. Expects the language code.
- Return as tuple containing the primary template and it's alternatives
+ def getCommonscatTemplate(cls, code=None):
+ """Get the template name of a site. Expects the site code.
+
+ Return as tuple containing the primary template and its alternatives.
"""
- if lang in commonscatTemplates:
- return commonscatTemplates[lang]
+ if code in commonscatTemplates:
+ return commonscatTemplates[code]
else:
return commonscatTemplates[u'_default']
def skipPage(self, page):
- '''
- Do we want to skip this page?
- '''
- if page.site().language() in ignoreTemplates:
+ """Determine if the page should be skipped."""
+ if page.site.code in ignoreTemplates:
templatesInThePage = page.templates()
templatesWithParams = page.templatesWithParams()
- for template in ignoreTemplates[page.site().language()]:
- if type(template) != tuple:
+ for template in ignoreTemplates[page.site.code]:
+ if not isinstance(template, tuple):
if template in templatesInThePage:
return True
else:
@@ -354,7 +343,10 @@
commonsPage.put(newtext=newtext, comment=comment)
def addCommonscat(self, page):
- """Take a page. Go to all the interwiki page looking for a commonscat
+ """
+ Add CommonsCat template to page.
+
+ Take a page. Go to all the interwiki page looking for a commonscat
template. When all the interwiki's links are checked and a proper
category is found add it to the page.
@@ -362,7 +354,7 @@
pywikibot.output(u'Working on ' + page.title())
# Get the right templates for this page
primaryCommonscat, commonscatAlternatives = self.getCommonscatTemplate(
- page.site().language())
+ page.site.code)
commonscatLink = self.getCommonscatLink(page)
if commonscatLink:
pywikibot.output(u'Commonscat template is already on %s'
@@ -384,14 +376,13 @@
checkedCommonscatTarget, LinkText, Note)
return (True, self.always)
else:
- #Commonscat link is wrong
+ # Commonscat link is wrong
commonscatLink = self.findCommonscatLink(page)
if (commonscatLink != u''):
self.changeCommonscat(page, currentCommonscatTemplate,
currentCommonscatTarget,
primaryCommonscat, commonscatLink)
- #else
- #Should i remove the commonscat link?
+ # TODO: if the commonsLink == u'', should it be removed?
elif self.skipPage(page):
pywikibot.output("Found a template in the skip list. Skipping %s"
@@ -417,7 +408,7 @@
description=u''):
""" Change the current commonscat template and target. """
if oldcat == '3=S' or linktitle == '3=S':
- return # additional param on de-wiki, TODO: to be handled
+ return # TODO: handle additional param on de-wiki
if not linktitle and (page.title().lower() in oldcat.lower() or
oldcat.lower() in page.title().lower()):
linktitle = oldcat
@@ -441,45 +432,47 @@
if self.summary:
comment = self.summary
else:
- comment = pywikibot.translate(page.site(),
- msg_change) % {'oldcat': oldcat,
- 'newcat': newcat}
+ comment = i18n.twtranslate(page.site.code,
+ 'commonscat-msg_change',
+ {'oldcat': oldcat, 'newcat': newcat})
self.save(newtext, page, comment)
def findCommonscatLink(self, page=None):
+ """Find CommonsCat template on interwiki pages."""
for ipage in page.interwiki():
try:
- if(ipage.exists() and not ipage.isRedirectPage()
- and not ipage.isDisambig()):
- commonscatLink = self.getCommonscatLink(ipage)
- if commonscatLink:
- (currentTemplate,
- possibleCommonscat, linkText, Note) = commonscatLink
- checkedCommonscat = self.checkCommonscatLink(
- possibleCommonscat)
- if (checkedCommonscat != u''):
- pywikibot.output(
- u"Found link for %s at [[%s:%s]] to %s."
- % (page.title(), ipage.site().language(),
- ipage.title(), checkedCommonscat))
- return checkedCommonscat
+ if(not ipage.exists() or ipage.isRedirectPage()
+ or ipage.isDisambig()):
+ continue
+ commonscatLink = self.getCommonscatLink(ipage)
+ if not commonscatLink:
+ continue
+ (currentTemplate,
+ possibleCommonscat, linkText, Note) = commonscatLink
+ checkedCommonscat = self.checkCommonscatLink(possibleCommonscat)
+ if (checkedCommonscat != u''):
+ pywikibot.output(
+ u"Found link for %s at [[%s:%s]] to %s."
+ % (page.title(), ipage.site.code,
+ ipage.title(), checkedCommonscat))
+ return checkedCommonscat
except pywikibot.BadTitle:
- #The interwiki was incorrect
+ # The interwiki was incorrect
return u''
return u''
def getCommonscatLink(self, wikipediaPage=None):
- '''
- Go through the page and return a tuple of (<templatename>, <target>)
- '''
+ """Find CommonsCat template on page.
+
+ @rtype: tuple of (<templatename>, <target>, <linktext>, <note>)
+ """
primaryCommonscat, commonscatAlternatives = self.getCommonscatTemplate(
- wikipediaPage.site().language())
+ wikipediaPage.site.code)
commonscatTemplate = u''
commonscatTarget = u''
commonscatLinktext = u''
commonscatNote = u''
# See if commonscat is present
-
for template in wikipediaPage.templatesWithParams():
if template[0] == primaryCommonscat \
or template[0] in commonscatAlternatives:
@@ -497,16 +490,17 @@
return None
def checkCommonscatLink(self, name=""):
- """ This function will return the name of a valid commons category
+ """ Return the name of a valid commons category.
+
If the page is a redirect this function tries to follow it.
- If the page doesnt exists the function will return an empty string
+ If the page doesn't exists the function will return an empty string
"""
if pywikibot.verbose:
pywikibot.output("getCommonscat: " + name)
try:
commonsSite = self.site.image_repository()
- #This can throw a pywikibot.BadTitle
+ # This can throw a pywikibot.BadTitle
commonsPage = pywikibot.Page(commonsSite, "Category:" + name)
if not commonsPage.exists():
@@ -525,9 +519,9 @@
return self.checkCommonscatLink(m.group('newcat2'))
else:
pywikibot.output(
- u'getCommonscat: Deleted by %s. Couldn\'t find '
+ u'getCommonscat: %s deleted by %s. Couldn\'t find '
u'move target in "%s"'
- % (loguser, logcomment))
+ % (commonsPage, loguser, logcomment))
return u''
except StopIteration:
if pywikibot.verbose:
@@ -563,20 +557,26 @@
return u''
-def main():
- """ Parse the command line arguments and get a pagegenerator to work on.
- Iterate through all the pages.
+def main(*args):
"""
+ Process command line arguments and invoke bot.
+ If args is an empty list, sys.argv is used.
+
+ @param args: command line arguments
+ @type args: list of unicode
+ """
summary = None
generator = None
always = False
ns = []
ns.append(14)
- # Load a lot of default generators
+
+ # Process global args and prepare generator args parser
+
genFactory = pagegenerators.GeneratorFactory()
- for arg in pywikibot.handleArgs():
+ for arg in pywikibot.handleArgs(*args):
if arg.startswith('-summary'):
if len(arg) == 8:
summary = pywikibot.input(u'What summary do you want to use?')
@@ -599,13 +599,14 @@
if not generator:
generator = genFactory.getCombinedGenerator()
- if not generator:
- raise add_text.NoEnoughData(u'You have to specify the generator you '
- u'want to use for the script!')
- pregenerator = pagegenerators.PreloadingGenerator(generator)
- bot = CommonscatBot(pregenerator, always, summary)
- bot.run()
+ if generator:
+ pregenerator = pagegenerators.PreloadingGenerator(generator)
+ bot = CommonscatBot(pregenerator, always, summary)
+ bot.run()
+ else:
+ pywikibot.showHelp()
+
if __name__ == "__main__":
try:
--
To view, visit https://gerrit.wikimedia.org/r/186150
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ibf46165beb4a7e7109dfd1e220ca43d226226f2e
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>