Revision: 8238
Author: xqt
Date: 2010-06-02 13:50:48 +0000 (Wed, 02 Jun 2010)
Log Message:
-----------
Adding @property decorator to Page.site() (bug #3010404) patch by stanlekub. Thanks.
Modified Paths:
--------------
branches/rewrite/pywikibot/data/api.py
branches/rewrite/pywikibot/page.py
branches/rewrite/pywikibot/pagegenerators.py
branches/rewrite/scripts/category.py
branches/rewrite/scripts/cosmetic_changes.py
branches/rewrite/scripts/interwiki.py
branches/rewrite/scripts/interwiki_graph.py
branches/rewrite/scripts/redirect.py
branches/rewrite/scripts/replace.py
branches/rewrite/scripts/solve_disambiguation.py
branches/rewrite/scripts/titletranslate.py
branches/rewrite/tests/api_tests.py
branches/rewrite/tests/page_tests.py
Modified: branches/rewrite/pywikibot/data/api.py
===================================================================
--- branches/rewrite/pywikibot/data/api.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/pywikibot/data/api.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -820,7 +820,7 @@
page._catinfo = pagedict["categoryinfo"]
if "templates" in pagedict:
- page._templates = [ pywikibot.Page(page.site(), tl['title'])
+ page._templates = [ pywikibot.Page(page.site, tl['title'])
for tl in pagedict['templates'] ]
if "langlinks" in pagedict:
@@ -828,7 +828,7 @@
for ll in pagedict["langlinks"]:
link = pywikibot.Link.langlinkUnsafe(ll['lang'],
ll['*'],
- source=page.site())
+ source=page.site)
links.append(link)
page._langlinks = links
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/pywikibot/page.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -102,7 +102,7 @@
self.__dict__ = source.__dict__
if title:
# overwrite title
- self._link = Link(title, source=source.site(), defaultNamespace=ns)
+ self._link = Link(title, source=source.site, defaultNamespace=ns)
## if ":" in title:
## prefix = title[ :title.index(":")]
## self._ns = self._site.ns_index(prefix)
@@ -111,7 +111,7 @@
## else:
## title = title[title.index(":")+1 : ].strip("
_")
## self._title = "%s:%s" % (
-## self.site().namespace(self._ns),
+## self.site.namespace(self._ns),
## self._title)
## else:
## self._ns = 0
@@ -127,7 +127,7 @@
## self._title = source.title
## # reassemble the canonical title from components
## if self._ns:
-## self._title = "%s:%s" % (self.site().namespace(self._ns),
+## self._title = "%s:%s" % (self.site.namespace(self._ns),
## self._title)
else:
raise pywikibot.Error(
@@ -140,6 +140,7 @@
## # Always capitalize the first letter
## self._title = self._title[:1].upper() + self._title[1:]
+ @property
def site(self):
"""Return the Site object for the wiki on which this Page
resides."""
return self._link.site
@@ -176,17 +177,17 @@
title = title + "#" + self._link.section
if asLink:
if forceInterwiki or (allowInterwiki and
- (self.site().family.name != config.family
- or self.site().code != config.mylang)):
- if self.site().family.name != config.family \
- and self.site().family.name != self.site().code:
- return u'[[%s:%s:%s]]' % (self.site().family.name,
- self.site().code,
+ (self.site.family.name != config.family
+ or self.site.code != config.mylang)):
+ if self.site.family.name != config.family \
+ and self.site.family.name != self.site.code:
+ return u'[[%s:%s:%s]]' % (self.site.family.name,
+ self.site.code,
title)
else:
# use this form for sites like commons, where the
# code is the same as the family name
- return u'[[%s:%s]]' % (self.site().code,
+ return u'[[%s:%s]]' % (self.site.code,
title)
elif textlink and (self.isImage() or self.isCategory()):
return u'[[:%s]]' % title
@@ -199,7 +200,7 @@
if underscore or asUrl:
title = title.replace(u' ', u'_')
if asUrl:
- encodedTitle = title.encode(self.site().encoding())
+ encodedTitle = title.encode(self.site.encoding())
title = urllib.quote(encodedTitle)
if as_filename:
# Replace characters that are not possible in file names on some
@@ -246,8 +247,8 @@
if not isinstance(other, Page):
# especially, return -1 if other is None
return -1
- if self.site() != other.site():
- return cmp(self.site(), other.site())
+ if self.site != other.site:
+ return cmp(self.site, other.site)
if self.namespace() != other.namespace():
return cmp(self.namespace(), other.namespace())
return cmp(self._link.title, other._link.title)
@@ -271,7 +272,7 @@
if not hasattr(self, '_autoFormat'):
from pywikibot import date
self._autoFormat = date.getAutoFormat(
- self.site().code,
+ self.site.code,
self.title(withNamespace=False)
)
return self._autoFormat
@@ -335,7 +336,7 @@
or not self._revid in self._revisions \
or self._revisions[self._revid].text is None:
try:
- self.site().loadrevisions(self, getText=True, sysop=sysop)
+ self.site.loadrevisions(self, getText=True, sysop=sysop)
except (pywikibot.NoPage, pywikibot.SectionError), e:
self._getexception = e
raise
@@ -357,7 +358,7 @@
"""
if force or not oldid in self._revisions \
or self._revisions[oldid].text is None:
- self.site().loadrevisions(self, getText=True, revids=oldid,
+ self.site.loadrevisions(self, getText=True, revids=oldid,
sysop=sysop)
# TODO: what about redirects, errors?
return self._revisions[oldid].text
@@ -365,16 +366,16 @@
def permalink(self):
"""Return the permalink URL for current revision of this
page."""
return "%s://%s/%sindex.php?title=%s&oldid=%s" \
- % (self.site().protocol(),
- self.site().hostname(),
- self.site().scriptpath(),
+ % (self.site.protocol(),
+ self.site.hostname(),
+ self.site.scriptpath(),
self.title(asUrl=True),
self.latestRevision())
def latestRevision(self):
"""Return the current revision id for this
page."""
if not hasattr(self, '_revid'):
- self.site().loadrevisions(self)
+ self.site.loadrevisions(self)
return self._revid
def _textgetter(self):
@@ -404,7 +405,7 @@
req = pywikibot.data.api.Request(action="expandtemplates",
text=self.text,
title=self.title(withSection=False),
- site=self.site())
+ site=self.site)
result = req.submit()
return result["expandtemplates"]["*"]
@@ -433,11 +434,11 @@
found.
"""
- return self.site().page_exists(self)
+ return self.site.page_exists(self)
def isRedirectPage(self):
"""Return True if this is a redirect, False if not or not
existing."""
- return self.site().page_isredirect(self)
+ return self.site.page_isredirect(self)
def isCategoryRedirect(self):
"""Return True if this is a category redirect page, False
otherwise."""
@@ -445,12 +446,12 @@
if not self.isCategory():
return False
if not hasattr(self, "_catredirect"):
- catredirs = self.site().category_redirects()
+ catredirs = self.site.category_redirects()
for (template, args) in self.templatesWithParams():
if template.title(withNamespace=False) in catredirs:
# Get target (first template argument)
try:
- self._catredirect = self.site().namespace(14) \
+ self._catredirect = self.site.namespace(14) \
+ ":" + args[0].strip()
break
except IndexError:
@@ -466,7 +467,7 @@
def getCategoryRedirectTarget(self):
"""If this is a category redirect, return the target category
title."""
if self.isCategoryRedirect():
- return Category(Link(self._catredirect, self.site()))
+ return Category(Link(self._catredirect, self.site))
raise pywikibot.IsNotRedirectPage(self.title())
def isEmpty(self):
@@ -477,8 +478,8 @@
"""
txt = self.get()
- txt = pywikibot.removeLanguageLinks(txt, site = self.site())
- txt = pywikibot.removeCategoryLinks(txt, site = self.site())
+ txt = pywikibot.removeLanguageLinks(txt, site = self.site)
+ txt = pywikibot.removeCategoryLinks(txt, site = self.site)
if len(txt) < 4:
return True
else:
@@ -504,14 +505,14 @@
return None
if self.isTalkPage():
if self.namespace() == 1:
- return Page(self.site(), self.title(withNamespace=False))
+ return Page(self.site, self.title(withNamespace=False))
else:
- return Page(self.site(),
- self.site().namespace(ns - 1) + ':'
+ return Page(self.site,
+ self.site.namespace(ns - 1) + ':'
+ self.title(withNamespace=False))
else:
- return Page(self.site(),
- self.site().namespace(ns + 1) + ':'
+ return Page(self.site,
+ self.site.namespace(ns + 1) + ':'
+ self.title(withNamespace=False))
def isCategory(self):
@@ -535,22 +536,22 @@
"""
if not hasattr(self, "_isDisambig"):
- if not hasattr(self.site(), "_disambigtemplates"):
- self.site()._disambigtemplates = \
- self.site().family.disambig(self.site().code)
- if self.site()._disambigtemplates is None:
+ if not hasattr(self.site, "_disambigtemplates"):
+ self.site._disambigtemplates = \
+ self.site.family.disambig(self.site.code)
+ if self.site._disambigtemplates is None:
try:
- disambigpages = Page(self.site(),
+ disambigpages = Page(self.site,
"MediaWiki:Disambiguationspage")
- self.site()._disambigtemplates = [
+ self.site._disambigtemplates = [
link.title(withNamespace=False)
for link in disambigpages.linkedPages()
if link.namespace() == 10
]
except pywikibot.NoPage:
- self.site()._disambigtemplates = ['Disambig']
+ self.site._disambigtemplates = ['Disambig']
for t in self.templates():
- if t.title(withNamespace=False) in self.site()._disambigtemplates:
+ if t.title(withNamespace=False) in self.site._disambigtemplates:
self._isDisambig = True
break
else:
@@ -582,7 +583,7 @@
# to implement those methods in the site interface and then combine
# the results for this method, or to implement this method and then
# split up the results for the others.
- return self.site().pagereferences(
+ return self.site.pagereferences(
self,
followRedirects=follow_redirects,
filterRedirects=redirectsOnly,
@@ -604,7 +605,7 @@
@param total: iterate no more than this number of pages in total
"""
- return self.site().pagebacklinks(self,
+ return self.site.pagebacklinks(self,
followRedirects=followRedirects,
filterRedirects=filterRedirects,
namespaces=namespaces, step=step,
@@ -621,7 +622,7 @@
@param total: iterate no more than this number of pages in total
"""
- return self.site().page_embeddedin(self,
+ return self.site.page_embeddedin(self,
filterRedirects=filter_redirects,
namespaces=namespaces,
step=step, total=total)
@@ -634,7 +635,7 @@
- page is protected, and bot has a sysop account for this site.
"""
- return self.site().page_can_be_edited(self)
+ return self.site.page_can_be_edited(self)
def botMayEdit(self):
"""Return True if this page allows bots to edit it.
@@ -652,7 +653,7 @@
""" # TODO: move this to Site object?
if config.ignore_bot_templates: #Check the "master ignore switch"
return True
- username = self.site().user()
+ username = self.site.user()
try:
templates = self.templatesWithParams();
except (pywikibot.NoPage,
@@ -729,7 +730,7 @@
err = None
link = self.title(asLink=True)
try:
- done = self.site().editpage(self, summary=comment, minor=minor,
+ done = self.site.editpage(self, summary=comment, minor=minor,
watch=watchval)
if not done:
pywikibot.warning(u"Page %s not saved" % link)
@@ -787,7 +788,7 @@
@return: True if successful, False otherwise.
"""
- return self.site().watchpage(self, unwatch)
+ return self.site.watchpage(self, unwatch)
def linkedPages(self, namespaces=None, step=None, total=None):
"""Iterate Pages that this Page links to.
@@ -803,7 +804,7 @@
@return: a generator that yields Page objects.
"""
- return self.site().pagelinks(self, namespaces=namespaces, step=step,
+ return self.site.pagelinks(self, namespaces=namespaces, step=step,
total=total)
def interwiki(self, expand=True):
@@ -824,15 +825,15 @@
for linkmatch in pywikibot.link_regex.finditer(
pywikibot.removeDisabledParts(text)):
linktitle = linkmatch.group("title")
- link = Link(linktitle, self.site())
+ link = Link(linktitle, self.site)
# only yield links that are to a different site and that
# are not language links
try:
- if link.site != self.site():
+ if link.site != self.site:
if linktitle.lstrip().startswith(":"):
# initial ":" indicates not a language link
yield link
- elif link.site.family != self.site().family:
+ elif link.site.family != self.site.family:
# link to a different family is not a language link
yield link
except pywikibot.Error:
@@ -863,7 +864,7 @@
# method is called. If we do this, we'll have to think
# about what will happen if the generator is not completely
# iterated upon.
- return self.site().pagelanglinks(self, step=step, total=total)
+ return self.site.pagelanglinks(self, step=step, total=total)
def templates(self):
"""Return a list of Page objects for templates used on this Page.
@@ -892,7 +893,7 @@
"""
if hasattr(self, '_templates'):
return iter(self._templates)
- return self.site().pagetemplates(self, step=step, total=total)
+ return self.site.pagetemplates(self, step=step, total=total)
@deprecate_arg("followRedirects", None)
@deprecate_arg("loose", None)
@@ -904,7 +905,7 @@
@return: a generator that yields ImagePage objects.
"""
- return self.site().pageimages(self, step=step, total=total)
+ return self.site.pageimages(self, step=step, total=total)
def templatesWithParams(self):
"""Iterate templates used on this Page.
@@ -922,7 +923,7 @@
# element into a list in the format used by old scripts
result = []
for template in templates:
- link = pywikibot.Link(template[0], self.site(),
+ link = pywikibot.Link(template[0], self.site,
defaultNamespace=10)
try:
if link.canonical_title() not in titles:
@@ -942,7 +943,7 @@
positional.append(args[key])
for name in named:
positional.append("%s=%s" % (name, named[name]))
- result.append((pywikibot.Page(link, self.site()), positional))
+ result.append((pywikibot.Page(link, self.site), positional))
return result
@deprecate_arg("nofollow_redirects", None)
@@ -956,7 +957,7 @@
@return: a generator that yields Category objects.
"""
- return self.site().pagecategories(self, withSortKey=withSortKey,
+ return self.site.pagecategories(self, withSortKey=withSortKey,
step=step, total=total)
def extlinks(self, step=None, total=None):
@@ -967,7 +968,7 @@
@return: a generator that yields unicode objects containing URLs.
"""
- return self.site().page_extlinks(self, step=step, total=total)
+ return self.site.page_extlinks(self, step=step, total=total)
def getRedirectTarget(self):
"""Return a Page object for the target this Page redirects to.
@@ -976,7 +977,7 @@
exception. This method also can raise a NoPage exception.
"""
- return self.site().getredirtarget(self)
+ return self.site.getredirtarget(self)
# BREAKING CHANGE: in old framework, default value for getVersionHistory
# returned no more than 500 revisions; now, it iterates
@@ -998,7 +999,7 @@
@param total: iterate no more than this number of revisions in total
"""
- self.site().loadrevisions(self, getText=False, rvdir=reverseOrder,
+ self.site.loadrevisions(self, getText=False, rvdir=reverseOrder,
step=step, total=total)
return [ ( self._revisions[rev].revid,
self._revisions[rev].timestamp,
@@ -1034,7 +1035,7 @@
edit date/time, user name and content
"""
- return self.site().loadrevisions(self, getText=True,
+ return self.site.loadrevisions(self, getText=True,
rvdir=reverseOrder,
step=step, total=total)
@@ -1070,7 +1071,7 @@
reason = pywikibot.input(u'Please enter a reason for the move:')
# TODO: implement "safe" parameter (Is this necessary ?)
# TODO: implement "sysop" parameter
- return self.site().movepage(self, newtitle, reason,
+ return self.site.movepage(self, newtitle, reason,
movetalk=movetalkpage,
noredirect=deleteAndMove)
@@ -1089,7 +1090,7 @@
pywikibot.output(u'Deleting %s.' % (self.title(asLink=True)))
reason = pywikibot.input(u'Please enter a reason for the deletion:')
answer = u'y'
- if prompt and not hasattr(self.site(), '_noDeletePrompt'):
+ if prompt and not hasattr(self.site, '_noDeletePrompt'):
answer = pywikibot.inputChoice(u'Do you want to delete %s?'
% self.title(asLink = True, forceInterwiki = True),
['Yes', 'No', 'All'],
@@ -1097,10 +1098,10 @@
'N')
if answer in ['a', 'A']:
answer = 'y'
- self.site()._noDeletePrompt = True
+ self.site._noDeletePrompt = True
if answer in ['y', 'Y']:
try:
- return self.site().deletepage(self, reason)
+ return self.site.deletepage(self, reason)
except pywikibot.NoUsername, e:
if mark:
raise NotImplementedError(
@@ -1121,7 +1122,7 @@
"""
if not hasattr(self, "_deletedRevs"):
self._deletedRevs = {}
- for item in self.site().deletedrevs(self, step=step, total=total):
+ for item in self.site.deletedrevs(self, step=step, total=total):
for rev in item.get("revisions", []):
self._deletedRevs[rev['timestamp']] = rev
yield rev['timestamp']
@@ -1140,7 +1141,7 @@
(not retrieveText)
or "content" in self._deletedRevs["timestamp"]):
return self._deletedRevs["timestamp"]
- for item in self.site().deletedrevs(self, start=timestamp,
+ for item in self.site.deletedrevs(self, start=timestamp,
get_text=retrieveText, total=1):
# should only be one item with one revision
if item['title'] == self.title:
@@ -1187,7 +1188,7 @@
% (self.title(asLink=True)))
comment = pywikibot.input(
u'Please enter a reason for the undeletion:')
- return self.site().undelete(self, comment)
+ return self.site.undelete(self, comment)
@deprecate_arg("throttle", None)
def protect(self, edit='sysop', move='sysop', unprotect=False,
@@ -1216,16 +1217,16 @@
if unprotect:
edit = move = ""
answer = 'y'
- if prompt and not hasattr(self.site(), '_noProtectPrompt'):
+ if prompt and not hasattr(self.site, '_noProtectPrompt'):
answer = pywikibot.inputChoice(
u'Do you want to change the protection level of %s?'
% self.title(asLink=True, forceInterwiki = True),
['Yes', 'No', 'All'], ['Y',
'N', 'A'], 'N')
if answer in ['a', 'A']:
answer = 'y'
- self.site()._noProtectPrompt = True
+ self.site._noProtectPrompt = True
if answer in ['y', 'Y']:
- return self.site().protect(self, edit, move, reason)
+ return self.site.protect(self, edit, move, reason)
def change_category(self, oldCat, newCat, comment=None, sortKey=None,
inPlace=True):
@@ -1241,7 +1242,7 @@
"""
#TODO: is inPlace necessary?
- site = self.site()
+ site = self.site
changesMade = False
if not self.canBeEdited():
@@ -1337,7 +1338,7 @@
if not self.isCategory():
return None # should this raise an exception??
try:
- return self.site().categoryinfo(self)
+ return self.site.categoryinfo(self)
except NotImplementedError:
return None
@@ -1346,7 +1347,7 @@
@deprecated("Site.encoding()")
def encoding(self):
"""DEPRECATED: use Site.encoding() instead"""
- return self.site().encoding()
+ return self.site.encoding()
@deprecated("Page.title(withNamespace=False)")
def titleWithoutNamespace(self, underscore=False):
@@ -1427,15 +1428,15 @@
if not hasattr(self, '_imagePageHtml'):
from pywikibot.comms import http
path = "%s/index.php?title=%s" \
- % (self.site().scriptpath(), self.title(asUrl=True))
- self._imagePageHtml = http.request(self.site(), path)
+ % (self.site.scriptpath(), self.title(asUrl=True))
+ self._imagePageHtml = http.request(self.site, path)
return self._imagePageHtml
def fileUrl(self):
"""Return the URL for the image described on this
page."""
# TODO add scaling option?
if not hasattr(self, '_imageinfo'):
- self._imageinfo = self.site().getimageinfo(self) #FIXME
+ self._imageinfo = self.site.getimageinfo(self) #FIXME
return self._imageinfo['url']
def fileIsOnCommons(self):
@@ -1446,7 +1447,7 @@
def fileIsShared(self):
"""Return True if image is stored on any known shared
repository."""
# as of now, the only known repositories are commons and wikitravel
- if 'wikitravel_shared' in self.site().shared_image_repository():
+ if 'wikitravel_shared' in self.site.shared_image_repository():
return self.fileUrl().startswith(
u'http://wikitravel.org/upload/shared/')
return self.fileIsOnCommons()
@@ -1466,7 +1467,7 @@
def getFileSHA1Sum(self):
"""Return image file's SHA1 checksum."""
if not hasattr(self, '_imageinfo'):
- self._imageinfo = self.site().getimageinfo(self) #FIXME
+ self._imageinfo = self.site.getimageinfo(self) #FIXME
return self._imageinfo['sha1']
def getFileVersionHistory(self):
@@ -1477,7 +1478,7 @@
"""
#TODO; return value may need to change
- return self.site().getimageinfo(self, history=True) #FIXME
+ return self.site.getimageinfo(self, history=True) #FIXME
def getFileVersionHistoryTable(self):
"""Return the version history in the form of a wiki
table."""
@@ -1496,7 +1497,7 @@
@param total: iterate no more than this number of pages in total
"""
- return self.site().imageusage(self, step=step, total=total)
+ return self.site.imageusage(self, step=step, total=total)
class Category(Page):
@@ -1553,9 +1554,9 @@
recurse = recurse - 1
if not hasattr(self, "_subcats"):
self._subcats = []
- for member in self.site().categorymembers(self, namespaces=[14],
+ for member in self.site.categorymembers(self, namespaces=[14],
step=step, total=total):
- subcat = Category(self.site(), member.title())
+ subcat = Category(self.site, member.title())
self._subcats.append(subcat)
yield subcat
if total is not None:
@@ -1601,9 +1602,9 @@
total (at all levels)
"""
- namespaces = [x for x in self.site().namespaces()
+ namespaces = [x for x in self.site.namespaces()
if x>=0 and x!=14]
- for member in self.site().categorymembers(self,
+ for member in self.site.categorymembers(self,
namespaces=namespaces,
step=step, total=total):
yield member
@@ -1625,7 +1626,7 @@
def members(self, recurse=False, namespaces=None, step=None, total=None):
"""Yield all category contents (subcats, pages, and
files)."""
- for member in self.site().categorymembers(self, namespaces,
+ for member in self.site.categorymembers(self, namespaces,
step=step, total=total):
yield member
if total is not None:
@@ -1646,7 +1647,7 @@
def isEmptyCategory(self):
"""Return True if category has no members (including
subcategories)."""
- for member in self.site().categorymembers(self, total=1):
+ for member in self.site.categorymembers(self, total=1):
return False
return True
@@ -1668,8 +1669,8 @@
# move to category.py? (Although it doesn't seem to be used there,
# either)
if not isinstance(cat, Category):
- cat = self.site().category_namespace() + ':' + cat
- targetCat = Category(self.site(), cat)
+ cat = self.site.category_namespace() + ':' + cat
+ targetCat = Category(self.site, cat)
else:
targetCat = cat
if targetCat.exists():
@@ -1709,8 +1710,8 @@
"""
# I don't see why we need this as part of the framework either
# move to scripts/category.py?
- catname = self.site().category_namespace() + ':' + catname
- targetCat = Category(self.site(), catname)
+ catname = self.site.category_namespace() + ':' + catname
+ targetCat = Category(self.site, catname)
if targetCat.exists():
pywikibot.warning(u'Target page %s already exists!'
% targetCat.title())
@@ -1801,7 +1802,7 @@
del self._userprops
if not hasattr(self, '_userprops'):
usrequest = pywikibot.data.api.Request(
- site=self.site(),
+ site=self.site,
action='query',
list='users',
usprop='blockinfo|groups|editcount|registration|emailable',
@@ -1883,7 +1884,7 @@
raise AutoblockUser("This is an autoblock ID, you can only use to
unblock it.")
if subpage:
subpage = u'/' + subpage
- return Page(Link(self.title() + subpage, self.site()))
+ return Page(Link(self.title() + subpage, self.site))
def getUserTalkPage(self, subpage=u''):
""" Return a pywikibot.Page object corresponding to this
user's main
@@ -1900,7 +1901,7 @@
if subpage:
subpage = u'/' + subpage
return Page(Link(self.title(withNamespace=False) + subpage,
- self.site(), defaultNamespace=3))
+ self.site, defaultNamespace=3))
def sendMail(self, subject, text, ccme = False):
""" Send an email to this user via mediawiki's email
interface.
@@ -1919,13 +1920,13 @@
if not self.isEmailable():
raise UserActionRefuse('This user is not mailable')
- if not self.site().has_right('sendemail'):
+ if not self.site.has_right('sendemail'):
raise UserActionRefuse('You don\'t have permission to send
mail')
params = {
'action': 'emailuser',
'target': self.username,
- 'token': self.site().token(self, 'email'),
+ 'token': self.site.token(self, 'email'),
'subject': subject,
'text': text,
}
@@ -1972,11 +1973,11 @@
@param namespaces: only iterate links in these namespaces
@type namespaces: list
"""
- for contrib in self.site().usercontribs(user=self.username,
+ for contrib in self.site.usercontribs(user=self.username,
namespaces=namespaces, total=total):
ts = pywikibot.Timestamp.fromISOformat(contrib['timestamp'])
ts = int(ts.strftime("%Y%m%d%H%M%S"))
- yield Page(Link(contrib['title'], self.site(),
+ yield Page(Link(contrib['title'], self.site,
defaultNamespace=contrib['ns'])), \
contrib['revid'], ts, contrib['comment']
@@ -1990,7 +1991,7 @@
@param total: limit result to this number of pages
@type total: int
"""
- for item in self.site().logevents(logtype='upload', user=self.username,
+ for item in self.site.logevents(logtype='upload', user=self.username,
total=total):
yield item.title(), str(item.timestamp()), item.comment(), False
@@ -2361,7 +2362,7 @@
"""
link = Link.__new__(Link)
- link._site = page.site()
+ link._site = page.site
link._section = page.section()
link._namespace = page.namespace()
link._title = page.title(withNamespace=False,
Modified: branches/rewrite/pywikibot/pagegenerators.py
===================================================================
--- branches/rewrite/pywikibot/pagegenerators.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/pywikibot/pagegenerators.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -708,8 +708,8 @@
seenPages = {}
for page in generator:
if page not in seenPages:
- _page = u"%s:%s:%s" % (page.site().family.name,
- page.site().code,
+ _page = u"%s:%s:%s" % (page.site.family.name,
+ page.site.code,
page.title())
seenPages[_page] = True
yield page
@@ -760,7 +760,7 @@
sites = {}
# build a list of pages for each site found in the iterator
for page in generator:
- site = page.site()
+ site = page.site
sites.setdefault(site, []).append(page)
if len(sites[site]) >= step:
group = sites[site]
@@ -789,7 +789,7 @@
site = pywikibot.Site()
for page in site.unusedfiles(number=number, repeat=repeat,
extension=extension):
- yield pywikibot.ImagePage(page.site(), page.title())
+ yield pywikibot.ImagePage(page.site, page.title())
def WithoutInterwikiPageGenerator(number=100, repeat=False, site=None):
if site is None:
@@ -1007,7 +1007,7 @@
title = url[len(base):]
page = pywikibot.Page(pywikibot.Link(title, self.site))
# Google contains links in the format
http://de.wikipedia.org/wiki/en:Foobar
- if page.site() == self.site:
+ if page.site == self.site:
yield page
def MySQLPageGenerator(query, site = None):
Modified: branches/rewrite/scripts/category.py
===================================================================
--- branches/rewrite/scripts/category.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/category.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -343,7 +343,7 @@
'''
page_name = pagelink.title()
- site = pagelink.site()
+ site = pagelink.site
# regular expression that matches a name followed by a space and
# disambiguation brackets. Group 1 is the name without the rest.
bracketsR = re.compile('(.*) \(.+?\)')
Modified: branches/rewrite/scripts/cosmetic_changes.py
===================================================================
--- branches/rewrite/scripts/cosmetic_changes.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/cosmetic_changes.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -469,7 +469,7 @@
# Show the title of the page we're working on.
# Highlight the title in purple.
pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default}
<<<" % page.title())
- ccToolkit = CosmeticChangesToolkit(page.site(), debug = True, namespace =
page.namespace())
+ ccToolkit = CosmeticChangesToolkit(page.site, debug = True, namespace =
page.namespace())
changedText = ccToolkit.change(page.get())
if changedText != page.get():
if not self.acceptall:
Modified: branches/rewrite/scripts/interwiki.py
===================================================================
--- branches/rewrite/scripts/interwiki.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/interwiki.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -843,7 +843,7 @@
return self.size
def add(self, page):
- site = page.site()
+ site = page.site
if not site in self.tree:
self.tree[site] = []
self.tree[site].append(page)
@@ -851,7 +851,7 @@
def remove(self, page):
try:
- self.tree[page.site()].remove(page)
+ self.tree[page.site].remove(page)
self.size -= 1
except ValueError:
pass
@@ -916,7 +916,7 @@
Site:
Code becomes:
- todo <- {originPage.site():[originPage]}
+ todo <- {originPage.site:[originPage]}
done <- []
while todo != {}:
site <- electSite()
@@ -1101,7 +1101,7 @@
page = StoredPage(page)
self.foundIn[page] = [linkingPage]
self.todo.add(page)
- counter.plus(page.site())
+ counter.plus(page.site)
return True
def skipPage(self, page, target, counter):
@@ -1122,9 +1122,9 @@
return False
elif self.originPage.namespace() != linkedPage.namespace():
# Allow for a mapping between different namespaces
- crossFrom =
self.originPage.site().family.crossnamespace.get(self.originPage.namespace(), {})
- crossTo = crossFrom.get(self.originPage.site().language(),
crossFrom.get('_default', {}))
- nsmatch = crossTo.get(linkedPage.site().language(),
crossTo.get('_default', []))
+ crossFrom =
self.originPage.site.family.crossnamespace.get(self.originPage.namespace(), {})
+ crossTo = crossFrom.get(self.originPage.site.language(),
crossFrom.get('_default', {}))
+ nsmatch = crossTo.get(linkedPage.site.language(),
crossTo.get('_default', []))
if linkedPage.namespace() in nsmatch:
return False
if globalvar.autonomous:
@@ -1133,7 +1133,7 @@
self.foundIn[linkedPage] = [linkingPage]
return True
else:
- preferredPage = self.getFoundInCorrectNamespace(linkedPage.site())
+ preferredPage = self.getFoundInCorrectNamespace(linkedPage.site)
if preferredPage:
pywikibot.output(u"NOTE: Ignoring link from page %s in namespace
%i to page %s in namespace %i because page %s in the correct namespace has already been
found." % (self.originPage, self.originPage.namespace(), linkedPage,
linkedPage.namespace(), preferredPage))
return True
@@ -1145,9 +1145,9 @@
if choice == 'g':
self.makeForcedStop(counter)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative for
language %s, not using a language code:' % linkedPage.site().language())
+ newHint = pywikibot.input(u'Give the alternative for
language %s, not using a language code:' % linkedPage.site.language())
if newHint:
- alternativePage = pywikibot.Page(linkedPage.site(),
newHint)
+ alternativePage = pywikibot.Page(linkedPage.site,
newHint)
if alternativePage:
# add the page that was entered by the user
self.addIfNew(alternativePage, counter, None)
@@ -1163,7 +1163,7 @@
if page.title().lower() != self.originPage.title().lower():
pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode"
% (page, self.originPage))
return True
- elif page.title() != self.originPage.title() and
self.originPage.site().nocapitalize and page.site().nocapitalize:
+ elif page.title() != self.originPage.title() and
self.originPage.site.nocapitalize and page.site.nocapitalize:
pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode
because both languages are uncapitalized." % (page, self.originPage))
return True
return False
@@ -1192,14 +1192,14 @@
else:
choice = 'y'
if self.originPage.isDisambig() and not page.isDisambig():
- disambig = self.getFoundDisambig(page.site())
+ disambig = self.getFoundDisambig(page.site)
if disambig:
pywikibot.output(u"NOTE: Ignoring non-disambiguation page %s for
%s because disambiguation page %s has already been found." % (page, self.originPage,
disambig))
return (True, None)
else:
choice = pywikibot.inputChoice(u'WARNING: %s is a disambiguation
page, but %s doesn\'t seem to be one. Follow it anyway?' % (self.originPage,
page), ['Yes', 'No', 'Add an alternative', 'Give up'],
['y', 'n', 'a', 'g'])
elif not self.originPage.isDisambig() and page.isDisambig():
- nondisambig = self.getFoundNonDisambig(page.site())
+ nondisambig = self.getFoundNonDisambig(page.site)
if nondisambig:
pywikibot.output(u"NOTE: Ignoring disambiguation page %s for %s
because non-disambiguation page %s has already been found." % (page, self.originPage,
nondisambig))
return (True, None)
@@ -1208,8 +1208,8 @@
if choice == 'n':
return (True, None)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative for language %s, not
using a language code:' % page.site().language())
- alternativePage = pywikibot.Page(page.site(), newHint)
+ newHint = pywikibot.input(u'Give the alternative for language %s, not
using a language code:' % page.site.language())
+ alternativePage = pywikibot.Page(page.site, newHint)
return (True, alternativePage)
elif choice == 'g':
self.makeForcedStop(counter)
@@ -1218,7 +1218,7 @@
return (False, None)
def isIgnored(self, page):
- if page.site().language() in globalvar.neverlink:
+ if page.site.language() in globalvar.neverlink:
pywikibot.output(u"Skipping link %s to an ignored language" %
page)
return True
if page in globalvar.ignore:
@@ -1286,14 +1286,14 @@
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- pywikibot.output(u'WARNING: %s:%s relates to %s:%s, which is an
auto entry %s(%s)' % (self.originPage.site().language(), self.originPage,
page.site().language(),page,dictName,year))
+ pywikibot.output(u'WARNING: %s:%s relates to %s:%s, which is an
auto entry %s(%s)' % (self.originPage.site.language(), self.originPage,
page.site.language(),page,dictName,year))
# Abort processing if the bot is running in autonomous mode.
if globalvar.autonomous:
self.makeForcedStop(counter)
# Register this fact at the todo-counter.
- counter.minus(page.site())
+ counter.minus(page.site)
# Now check whether any interwiki links should be added to the
# todo list.
@@ -1351,7 +1351,7 @@
elif not globalvar.followredirect:
if not globalvar.quiet:
pywikibot.output(u"NOTE: not following %sredirects." %
redir)
- elif page.site().family == redirectTargetPage.site().family \
+ elif page.site.family == redirectTargetPage.site.family \
and not self.skipPage(page, redirectTargetPage, counter):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew:
@@ -1381,7 +1381,7 @@
iw = page.langlinks()
except pywikibot.NoSuchSite:
if not globalvar.quiet:
- pywikibot.output(u"NOTE: site %s does not exist" %
page.site())
+ pywikibot.output(u"NOTE: site %s does not exist" %
page.site)
continue
(skip, alternativePage) = self.disambigMismatch(page, counter)
@@ -1394,7 +1394,7 @@
self.addIfNew(alternativePage, counter, None)
duplicate = None
- for p in self.done.filter(page.site()):
+ for p in self.done.filter(page.site):
if p != page and p.exists() and not p.isRedirectPage() and not
p.isCategoryRedirect():
duplicate = p
break
@@ -1405,7 +1405,7 @@
# Ignore the interwiki links.
iw = ()
if globalvar.lacklanguage:
- if globalvar.lacklanguage in [link.site().language() for link in
iw]:
+ if globalvar.lacklanguage in [link.site.language() for link in iw]:
iw = ()
self.workonme = False
if len(iw) < globalvar.minlinks:
@@ -1422,7 +1422,7 @@
f = codecs.open(
pywikibot.config.datafilepath('autonomous_problems.dat'),
'a', 'utf-8')
- f.write(u"* %s {Found more than one link for %s}" %
(self.originPage, page.site()))
+ f.write(u"* %s {Found more than one link for %s}" %
(self.originPage, page.site))
if config.interwiki_graph and config.interwiki_graph_url:
filename = interwiki_graph.getFilename(self.originPage, extension
= config.interwiki_graph_formats[0])
f.write(u" [%s%s graph]" % (config.interwiki_graph_url,
filename))
@@ -1453,9 +1453,9 @@
if self.addIfNew(linkedPage, counter, page):
# It is new. Also verify whether it is the second on the
# same site
- lpsite=linkedPage.site()
+ lpsite=linkedPage.site
for prevPage in self.foundIn:
- if prevPage != linkedPage and prevPage.site() == lpsite:
+ if prevPage != linkedPage and prevPage.site == lpsite:
# Still, this could be "no problem" as
either may be a
# redirect to the other. No way to find out quickly!
pywikibot.output(u"NOTE: %s: %s gives duplicate
interwiki on same site %s" % (self.originPage, page, linkedPage))
@@ -1499,8 +1499,8 @@
new = {}
for page in self.done:
if page.exists() and not page.isRedirectPage() and not
page.isCategoryRedirect():
- site = page.site()
- if site == self.originPage.site():
+ site = page.site
+ if site == self.originPage.site:
if page != self.originPage:
self.problem(u"Found link to %s" % page )
self.whereReport(page)
@@ -1624,8 +1624,8 @@
# Make sure new contains every page link, including the page we are processing
# replaceLinks will skip the site it's working on.
- if self.originPage.site() not in new:
- new[self.originPage.site()] = self.originPage
+ if self.originPage.site not in new:
+ new[self.originPage.site] = self.originPage
#self.replaceLinks(self.originPage, new, True, bot)
@@ -1634,7 +1634,7 @@
# Process all languages here
globalvar.always = False
if globalvar.limittwo:
- lclSite = self.originPage.site()
+ lclSite = self.originPage.site
lclSiteDone = False
frgnSiteDone = False
@@ -1658,7 +1658,7 @@
try:
for link in new[site].iterlanglinks():
page = pywikibot.Page(link)
- old[page.site()] = page
+ old[page.site] = page
except pywikibot.NoPage:
pywikibot.output(u"BUG>>> %s no longer
exists?" % new[site])
continue
@@ -1747,23 +1747,23 @@
# remove interwiki links to ignore
for iw in re.finditer('<!-- *\[\[(.*?:.*?)\]\] *-->', pagetext):
try:
- ignorepage = pywikibot.Page(page.site(), iw.groups()[0])
+ ignorepage = pywikibot.Page(page.site, iw.groups()[0])
except (pywikibot.NoSuchSite, pywikibot.InvalidTitle):
continue
try:
- if (new[ignorepage.site()] == ignorepage) and (ignorepage.site() !=
page.site()):
+ if (new[ignorepage.site] == ignorepage) and (ignorepage.site !=
page.site):
if (ignorepage not in interwikis):
pywikibot.output(u"Ignoring link to %(to)s for
%(from)s" % {'to': ignorepage, 'from': page})
- new.pop(ignorepage.site())
+ new.pop(ignorepage.site)
else:
pywikibot.output(u"NOTE: Not removing interwiki from
%(from)s to %(to)s (exists both commented and non-commented)" % {'to':
ignorepage, 'from': page})
except KeyError:
pass
# sanity check - the page we are fixing must be the only one for that site.
- pltmp = new[page.site()]
+ pltmp = new[page.site]
if pltmp != page:
s = u"None"
if pltmp is not None: s = pltmp
@@ -1771,25 +1771,25 @@
raise SaveError(u'BUG: sanity check failed')
# Avoid adding an iw link back to itself
- del new[page.site()]
+ del new[page.site]
# Put interwiki links into a map
old={}
for page2 in interwikis:
- old[page2.site()] = page2
+ old[page2.site] = page2
# Check what needs to get done
- mods, mcomment, adding, removing, modifying = compareLanguages(old, new, insite =
page.site())
+ mods, mcomment, adding, removing, modifying = compareLanguages(old, new, insite =
page.site)
# When running in autonomous mode without -force switch, make sure we don't
remove any items, but allow addition of the new ones
if globalvar.autonomous and not globalvar.force and len(removing) > 0:
for rmsite in removing:
- if rmsite != page.site(): # Sometimes sites have an erroneous link to
itself as an interwiki
+ if rmsite != page.site: # Sometimes sites have an erroneous link to
itself as an interwiki
rmPage = old[rmsite]
new[rmsite] = old[rmsite] #put it to new means don't delete it
pywikibot.output(u"WARNING: %s is either deleted or has a
mismatching disambiguation state." % rmPage)
# Re-Check what needs to get done
- mods, mcomment, adding, removing, modifying = compareLanguages(old, new,
insite = page.site())
+ mods, mcomment, adding, removing, modifying = compareLanguages(old, new,
insite = page.site)
if not mods:
pywikibot.output(u'No changes needed' )
@@ -1799,7 +1799,7 @@
oldtext = page.get()
template = (page.namespace() == 10)
newtext = pywikibot.replaceLanguageLinks(oldtext, new,
- site = page.site(),
+ site = page.site,
template = template)
# This is for now. Later there should be different funktions for each kind
if not botMayEdit(page):
@@ -1815,7 +1815,7 @@
# pywikibot.output(u"NOTE: Replace %s" % page)
# Determine whether we need permission to submit
ask = False
- if removing and removing != [page.site()]: # Allow for special case of a
self-pointing interwiki link
+ if removing and removing != [page.site]: # Allow for special case of a
self-pointing interwiki link
self.problem(u'Found incorrect link to %s in %s'%
(",".join([x.lang for x in removing]), page), createneed = False)
ask = True
if globalvar.force:
@@ -1833,8 +1833,8 @@
['y', 'n', 'b', 'g',
'a'])
if answer == 'b':
webbrowser.open("http://%s%s" % (
- page.site().hostname(),
- page.site().nice_get_address(page.title())
+ page.site.hostname(),
+ page.site.nice_get_address(page.title())
))
pywikibot.input(u"Press Enter when finished in browser.")
return True
@@ -1931,21 +1931,21 @@
# This assumes that there is only one interwiki link per language.
linkedPagesDict = {}
for linkedPage in linkedPages:
- linkedPagesDict[linkedPage.site()] = linkedPage
+ linkedPagesDict[linkedPage.site] = linkedPage
for expectedPage in expectedPages - linkedPages:
if expectedPage != page:
try:
- linkedPage = linkedPagesDict[expectedPage.site()]
- pywikibot.output(u"WARNING: %s: %s does not link to
%s but to %s" % (page.site().family.name, page, expectedPage, linkedPage))
+ linkedPage = linkedPagesDict[expectedPage.site]
+ pywikibot.output(u"WARNING: %s: %s does not link to
%s but to %s" % (page.site.family.name, page, expectedPage, linkedPage))
except KeyError:
- pywikibot.output(u"WARNING: %s: %s does not link to
%s" % (page.site().family.name, page, expectedPage))
+ pywikibot.output(u"WARNING: %s: %s does not link to
%s" % (page.site.family.name, page, expectedPage))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
# Check whether there is an alternative page on that
language.
# In this case, it was already reported above.
- if linkedPage.site() not in expectedSites:
- pywikibot.output(u"WARNING: %s: %s links to
incorrect %s" % (page.site().family.name, page, linkedPage))
+ if linkedPage.site not in expectedSites:
+ pywikibot.output(u"WARNING: %s: %s links to
incorrect %s" % (page.site.family.name, page, linkedPage))
except (socket.error, IOError):
pywikibot.output(u'ERROR: could not report backlinks')
@@ -2033,7 +2033,7 @@
if page.namespace() == 10:
loc = None
try:
- tmpl, loc = moved_links[page.site().lang]
+ tmpl, loc = moved_links[page.site.lang]
del tmpl
except KeyError:
pass
@@ -2044,7 +2044,7 @@
if self.generateUntil:
until = self.generateUntil
- if page.site().lang not in page.site().family.nocapitalize:
+ if page.site.lang not in page.site.family.nocapitalize:
until = until[0].upper()+until[1:]
if page.titleWithoutNamespace() > until:
raise StopIteration
@@ -2230,13 +2230,13 @@
def botMayEdit (page):
tmpl = []
try:
- tmpl, loc = moved_links[page.site().lang]
+ tmpl, loc = moved_links[page.site.lang]
except KeyError:
pass
if type(tmpl) != list:
tmpl = [tmpl]
try:
- tmpl += ignoreTemplates[page.site().lang]
+ tmpl += ignoreTemplates[page.site.lang]
except KeyError:
pass
tmpl += ignoreTemplates['_default']
@@ -2255,7 +2255,7 @@
for page, pagelist in hints.iteritems():
# The WarnfileReader gives us a list of pagelinks, but titletranslate.py expects
a list of strings, so we convert it back.
# TODO: This is a quite ugly hack, in the future we should maybe make
titletranslate expect a list of pagelinks.
- hintStrings = ['%s:%s' % (hintedPage.site().language(),
hintedPage.title()) for hintedPage in pagelist]
+ hintStrings = ['%s:%s' % (hintedPage.site.language(), hintedPage.title())
for hintedPage in pagelist]
bot.add(page, hints = hintStrings)
def main():
Modified: branches/rewrite/scripts/interwiki_graph.py
===================================================================
--- branches/rewrite/scripts/interwiki_graph.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/interwiki_graph.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -53,14 +53,14 @@
self.subject = subject
def getLabel(self, page):
- return (u'"\"%s:%s\""' % (page.site().language(),
+ return (u'"\"%s:%s\""' % (page.site.language(),
page.title())).encode('utf-8')
def addNode(self, page):
node = pydot.Node(self.getLabel(page), shape = 'rectangle')
node.set_URL("\"http://%s%s\""
- % (page.site().hostname(),
- page.site().get_address(page.urlname())))
+ % (page.site.hostname(),
+ page.site.get_address(page.urlname())))
node.set_style('filled')
node.set_fillcolor('white')
node.set_fontsize('11')
@@ -74,7 +74,7 @@
node.set_color('green')
node.set_style('filled,bold')
# if we found more than one valid page for this language:
- if len(filter(lambda p: p.site() == page.site() and p.exists() \
+ if len(filter(lambda p: p.site == page.site and p.exists() \
and not p.isRedirectPage(),
self.subject.foundIn.keys())) > 1:
# mark conflict by octagonal node
@@ -102,7 +102,7 @@
# opposite edge.
else:
# add edge
- if refPage.site() == page.site():
+ if refPage.site == page.site:
edge.set_color('blue')
elif not page.exists():
# mark dead links
@@ -231,8 +231,8 @@
def getFilename(page, extension = None):
- filename = '%s-%s-%s' % (page.site().family.name,
- page.site().language(),
+ filename = '%s-%s-%s' % (page.site.family.name,
+ page.site.language(),
page.titleForFilename())
if extension:
filename += '.%s' % extension
Modified: branches/rewrite/scripts/redirect.py
===================================================================
--- branches/rewrite/scripts/redirect.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/redirect.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -515,16 +515,16 @@
try:
redir_page.delete(reason, prompt = False)
except pywikibot.NoUsername:
- if targetPage.site().lang in sd_template \
- and targetPage.site().lang in reason_broken:
+ if targetPage.site.lang in sd_template \
+ and targetPage.site.lang in reason_broken:
pywikibot.output(
u"No sysop in user-config.py, put page to speedy deletion.")
content = redir_page.get(get_redirect=True)
content = pywikibot.translate(
- targetPage.site().lang,
+ targetPage.site.lang,
sd_template)+"\n"+content
summary = pywikibot.translate(
- targetPage.site().lang,
+ targetPage.site.lang,
reason_broken)
redir_page.put(content, summary)
@@ -555,7 +555,7 @@
newRedir = redir
redirList = [] # bookkeeping to detect loops
while True:
- redirList.append(u'%s:%s' % (newRedir.site().lang,
+ redirList.append(u'%s:%s' % (newRedir.site.lang,
newRedir.title(withSection=False)))
try:
targetPage = newRedir.getRedirectTarget()
@@ -604,11 +604,11 @@
pywikibot.output(
u' Links to: %s.'
% targetPage.title(asLink=True))
- if targetPage.site().sitename() == 'wikipedia:en' \
+ if targetPage.site.sitename() == 'wikipedia:en' \
and targetPage.title() == 'Target page name':
pywikibot.output(u"Skipping: Redirect source is
vandalized.")
break
- if targetPage.site() != self.site:
+ if targetPage.site != self.site:
pywikibot.output(
u'Warning: redirect target (%s) is on a different
site.'
% (targetPage.title(asLink=True)))
@@ -616,7 +616,7 @@
break # skip if automatic
# watch out for redirect loops
if redirList.count(u'%s:%s'
- % (targetPage.site().lang,
+ % (targetPage.site.lang,
targetPage.title(withSection=False))
) > 0:
pywikibot.output(
@@ -627,17 +627,17 @@
content = targetPage.get(get_redirect=True)
except pywikibot.SectionError:
content = pywikibot.Page(
- targetPage.site(),
+ targetPage.site,
targetPage.title(withSection=False)
).get(get_redirect=True)
- if targetPage.site().lang in sd_template \
- and targetPage.site().lang in sd_tagging_sum:
+ if targetPage.site.lang in sd_template \
+ and targetPage.site.lang in sd_tagging_sum:
pywikibot.output(u"Tagging redirect for deletion")
# Delete the two redirects
content = pywikibot.translate(
- targetPage.site().lang,
+ targetPage.site.lang,
sd_template)+"\n"+content
- summ = pywikibot.translate(targetPage.site().lang,
+ summ = pywikibot.translate(targetPage.site.lang,
sd_tagging_sum)
targetPage.put(content, summ)
redir.put(content, summ)
Modified: branches/rewrite/scripts/replace.py
===================================================================
--- branches/rewrite/scripts/replace.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/replace.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -416,8 +416,8 @@
continue
if choice == 'b':
webbrowser.open("http://%s%s" % (
- page.site().hostname(),
- page.site().nice_get_address(page.title())
+ page.site.hostname(),
+ page.site.nice_get_address(page.title())
))
pywikibot.input("Press Enter when finished in browser.")
original_text = page.get(get_redirect=True, force=True)
Modified: branches/rewrite/scripts/solve_disambiguation.py
===================================================================
--- branches/rewrite/scripts/solve_disambiguation.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/solve_disambiguation.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -445,10 +445,11 @@
return string[0].upper()+string[1:]
def correctcap(link, text):
- # If text links to a page with title link uncapitalized, uncapitalize link, otherwise
capitalize it
+ # If text links to a page with title link uncapitalized, uncapitalize link,
+ # otherwise capitalize it
linkupper = link.title()
linklower = linkupper[0].lower() + linkupper[1:]
- if text.find("[[%s]]"%linklower) > -1 or
text.find("[[%s|"%linklower) > -1:
+ if "[[%s]]"%linklower in text or "[[%s|"%linklower in text:
return linklower
else:
return linkupper
@@ -468,11 +469,11 @@
withTemplateInclusion=False)]
pywikibot.output(u"Found %d references." % len(refs))
# Remove ignorables
- if self.disambPage.site().family.name in ignore_title \
- and self.disambPage.site().lang \
- in ignore_title[self.disambPage.site().family.name]:
- for ig in ignore_title[self.disambPage.site().family.name
- ][self.disambPage.site().lang]:
+ if self.disambPage.site.family.name in ignore_title \
+ and self.disambPage.site.lang \
+ in ignore_title[self.disambPage.site.family.name]:
+ for ig in ignore_title[self.disambPage.site.family.name
+ ][self.disambPage.site.lang]:
for i in range(len(refs)-1, -1, -1):
if re.match(ig, refs[i].title()):
pywikibot.log(u'Ignoring page %s'
@@ -711,11 +712,11 @@
curpos = m.start() + 1
try:
foundlink = pywikibot.Link(m.group('title'),
- disambPage.site())
+ disambPage.site)
except pywikibot.Error:
continue
# ignore interwiki links
- if foundlink.site != disambPage.site():
+ if foundlink.site != disambPage.site:
continue
# check whether the link found is to disambPage
try:
@@ -884,7 +885,7 @@
continue
new_page_title = self.alternatives[choice]
repPl = pywikibot.Page(pywikibot.Link(new_page_title,
- disambPage.site()))
+ disambPage.site))
if (new_page_title[0].isupper()
or link_text[0].isupper()):
new_page_title = repPl.title()
@@ -937,14 +938,14 @@
def findAlternatives(self, disambPage):
if disambPage.isRedirectPage() and not self.primary:
- if (disambPage.site().lang in self.primary_redir_template
- and self.primary_redir_template[disambPage.site().lang]
+ if (disambPage.site.lang in self.primary_redir_template
+ and self.primary_redir_template[disambPage.site.lang]
in disambPage.templates(get_redirect = True)):
baseTerm = disambPage.title()
for template in disambPage.templatesWithParams(
get_redirect=True):
if template[0] == self.primary_redir_template[
- disambPage.site().lang] \
+ disambPage.site.lang] \
and len(template[1]) > 0:
baseTerm = template[1][1]
disambTitle = primary_topic_format[self.mylang] % baseTerm
Modified: branches/rewrite/scripts/titletranslate.py
===================================================================
--- branches/rewrite/scripts/titletranslate.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/scripts/titletranslate.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -19,7 +19,7 @@
Does some magic stuff. Returns a list of Links.
"""
result = []
- site = page.site()
+ site = page.site
if hints:
for h in hints:
if h.find(':') == -1:
@@ -64,12 +64,12 @@
# Autotranslate dates into all other languages, the rest will come from existing
interwiki links.
if auto:
# search inside all dictionaries for this link
- dictName, value = date.getAutoFormat( page.site().code, page.title() )
+ dictName, value = date.getAutoFormat( page.site.code, page.title() )
if dictName:
- if not (dictName == 'yearsBC' and
date.maxyearBC.has_key(page.site().code) and value > date.maxyearBC[page.site().code])
or (dictName == 'yearsAD' and date.maxyearAD.has_key(page.site().code) and value
> date.maxyearAD[page.site().code]):
+ if not (dictName == 'yearsBC' and
date.maxyearBC.has_key(page.site.code) and value > date.maxyearBC[page.site.code]) or
(dictName == 'yearsAD' and date.maxyearAD.has_key(page.site.code) and value >
date.maxyearAD[page.site.code]):
pywikibot.output(u'TitleTranslate: %s was recognized as %s with value
%d' % (page.title(),dictName,value))
for entryLang, entry in date.formats[dictName].iteritems():
- if entryLang != page.site().code:
+ if entryLang != page.site.code:
if dictName == 'yearsBC' and
date.maxyearBC.has_key(entryLang) and value > date.maxyearBC[entryLang]:
pass
elif dictName == 'yearsAD' and
date.maxyearAD.has_key(entryLang) and value > date.maxyearAD[entryLang]:
@@ -94,7 +94,7 @@
pywikibot.output( u'getting poisoned links for %s' % pl.title() )
- dictName, value = date.getAutoFormat( pl.site().code, pl.title() )
+ dictName, value = date.getAutoFormat( pl.site.code, pl.title() )
if dictName is not None:
pywikibot.output( u'date found in %s' % dictName )
Modified: branches/rewrite/tests/api_tests.py
===================================================================
--- branches/rewrite/tests/api_tests.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/tests/api_tests.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -68,7 +68,7 @@
self.assertEqual(len(results), 4)
for page in results:
self.assertEqual(type(page), pywikibot.Page)
- self.assertEqual(page.site(), mysite)
+ self.assertEqual(page.site, mysite)
self.assert_(page.title() in titles)
Modified: branches/rewrite/tests/page_tests.py
===================================================================
--- branches/rewrite/tests/page_tests.py 2010-06-02 13:00:08 UTC (rev 8237)
+++ branches/rewrite/tests/page_tests.py 2010-06-02 13:50:48 UTC (rev 8238)
@@ -114,7 +114,7 @@
def testSite(self):
"""Test site() method"""
- self.assertEqual(mainpage.site(), site)
+ self.assertEqual(mainpage.site, site)
self.assertEqual(mainpage.encoding(), site.encoding())
def testNamespace(self):