jenkins-bot has submitted this change and it was merged.
Change subject: replace all occurrences of .getSite() with .Site() ......................................................................
replace all occurrences of .getSite() with .Site()
as it is now an alias kept for backwards-compability only
also fixed some minor typos
Change-Id: I558c5970906bd900d80c7922e718fc9ecdfda995 --- M pywikibot/bot.py M pywikibot/pagegenerators.py M pywikibot/site.py M pywikibot/textlib.py M pywikibot/titletranslate.py M scripts/add_text.py M scripts/archivebot.py M scripts/basic.py M scripts/blockpageschecker.py M scripts/casechecker.py M scripts/catall.py M scripts/category.py M scripts/category_redirect.py M scripts/commons_link.py M scripts/commonscat.py M scripts/cosmetic_changes.py M scripts/create_categories.py M scripts/data_ingestion.py M scripts/delete.py M scripts/disambredir.py M scripts/editarticle.py M scripts/fixing_redirects.py M scripts/flickrripper.py M scripts/image.py M scripts/imagerecat.py M scripts/imagetransfer.py M scripts/imageuncat.py M scripts/interwiki.py M scripts/isbn.py M scripts/login.py M scripts/lonelypages.py M scripts/makecat.py M scripts/noreferences.py M scripts/nowcommons.py M scripts/pagefromfile.py M scripts/protect.py M scripts/redirect.py M scripts/reflinks.py M scripts/replace.py M scripts/replicate_wiki.py M scripts/revertbot.py M scripts/script_wui.py M scripts/solve_disambiguation.py M scripts/spamremove.py M scripts/template.py M scripts/transferbot.py M scripts/unusedfiles.py M scripts/weblinkchecker.py M scripts/welcome.py M tests/wikibase_tests.py 50 files changed, 168 insertions(+), 168 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py index 2d478c5..3aa8189 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -243,7 +243,7 @@ """ # if site not available it's too early to print a header (work-a-round) try: - site = pywikibot.getSite() + site = pywikibot.Site() except AttributeError: return
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py index 385e613..764fdc9 100644 --- a/pywikibot/pagegenerators.py +++ b/pywikibot/pagegenerators.py @@ -522,7 +522,7 @@
""" if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() if includeredirects: if includeredirects == 'only': filterredir = True @@ -1102,7 +1102,7 @@ 'works!' % link) else: for result in results: - yield pywikibot.Page(pywikibot.getSite(), result) + yield pywikibot.Page(pywikibot.Site(), result)
# following classes just ported from version 1 without revision; not tested diff --git a/pywikibot/site.py b/pywikibot/site.py index 141f0e9..56492c5 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -396,7 +396,7 @@ re.IGNORECASE | re.UNICODE | re.DOTALL)
def sametitle(self, title1, title2): - """Return True iff title1 and title2 identify the same wiki page.""" + """Return True if title1 and title2 identify the same wiki page.""" # title1 and title2 may be unequal but still identify the same page, # if they use different aliases for the same namespace
@@ -3725,7 +3725,7 @@
def getPropertyType(self, prop): """ - This is used sepecifically because we can cache + This is used specifically because we can cache the value for a much longer time (near infinite). """ params = dict( diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index 4e347c2..4535b4d 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -70,7 +70,7 @@
""" if site is None: - site = pywikibot.getSite() + site = pywikibot.Site()
exceptionRegexes = { 'comment': re.compile(r'(?s)<!--.*?-->'), @@ -407,7 +407,7 @@
""" if insite is None: - insite = pywikibot.getSite() + insite = pywikibot.Site() fam = insite.family # when interwiki links forward to another family, retrieve pages & other # infos there @@ -441,7 +441,7 @@ # ignore text after the pipe pagetitle = pagetitle[:pagetitle.index('|')] # we want the actual page objects rather than the titles - site = pywikibot.getSite(code=lang, fam=fam) + site = pywikibot.Site(code=lang, fam=fam) try: result[site] = pywikibot.Page(site, pagetitle, insite=insite) except pywikibot.InvalidTitle: @@ -462,7 +462,7 @@
""" if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() if not site.validLanguageLinks(): return text # This regular expression will find every interwiki link, plus trailing @@ -480,7 +480,7 @@ def removeLanguageLinksAndSeparator(text, site=None, marker='', separator=''): """ Return text with all interlanguage links, plus any preceeding whitespace - and separateor occurrences removed. + and separator occurrences removed.
If a link to an unknown language is encountered, a warning is printed. If a marker is defined, that string is placed at the location of the @@ -509,7 +509,7 @@ # Find a marker that is not already in the text. marker = findmarker(oldtext) if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() separator = site.family.interwiki_text_separator cseparator = site.family.category_text_separator separatorstripped = separator.strip() @@ -595,7 +595,7 @@
""" if insite is None: - insite = pywikibot.getSite() + insite = pywikibot.Site() if not links: return ''
@@ -620,7 +620,7 @@ if not sites: return [] if insite is None: - insite = pywikibot.getSite() + insite = pywikibot.Site()
sites.sort() putfirst = insite.interwiki_putfirst() @@ -653,7 +653,7 @@ """ result = [] if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() # Ignore category links within nowiki tags, pre tags, includeonly tags, # and HTML comments text = removeDisabledParts(text) @@ -683,7 +683,7 @@ # NOTE: This assumes that language codes only consist of non-capital # ASCII letters and hyphens. if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() catNamespace = '|'.join(site.category_namespaces()) categoryR = re.compile(r'[[\s*(%s)\s*:.*?]]\s*' % catNamespace, re.I) text = replaceExcept(text, categoryR, '', @@ -699,14 +699,14 @@ def removeCategoryLinksAndSeparator(text, site=None, marker='', separator=''): """ Return text with all category links, plus any preceeding whitespace - and separateor occurrences removed. + and separator occurrences removed.
Put the string marker after the last replacement (at the end of the text if there is no replacement).
""" if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() if separator: mymarker = findmarker(text, u'@C@') newtext = removeCategoryLinks(text, site, mymarker) @@ -722,7 +722,7 @@
""" if site is None: - site = pywikibot.getSite() + site = pywikibot.Site()
catNamespace = '|'.join(site.category_namespaces()) title = oldcat.title(withNamespace=False) @@ -773,7 +773,7 @@ # Find a marker that is not already in the text. marker = findmarker(oldtext) if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() if site.sitename() == 'wikipedia:de' and "{{Personendaten" in oldtext: raise pywikibot.Error("""\ The Pywikibot is no longer allowed to touch categories on the German @@ -836,7 +836,7 @@ if not categories: return '' if insite is None: - insite = pywikibot.getSite() + insite = pywikibot.Site()
if isinstance(categories[0], basestring): if categories[0][0] == '[': @@ -1166,7 +1166,7 @@
def __init__(self, site=None): if site is None: - self.site = pywikibot.getSite() + self.site = pywikibot.Site() else: self.site = site
diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py index 656a5ea..6eb4c99 100644 --- a/pywikibot/titletranslate.py +++ b/pywikibot/titletranslate.py @@ -108,8 +108,8 @@ newname = entry(value) x = pywikibot.Link( newname, - pywikibot.getSite(code=entryLang, - fam=site.family)) + pywikibot.Site(code=entryLang, + fam=site.family)) if x not in result: result.append(x) # add new page return result diff --git a/scripts/add_text.py b/scripts/add_text.py index 773c789..90c0e22 100644 --- a/scripts/add_text.py +++ b/scripts/add_text.py @@ -127,7 +127,7 @@ if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: - summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', + summary = i18n.twtranslate(pywikibot.Site(), 'add_text-adding', {'adding': addText[:200]})
# When a page is tagged as "really well written" it has a star in the @@ -135,7 +135,7 @@ # format) to make the stars appear.
errorCount = 0 - site = pywikibot.getSite() + site = pywikibot.Site() pathWiki = site.family.nicepath(site.lang)
if putText: @@ -316,10 +316,10 @@ elif arg.startswith('-page'): if len(arg) == 5: generator = [pywikibot.Page( - pywikibot.getSite(), + pywikibot.Site(), pywikibot.input(u'What page do you want to use?'))] else: - generator = [pywikibot.Page(pywikibot.getSite(), arg[6:])] + generator = [pywikibot.Page(pywikibot.Site(), arg[6:])] elif arg.startswith('-excepturl'): if len(arg) == 10: regexSkipUrl = pywikibot.input(u'What text should I skip?') @@ -351,7 +351,7 @@ 'You have to specify the generator you want to use for the script!') if talkPage: generator = pagegenerators.PageWithTalkPageGenerator(generator) - site = pywikibot.getSite() + site = pywikibot.Site() for namespace in site.namespaces(): index = site.getNamespaceIndex(namespace) if index % 2 == 1 and index > 0: diff --git a/scripts/archivebot.py b/scripts/archivebot.py index 23a5a30..a3884b5 100644 --- a/scripts/archivebot.py +++ b/scripts/archivebot.py @@ -85,7 +85,7 @@
ZERO = datetime.timedelta(0)
-Site = pywikibot.getSite() +Site = pywikibot.Site()
try: # Get a constructor for the MD5 hash object import hashlib @@ -513,7 +513,7 @@ if not salt: salt = ''
- Site = pywikibot.getSite() + Site = pywikibot.Site() language = Site.language()
if not args or len(args) <= 1: diff --git a/scripts/basic.py b/scripts/basic.py index daee1a3..380d19e 100755 --- a/scripts/basic.py +++ b/scripts/basic.py @@ -49,7 +49,7 @@ what would have been changed. @type dry: boolean. """ - site = pywikibot.getSite() + site = pywikibot.Site() self.generator = generator self.dry = dry # Set the edit summary message diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index dd9bb9c..76bf750 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -200,7 +200,7 @@ ['with browser', 'with gui', 'no'], ['b', 'g', 'n'], 'n') pathWiki = site.family.nicepath(site.lang) - url = 'http://%s%s%s?&redirect=no' % (pywikibot.getSite().hostname(), + url = 'http://%s%s%s?&redirect=no' % (pywikibot.Site().hostname(), pathWiki, page.urlname()) if quest == 'b': webbrowser.open(url) @@ -250,7 +250,7 @@ pywikibot.output(u"Your project is not supported by this script.\n" u"You have to edit the script and add it!") return - site = pywikibot.getSite() + site = pywikibot.Site() site.login() if protectedpages: generator = site.protectedpages(namespace=namespace, type=protectType) diff --git a/scripts/casechecker.py b/scripts/casechecker.py index 5e53ea6..27bce4c 100644 --- a/scripts/casechecker.py +++ b/scripts/casechecker.py @@ -198,7 +198,7 @@
self.queryParams['prop'] = propParam
- self.site = pywikibot.getSite() + self.site = pywikibot.Site()
if len(self.localSuspects) != len(self.latinSuspects): raise ValueError(u'Suspects must be the same size') diff --git a/scripts/catall.py b/scripts/catall.py index 421cd76..bda6131 100755 --- a/scripts/catall.py +++ b/scripts/catall.py @@ -59,7 +59,7 @@
def make_categories(page, list, site=None): if site is None: - site = pywikibot.getSite() + site = pywikibot.Site() pllist = [] for p in list: cattitle = "%s:%s" % (site.category_namespace(), p) @@ -83,7 +83,7 @@ else: start = ' '.join(start)
- mysite = pywikibot.getSite() + mysite = pywikibot.Site()
for p in mysite.allpages(start=start): try: diff --git a/scripts/category.py b/scripts/category.py index 8a3ab0d..15ddb78 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -569,7 +569,7 @@ self.editSummary = editSummary self.overwrite = overwrite self.showImages = showImages - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.cat = pywikibot.Category(self.site, catTitle) self.list = pywikibot.Page(self.site, listTitle) self.subCats = subCats @@ -623,7 +623,7 @@ useSummaryForDeletion=True, titleRegex=None, inPlace=False, pagesonly=False): self.editSummary = editSummary - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.cat = pywikibot.Category(self.site, catTitle) # get edit summary message self.useSummaryForDeletion = useSummaryForDeletion @@ -705,7 +705,7 @@ def __init__(self, catTitle, catDB): self.catTitle = catTitle self.catDB = catDB - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.editSummary = i18n.twtranslate(self.site, 'category-changing', {'oldcat': self.catTitle, 'newcat': u''}) @@ -861,7 +861,7 @@ self.filename = filename # TODO: make maxDepth changeable with a parameter or config file entry self.maxDepth = maxDepth - self.site = pywikibot.getSite() + self.site = pywikibot.Site()
def treeview(self, cat, currentDepth=0, parent=None): ''' diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index 48bcc93..7aa1bc6 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -35,7 +35,7 @@
def __init__(self): self.cooldown = 7 # days - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.site.login() self.catprefix = self.site.namespace(14) + ":" self.log_text = [] diff --git a/scripts/commons_link.py b/scripts/commons_link.py index 797d3ec..3c7d749 100644 --- a/scripts/commons_link.py +++ b/scripts/commons_link.py @@ -52,7 +52,7 @@ for page in self.generator: try: pywikibot.output(u'\n>>>> %s <<<<' % page.title()) - commons = pywikibot.getSite().image_repository() + commons = pywikibot.Site().image_repository() commonspage = pywikibot.Page(commons, page.title()) try: getcommons = commonspage.get(get_redirect=True) @@ -83,7 +83,7 @@ if self.acceptall or choice == 'y': try: msg = i18n.twtranslate( - pywikibot.getSite(), 'commons_link-template-added') + pywikibot.Site(), 'commons_link-template-added') page.put(text, msg) except pywikibot.EditConflict: pywikibot.output( @@ -106,7 +106,7 @@ for page in self.generator: try: pywikibot.output(u'\n>>>> %s <<<<' % page.title()) - commons = pywikibot.getSite().image_repository() + commons = pywikibot.Site().image_repository() commonsCategory = pywikibot.Category(commons, 'Category:%s' % page.title()) try: @@ -140,7 +140,7 @@ if self.acceptall or choice == 'y': try: msg = i18n.twtranslate( - pywikibot.getSite(), 'commons_link-cat-template-added') + pywikibot.Site(), 'commons_link-cat-template-added') page.put(text, msg) except pywikibot.EditConflict: pywikibot.output( @@ -171,23 +171,23 @@ elif arg == ('categories'): action = 'categories' elif arg.startswith('-start:'): - start = pywikibot.Page(pywikibot.getSite(), arg[7:]) + start = pywikibot.Page(pywikibot.Site(), arg[7:]) gen = pagegenerators.AllpagesPageGenerator( start.title(withNamespace=False), namespace=start.namespace(), includeredirects=False) elif arg.startswith('-cat:'): - cat = pywikibot.Category(pywikibot.getSite(), + cat = pywikibot.Category(pywikibot.Site(), 'Category:%s' % arg[5:]) gen = pagegenerators.CategorizedPageGenerator(cat) elif arg.startswith('-ref:'): - ref = pywikibot.Page(pywikibot.getSite(), arg[5:]) + ref = pywikibot.Page(pywikibot.Site(), arg[5:]) gen = pagegenerators.ReferringPageGenerator(ref) elif arg.startswith('-link:'): - link = pywikibot.Page(pywikibot.getSite(), arg[6:]) + link = pywikibot.Page(pywikibot.Site(), arg[6:]) gen = pagegenerators.LinkedPageGenerator(link) elif arg.startswith('-page:'): - singlepage = pywikibot.Page(pywikibot.getSite(), arg[6:]) + singlepage = pywikibot.Page(pywikibot.Site(), arg[6:]) gen = iter([singlepage]) #else: #bug diff --git a/scripts/commonscat.py b/scripts/commonscat.py index 6112a06..a049a06 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -239,7 +239,7 @@ self.generator = generator self.always = always self.summary = summary - self.site = pywikibot.getSite() + self.site = pywikibot.Site()
def run(self): for page in self.generator: @@ -572,10 +572,10 @@ checkcurrent = True primaryCommonscat, commonscatAlternatives = \ CommonscatBot.getCommonscatTemplate( - pywikibot.getSite().language()) + pywikibot.Site().language()) generator = pagegenerators.NamespaceFilterPageGenerator( pagegenerators.ReferringPageGenerator( - pywikibot.Page(pywikibot.getSite(), + pywikibot.Page(pywikibot.Site(), u'Template:' + primaryCommonscat), onlyTemplateInclusion=True), ns)
diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py index f1036b3..e7836d2 100755 --- a/scripts/cosmetic_changes.py +++ b/scripts/cosmetic_changes.py @@ -924,9 +924,9 @@
if editSummary == '': # Load default summary message. - editSummary = i18n.twtranslate(pywikibot.getSite(), + editSummary = i18n.twtranslate(pywikibot.Site(), 'cosmetic_changes-standalone') - site = pywikibot.getSite() + site = pywikibot.Site() site.login() if pageTitle: gen = iter([pywikibot.Page(pywikibot.Link(t, site)) for t in pageTitle]) diff --git a/scripts/create_categories.py b/scripts/create_categories.py index c29edbf..dcd78d4 100755 --- a/scripts/create_categories.py +++ b/scripts/create_categories.py @@ -40,7 +40,7 @@ def createCategory(page, parent, basename): title = page.title(withNamespace=False)
- newpage = pywikibot.Page(pywikibot.getSite(u'commons', u'commons'), + newpage = pywikibot.Page(pywikibot.Site(u'commons', u'commons'), u'Category:' + basename + u' ' + title) newtext = u'' newtext += u'[[Category:' + parent + u'|' + title + u']]\n' diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index c6e2636..b30361d 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -56,7 +56,7 @@ return self.contents
def findDuplicateImages(self, - site=pywikibot.getSite(u'commons', u'commons')): + site=pywikibot.Site(u'commons', u'commons')): """ Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates. @@ -106,7 +106,7 @@
class DataIngestionBot: def __init__(self, reader, titlefmt, pagefmt, - site=pywikibot.getSite(u'commons', u'commons')): + site=pywikibot.Site(u'commons', u'commons')): self.reader = reader self.titlefmt = titlefmt self.pagefmt = pagefmt @@ -145,7 +145,7 @@ bot = DataIngestionBot( reader, "%(name)s - %(set)s.%(_ext)s", ":user:valhallasw/test_template", - pywikibot.getSite('test', 'test')) + pywikibot.Site('test', 'test')) bot.run()
''' @@ -169,7 +169,7 @@
templates = configurationPage.templatesWithParams() for (template, params) in templates: - if template==u'Data ingestion': + if template == u'Data ingestion': for param in params: (field, sep, value) = param.partition(u'=')
@@ -181,16 +181,16 @@ return configuration
- def downloadPhoto(self, photoUrl = ''): + def downloadPhoto(self, photoUrl=''): """ Download the photo and store it in a StrinIO.StringIO object.
TODO: Add exception handling """ - imageFile=urllib.urlopen(photoUrl).read() + imageFile = urllib.urlopen(photoUrl).read() return StringIO.StringIO(imageFile)
- def findDuplicateImages(self, photo = None, site = pywikibot.getSite(u'commons', u'commons')): + def findDuplicateImages(self, photo=None, site=pywikibot.Site(u'commons', u'commons')): """ Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
@@ -213,7 +213,7 @@ description = metadata.get(u'dc:title') identifier = metadata.get(u'dc:identifier')
- if len(description)>120: + if len(description) > 120: description = description[0 : 120]
title = u'%s - %s.jpg' % (description, identifier) @@ -225,7 +225,7 @@ A function to do date clean up. """ # Empty, make it really empty - if field==u'-': + if field == u'-': return u'' # TODO: Circa # TODO: Period @@ -279,7 +279,7 @@ pywikibot.output(u'The field "sourceFormat" is not set') return False
- if self.configuration.get('sourceFormat')==u'csv': + if self.configuration.get('sourceFormat') == u'csv': self.processCSV() else: pywikibot.output(u'%s is not a supported source format') diff --git a/scripts/delete.py b/scripts/delete.py index 90d437c..f380b43 100644 --- a/scripts/delete.py +++ b/scripts/delete.py @@ -81,7 +81,7 @@
# read command line parameters localargs = pywikibot.handleArgs() - mysite = pywikibot.getSite() + mysite = pywikibot.Site()
for arg in localargs: if arg == '-always': diff --git a/scripts/disambredir.py b/scripts/disambredir.py index 1983d6a..28c1e7f 100644 --- a/scripts/disambredir.py +++ b/scripts/disambredir.py @@ -152,7 +152,7 @@ start = " ".join(start) else: start = "!" - mysite = pywikibot.getSite() + mysite = pywikibot.Site() linktrail = mysite.linktrail() try: generator = pagegenerators.CategorizedPageGenerator( diff --git a/scripts/editarticle.py b/scripts/editarticle.py index 21d16ac..af1cff3 100755 --- a/scripts/editarticle.py +++ b/scripts/editarticle.py @@ -35,7 +35,7 @@ def __init__(self, *args): self.set_options(*args) self.setpage() - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.site.login()
def set_options(self, *args): @@ -60,7 +60,7 @@
def setpage(self): """Sets page and page title""" - site = pywikibot.getSite() + site = pywikibot.Site() pageTitle = self.options.page or pywikibot.input(u"Page to edit:") self.page = pywikibot.Page(pywikibot.Link(pageTitle, site)) if not self.options.edit_redirect and self.page.isRedirectPage(): @@ -85,7 +85,7 @@ if new and old != new: pywikibot.showDiff(old, new) changes = pywikibot.input(u"What did you change?") - comment = i18n.twtranslate(pywikibot.getSite(), 'editarticle-edit', + comment = i18n.twtranslate(pywikibot.Site(), 'editarticle-edit', {'description': changes}) try: self.page.put(new, comment=comment, minorEdit=False, diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py index ff105bc..54ef42a 100644 --- a/scripts/fixing_redirects.py +++ b/scripts/fixing_redirects.py @@ -65,7 +65,7 @@ """ Based on the method of the same name in solve_disambiguation.py """ - mysite = pywikibot.getSite() + mysite = pywikibot.Site() linktrail = mysite.linktrail()
# make a backup of the original text so we can show the changes later @@ -147,7 +147,7 @@
def workon(page): - mysite = pywikibot.getSite() + mysite = pywikibot.Site() try: text = page.get() except pywikibot.IsRedirectPage: @@ -204,7 +204,7 @@ else: genFactory.handleArg(arg)
- mysite = pywikibot.getSite() + mysite = pywikibot.Site() if mysite.sitename() == 'wikipedia:nl': pywikibot.output( u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}') @@ -212,7 +212,7 @@
if featured: featuredList = i18n.translate(mysite, featured_articles) - ref = pywikibot.Page(pywikibot.getSite(), featuredList) + ref = pywikibot.Page(pywikibot.Site(), featuredList) gen = pagegenerators.ReferringPageGenerator(ref) gen = pagegenerators.NamespaceFilterPageGenerator(gen, [0]) if not gen: diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py index d36275d..d66902b 100644 --- a/scripts/flickrripper.py +++ b/scripts/flickrripper.py @@ -118,7 +118,7 @@
def findDuplicateImages(photo=None, - site=pywikibot.getSite(u'commons', u'commons')): + site=pywikibot.Site(u'commons', u'commons')): """ Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
@@ -285,7 +285,7 @@ newFilename = filename skip = False #pywikibot.output(newPhotoDescription) - #if (pywikibot.Page(title=u'File:'+ filename, site=pywikibot.getSite()).exists()): + #if (pywikibot.Page(title=u'File:'+ filename, site=pywikibot.Site()).exists()): # I should probably check if the hash is the same and if not upload it under a different name #pywikibot.output(u'File:' + filename + u' already exists!') #else: @@ -504,7 +504,7 @@
def main(): - site = pywikibot.getSite(u'commons', u'commons') + site = pywikibot.Site(u'commons', u'commons') #imagerecat.initLists()
#Get the api key diff --git a/scripts/image.py b/scripts/image.py index fe50f48..da7b0ca 100644 --- a/scripts/image.py +++ b/scripts/image.py @@ -114,7 +114,7 @@ self.loose = loose
# get edit summary message - mysite = pywikibot.getSite() + mysite = pywikibot.Site() if summary: self.editSummary = summary elif self.newImage: @@ -135,7 +135,7 @@ # empty string if there are none.
replacements = [] - site = pywikibot.getSite() + site = pywikibot.Site()
if not site.nocapitalize: case = re.escape(self.oldImage[0].upper() + \ @@ -190,7 +190,7 @@ if not oldImage: pywikibot.showHelp('image') else: - mysite = pywikibot.getSite() + mysite = pywikibot.Site() ns = mysite.image_namespace() oldImagePage = pywikibot.ImagePage(mysite, ns + ':' + oldImage) gen = pagegenerators.FileLinksGenerator(oldImagePage) diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py index 7016d18..114c88d 100644 --- a/scripts/imagerecat.py +++ b/scripts/imagerecat.py @@ -321,7 +321,7 @@ """ If a category is a redirect, replace the category with the target. """ result = [] for cat in categories: - categoryPage = pywikibot.Page(pywikibot.getSite(u'commons', u'commons'), + categoryPage = pywikibot.Page(pywikibot.Site(u'commons', u'commons'), cat, ns=14) if categoryPage.isCategoryRedirect(): result.append( @@ -453,7 +453,7 @@ global search_wikis global hint_wiki
- site = pywikibot.getSite(u'commons', u'commons') + site = pywikibot.Site(u'commons', u'commons') for arg in pywikibot.handleArgs(): if arg == '-onlyfilter': onlyFilter = True diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index b2dd90d..42b6203 100644 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -289,7 +289,7 @@ imagelist = [imagePage] else: imagePage = (page.imagelinks(followRedirects=True)).result( - {'title': page.title(), 'ns': pywikibot.getSite().image_namespace()}) + {'title': page.title(), 'ns': pywikibot.Site().image_namespace()}) imagelist = [imagePage]
while len(imagelist) > 0: @@ -350,23 +350,23 @@ # connect the title's parts with spaces if pageTitle != []: pageTitle = ' '.join(pageTitle) - page = pywikibot.Page(pywikibot.getSite(), pageTitle) + page = pywikibot.Page(pywikibot.Site(), pageTitle) # if no page title was given as an argument, and none was # read from a file, query the user if not page: pageTitle = pywikibot.input(u'Which page to check:') - page = pywikibot.Page(pywikibot.getSite(), pageTitle) + page = pywikibot.Page(pywikibot.Site(), pageTitle) # generator which will yield only a single Page gen = iter([page])
if not targetLang and not targetFamily: - targetSite = pywikibot.getSite('commons', 'commons') + targetSite = pywikibot.Site('commons', 'commons') else: if not targetLang: - targetLang = pywikibot.getSite().language + targetLang = pywikibot.Site().language if not targetFamily: - targetFamily = pywikibot.getSite().family - targetSite = pywikibot.getSite(targetLang, targetFamily) + targetFamily = pywikibot.Site().family + targetSite = pywikibot.Site(targetLang, targetFamily) bot = ImageTransferBot(gen, interwiki=interwiki, targetSite=targetSite, keep_name=keep_name) bot.run() diff --git a/scripts/imageuncat.py b/scripts/imageuncat.py index 2396261..decbdca 100755 --- a/scripts/imageuncat.py +++ b/scripts/imageuncat.py @@ -1342,7 +1342,7 @@ genFactory = pagegenerators.GeneratorFactory()
# use the default imagerepository normally commons - site = pywikibot.getSite().image_repository() + site = pywikibot.Site().image_repository() site.login() for arg in pywikibot.handleArgs(*args): if arg.startswith('-yesterday'): diff --git a/scripts/interwiki.py b/scripts/interwiki.py index a1eb9f8..7881a72 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -931,7 +931,7 @@ hints=hints, auto=globalvar.auto, removebrackets=globalvar.hintnobracket, - site=pywikibot.getSite()) + site=pywikibot.Site()) for page in pages: if globalvar.contentsondisk: page = StoredPage(page) @@ -1653,7 +1653,7 @@ frgnSiteDone = False
for siteCode in lclSite.family.languages_by_size: - site = pywikibot.getSite(siteCode) + site = pywikibot.Site(siteCode) if (not lclSiteDone and site == lclSite) or \ (not frgnSiteDone and site != lclSite and site in new): if site == lclSite: @@ -2117,7 +2117,7 @@ self.generateUntil = until
def dump(self, append=True): - site = pywikibot.getSite() + site = pywikibot.Site() dumpfn = pywikibot.config.datafilepath( 'data', 'interwiki-dumps', @@ -2218,8 +2218,8 @@ # because we have to wait before submitting another modification to # go live. Select any language from counts. oc = self.counts - if pywikibot.getSite() in oc: - return pywikibot.getSite() + if pywikibot.Site() in oc: + return pywikibot.Site() for lang in oc: count = self.counts[lang] if count > max: @@ -2230,7 +2230,7 @@ def selectQuerySite(self): """Select the site the next query should go out for.""" # How many home-language queries we still have? - mycount = self.counts.get(pywikibot.getSite(), 0) + mycount = self.counts.get(pywikibot.Site(), 0) # Do we still have enough subjects to work on for which the # home language has been retrieved? This is rough, because # some subjects may need to retrieve a second home-language page! @@ -2253,8 +2253,8 @@ # If we have a few, getting the home language is a good thing. if not globalvar.restoreAll: try: - if self.counts[pywikibot.getSite()] > 4: - return pywikibot.getSite() + if self.counts[pywikibot.Site()] > 4: + return pywikibot.Site() except KeyError: pass # If getting the home language doesn't make sense, see how many @@ -2500,7 +2500,7 @@ elif globalvar.summary: globalvar.summary += u'; '
- site = pywikibot.getSite() + site = pywikibot.Site() site.login() # ensure that we don't try to change main page try: @@ -2569,7 +2569,7 @@ if not singlePageTitle and not opthintsonly: singlePageTitle = pywikibot.input(u'Which page to check:') if singlePageTitle: - singlePage = pywikibot.Page(pywikibot.getSite(), singlePageTitle) + singlePage = pywikibot.Page(pywikibot.Site(), singlePageTitle) else: singlePage = None bot.add(singlePage, hints=globalvar.hints) diff --git a/scripts/isbn.py b/scripts/isbn.py index 3e6a722..ee3d136 100755 --- a/scripts/isbn.py +++ b/scripts/isbn.py @@ -1403,7 +1403,7 @@ self.format = format self.always = always self.isbnR = re.compile(r'(?<=ISBN )(?P<code>[\d-]+[Xx]?)') - self.comment = i18n.twtranslate(pywikibot.getSite(), 'isbn-formatting') + self.comment = i18n.twtranslate(pywikibot.Site(), 'isbn-formatting')
def treat(self, page): try: @@ -1503,7 +1503,7 @@ if not genFactory.handleArg(arg): pageTitle.append(arg)
- site = pywikibot.getSite() + site = pywikibot.Site() site.login() if pageTitle: gen = iter([pywikibot.Page(pywikibot.Link(t, site)) diff --git a/scripts/login.py b/scripts/login.py index 6de961e..5b4f25f 100755 --- a/scripts/login.py +++ b/scripts/login.py @@ -91,12 +91,12 @@ else: namedict = config.usernames else: - site = pywikibot.getSite() + site = pywikibot.Site() namedict = {site.family.name: {site.code: None}} for familyName in namedict: for lang in namedict[familyName]: try: - site = pywikibot.getSite(code=lang, fam=familyName) + site = pywikibot.Site(code=lang, fam=familyName) if logout: site.logout() else: diff --git a/scripts/lonelypages.py b/scripts/lonelypages.py index 579199e..0ca1a18 100644 --- a/scripts/lonelypages.py +++ b/scripts/lonelypages.py @@ -116,14 +116,14 @@ nwlimit = 50 # Default: 50 pages else: nwlimit = int(arg[10:]) - generator = pywikibot.getSite().newpages(number=nwlimit) + generator = pywikibot.Site().newpages(number=nwlimit) nwpages = True elif arg == '-always': always = True else: genFactory.handleArg(arg) # Retrive the site - wikiSite = pywikibot.getSite() + wikiSite = pywikibot.Site()
if not generator: generator = genFactory.getCombinedGenerator() diff --git a/scripts/makecat.py b/scripts/makecat.py index eddde36..73db257 100644 --- a/scripts/makecat.py +++ b/scripts/makecat.py @@ -52,7 +52,7 @@
def isdate(s): """returns true if s is a date or year """ - dict, val = date.getAutoFormat(pywikibot.getSite().language(), s) + dict, val = date.getAutoFormat(pywikibot.Site().language(), s) return dict is not None
@@ -159,7 +159,7 @@ pywikibot.output(u"l: Give a list of the pages to check") elif answer == 'a': pagetitle = raw_input("Specify page to add:") - page = pywikibot.Page(pywikibot.getSite(), pagetitle) + page = pywikibot.Page(pywikibot.Site(), pagetitle) if not page in checked.keys(): include(page) elif answer == 'x': @@ -219,7 +219,7 @@ workingcatname = raw_input("Which page to start with? ") else: workingcatname = ' '.join(workingcatname) - mysite = pywikibot.getSite() + mysite = pywikibot.Site() workingcatname = unicode(workingcatname, 'utf-8') pywikibot.setAction(i18n.twtranslate(mysite, 'makecat-create', {'cat': workingcatname})) workingcat = pywikibot.Category(mysite, diff --git a/scripts/noreferences.py b/scripts/noreferences.py index 2bc7200..9c91521 100755 --- a/scripts/noreferences.py +++ b/scripts/noreferences.py @@ -445,7 +445,7 @@ for entry in dump.parse(): text = pywikibot.removeDisabledParts(entry.text) if self.refR.search(text) and not self.referencesR.search(text): - yield pywikibot.Page(pywikibot.getSite(), entry.title) + yield pywikibot.Page(pywikibot.Site(), entry.title)
class NoReferencesBot: @@ -453,7 +453,7 @@ def __init__(self, generator, always=False): self.generator = generator self.always = always - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.comment = i18n.twtranslate(self.site, 'noreferences-add-tag')
self.refR = re.compile('</ref>', re.IGNORECASE) @@ -462,12 +462,12 @@ re.IGNORECASE | re.DOTALL) try: self.referencesTemplates = referencesTemplates[ - pywikibot.getSite().family.name][pywikibot.getSite().lang] + pywikibot.Site().family.name][pywikibot.Site().lang] except KeyError: self.referencesTemplates = [] try: self.referencesText = referencesSubstitute[ - pywikibot.getSite().family.name][pywikibot.getSite().lang] + pywikibot.Site().family.name][pywikibot.Site().lang] except KeyError: self.referencesText = u'<references />'
@@ -659,7 +659,7 @@ pywikibot.output(u"Page %s is a disambig; skipping." % page.title(asLink=True)) continue - if pywikibot.getSite().sitename() == 'wikipedia:en' and \ + if pywikibot.Site().sitename() == 'wikipedia:en' and \ page.isIpEdit(): pywikibot.output( u"Page %s is edited by IP. Possible vandalized" @@ -705,12 +705,12 @@ pageTitle.append(arg)
if pageTitle: - page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) + page = pywikibot.Page(pywikibot.Site(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: - site = pywikibot.getSite() + site = pywikibot.Site() try: cat = maintenance_category[site.family.name][site.lang] except: diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index e92d84a..bce2a16 100644 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -185,7 +185,7 @@
class NowCommonsDeleteBot: def __init__(self): - self.site = pywikibot.getSite() + self.site = pywikibot.Site() if repr(self.site) == 'commons:commons': sys.exit('Do not run this bot on Commons!')
@@ -308,7 +308,7 @@ return urllib.quote(encodedTitle)
def run(self): - commons = pywikibot.getSite('commons', 'commons') + commons = pywikibot.Site('commons', 'commons') comment = i18n.translate(self.site, nowCommonsMessage)
for page in self.getPageGenerator(): diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py index cd0c83d..0ed52db 100644 --- a/scripts/pagefromfile.py +++ b/scripts/pagefromfile.py @@ -161,7 +161,7 @@ self.put(title, contents)
def put(self, title, contents): - mysite = pywikibot.getSite() + mysite = pywikibot.Site()
page = pywikibot.Page(mysite, title) # Show the title of the page we're working on. diff --git a/scripts/protect.py b/scripts/protect.py index 7642c62..70035c0 100644 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -125,7 +125,7 @@
# read command line parameters localargs = pywikibot.handleArgs() - mysite = pywikibot.getSite() + mysite = pywikibot.Site()
for arg in pywikibot.handleArgs(*args): if arg == '-always': diff --git a/scripts/redirect.py b/scripts/redirect.py index 1d85134..7d451c4 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -80,7 +80,7 @@ def __init__(self, xmlFilename=None, namespaces=[], offset=-1, use_move_log=False, use_api=False, start=None, until=None, number=None, step=None): - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.site.login() self.xmlFilename = xmlFilename self.namespaces = namespaces @@ -146,7 +146,7 @@ # remove leading and trailing whitespace target = target.strip('_') # capitalize the first letter - if not pywikibot.getSite().nocapitalize: + if not pywikibot.Site().nocapitalize: source = source[:1].upper() + source[1:] target = target[:1].upper() + target[1:] if '#' in target: @@ -374,7 +374,7 @@ class RedirectRobot: def __init__(self, action, generator, always=False, number=None, delete=False): - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.action = action self.generator = generator self.always = always diff --git a/scripts/reflinks.py b/scripts/reflinks.py index 430fb34..ee7037c 100644 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -179,7 +179,7 @@ self.xmlStart = xmlStart self.namespaces = namespaces self.skipping = bool(xmlStart) - self.site = pywikibot.getSite() + self.site = pywikibot.Site()
import xmlreader dump = xmlreader.XmlDump(xmlFilename) @@ -212,7 +212,7 @@ def __init__(self, link, name): self.refname = name self.link = link - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.linkComment = i18n.twtranslate(self.site, 'reflinks-comment') self.url = re.sub(u'#.*', '', self.link) self.title = None @@ -290,7 +290,7 @@ u'(?i).*name\s*=\s*(?P<quote>"?)\s*(?P<name>.+)\s*(?P=quote).*') self.GROUPS = re.compile( u'(?i).*group\s*=\s*(?P<quote>"?)\s*(?P<group>.+)\s*(?P=quote).*') - self.autogen = i18n.twtranslate(pywikibot.getSite(), 'reflinks-autogen') + self.autogen = i18n.twtranslate(pywikibot.Site(), 'reflinks-autogen')
def process(self, text): # keys are ref groups @@ -401,7 +401,7 @@ self.acceptall = acceptall self.limit = limit self.ignorepdf = ignorepdf - self.site = pywikibot.getSite() + self.site = pywikibot.Site() # Check manual = 'mw:Manual:Pywikibot/refLinks' if self.site.family.name == 'wikipedia': diff --git a/scripts/replace.py b/scripts/replace.py index 4d8714a..ad9c11b 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -268,7 +268,7 @@ self.recursive = recursive self.site = site if self.site is None: - self.site = pywikibot.getSite() + self.site = pywikibot.Site() if addedCat: cat_ns = site.category_namespaces()[0] self.addedCat = pywikibot.Page(self.site, @@ -553,7 +553,7 @@ commandline_replacements[1])) if not summary_commandline: edit_summary = i18n.twtranslate( - pywikibot.getSite(), 'replace-replacing', + pywikibot.Site(), 'replace-replacing', {'description': ' (-%s +%s)' % (commandline_replacements[0], commandline_replacements[1])} ) @@ -568,7 +568,7 @@ for i in range(0, len(commandline_replacements), 2)] replacementsDescription = '(%s)' % ', '.join( [('-' + pair[0] + ' +' + pair[1]) for pair in pairs]) - edit_summary = i18n.twtranslate(pywikibot.getSite(), + edit_summary = i18n.twtranslate(pywikibot.Site(), 'replace-replacing', {'description': replacementsDescription}) @@ -591,7 +591,7 @@ change += ' & -' + old + ' +' + new replacements.append((old, new)) if not summary_commandline: - default_summary_message = i18n.twtranslate(pywikibot.getSite(), + default_summary_message = i18n.twtranslate(pywikibot.Site(), 'replace-replacing', {'description': change}) pywikibot.output(u'The summary message will default to: %s' @@ -615,10 +615,10 @@ regex = fix['regex'] if "msg" in fix: if isinstance(fix['msg'], basestring): - edit_summary = i18n.twtranslate(pywikibot.getSite(), + edit_summary = i18n.twtranslate(pywikibot.Site(), str(fix['msg'])) else: - edit_summary = pywikibot.translate(pywikibot.getSite(), + edit_summary = pywikibot.translate(pywikibot.Site(), fix['msg']) if "exceptions" in fix: exceptions = fix['exceptions'] @@ -678,7 +678,7 @@ LIMIT 200""" % (whereClause, exceptClause) gen = pagegenerators.MySQLPageGenerator(query) elif PageTitles: - pages = [pywikibot.Page(pywikibot.getSite(), PageTitle) + pages = [pywikibot.Page(pywikibot.Site(), PageTitle) for PageTitle in PageTitles] gen = iter(pages)
diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index 67d073d..082b4ef 100644 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -64,7 +64,7 @@
sites = options.destination_wiki
- self.original = getSite(original_wiki, family) + self.original = Site(original_wiki, family)
if options.namespace and 'help' in options.namespace: nsd = namespaces(self.original) @@ -72,7 +72,7 @@ pywikibot.output('%s %s' % (k, nsd[k])) sys.exit()
- self.sites = map(lambda s: getSite(s, family), sites) + self.sites = map(lambda s: Site(s, family), sites)
self.differences = {} self.user_diff = {} diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 3fc19f4..c8900fd 100644 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -38,7 +38,7 @@ item = iterator.next() except StopIteration: self.log(u'Fetching new batch of contributions') - data = list(pywikibot.getSite().usercontribs(user=self.site.username(), namespaces=ns, total=max)) + data = list(pywikibot.Site().usercontribs(user=self.site.username(), namespaces=ns, total=max)) never_continue = True iterator = iter(data) else: @@ -69,12 +69,12 @@ return 'top' in item
def revert(self, item): - if len(pywikibot.Page(pywikibot.getSite(), item['title']).fullVersionHistory()) > 1: - rev = pywikibot.Page(pywikibot.getSite(), item['title']).fullVersionHistory()[1] + if len(pywikibot.Page(pywikibot.Site(), item['title']).fullVersionHistory()) > 1: + rev = pywikibot.Page(pywikibot.Site(), item['title']).fullVersionHistory()[1] else: return False
- comment = i18n.twtranslate(pywikibot.getSite(), 'revertbot-revert', {'revid': rev[0], 'author': rev[2], 'timestamp': rev[1]}) + comment = i18n.twtranslate(pywikibot.Site(), 'revertbot-revert', {'revid': rev[0], 'author': rev[2], 'timestamp': rev[1]})
if self.comment: comment += ': ' + self.comment @@ -107,7 +107,7 @@ def main(): for arg in pywikibot.handleArgs(): continue - bot = myRevertBot(site=pywikibot.getSite()) + bot = myRevertBot(site=pywikibot.Site()) bot.revert_contribs()
if __name__ == "__main__": diff --git a/scripts/script_wui.py b/scripts/script_wui.py index 122c77c..3a27753 100755 --- a/scripts/script_wui.py +++ b/scripts/script_wui.py @@ -261,7 +261,7 @@ rev = page.latestRevision() link = page.permalink(oldid=rev) # append to page - outpage = pywikibot.Page(pywikibot.getSite(), bot_config['ConfCSSoutput']) + outpage = pywikibot.Page(pywikibot.Site(), bot_config['ConfCSSoutput']) text = outpage.get() outpage.put(text + u"\n== Simulation vom %s mit [%s code:%s] ==\n<pre>\n%s</pre>\n\n" % (pywikibot.Timestamp.now().isoformat(' '), link, rev, buffer)) # comment = pywikibot.translate(self.site.lang, bot_config['msg'])) @@ -277,7 +277,7 @@ __simulate = pywikibot.config.simulate __sys_argv = sys.argv
- site = pywikibot.getSite() + site = pywikibot.Site() site.login() chan = '#' + site.language() + '.' + site.family.name bot = ScriptWUIBot(site, chan, site.user() + "_WUI", "irc.wikimedia.org") diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 9a65328..be176cc 100644 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -471,7 +471,7 @@ self.main_only = main_only self.minimum = minimum
- self.mysite = pywikibot.getSite() + self.mysite = pywikibot.Site() self.mylang = self.mysite.language() self.comment = None
@@ -1040,7 +1040,7 @@ generator = pagegenerators.TextfilePageGenerator(filename=arg[6:]) elif arg.startswith('-pos:'): if arg[5] != ':': - mysite = pywikibot.getSite() + mysite = pywikibot.Site() page = pywikibot.Page(pywikibot.Link(arg[5:], mysite)) if page.exists(): alternatives.append(page.title()) @@ -1064,10 +1064,10 @@ try: if len(arg) <= len('-start:'): generator = pagegenerators.CategorizedPageGenerator( - pywikibot.getSite().disambcategory()) + pywikibot.Site().disambcategory()) else: generator = pagegenerators.CategorizedPageGenerator( - pywikibot.getSite().disambcategory(), + pywikibot.Site().disambcategory(), start=arg[7:]) generator = pagegenerators.NamespaceFilterPageGenerator( generator, [0]) diff --git a/scripts/spamremove.py b/scripts/spamremove.py index 2c6c50b..b024b61 100755 --- a/scripts/spamremove.py +++ b/scripts/spamremove.py @@ -57,7 +57,7 @@ pywikibot.output(u"No spam site specified.") return
- mysite = pywikibot.getSite() + mysite = pywikibot.Site() pages = mysite.exturlusage(spamSite) if namespaces: pages = pagegenerators.NamespaceFilterPageGenerator(pages, namespaces) diff --git a/scripts/template.py b/scripts/template.py index 7e6978c..c3f6063 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -163,7 +163,7 @@
def __iter__(self): """Yield page objects until the entire XML dump has been read.""" - mysite = pywikibot.getSite() + mysite = pywikibot.Site() dump = xmlreader.XmlDump(self.xmlfilename) # regular expression to find the original template. # {{vfd}} does the same thing as {{Vfd}}, so both will be found. @@ -172,7 +172,7 @@ templatePatterns = [] for template in self.templates: templatePattern = template.title(withNamespace=False) - if not pywikibot.getSite().nocapitalize: + if not pywikibot.Site().nocapitalize: templatePattern = '[%s%s]%s' % (templatePattern[0].upper(), templatePattern[0].lower(), templatePattern[1:]) @@ -245,7 +245,7 @@
replacements = [] exceptions = {} - site = pywikibot.getSite() + site = pywikibot.Site() for old, new in self.templates.items(): namespaces = list(site.namespace(10, all=True)) if not site.nocapitalize: diff --git a/scripts/transferbot.py b/scripts/transferbot.py index 15a9910..50372da 100644 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -51,7 +51,7 @@ def main(): tohandle = pywikibot.handleArgs()
- fromsite = pywikibot.getSite() + fromsite = pywikibot.Site() tolang = fromsite.code tofamily = fromsite.family.name prefix = '' diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py index 690205d..9529e70 100644 --- a/scripts/unusedfiles.py +++ b/scripts/unusedfiles.py @@ -84,7 +84,7 @@ if choice == 'a': always = True if always or choice == 'y': - page.put(text, pywikibot.translate(pywikibot.getSite(), comment)) + page.put(text, pywikibot.translate(pywikibot.Site(), comment))
def main(): @@ -97,13 +97,13 @@ if arg == '-start': start = True
- mysite = pywikibot.getSite() + mysite = pywikibot.Site() # If anything needs to be prepared, you can do it here - template_image = pywikibot.translate(pywikibot.getSite(), + template_image = pywikibot.translate(pywikibot.Site(), template_to_the_image) - template_user = pywikibot.translate(pywikibot.getSite(), + template_user = pywikibot.translate(pywikibot.Site(), template_to_the_user).encode("utf-8") - except_text_translated = pywikibot.translate(pywikibot.getSite(), + except_text_translated = pywikibot.translate(pywikibot.Site(), except_text).encode("utf-8") basicgenerator = pagegenerators.UnusedFilesGenerator() generator = pagegenerators.PreloadingGenerator(basicgenerator) diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index de5b86d..4ca5627 100644 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -187,7 +187,7 @@ self.xmlStart = xmlStart self.namespaces = namespaces self.skipping = bool(xmlStart) - self.site = pywikibot.getSite() + self.site = pywikibot.Site()
dump = xmlreader.XmlDump(xmlFilename) self.parser = dump.parse() @@ -527,7 +527,7 @@
def __init__(self, reportThread): self.reportThread = reportThread - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.semaphore = threading.Semaphore() self.datfilename = pywikibot.config.datafilepath( 'deadlinks', 'deadlinks-%s-%s.dat' % (self.site.family.name, self.site.code)) @@ -682,7 +682,7 @@
if archiveURL: archiveMsg = u'\n' + \ - i18n.twtranslate(pywikibot.getSite(), + i18n.twtranslate(pywikibot.Site(), 'weblinkchecker-archive_msg', {'URL': archiveURL}) else: @@ -690,7 +690,7 @@ # The caption will default to "Dead link". But if there is # already such a caption, we'll use "Dead link 2", # "Dead link 3", etc. - caption = i18n.twtranslate(pywikibot.getSite(), + caption = i18n.twtranslate(pywikibot.Site(), 'weblinkchecker-caption') i = 1 count = u'' @@ -702,13 +702,13 @@ caption += count content += '\n\n== %s ==\n\n%s\n\n%s%s--~~~~' % \ (caption, - i18n.twtranslate(pywikibot.getSite(), + i18n.twtranslate(pywikibot.Site(), 'weblinkchecker-report'), errorReport, archiveMsg) comment = u'[[%s#%s|→]] %s' % \ (talkPage.title(), caption, - i18n.twtranslate(pywikibot.getSite(), + i18n.twtranslate(pywikibot.Site(), 'weblinkchecker-summary')) try: talkPage.put(content, comment) @@ -778,7 +778,7 @@ pageTitles = list(pageTitles) pageTitles.sort() for pageTitle in pageTitles: - page = pywikibot.Page(pywikibot.getSite(), pageTitle) + page = pywikibot.Page(pywikibot.Site(), pageTitle) yield page
@@ -843,7 +843,7 @@
if singlePageTitle: singlePageTitle = ' '.join(singlePageTitle) - page = pywikibot.Page(pywikibot.getSite(), singlePageTitle) + page = pywikibot.Page(pywikibot.Site(), singlePageTitle) gen = iter([page])
if xmlFilename: diff --git a/scripts/welcome.py b/scripts/welcome.py index b81b474..77ab8ef 100644 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -433,7 +433,7 @@
def __init__(self): #Initial - self.site = pywikibot.getSite() + self.site = pywikibot.Site() self.bname = dict()
self._totallyCount = 0 diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py index 14a5020..9773da5 100644 --- a/tests/wikibase_tests.py +++ b/tests/wikibase_tests.py @@ -70,7 +70,7 @@
def test_iterlinks_page_object(self): page = [pg for pg in self.wdp.iterlinks() if pg.site.language() == 'af'][0] - self.assertEquals(page, pywikibot.Page(pywikibot.getSite('af', 'wikipedia'), u'New York Stad')) + self.assertEquals(page, pywikibot.Page(pywikibot.Site('af', 'wikipedia'), u'New York Stad'))
def test_iterlinks_filtering(self): wikilinks = list(self.wdp.iterlinks('wikipedia'))
pywikibot-commits@lists.wikimedia.org