jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/445356 )
Change subject: pywikibot/page.py: Fix N803 and N806 naming errors ......................................................................
pywikibot/page.py: Fix N803 and N806 naming errors
Change-Id: I4bbf0b903d7d2f034fdda81777b2bf7dd3c0ce55 --- M pywikibot/bot.py M pywikibot/cosmetic_changes.py M pywikibot/data/api.py M pywikibot/exceptions.py M pywikibot/family.py M pywikibot/interwiki_graph.py M pywikibot/login.py M pywikibot/page.py M pywikibot/pagegenerators.py M pywikibot/proofreadpage.py M pywikibot/site.py M pywikibot/textlib.py M pywikibot/titletranslate.py M scripts/archive/featured.py M scripts/archivebot.py M scripts/blockpageschecker.py M scripts/capitalize_redirects.py M scripts/casechecker.py M scripts/category.py M scripts/category_redirect.py M scripts/checkimages.py M scripts/clean_sandbox.py M scripts/commons_link.py M scripts/commonscat.py M scripts/create_categories.py M scripts/data_ingestion.py M scripts/djvutext.py M scripts/fixing_redirects.py M scripts/harvest_template.py M scripts/illustrate_wikidata.py M scripts/imagecopy.py M scripts/imagecopy_self.py M scripts/imagerecat.py M scripts/imagetransfer.py M scripts/interwiki.py M scripts/interwikidata.py M scripts/isbn.py M scripts/listpages.py M scripts/makecat.py M scripts/misspelling.py M scripts/movepages.py M scripts/newitem.py M scripts/noreferences.py M scripts/nowcommons.py M scripts/protect.py M scripts/redirect.py M scripts/reflinks.py M scripts/replace.py M scripts/replicate_wiki.py M scripts/revertbot.py M scripts/script_wui.py M scripts/selflink.py M scripts/solve_disambiguation.py M scripts/surnames_redirects.py M scripts/template.py M scripts/templatecount.py M scripts/touch.py M scripts/transferbot.py M scripts/unusedfiles.py M scripts/weblinkchecker.py M scripts/wikisourcetext.py M tests/api_tests.py M tests/category_tests.py M tests/deletionbot_tests.py M tests/page_tests.py M tests/proofreadpage_tests.py M tests/site_tests.py M tests/textlib_tests.py M tests/user_tests.py M tests/wikibase_edit_tests.py M tox.ini 71 files changed, 704 insertions(+), 665 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py index dcfd908..dbb2193 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -1292,7 +1292,7 @@ """ if oldtext.rstrip() == newtext.rstrip(): pywikibot.output(u'No changes were needed on %s' - % page.title(asLink=True)) + % page.title(as_link=True)) return
self.current_page = page @@ -2121,7 +2121,7 @@ if not summary: # FIXME: i18n summary = ('Bot: New item with sitelink from %s' - % page.title(asLink=True, insite=self.repo)) + % page.title(as_link=True, insite=self.repo))
if data is None: data = {} diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py index c33cdd1..fba3ef4 100755 --- a/pywikibot/cosmetic_changes.py +++ b/pywikibot/cosmetic_changes.py @@ -370,7 +370,7 @@ # categories.sort()? (T100265) # TODO: Get main categories from Wikidata? main = pywikibot.Category(self.site, 'Category:' + self.title, - sortKey=' ') + sort_key=' ') if main in categories: categories.pop(categories.index(main)) categories.insert(0, main) diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index be03877..dfc7d39 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -1563,7 +1563,7 @@ return value.strftime(pywikibot.Timestamp.ISO8601Format) elif isinstance(value, pywikibot.page.BasePage): assert(value.site == self.site) - return value.title(withSection=False) + return value.title(with_section=False) else: return unicode(value)
diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py index f8cb2fd..6441d3e 100644 --- a/pywikibot/exceptions.py +++ b/pywikibot/exceptions.py @@ -160,7 +160,7 @@ raise Error("PageRelatedError is abstract. Can't instantiate it!")
self.page = page - self.title = page.title(asLink=True) + self.title = page.title(as_link=True) self.site = page.site
if '%(' in self.message and ')s' in self.message: diff --git a/pywikibot/family.py b/pywikibot/family.py index a334fe5..5ef26c5 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -1070,9 +1070,9 @@ cr_page = pywikibot.Page(site, cr_template, ns=tpl_ns) # retrieve all redirects to primary template from API, # add any that are not already on the list - for t in cr_page.backlinks(filterRedirects=True, + for t in cr_page.backlinks(filter_redirects=True, namespaces=tpl_ns): - newtitle = t.title(withNamespace=False) + newtitle = t.title(with_ns=False) if newtitle not in cr_template_tuple: cr_set.add(newtitle) self._catredirtemplates[code] = list(cr_template_tuple) + list(cr_set) diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py index dcff1cd..0e472f8 100644 --- a/pywikibot/interwiki_graph.py +++ b/pywikibot/interwiki_graph.py @@ -166,7 +166,7 @@ node = pydot.Node(self.getLabel(page), shape='rectangle') node.set_URL(""http://%s%s%5C"" % (page.site.hostname(), - page.site.get_address(page.title(asUrl=True)))) + page.site.get_address(page.title(as_url=True)))) node.set_style('filled') node.set_fillcolor('white') node.set_fontsize('11') @@ -204,7 +204,7 @@ elif self.graph.get_edge(sourceLabel, targetLabel): pywikibot.output( u'BUG: Tried to create duplicate edge from %s to %s' - % (refPage.title(asLink=True), page.title(asLink=True))) + % (refPage.title(as_link=True), page.title(as_link=True))) # duplicate edges would be bad because then get_edge() would # give a list of edges, not a single edge when we handle the # opposite edge. diff --git a/pywikibot/login.py b/pywikibot/login.py index 3558dcc..56fad1a 100644 --- a/pywikibot/login.py +++ b/pywikibot/login.py @@ -175,7 +175,7 @@ return True else: for linked_page in botlist_page.linkedPages(): - if linked_page.title(withNamespace=False) == self.username: + if linked_page.title(with_ns=False) == self.username: return True return False else: diff --git a/pywikibot/page.py b/pywikibot/page.py index ef3c2c9..2cd1fb6 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -110,7 +110,7 @@ # TODO: other "expected" error types to catch? except pywikibot.Error as edit_err: err = edit_err # edit_err will be deleted in the end of the scope - link = self.title(asLink=True) + link = self.title(as_link=True) pywikibot.log('Error saving page %s (%s)\n' % (link, err), exc_info=True) if not callback and not do_async: @@ -189,7 +189,7 @@ raise ValueError(u'Title cannot be None.')
if isinstance(source, pywikibot.site.BaseSite): - self._link = Link(title, source=source, defaultNamespace=ns) + self._link = Link(title, source=source, default_namespace=ns) self._revisions = {} elif isinstance(source, Page): # copy all of source's attributes to this object @@ -200,7 +200,7 @@ if title: # overwrite title self._link = Link(title, source=source.site, - defaultNamespace=ns) + default_namespace=ns) elif isinstance(source, Link): self._link = source self._revisions = {} @@ -283,26 +283,29 @@ self.site.loadpageinfo(self) return self._pageid
- @deprecated_args(decode=None, savetitle="asUrl") - def title(self, underscore=False, withNamespace=True, - withSection=True, asUrl=False, asLink=False, - allowInterwiki=True, forceInterwiki=False, textlink=False, + @deprecated_args( + decode=None, savetitle='as_url', withNamespace='with_ns', + withSection='with_section', forceInterwiki='force_interwiki', + asUrl='as_url', asLink='as_link', allowInterwiki='allow_interwiki') + def title(self, underscore=False, with_ns=True, + with_section=True, as_url=False, as_link=False, + allow_interwiki=True, force_interwiki=False, textlink=False, as_filename=False, insite=None): """ Return the title of this Page, as a Unicode string.
@param underscore: (not used with asLink) if true, replace all ' ' characters with '_' - @param withNamespace: if false, omit the namespace prefix. If this + @param with_ns: if false, omit the namespace prefix. If this option is false and used together with asLink return a labeled link like [[link|label]] - @param withSection: if false, omit the section - @param asUrl: (not used with asLink) if true, quote title as if in an + @param with_section: if false, omit the section + @param as_url: (not used with asLink) if true, quote title as if in an URL - @param asLink: if true, return the title in the form of a wikilink - @param allowInterwiki: (only used if asLink is true) if true, format + @param as_link: if true, return the title in the form of a wikilink + @param allow_interwiki: (only used if asLink is true) if true, format the link as an interwiki link if necessary - @param forceInterwiki: (only used if asLink is true) if true, always + @param force_interwiki: (only used if asLink is true) if true, always format the link as an interwiki link @param textlink: (only used if asLink is true) if true, place a ':' before Category: and Image: links @@ -315,19 +318,19 @@ """ title = self._link.canonical_title() label = self._link.title - if withSection and self._link.section: + if with_section and self._link.section: section = u"#" + self._link.section else: section = u'' - if asLink: + if as_link: if insite: target_code = insite.code target_family = insite.family.name else: target_code = config.mylang target_family = config.family - if forceInterwiki or \ - (allowInterwiki and + if force_interwiki or \ + (allow_interwiki and (self.site.family.name != target_family or self.site.code != target_code)): if self.site.family.name != target_family \ @@ -342,20 +345,20 @@ elif textlink and (self.is_filepage() or self.is_categorypage()): title = u':%s' % title elif self.namespace() == 0 and not section: - withNamespace = True - if withNamespace: + with_ns = True + if with_ns: return u'[[%s%s]]' % (title, section) else: return u'[[%s%s|%s]]' % (title, section, label) - if not withNamespace and self.namespace() != 0: + if not with_ns and self.namespace() != 0: title = label + section else: title += section - if underscore or asUrl: + if underscore or as_url: title = title.replace(u' ', u'_') - if asUrl: - encodedTitle = title.encode(self.site.encoding()) - title = quote_from_bytes(encodedTitle, safe='') + if as_url: + encoded_title = title.encode(self.site.encoding()) + title = quote_from_bytes(encoded_title, safe='') if as_filename: # Replace characters that are not possible in file names on some # systems. @@ -378,7 +381,7 @@
def __unicode__(self): """Return a unicode string representation.""" - return self.title(asLink=True, forceInterwiki=True) + return self.title(as_link=True, force_interwiki=True)
def __repr__(self): """Return a more complete string representation.""" @@ -415,7 +418,7 @@ def full_url(self): """Return the full URL.""" return self.site.base_url(self.site.article_path + - self.title(asUrl=True)) + self.title(as_url=True))
def autoFormat(self): """ @@ -431,7 +434,7 @@ from pywikibot import date self._autoFormat = date.getAutoFormat( self.site.lang, - self.title(withNamespace=False) + self.title(with_ns=False) ) return self._autoFormat
@@ -537,9 +540,9 @@ @rtype: unicode """ if percent_encoded: - title = self.title(asUrl=True) + title = self.title(as_url=True) else: - title = self.title(asUrl=False).replace(' ', '_') + title = self.title(as_url=False).replace(' ', '_') return '{0}//{1}{2}/index.php?title={3}&oldid={4}'.format( self.site.protocol() + ':' if with_protocol else '', self.site.hostname(), @@ -703,7 +706,7 @@
self._expanded_text = self.site.expand_text( self.text, - title=self.title(withSection=False), + title=self.title(with_section=False), includecomments=includecomments) return self._expanded_text
@@ -819,10 +822,10 @@ """ found = False if self.isRedirectPage(): - staticKeys = self.site.getmagicwords('staticredirect') + static_keys = self.site.getmagicwords('staticredirect') text = self.get(get_redirect=True, force=force) - if staticKeys: - for key in staticKeys: + if static_keys: + for key in static_keys: if key in text: found = True break @@ -840,7 +843,7 @@ self._catredirect = False catredirs = self.site.category_redirects() for template, args in self.templatesWithParams(): - if template.title(withNamespace=False) in catredirs: + if template.title(with_ns=False) in catredirs: if args: # Get target (first template argument) p = pywikibot.Page( @@ -850,12 +853,12 @@ else: pywikibot.warning( 'Category redirect target {0} on {1} is not a ' - 'category'.format(p.title(asLink=True), - self.title(asLink=True))) + 'category'.format(p.title(as_link=True), + self.title(as_link=True))) else: pywikibot.warning( 'No target found for category redirect on ' - + self.title(asLink=True)) + + self.title(as_link=True)) break return bool(self._catredirect)
@@ -905,15 +908,15 @@ return if self.isTalkPage(): if self.namespace() == 1: - return Page(self.site, self.title(withNamespace=False)) + return Page(self.site, self.title(with_ns=False)) else: return Page(self.site, "%s:%s" % (self.site.namespace(ns - 1), - self.title(withNamespace=False))) + self.title(with_ns=False))) else: return Page(self.site, "%s:%s" % (self.site.namespace(ns + 1), - self.title(withNamespace=False))) + self.title(with_ns=False)))
def is_categorypage(self): """Return True if the page is a Category, False otherwise.""" @@ -970,7 +973,7 @@ disambigpages = Page(self.site, "MediaWiki:Disambiguationspage") if disambigpages.exists(): - disambigs = {link.title(withNamespace=False) + disambigs = {link.title(with_ns=False) for link in disambigpages.linkedPages() if link.namespace() == 10} elif self.site.has_mediawiki_message('disambiguationspage'): @@ -985,18 +988,23 @@ else: # Normalize template capitalization self.site._disambigtemplates = {first_upper(t) for t in distl} - templates = {tl.title(withNamespace=False) for tl in self.templates()} + templates = {tl.title(with_ns=False) for tl in self.templates()} disambigs = set() # always use cached disambig templates disambigs.update(self.site._disambigtemplates) # see if any template on this page is in the set of disambigs - disambigInPage = disambigs.intersection(templates) - return self.namespace() != 10 and len(disambigInPage) > 0 + disambig_in_page = disambigs.intersection(templates) + return self.namespace() != 10 and len(disambig_in_page) > 0
- @deprecated_args(step=None) - def getReferences(self, follow_redirects=True, withTemplateInclusion=True, - onlyTemplateInclusion=False, redirectsOnly=False, - namespaces=None, total=None, content=False): + @deprecated_args( + step=None, withTemplateInclusion='with_template_inclusion', + onlyTemplateInclusion='only_template_inclusion', + redirectsOnly='filter_redirects') + def getReferences( + self, follow_redirects=True, with_template_inclusion=True, + only_template_inclusion=False, filter_redirects=False, + namespaces=None, total=None, content=False + ): """ Return an iterator all pages that refer to or embed the page.
@@ -1005,11 +1013,11 @@
@param follow_redirects: if True, also iterate pages that link to a redirect pointing to the page. - @param withTemplateInclusion: if True, also iterate pages where self + @param with_template_inclusion: if True, also iterate pages where self is used as a template. - @param onlyTemplateInclusion: if True, only iterate pages where self + @param only_template_inclusion: if True, only iterate pages where self is used as a template. - @param redirectsOnly: if True, only iterate redirects to self. + @param filter_redirects: if True, only iterate redirects to self. @param namespaces: only iterate pages in these namespaces @param total: iterate no more than this number of pages in total @param content: if True, retrieve the content of the current version @@ -1023,23 +1031,24 @@ return self.site.pagereferences( self, follow_redirects=follow_redirects, - filter_redirects=redirectsOnly, - with_template_inclusion=withTemplateInclusion, - only_template_inclusion=onlyTemplateInclusion, + filter_redirects=filter_redirects, + with_template_inclusion=with_template_inclusion, + only_template_inclusion=only_template_inclusion, namespaces=namespaces, total=total, content=content )
- @deprecated_args(step=None) - def backlinks(self, followRedirects=True, filterRedirects=None, + @deprecated_args(step=None, followRedirects='follow_redirects', + filterRedirects='filter_redirects') + def backlinks(self, follow_redirects=True, filter_redirects=None, namespaces=None, total=None, content=False): """ Return an iterator for pages that link to this page.
- @param followRedirects: if True, also iterate pages that link to a + @param follow_redirects: if True, also iterate pages that link to a redirect pointing to the page. - @param filterRedirects: if True, only iterate redirects; if False, + @param filter_redirects: if True, only iterate redirects; if False, omit redirects; if None, do not filter @param namespaces: only iterate pages in these namespaces @param total: iterate no more than this number of pages in total @@ -1048,8 +1057,8 @@ """ return self.site.pagebacklinks( self, - follow_redirects=followRedirects, - filter_redirects=filterRedirects, + follow_redirects=follow_redirects, + filter_redirects=filter_redirects, namespaces=namespaces, total=total, content=content @@ -1171,7 +1180,7 @@ restrictions = archived
for template, params in templates: - title = template.title(withNamespace=False) + title = template.title(with_ns=False) if restrictions: if title in restrictions: return False @@ -1267,7 +1276,7 @@ def _save(self, summary=None, watch=None, minor=True, botflag=None, cc=None, quiet=False, **kwargs): """Helper function for save().""" - link = self.title(asLink=True) + link = self.title(as_link=True) if cc or cc is None and config.cosmetic_changes: summary = self._cosmetic_changes_hook(summary)
@@ -1312,11 +1321,10 @@ # cc depends on page directly and via several other imports from pywikibot.cosmetic_changes import ( CANCEL_MATCH, CosmeticChangesToolkit) - ccToolkit = CosmeticChangesToolkit(self.site, - namespace=self.namespace(), - pageTitle=self.title(), - ignore=CANCEL_MATCH) - self.text = ccToolkit.change(old) + cc_toolkit = CosmeticChangesToolkit( + self.site, namespace=self.namespace(), pageTitle=self.title(), + ignore=CANCEL_MATCH) + self.text = cc_toolkit.change(old) if summary and old.strip().replace( '\r\n', '\n') != self.text.strip().replace('\r\n', '\n'): from pywikibot import i18n @@ -1582,21 +1590,22 @@ """ return self.site.pageimages(self, total=total, content=content)
- @deprecated_args(nofollow_redirects=None, get_redirect=None, step=None) - def categories(self, withSortKey=False, total=None, content=False): + @deprecated_args(nofollow_redirects=None, get_redirect=None, step=None, + withSortKey='with_sort_key') + def categories(self, with_sort_key=False, total=None, content=False): """ Iterate categories that the article is in.
- @param withSortKey: if True, include the sort key in each Category. + @param with_sort_key: if True, include the sort key in each Category. @param total: iterate no more than this number of pages in total @param content: if True, retrieve the content of the current version of each category description page (default False) @return: a generator that yields Category objects. @rtype: generator """ - # FIXME: bug T75561: withSortKey is ignored by Site.pagecategories - if withSortKey: - raise NotImplementedError('withSortKey is not implemented') + # FIXME: bug T75561: with_sort_key is ignored by Site.pagecategories + if with_sort_key: + raise NotImplementedError('with_sort_key is not implemented')
return self.site.pagecategories(self, total=total, content=content)
@@ -1836,30 +1845,31 @@ """ self.site.merge_history(self, dest, timestamp, reason)
- @deprecate_arg("throttle", None) - def move(self, newtitle, reason=None, movetalkpage=True, sysop=False, - deleteAndMove=False, safe=True): + @deprecated_args( + throttle=None, deleteAndMove='noredirect', movetalkpage='movetalk') + def move(self, newtitle, reason=None, movetalk=True, sysop=False, + noredirect=False, safe=True): """ Move this page to a new title.
@param newtitle: The new page title. @param reason: The edit summary for the move. - @param movetalkpage: If true, move this page's talk page (if it exists) + @param movetalk: If true, move this page's talk page (if it exists) @param sysop: Try to move using sysop account, if available - @param deleteAndMove: if move succeeds, delete the old page + @param noredirect: if move succeeds, delete the old page (usually requires sysop privileges, depending on wiki settings) @param safe: If false, attempt to delete existing page at newtitle (if there is one) and then move this page to that title """ if reason is None: pywikibot.output(u'Moving %s to [[%s]].' - % (self.title(asLink=True), newtitle)) + % (self.title(as_link=True), newtitle)) reason = pywikibot.input(u'Please enter a reason for the move:') # TODO: implement "safe" parameter (Is this necessary ?) # TODO: implement "sysop" parameter return self.site.movepage(self, newtitle, reason, - movetalk=movetalkpage, - noredirect=deleteAndMove) + movetalk=movetalk, + noredirect=noredirect)
@deprecate_arg("throttle", None) def delete(self, reason=None, prompt=True, mark=False, quit=False): @@ -1875,7 +1885,7 @@ @param quit: show also the quit option, when asking for confirmation. """ if reason is None: - pywikibot.output(u'Deleting %s.' % (self.title(asLink=True))) + pywikibot.output('Deleting %s.' % (self.title(as_link=True))) reason = pywikibot.input(u'Please enter a reason for the deletion:')
# If user is a sysop, delete the page @@ -1884,7 +1894,7 @@ if prompt and not hasattr(self.site, '_noDeletePrompt'): answer = pywikibot.input_choice( u'Do you want to delete %s?' % self.title( - asLink=True, forceInterwiki=True), + as_link=True, force_interwiki=True), [('Yes', 'y'), ('No', 'n'), ('All', 'a')], 'n', automatic_quit=quit) if answer == 'a': @@ -1898,8 +1908,8 @@ else: answer = pywikibot.input_choice( u"Can't delete %s; do you want to mark it " - "for deletion instead?" % self.title(asLink=True, - forceInterwiki=True), + 'for deletion instead?' % self.title(as_link=True, + force_interwiki=True), [('Yes', 'y'), ('No', 'n'), ('All', 'a')], 'n', automatic_quit=False) if answer == 'a': @@ -1936,23 +1946,24 @@ self._deletedRevs[rev['timestamp']] = rev yield rev['timestamp']
- def getDeletedRevision(self, timestamp, retrieveText=False): + @deprecated_args(retrieveText='content') + def getDeletedRevision(self, timestamp, content=False): """ Return a particular deleted revision by timestamp.
@return: a list of [date, editor, comment, text, restoration - marker]. text will be None, unless retrieveText is True (or has + marker]. text will be None, unless content is True (or has been retrieved earlier). If timestamp is not found, returns None. @rtype: list """ if hasattr(self, "_deletedRevs"): if timestamp in self._deletedRevs and ( - (not retrieveText) + not content or 'content' in self._deletedRevs[timestamp]): return self._deletedRevs[timestamp] for item in self.site.deletedrevs(self, start=timestamp, - get_text=retrieveText, total=1): + content=content, total=1): # should only be one item with one revision if item['title'] == self.title: if "revisions" in item: @@ -2003,7 +2014,7 @@ if reason is None: warn('Not passing a reason for undelete() is deprecated.', DeprecationWarning) - pywikibot.output(u'Undeleting %s.' % (self.title(asLink=True))) + pywikibot.output('Undeleting %s.' % (self.title(as_link=True))) reason = pywikibot.input(u'Please enter a reason for the undeletion:') self.site.undelete_page(self, reason, undelete_revs)
@@ -2058,7 +2069,7 @@
if reason is None: pywikibot.output(u'Preparing to protection change of %s.' - % (self.title(asLink=True))) + % (self.title(as_link=True))) reason = pywikibot.input(u'Please enter a reason for the action:') if unprotect: warn(u'"unprotect" argument of protect() is deprecated', @@ -2074,7 +2085,7 @@ if prompt and not hasattr(self.site, '_noProtectPrompt'): answer = pywikibot.input_choice( u'Do you want to change the protection level of %s?' - % self.title(asLink=True, forceInterwiki=True), + % self.title(as_link=True, force_interwiki=True), [('Yes', 'y'), ('No', 'n'), ('All', 'a')], 'n', automatic_quit=False) if answer == 'a': @@ -2083,24 +2094,28 @@ if answer == 'y': return self.site.protect(self, protections, reason, **kwargs)
- @deprecated_args(comment='summary') - def change_category(self, oldCat, newCat, summary=None, sortKey=None, - inPlace=True, include=[]): + @deprecated_args( + comment='summary', oldCat='old_cat', newCat='new_cat', + sortKey='sort_key', inPlace='in_place') + def change_category( + self, old_cat, new_cat, summary=None, sort_key=None, in_place=True, + include=[] + ): """ Remove page from oldCat and add it to newCat.
- @param oldCat: category to be removed - @type oldCat: Category - @param newCat: category to be added, if any - @type newCat: Category or None + @param old_cat: category to be removed + @type old_cat: Category + @param new_cat: category to be added, if any + @type new_cat: Category or None
@param summary: string to use as an edit summary
- @param sortKey: sortKey to use for the added category. + @param sort_key: sortKey to use for the added category. Unused if newCat is None, or if inPlace=True If sortKey=True, the sortKey used for oldCat will be used.
- @param inPlace: if True, change categories in place rather than + @param in_place: if True, change categories in place rather than rearranging them.
@param include: list of tags not to be disabled by default in relevant @@ -2120,34 +2135,34 @@
if not self.canBeEdited(): pywikibot.output(u"Can't edit %s, skipping it..." - % self.title(asLink=True)) + % self.title(as_link=True)) return False
- if oldCat not in cats: + if old_cat not in cats: if self.namespace() != 10: pywikibot.error('%s is not in category %s!' - % (self.title(asLink=True), oldCat.title())) + % (self.title(as_link=True), old_cat.title())) else: pywikibot.output('%s is not in category %s, skipping...' - % (self.title(asLink=True), oldCat.title())) + % (self.title(as_link=True), old_cat.title())) return False
- # This prevents the bot from adding newCat if it is already present. - if newCat in cats: - newCat = None + # This prevents the bot from adding new_cat if it is already present. + if new_cat in cats: + new_cat = None
oldtext = self.text - if inPlace or self.namespace() == 10: - newtext = textlib.replaceCategoryInPlace(oldtext, oldCat, newCat, + if in_place or self.namespace() == 10: + newtext = textlib.replaceCategoryInPlace(oldtext, old_cat, new_cat, site=self.site) else: - old_cat_pos = cats.index(oldCat) - if newCat: - if sortKey is True: - # Fetch sortKey from oldCat in current page. - sortKey = cats[old_cat_pos].sortKey - cats[old_cat_pos] = Category(self.site, newCat.title(), - sortKey=sortKey) + old_cat_pos = cats.index(old_cat) + if new_cat: + if sort_key is True: + # Fetch sort_key from old_cat in current page. + sort_key = cats[old_cat_pos].sortKey + cats[old_cat_pos] = Category(self.site, new_cat.title(), + sort_key=sort_key) else: cats.pop(old_cat_pos)
@@ -2166,11 +2181,11 @@ return True except pywikibot.PageSaveRelatedError as error: pywikibot.output(u'Page %s not saved: %s' - % (self.title(asLink=True), + % (self.title(as_link=True), error)) except pywikibot.NoUsername: pywikibot.output(u'Page %s not saved; sysop privileges ' - u'required.' % self.title(asLink=True)) + 'required.' % self.title(as_link=True)) return False
@deprecated('Page.is_flow_page()') @@ -2196,8 +2211,8 @@ @deprecated("Page.title(withNamespace=False)") def titleWithoutNamespace(self, underscore=False): """DEPRECATED: use self.title(withNamespace=False) instead.""" - return self.title(underscore=underscore, withNamespace=False, - withSection=False) + return self.title(underscore=underscore, with_ns=False, + with_section=False)
@deprecated("Page.title(as_filename=True)") def titleForFilename(self): @@ -2207,13 +2222,17 @@ @deprecated("Page.title(withSection=False)") def sectionFreeTitle(self, underscore=False): """DEPRECATED: use self.title(withSection=False) instead.""" - return self.title(underscore=underscore, withSection=False) + return self.title(underscore=underscore, with_section=False)
@deprecated("Page.title(asLink=True)") - def aslink(self, forceInterwiki=False, textlink=False, noInterwiki=False): + @deprecated_args( + forceInterwiki='force_interwiki', noInterwiki='no_interwiki') + def aslink( + self, force_interwiki=False, textlink=False, no_interwiki=False + ): """DEPRECATED: use self.title(asLink=True) instead.""" - return self.title(asLink=True, forceInterwiki=forceInterwiki, - allowInterwiki=not noInterwiki, textlink=textlink) + return self.title(as_link=True, force_interwiki=force_interwiki, + allow_interwiki=not no_interwiki, textlink=textlink)
@deprecated("Page.title(asUrl=True)") def urlname(self): @@ -2221,7 +2240,7 @@
DEPRECATED: use self.title(asUrl=True) instead. """ - return self.title(asUrl=True) + return self.title(as_url=True)
@deprecated('Page.protection()') def getRestrictions(self): @@ -2298,7 +2317,7 @@ for template in templates: try: link = pywikibot.Link(template[0], self.site, - defaultNamespace=10) + default_namespace=10) if link.canonical_title() not in titles: continue except pywikibot.Error: @@ -2380,8 +2399,8 @@ prefix = '' suffix = ''
- target_link = target_page.title(asLink=True, textlink=True, - allowInterwiki=False) + target_link = target_page.title(as_link=True, textlink=True, + allow_interwiki=False) target_link = u'#{0} {1}'.format(self.site.redirect(), target_link) self.text = prefix + target_link + suffix if save: @@ -2461,7 +2480,7 @@ """ if not hasattr(self, '_imagePageHtml'): path = "%s/index.php?title=%s" \ - % (self.site.scriptpath(), self.title(asUrl=True)) + % (self.site.scriptpath(), self.title(as_url=True)) self._imagePageHtml = http.request(self.site, path) return self._imagePageHtml
@@ -2536,8 +2555,7 @@ req = http.fetch(self.fileUrl()) h = hashlib.md5() h.update(req.raw) - md5Checksum = h.hexdigest() - return md5Checksum + return h.hexdigest()
@deprecated("FilePage.latest_file_info.sha1") def getFileSHA1Sum(self): @@ -2682,7 +2700,7 @@ @raise: IOError if filename cannot be written for any reason. """ if filename is None: - filename = self.title(as_filename=True, withNamespace=False) + filename = self.title(as_filename=True, with_ns=False)
filename = os.path.expanduser(filename)
@@ -2724,38 +2742,39 @@
"""A page in the Category: namespace."""
- @deprecate_arg("insite", None) - def __init__(self, source, title=u"", sortKey=None): + @deprecated_args(insite=None, sortKey='sort_key') + def __init__(self, source, title='', sort_key=None): """ Initializer.
All parameters are the same as for Page() Initializer. """ - self.sortKey = sortKey + self.sortKey = sort_key Page.__init__(self, source, title, ns=14) if self.namespace() != 14: raise ValueError(u"'%s' is not in the category namespace!" % title)
- @deprecated_args(forceInterwiki=None, textlink=None, noInterwiki=None) - def aslink(self, sortKey=None): + @deprecated_args( + forceInterwiki=None, textlink=None, noInterwiki=None, + sortKey='sort_key') + def aslink(self, sort_key=None): """ Return a link to place a page in this Category.
Use this only to generate a "true" category link, not for interwikis or text links to category pages.
- @param sortKey: The sort key for the article to be placed in this + @param sort_key: The sort key for the article to be placed in this Category; if omitted, default sort key is used. - @type sortKey: (optional) unicode + @type sort_key: (optional) unicode """ - key = sortKey or self.sortKey + key = sort_key or self.sortKey if key is not None: - titleWithSortKey = '%s|%s' % (self.title(withSection=False), - key) + title_with_sort_key = self.title(with_section=False) + '|' + key else: - titleWithSortKey = self.title(withSection=False) - return '[[%s]]' % titleWithSortKey + title_with_sort_key = self.title(with_section=False) + return '[[%s]]' % title_with_sort_key
@deprecated_args(startFrom=None, cacheResults=None, step=None) def subcategories(self, recurse=False, total=None, content=False): @@ -2971,26 +2990,26 @@ # either) if not isinstance(cat, Category): cat = self.site.namespaces.CATEGORY + ':' + cat - targetCat = Category(self.site, cat) + target_cat = Category(self.site, cat) else: - targetCat = cat - if targetCat.exists(): + target_cat = cat + if target_cat.exists(): pywikibot.output(u'Target page %s already exists!' - % targetCat.title(), + % target_cat.title(), level=pywikibot.logging.WARNING) return False else: pywikibot.output('Moving text from %s to %s.' - % (self.title(), targetCat.title())) + % (self.title(), target_cat.title())) authors = ', '.join(self.contributingUsers()) try: - creationSummary = message % (self.title(), authors) + creation_summary = message % (self.title(), authors) except TypeError: - creationSummary = message - targetCat.put(self.get(), creationSummary) + creation_summary = message + target_cat.put(self.get(), creation_summary) return True
- def copyAndKeep(self, catname, cfdTemplates, message): + def copyAndKeep(self, catname, cfd_templates, message): """ Copy partial category page text (not contents) to a new title.
@@ -3004,7 +3023,7 @@ existed.
@param catname: New category title (without namespace) - @param cfdTemplates: A list (or iterator) of templates to be removed + @param cfd_templates: A list (or iterator) of templates to be removed from the page text @return: True if copying was successful, False if target page already existed. @@ -3013,19 +3032,19 @@ # I don't see why we need this as part of the framework either # move to scripts/category.py? catname = self.site.namespaces.CATEGORY + ':' + catname - targetCat = Category(self.site, catname) - if targetCat.exists(): + target_cat = Category(self.site, catname) + if target_cat.exists(): pywikibot.warning(u'Target page %s already exists!' - % targetCat.title()) + % target_cat.title()) return False else: pywikibot.output( 'Moving text from %s to %s.' - % (self.title(), targetCat.title())) + % (self.title(), target_cat.title())) authors = ', '.join(self.contributingUsers()) - creationSummary = message % (self.title(), authors) + creation_summary = message % (self.title(), authors) newtext = self.get() - for regexName in cfdTemplates: + for regexName in cfd_templates: matchcfd = re.compile(r"{{%s.*?}}" % regexName, re.IGNORECASE) newtext = matchcfd.sub('', newtext) matchcomment = re.compile( @@ -3036,7 +3055,7 @@ while (newtext[pos:pos + 1] == "\n"): pos = pos + 1 newtext = newtext[pos:] - targetCat.put(newtext, creationSummary) + target_cat.put(newtext, creation_summary) return True
@property @@ -3200,9 +3219,9 @@ @rtype: unicode """ if self._isAutoblock: - return u'#' + self.title(withNamespace=False) + return '#' + self.title(with_ns=False) else: - return self.title(withNamespace=False) + return self.title(with_ns=False)
def isRegistered(self, force=False): """ @@ -3389,7 +3408,7 @@ if subpage: subpage = u'/' + subpage return Page(Link(self.username + subpage, - self.site, defaultNamespace=3)) + self.site, default_namespace=3))
def send_email(self, subject, text, ccme=False): """ @@ -4319,7 +4338,7 @@ # clear id, and temporarily store data needed to lazy loading the item del i.id i._site = page.site - i._title = page.title(withSection=False) + i._title = page.title(with_section=False) if not lazy_load and not i.exists(): raise pywikibot.NoPage(i) page._item = i @@ -4500,12 +4519,12 @@ data = {} for obj in sitelinks: if isinstance(obj, Page): - dbName = self.getdbName(obj.site) - data[dbName] = {'site': dbName, 'title': obj.title()} + db_name = self.getdbName(obj.site) + data[db_name] = {'site': db_name, 'title': obj.title()} else: # TODO: Do some verification here - dbName = obj['site'] - data[dbName] = obj + db_name = obj['site'] + data[db_name] = obj data = {'sitelinks': data} self.editEntity(data, **kwargs)
@@ -4787,8 +4806,9 @@
SNAK_TYPES = ('value', 'somevalue', 'novalue')
- def __init__(self, site, pid, snak=None, hash=None, isReference=False, - isQualifier=False, **kwargs): + @deprecated_args(isReference='is_reference', isQualifier='is_qualifier') + def __init__(self, site, pid, snak=None, hash=None, is_reference=False, + is_qualifier=False, **kwargs): """ Initializer.
@@ -4799,14 +4819,14 @@ @param pid: property id, with "P" prefix @param snak: snak identifier for claim @param hash: hash identifier for references - @param isReference: whether specified claim is a reference - @param isQualifier: whether specified claim is a qualifier + @param is_reference: whether specified claim is a reference + @param is_qualifier: whether specified claim is a qualifier """ Property.__init__(self, site, pid, **kwargs) self.snak = snak self.hash = hash - self.isReference = isReference - self.isQualifier = isQualifier + self.isReference = is_reference + self.isQualifier = is_qualifier if self.isQualifier and self.isReference: raise ValueError(u'Claim cannot be both a qualifier and reference.') self.sources = [] @@ -5212,7 +5232,7 @@ elif self.type in ('string', 'url', 'math', 'external-id'): value = self.getTarget() elif self.type == 'commonsMedia': - value = self.getTarget().title(withNamespace=False) + value = self.getTarget().title(with_ns=False) elif self.type in ('globe-coordinate', 'time', 'quantity', 'monolingualtext', 'geo-shape', 'tabular-data'): @@ -5433,7 +5453,8 @@ u'|&#x[0-9A-Fa-f]+;' )
- def __init__(self, text, source=None, defaultNamespace=0): + @deprecated_args(defaultNamespace='default_namespace') + def __init__(self, text, source=None, default_namespace=0): """ Initializer.
@@ -5443,9 +5464,9 @@ @param source: the Site on which the link was found (not necessarily the site to which the link refers) @type source: Site or BasePage - @param defaultNamespace: a namespace to use if the link does not + @param default_namespace: a namespace to use if the link does not contain one (defaults to 0) - @type defaultNamespace: int + @type default_namespace: int
@raises UnicodeError: text could not be converted to unicode. """ @@ -5460,11 +5481,11 @@ "source parameter should be either a Site or Page object"
self._text = text - # See bug T104864, defaultNamespace might have been deleted. + # See bug T104864, default_namespace might have been deleted. try: - self._defaultns = self._source.namespaces[defaultNamespace] + self._defaultns = self._source.namespaces[default_namespace] except KeyError: - self._defaultns = defaultNamespace + self._defaultns = default_namespace
# preprocess text (these changes aren't site-dependent) # First remove anchor, which is stored unchanged, if there is one @@ -5503,7 +5524,7 @@ self._text = t
if source_is_page: - self._text = source.title(withSection=False) + self._text + self._text = source.title(with_section=False) + self._text
def __repr__(self): """Return a more complete string representation.""" @@ -5842,9 +5863,9 @@ link._site = page.site link._section = page.section() link._namespace = page.namespace() - link._title = page.title(withNamespace=False, - allowInterwiki=False, - withSection=False) + link._title = page.title(with_ns=False, + allow_interwiki=False, + with_section=False) link._anchor = None link._source = source or pywikibot.Site()
@@ -5926,6 +5947,42 @@
# Utility functions for parsing page titles
+# This regular expression will match any decimal and hexadecimal entity and +# also entities that might be named entities. +_ENTITY_SUB = re.compile( + r'&(#(?P<decimal>\d+)|#x(?P<hex>[0-9a-fA-F]+)|(?P<name>[A-Za-z]+));').sub +# These characters are Html-illegal, but sadly you *can* find some of +# these and converting them to chr(decimal) is unsuitable +_ILLEGAL_HTML_ENTITIES_MAPPING = { + 128: 8364, # € + 130: 8218, # ‚ + 131: 402, # ƒ + 132: 8222, # „ + 133: 8230, # … + 134: 8224, # † + 135: 8225, # ‡ + 136: 710, # ˆ + 137: 8240, # ‰ + 138: 352, # Š + 139: 8249, # ‹ + 140: 338, # Œ + 142: 381, # Ž + 145: 8216, # ‘ + 146: 8217, # ’ + 147: 8220, # “ + 148: 8221, # ” + 149: 8226, # • + 150: 8211, # – + 151: 8212, # — + 152: 732, # ˜ + 153: 8482, # ™ + 154: 353, # š + 155: 8250, # › + 156: 339, # œ + 158: 382, # ž + 159: 376 # Ÿ +} +
def html2unicode(text, ignore=None, exceptions=None): """ @@ -5938,44 +5995,9 @@ """ if ignore is None: ignore = [] - # This regular expression will match any decimal and hexadecimal entity and - # also entities that might be named entities. - entityR = re.compile( - r'&(#(?P<decimal>\d+)|#x(?P<hex>[0-9a-fA-F]+)|(?P<name>[A-Za-z]+));') - # These characters are Html-illegal, but sadly you *can* find some of - # these and converting them to chr(decimal) is unsuitable - convertIllegalHtmlEntities = { - 128: 8364, # € - 130: 8218, # ‚ - 131: 402, # ƒ - 132: 8222, # „ - 133: 8230, # … - 134: 8224, # † - 135: 8225, # ‡ - 136: 710, # ˆ - 137: 8240, # ‰ - 138: 352, # Š - 139: 8249, # ‹ - 140: 338, # Œ - 142: 381, # Ž - 145: 8216, # ‘ - 146: 8217, # ’ - 147: 8220, # “ - 148: 8221, # ” - 149: 8226, # • - 150: 8211, # – - 151: 8212, # — - 152: 732, # ˜ - 153: 8482, # ™ - 154: 353, # š - 155: 8250, # › - 156: 339, # œ - 158: 382, # ž - 159: 376 # Ÿ - } # ensuring that illegal   and , which have no known # values, don't get converted to chr(129), chr(141) or chr(157) - ignore = (set(map(lambda x: convertIllegalHtmlEntities.get(x, x), + ignore = (set(map(lambda x: _ILLEGAL_HTML_ENTITIES_MAPPING.get(x, x), ignore)) | {129, 141, 157})
def handle_entity(match): @@ -5984,30 +6006,30 @@ # to pass it to handle_entity, ♥ Python return match.group(0) if match.group('decimal'): - unicodeCodepoint = int(match.group('decimal')) + unicode_codepoint = int(match.group('decimal')) elif match.group('hex'): - unicodeCodepoint = int(match.group('hex'), 16) + unicode_codepoint = int(match.group('hex'), 16) elif match.group('name'): name = match.group('name') if name in htmlentitydefs.name2codepoint: # We found a known HTML entity. - unicodeCodepoint = htmlentitydefs.name2codepoint[name] + unicode_codepoint = htmlentitydefs.name2codepoint[name] else: - unicodeCodepoint = False + unicode_codepoint = False
- if unicodeCodepoint in convertIllegalHtmlEntities: - unicodeCodepoint = convertIllegalHtmlEntities[unicodeCodepoint] + unicode_codepoint = _ILLEGAL_HTML_ENTITIES_MAPPING.get( + unicode_codepoint, unicode_codepoint)
- if unicodeCodepoint and unicodeCodepoint not in ignore: - if unicodeCodepoint > sys.maxunicode: + if unicode_codepoint and unicode_codepoint not in ignore: + if unicode_codepoint > sys.maxunicode: # solve narrow Python 2 build exception (UTF-16) - return eval("'\U{0:08x}'".format(unicodeCodepoint)) + return eval("'\U{0:08x}'".format(unicode_codepoint)) else: - return chr(unicodeCodepoint) + return chr(unicode_codepoint) else: # Leave the entity unchanged return match.group(0) - return entityR.sub(handle_entity, text) + return _ENTITY_SUB(handle_entity, text)
def UnicodeToAsciiHtml(s): @@ -6066,14 +6088,14 @@ site = encodings encodings = [site.encoding()] + list(site.encodings())
- firstException = None + first_exception = None for enc in encodings: try: t = title.encode(enc) t = unquote_to_bytes(t) return t.decode(enc) except UnicodeError as ex: - if not firstException: - firstException = ex + if not first_exception: + first_exception = ex # Couldn't convert, raise the original exception - raise firstException + raise first_exception diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py index 48c0803..d8b3d5f 100644 --- a/pywikibot/pagegenerators.py +++ b/pywikibot/pagegenerators.py @@ -604,7 +604,7 @@ categoryname) cat = pywikibot.Category(pywikibot.Link(categoryname, source=self.site, - defaultNamespace=14)) + default_namespace=14)) return cat, startfrom
@deprecated_args(arg='category') @@ -947,7 +947,7 @@ value = pywikibot.input( 'Pages that transclude which page should be processed?') page = pywikibot.Page(pywikibot.Link(value, - defaultNamespace=10, + default_namespace=10, source=self.site)) return ReferringPageGenerator(page, onlyTemplateInclusion=True)
@@ -1448,8 +1448,8 @@ """Yield all pages referring to a specific page.""" return referredPage.getReferences( follow_redirects=followRedirects, - withTemplateInclusion=withTemplateInclusion, - onlyTemplateInclusion=onlyTemplateInclusion, + with_template_inclusion=withTemplateInclusion, + only_template_inclusion=onlyTemplateInclusion, total=total, content=content)
@@ -1511,7 +1511,7 @@ # TODO: page generator could be modified to use cmstartsortkey ... for s in category.subcategories(recurse=recurse, total=total, content=content): - if start is None or s.title(withNamespace=False) >= start: + if start is None or s.title(with_ns=False) >= start: yield s
@@ -1750,7 +1750,7 @@ cls = pywikibot.PropertyPage else: cls = pywikibot.ItemPage - page = cls(page.site, page.title(withNamespace=False)) + page = cls(page.site, page.title(with_ns=False)) else: try: page = pywikibot.ItemPage.fromPage(page) @@ -1883,7 +1883,7 @@ quantifier = 'none' reg = cls.__precompile(regex, re.I) for page in generator: - title = page.title(withNamespace=not ignore_namespace) + title = page.title(with_ns=not ignore_namespace) if cls.__filter_match(reg, title, quantifier): yield page
@@ -2045,7 +2045,7 @@ if bool(contribs[username]) is not bool(skip): # xor operation yield page elif show_filtered: - pywikibot.output(u'Skipping %s' % page.title(asLink=True)) + pywikibot.output('Skipping %s' % page.title(as_link=True))
@deprecated('itertools.chain(*iterables)') @@ -2908,7 +2908,7 @@ site = pywikibot.Site() fd = date.FormatDate(site) firstPage = pywikibot.Page(site, fd(start_month, 1)) - pywikibot.output(u"Starting with %s" % firstPage.title(asLink=True)) + pywikibot.output('Starting with %s' % firstPage.title(as_link=True)) for month in range(start_month, end_month + 1): for day in range(1, calendar.monthrange(year, month)[1] + 1): yield pywikibot.Page(pywikibot.Link(fd(month, day), site)) diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py index a198290..c0dc287 100644 --- a/pywikibot/proofreadpage.py +++ b/pywikibot/proofreadpage.py @@ -173,7 +173,7 @@ @return: (base, ext, num). @rtype: tuple """ - left, sep, right = self.title(withNamespace=False).rpartition('/') + left, sep, right = self.title(with_ns=False).rpartition('/') if sep: base = left num = int(right) @@ -217,7 +217,7 @@ # Try to infer names from page titles. if self._num is not None: for page in what_links_here: - if page.title(withNamespace=False) == self._base: + if page.title(with_ns=False) == self._base: what_links_here.remove(page) self._index = (page, what_links_here) break @@ -319,7 +319,7 @@ return self.site.proofread_levels[self.ql] except KeyError: pywikibot.warning('Not valid status set for %s: quality level = %s' - % (self.title(asLink=True), self.ql)) + % (self.title(as_link=True), self.ql)) return None
def without_text(self): @@ -450,7 +450,7 @@ len_cq = len(close_queue) if (len_oq != len_cq) or (len_oq < 2 or len_cq < 2): raise pywikibot.Error('ProofreadPage %s: invalid format' - % self.title(asLink=True)) + % self.title(as_link=True))
f_open, f_close = open_queue[0], close_queue[0] self._full_header = FullHeader(self._text[f_open.end():f_close.start()]) @@ -519,7 +519,7 @@ url = self.full_url() else: path = 'w/index.php?title={0}&action=edit&redlink=1' - url = self.site.base_url(path.format(self.title(asUrl=True))) + url = self.site.base_url(path.format(self.title(as_url=True)))
try: response = http.fetch(url, charset='utf-8') @@ -593,7 +593,7 @@ res.append('\n') return ''.join(res)
- params = {'book': self.title(asUrl=True, withNamespace=False), + params = {'book': self.title(as_url=True, with_ns=False), 'lang': self.site.lang, 'user': self.site.user(), } diff --git a/pywikibot/site.py b/pywikibot/site.py index 9fcea40..a63f4d7 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -1066,11 +1066,11 @@ """ self._pagemutex.acquire() try: - while page.title(withSection=False) in self._locked_pages: + while page.title(with_section=False) in self._locked_pages: if not block: - raise PageInUse(page.title(withSection=False)) + raise PageInUse(page.title(with_section=False)) time.sleep(.25) - self._locked_pages.append(page.title(withSection=False)) + self._locked_pages.append(page.title(with_section=False)) finally: self._pagemutex.release()
@@ -1084,7 +1084,7 @@ """ self._pagemutex.acquire() try: - self._locked_pages.remove(page.title(withSection=False)) + self._locked_pages.remove(page.title(with_section=False)) finally: self._pagemutex.release()
@@ -2953,13 +2953,13 @@ def _update_page(self, page, query): for pageitem in query: if not self.sametitle(pageitem['title'], - page.title(withSection=False)): + page.title(with_section=False)): raise InconsistentTitleReceived(page, pageitem['title']) api.update_page(page, pageitem, query.props)
def loadpageinfo(self, page, preload=False): """Load page info from api and store in page attributes.""" - title = page.title(withSection=False) + title = page.title(with_section=False) inprop = 'protection' if preload: inprop += '|preload' @@ -2973,7 +2973,7 @@ @need_extension('GeoData') def loadcoordinfo(self, page): """Load [[mw:Extension:GeoData]] info.""" - title = page.title(withSection=False) + title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, type_arg="coordinates", titles=title.encode(self.encoding()), @@ -2993,7 +2993,7 @@
@raises APIError: PageImages extension is not installed """ - title = page.title(withSection=False) + title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, type_arg='pageimages', titles=title.encode(self.encoding()), @@ -3002,7 +3002,7 @@
def loadpageprops(self, page): """Load page props for the given page.""" - title = page.title(withSection=False) + title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, type_arg="pageprops", titles=title.encode(self.encoding()), @@ -3023,7 +3023,7 @@ if not isinstance(page, pywikibot.FilePage): raise TypeError('Page %s must be a FilePage.' % page)
- title = page.title(withSection=False) + title = page.title(with_section=False) args = {'titles': title, 'gufilterlocal': False, } @@ -3035,7 +3035,7 @@
for pageitem in query: if not self.sametitle(pageitem['title'], - page.title(withSection=False)): + page.title(with_section=False)): raise InconsistentTitleReceived(page, pageitem['title'])
api.update_page(page, pageitem, query.props) @@ -3067,7 +3067,7 @@ @param url_param: see iiurlparam in [1]
""" - title = page.title(withSection=False) + title = page.title(with_section=False) args = {'titles': title, 'iiurlwidth': url_width, 'iiurlheight': url_height, @@ -3108,7 +3108,7 @@
@raises APIError: Flow extension is not installed """ - title = page.title(withSection=False) + title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, type_arg="flowinfo", titles=title.encode(self.encoding()), @@ -3173,7 +3173,7 @@ if hasattr(page, '_redirtarget'): return page._redirtarget
- title = page.title(withSection=False) + title = page.title(with_section=False) query = self._simple_request( action='query', prop='info', @@ -3343,7 +3343,7 @@ # In case of duplicates, return the first entry. for priority, page in enumerate(sublist): try: - cache.setdefault(page.title(withSection=False), + cache.setdefault(page.title(with_section=False), (priority, page)) except pywikibot.InvalidTitle: pywikibot.exception() @@ -3629,7 +3629,7 @@ @raises TypeError: a namespace identifier has an inappropriate type such as NoneType or bool """ - bltitle = page.title(withSection=False).encode(self.encoding()) + bltitle = page.title(with_section=False).encode(self.encoding()) blargs = {"gbltitle": bltitle} if filter_redirects is not None: blargs['gblfilterredir'] = ('redirects' if filter_redirects @@ -3687,7 +3687,7 @@ type such as NoneType or bool """ eiargs = {"geititle": - page.title(withSection=False).encode(self.encoding())} + page.title(with_section=False).encode(self.encoding())} if filter_redirects is not None: eiargs['geifilterredir'] = ('redirects' if filter_redirects else 'nonredirects') @@ -3755,7 +3755,7 @@ if hasattr(page, "_pageid"): plargs['pageids'] = str(page._pageid) else: - pltitle = page.title(withSection=False).encode(self.encoding()) + pltitle = page.title(with_section=False).encode(self.encoding()) plargs['titles'] = pltitle return self._generator(api.PageGenerator, type_arg='links', namespaces=namespaces, total=total, @@ -3776,7 +3776,7 @@ clargs['pageids'] = str(page._pageid) else: clargs['titles'] = page.title( - withSection=False).encode(self.encoding()) + with_section=False).encode(self.encoding()) return self._generator(api.PageGenerator, type_arg='categories', total=total, g_content=content, **clargs) @@ -3790,7 +3790,7 @@ description page, not the image itself
""" - imtitle = page.title(withSection=False).encode(self.encoding()) + imtitle = page.title(with_section=False).encode(self.encoding()) return self._generator(api.PageGenerator, type_arg='images', titles=imtitle, total=total, g_content=content) @@ -3810,7 +3810,7 @@ @raises TypeError: a namespace identifier has an inappropriate type such as NoneType or bool """ - tltitle = page.title(withSection=False).encode(self.encoding()) + tltitle = page.title(with_section=False).encode(self.encoding()) return self._generator(api.PageGenerator, type_arg='templates', titles=tltitle, namespaces=namespaces, total=total, g_content=content) @@ -3877,7 +3877,7 @@ raise Error( u"categorymembers: non-Category page '%s' specified" % category.title()) - cmtitle = category.title(withSection=False).encode(self.encoding()) + cmtitle = category.title(with_section=False).encode(self.encoding()) cmargs = {'type_arg': "categorymembers", 'gcmtitle': cmtitle, 'gcmprop': "ids|title|sortkey"} if sortby in ["sortkey", "timestamp"]: @@ -4077,7 +4077,7 @@ self.login(sysop=sysop) rvargs[u"rvtoken"] = "rollback" if revids is None: - rvtitle = page.title(withSection=False).encode(self.encoding()) + rvtitle = page.title(with_section=False).encode(self.encoding()) rvargs[u"titles"] = rvtitle else: if isinstance(revids, (int, basestring)): @@ -4114,7 +4114,7 @@
for pagedata in rvgen: if not self.sametitle(pagedata['title'], - page.title(withSection=False)): + page.title(with_section=False)): raise InconsistentTitleReceived(page, pagedata['title']) if "missing" in pagedata: raise NoPage(page) @@ -4137,7 +4137,7 @@ @param include_obsolete: if true, yield even Link objects whose site is obsolete """ - lltitle = page.title(withSection=False) + lltitle = page.title(with_section=False) llquery = self._generator(api.PropertyGenerator, type_arg="langlinks", titles=lltitle.encode(self.encoding()), @@ -4159,7 +4159,7 @@ @deprecated_args(step=None) def page_extlinks(self, page, total=None): """Iterate all external links on page, yielding URL strings.""" - eltitle = page.title(withSection=False) + eltitle = page.title(with_section=False) elquery = self._generator(api.PropertyGenerator, type_arg="extlinks", titles=eltitle.encode(self.encoding()), total=total) @@ -4173,7 +4173,7 @@
def getcategoryinfo(self, category): """Retrieve data on contents of category.""" - cititle = category.title(withSection=False) + cititle = category.title(with_section=False) ciquery = self._generator(api.PropertyGenerator, type_arg="categoryinfo", titles=cititle.encode(self.encoding())) @@ -4539,7 +4539,7 @@ @raises TypeError: a namespace identifier has an inappropriate type such as NoneType or bool """ - iuargs = {'giutitle': image.title(withSection=False)} + iuargs = {'giutitle': image.title(with_section=False)} if filterredir is not None: iuargs['giufilterredir'] = ('redirects' if filterredir else 'nonredirects') @@ -4725,7 +4725,7 @@ pywikibot.warning( u"recentchanges: pagelist option is disabled; ignoring.") else: - rcgen.request["rctitles"] = (p.title(withSection=False) + rcgen.request['rctitles'] = (p.title(with_section=False) for p in pagelist) if changetype: rcgen.request["rctype"] = changetype @@ -4911,7 +4911,7 @@ # TODO: T75370 @deprecated_args(step=None) def deletedrevs(self, page, start=None, end=None, reverse=None, - get_text=False, total=None): + content=False, total=None): """Iterate deleted revisions.
Each value returned by the iterator will be a dict containing the @@ -4924,7 +4924,7 @@ @param start: Iterate revisions starting at this Timestamp @param end: Iterate revisions ending at this Timestamp @param reverse: Iterate oldest revisions first (default: newest) - @param get_text: If True, retrieve the content of each revision and + @param content: If True, retrieve the content of each revision and an undelete token """ if start and end: @@ -4942,7 +4942,7 @@ "deletedrevs: " "User:%s not authorized to access deleted revisions." % self.user()) - if get_text: + if content: if "undelete" not in self.userinfo['rights']: try: self.login(True) @@ -4955,10 +4955,10 @@ % self.user())
drgen = self._generator(api.ListGenerator, type_arg="deletedrevs", - titles=page.title(withSection=False), + titles=page.title(with_section=False), drprop="revid|user|comment|minor", total=total) - if get_text: + if content: drgen.request['drprop'] = (drgen.request['drprop'] + ['content', 'token']) if start is not None: @@ -5203,7 +5203,7 @@ if isinstance(self._ep_errors[err.code], basestring): errdata = { 'site': self, - 'title': page.title(withSection=False), + 'title': page.title(with_section=False), 'user': self.user(), 'info': err.info } @@ -5431,7 +5431,7 @@ @return: Page object with the new title @rtype: pywikibot.Page """ - oldtitle = page.title(withSection=False) + oldtitle = page.title(with_section=False) newlink = pywikibot.Link(newtitle, self) newpage = pywikibot.Page(newlink) if newlink.namespace: @@ -5506,7 +5506,7 @@ if "talkmove-error-code" in result["move"]: pywikibot.warning( u"movepage: Talk page %s not moved" - % (page.toggleTalkPage().title(asLink=True))) + % (page.toggleTalkPage().title(as_link=True))) return pywikibot.Page(page, newtitle)
# catalog of rollback errors for use in error messages @@ -5533,7 +5533,7 @@ if len(page._revisions) < 2: raise Error( u"Rollback of %s aborted; load revision history first." - % page.title(asLink=True)) + % page.title(as_link=True)) last_rev = page.latest_revision last_user = last_rev.user for rev in sorted(page._revisions.values(), reverse=True, @@ -5544,7 +5544,7 @@ else: raise Error( u"Rollback of %s aborted; only one user in revision history." - % page.title(asLink=True)) + % page.title(as_link=True)) parameters = merge_unique_dicts(kwargs, action='rollback', title=page, token=self.tokens['rollback'], @@ -5556,7 +5556,7 @@ except api.APIError as err: errdata = { 'site': self, - 'title': page.title(withSection=False), + 'title': page.title(with_section=False), 'user': self.user(), } if err.code in self._rb_errors: @@ -5602,7 +5602,7 @@ except api.APIError as err: errdata = { 'site': self, - 'title': page.title(withSection=False), + 'title': page.title(with_section=False), 'user': self.user(), } if err.code in self._dl_errors: @@ -5643,7 +5643,7 @@ except api.APIError as err: errdata = { 'site': self, - 'title': page.title(withSection=False), + 'title': page.title(with_section=False), 'user': self.user(), } if err.code in self._dl_errors: @@ -6018,7 +6018,7 @@ 'The "hash_found" parameter in "getFilesFromAnHash" and ' '"getImagesFromAnHash" are not optional.') return - return [image.title(withNamespace=False) + return [image.title(with_ns=False) for image in self.allimages(sha1=hash_found)]
@deprecated('Site().allimages') @@ -6212,7 +6212,7 @@ text = comment token = self.tokens['edit'] result = None - file_page_title = filepage.title(withNamespace=False) + file_page_title = filepage.title(with_ns=False) file_size = None offset = _offset # make sure file actually exists @@ -7630,7 +7630,7 @@ else: if p.site == self and p.namespace() in ( self.item_namespace, self.property_namespace): - req['ids'].append(p.title(withNamespace=False)) + req['ids'].append(p.title(with_ns=False)) else: assert p.site.has_data_repository, \ 'Site must have a data repository' diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index 050ee5a..9a83cd4 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -213,7 +213,7 @@ namespace = self.site.namespaces[10] if isinstance(template, pywikibot.Page): if template.namespace() == 10: - old = template.title(withNamespace=False) + old = template.title(with_ns=False) else: raise ValueError( '{0} is not a template Page object'.format(template)) @@ -1217,7 +1217,7 @@ if isinstance(links[site], pywikibot.Link): links[site] = pywikibot.Page(links[site]) if isinstance(links[site], pywikibot.Page): - title = links[site].title(asLink=True, forceInterwiki=True, + title = links[site].title(as_link=True, force_interwiki=True, insite=insite) link = title.replace('[[:', '[[') s.append(link) @@ -1290,7 +1290,7 @@ cat = pywikibot.Category(pywikibot.Link( '%s:%s' % (match.group('namespace'), title), site), - sortKey=sortKey) + sort_key=sortKey) except InvalidTitle: # Category title extracted contains invalid characters # Likely due to on-the-fly category name creation, see T154309 @@ -1363,7 +1363,7 @@ site = pywikibot.Site()
catNamespace = '|'.join(site.namespaces.CATEGORY) - title = oldcat.title(withNamespace=False) + title = oldcat.title(with_ns=False) if not title: return # title might contain regex special characters @@ -1390,14 +1390,15 @@ elif add_only: text = replaceExcept( oldtext, categoryR, - '{0}\n{1}'.format(oldcat.title(asLink=True, allowInterwiki=False), - newcat.title(asLink=True, allowInterwiki=False)), + '{0}\n{1}'.format( + oldcat.title(as_link=True, allow_interwiki=False), + newcat.title(as_link=True, allow_interwiki=False)), exceptions, site=site) else: text = replaceExcept(oldtext, categoryR, '[[{0}:{1}\2' .format(site.namespace(14), - newcat.title(withNamespace=False)), + newcat.title(with_ns=False)), exceptions, site=site) return text
@@ -1525,8 +1526,8 @@ category = u'{0}:{1}'.format(insite.namespace(14), category) category = pywikibot.Category(pywikibot.Link(category, insite, - defaultNamespace=14), - sortKey=sortKey) + default_namespace=14), + sort_key=sortKey) # Make sure a category is casted from Page to Category. elif not isinstance(category, pywikibot.Category): category = pywikibot.Category(category) diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py index d5ca6df..99110bd 100644 --- a/pywikibot/titletranslate.py +++ b/pywikibot/titletranslate.py @@ -47,7 +47,7 @@ # we're currently working on ... if page is None: continue - newname = page.title(withNamespace=False) + newname = page.title(with_ns=False) # ... unless we do want brackets if removebrackets: newname = re.sub(re.compile(r'\W*?(.*?)\W*?', @@ -67,7 +67,7 @@ ns = page.namespace() if page else 0 x = pywikibot.Link(newname, site.getSite(code=newcode), - defaultNamespace=ns) + default_namespace=ns) result.add(x) elif config.verbose_output: pywikibot.output('Ignoring unknown language code {}' diff --git a/scripts/archive/featured.py b/scripts/archive/featured.py index e0e1745..4689f45 100755 --- a/scripts/archive/featured.py +++ b/scripts/archive/featured.py @@ -92,7 +92,7 @@ def BACK(site, name, hide): # pylint: disable=unused-argument p = pywikibot.Page(site, name, ns=10) return [page for page in p.getReferences(follow_redirects=False, - onlyTemplateInclusion=True)] + only_template_inclusion=True)]
def DATA(site, name, hide): @@ -384,7 +384,7 @@ # Article talk (like in English) elif p.namespace() == 1 and site.code != 'el': articles.append(pywikibot.Page(p.site, - p.title(withNamespace=False))) + p.title(with_ns=False))) pywikibot.output(color_format( '{lightred}** {0} has {1} {2} articles{default}', site, len(articles), task)) diff --git a/scripts/archivebot.py b/scripts/archivebot.py index a0e229a..4312a15 100755 --- a/scripts/archivebot.py +++ b/scripts/archivebot.py @@ -281,7 +281,7 @@ """ pywikibot.output(u'Fetching template transclusions...') transclusion_page = pywikibot.Page(site, template, ns=10) - return transclusion_page.getReferences(onlyTemplateInclusion=True, + return transclusion_page.getReferences(only_template_inclusion=True, follow_redirects=False, namespaces=namespaces)
@@ -298,7 +298,7 @@ """ ns = tpl_page.site.namespaces[tpl_page.namespace()] marker = '?' if ns.id == 10 else '' - title = tpl_page.title(withNamespace=False) + title = tpl_page.title(with_ns=False) if ns.case != 'case-sensitive': title = '[%s%s]%s' % (re.escape(title[0].upper()), re.escape(title[0].lower()), @@ -587,7 +587,7 @@ def attr2text(self): """Return a template with archiver saveable attributes.""" return '{{%s\n%s\n}}' \ - % (self.tpl.title(withNamespace=(self.tpl.namespace() != 10)), + % (self.tpl.title(with_ns=(self.tpl.namespace() != 10)), '\n'.join('|%s = %s' % (a, self.get_attr(a)) for a in self.saveables()))
@@ -721,7 +721,7 @@ self.comment_params['count'] = self.archived_threads comma = self.site.mediawiki_message('comma-separator') self.comment_params['archives'] \ - = comma.join(a.title(asLink=True) + = comma.join(a.title(as_link=True) for a in self.archives.values()) self.comment_params['why'] = comma.join(whys) comment = i18n.twtranslate(self.site.code, diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index 4b52b26..0c4d5b8 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -291,7 +291,7 @@ generator = pagegenerators.PreloadingGenerator(generator, groupsize=60) for page in generator: - pagename = page.title(asLink=True) + pagename = page.title(as_link=True) pywikibot.output('Loading %s...' % pagename) try: text = page.text diff --git a/scripts/capitalize_redirects.py b/scripts/capitalize_redirects.py index e967185..db3e653 100755 --- a/scripts/capitalize_redirects.py +++ b/scripts/capitalize_redirects.py @@ -73,10 +73,10 @@ page_cap = pywikibot.Page(site, page_t.capitalize()) if page_cap.exists(): pywikibot.output(u'%s already exists, skipping...\n' - % page_cap.title(asLink=True)) + % page_cap.title(as_link=True)) else: pywikibot.output(u'%s doesn't exist' - % page_cap.title(asLink=True)) + % page_cap.title(as_link=True)) if self.user_confirm('Do you want to create a redirect?'): comment = i18n.twtranslate( site, diff --git a/scripts/casechecker.py b/scripts/casechecker.py index 9e4cc1b..6c50bc3 100755 --- a/scripts/casechecker.py +++ b/scripts/casechecker.py @@ -330,7 +330,7 @@ if 'redirect' in page: src = self.Page(title) redir = src.getRedirectTarget() - redirTitle = redir.title(asLink=True, + redirTitle = redir.title(as_link=True, textlink=True)
if not dst.exists(): @@ -367,8 +367,8 @@ if page['ns'] == 14: dst = self.Page(newTitle) bot = CategoryMoveBot( - src.title(withNamespace=False), - dst.title(withNamespace=False), + src.title(with_ns=False), + dst.title(with_ns=False), self.autonomous, editSummary + u' ' + self.MakeMoveSummary(title, newTitle), @@ -706,7 +706,7 @@
def PutNewPage(self, pageObj, pageTxt, msg): """Save new page.""" - title = pageObj.title(asLink=True, textlink=True) + title = pageObj.title(as_link=True, textlink=True) coloredMsg = u', '.join([self.ColorCodeWord(m) for m in msg]) if pageObj.text == pageTxt: self.WikiLog(u"* Error: Text replacement failed in %s (%s)" diff --git a/scripts/category.py b/scripts/category.py index b6de801..6970137 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -203,28 +203,28 @@ redir_target.text = '' pywikibot.output('Redirect target %s does not exist ' 'yet; creating.' - % redir_target.title(asLink=True)) + % redir_target.title(as_link=True)) return redir_target if self.edit_redirects: return page pywikibot.warning('Redirect target %s can not ' 'be modified; skipping.' - % redir_target.title(asLink=True)) + % redir_target.title(as_link=True)) return None if self.edit_redirects: return page pywikibot.warning('Page %s is a redirect to %s; skipping.' - % (page.title(asLink=True), - redir_target.title(asLink=True))) + % (page.title(as_link=True), + redir_target.title(as_link=True))) return None return page if self.create: page.text = '' pywikibot.output('Page %s does not exist yet; creating.' - % page.title(asLink=True)) + % page.title(as_link=True)) return page pywikibot.warning('Page %s does not exist; skipping.' - % page.title(asLink=True)) + % page.title(as_link=True)) return None
def determine_template_target(self, page): @@ -254,7 +254,7 @@ if tmpl != []: templates = page.templatesWithParams() for template, params in templates: - if (template.title(withNamespace=False).lower() in tmpl + if (template.title(with_ns=False).lower() in tmpl and params): doc_page = pywikibot.Page(page.site, params[0]) if doc_page.exists(): @@ -469,14 +469,15 @@ else: if self.sort: catpl = self.sorted_by_last_name(catpl, self.current_page) - pywikibot.output(u'Adding %s' % catpl.title(asLink=True)) + pywikibot.output('Adding %s' % catpl.title(as_link=True)) if page.namespace() == page.site.namespaces.TEMPLATE: tagname = 'noinclude' if self.includeonly == ['includeonly']: tagname = 'includeonly' tagnameregexp = re.compile(r'(.*)(</{0}>)'.format(tagname), re.I | re.DOTALL) - categorytitle = catpl.title(asLink=True, allowInterwiki=False) + categorytitle = catpl.title( + as_link=True, allow_interwiki=False) if tagnameregexp.search(text): # add category into the <includeonly> tag in the # template document page or the <noinclude> tag @@ -500,13 +501,13 @@ comment = i18n.twtranslate(self.current_page.site, 'category-adding', {'newcat': catpl.title( - withNamespace=False)}) + with_ns=False)}) try: self.userPut(self.current_page, old_text, text, summary=comment) except pywikibot.PageSaveRelatedError as error: pywikibot.output(u'Page %s not saved: %s' - % (self.current_page.title(asLink=True), + % (self.current_page.title(as_link=True), error))
@@ -603,15 +604,15 @@ "The 'wikibase' option is turned on and {0} has no " 'registered username.'.format(repo))
- template_vars = {'oldcat': self.oldcat.title(withNamespace=False)} + template_vars = {'oldcat': self.oldcat.title(with_ns=False)} if self.newcat: template_vars.update({ 'newcat': self.newcat.title( - withNamespace=False, - asLink=True, + with_ns=False, + as_link=True, textlink=True ), - 'title': self.newcat.title(withNamespace=False)}) + 'title': self.newcat.title(with_ns=False)}) # Set edit summary for changed pages. if comment: self.comment = comment @@ -675,7 +676,7 @@ old_cat_text = self.oldcat.text self.newcat = self.oldcat.move(self.newcat.title(), reason=self.move_comment, - movetalkpage=can_move_talk) + movetalk=can_move_talk) # Copy over the article text so it can be stripped of # CFD templates and re-saved. This is faster than # reloading the article in place. @@ -733,8 +734,8 @@
page.change_category(self.oldcat, self.newcat, summary=self.comment, - inPlace=self.inplace, - sortKey=self.keep_sortkey) + in_place=self.inplace, + sort_key=self.keep_sortkey)
doc_page = self.determine_template_target(page) if doc_page != page and (not self.title_regex @@ -742,9 +743,9 @@ doc_page.title())): doc_page.change_category(self.oldcat, self.newcat, summary=self.comment, - inPlace=self.inplace, + in_place=self.inplace, include=self.includeonly, - sortKey=self.keep_sortkey) + sort_key=self.keep_sortkey)
@staticmethod def check_move(name, old_page, new_page): @@ -815,7 +816,7 @@
Do not use this function from outside the class. """ - cat_name_only = self.newcat.title(withNamespace=False) + cat_name_only = self.newcat.title(with_ns=False) comment = i18n.twtranslate(self.site, 'category-was-moved', {'newcat': cat_name_only, 'title': cat_name_only}) @@ -832,7 +833,7 @@ except pywikibot.NoPage: item = None if item and item.exists(): - cat_name_only = self.newcat.title(withNamespace=False) + cat_name_only = self.newcat.title(with_ns=False) comment = i18n.twtranslate(self.site, 'category-was-moved', {'newcat': cat_name_only, 'title': cat_name_only}) @@ -1030,11 +1031,11 @@ if len(self.text) > end: for cat in member.categories(): if cat != original_cat: - pywikibot.output(cat.title(asLink=True)) + pywikibot.output(cat.title(as_link=True)) else: pywikibot.output(color_format( '{lightpurple}{0}{default}', - current_cat.title(asLink=True))) + current_cat.title(as_link=True)))
class CatIntegerOption(IntegerOption): """An option allowing a range of integers.""" @@ -1134,7 +1135,8 @@ pywikibot.output('This category has no supercategories.') else: pywikibot.output('Move up to category:') - cat_list = [cat.title(withNamespace=False) for cat in supercatlist] + cat_list = [cat.title( + with_ns=False) for cat in supercatlist] supercat_option.list_categories(cat_list, 'u')
subcat_option = CatIntegerOption(0, len(subcatlist)) @@ -1142,7 +1144,7 @@ pywikibot.output('This category has no subcategories.') else: pywikibot.output('Move down to category:') - cat_list = [cat.title(withNamespace=False) for cat in subcatlist] + cat_list = [cat.title(with_ns=False) for cat in subcatlist] subcat_option.list_categories(cat_list)
# show possible options for the user @@ -1151,7 +1153,7 @@ subcat_option, StandardOption(color_format( 'save page to category {lightpurple}{0}{default}', - current_cat.title(withNamespace=False)), 'c'), + current_cat.title(with_ns=False)), 'c'), StandardOption('remove the category from page', 'r'), StandardOption('skip page', 's'), context_option, @@ -1168,7 +1170,8 @@ else: if not self.edit_summary: self.template_vars.update({ - 'newcat': current_cat.title(asLink=True, textlink=True) + 'newcat': current_cat.title( + as_link=True, textlink=True) }) self.edit_summary = i18n.twtranslate(self.site, 'category-replacing', @@ -1266,15 +1269,15 @@ result = u'#' * currentDepth if currentDepth > 0: result += u' ' - result += cat.title(asLink=True, textlink=True, withNamespace=False) + result += cat.title(as_link=True, textlink=True, with_ns=False) result += ' (%d)' % cat.categoryinfo['pages'] if currentDepth < self.maxDepth // 2: # noisy dots pywikibot.output('.', newline=False) # Create a list of other cats which are supercats of the current cat - supercat_names = [super_cat.title(asLink=True, + supercat_names = [super_cat.title(as_link=True, textlink=True, - withNamespace=False) + with_ns=False) for super_cat in self.catDB.getSupercats(cat) if super_cat != parent]
diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index dbe9da2..e879d07 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -234,7 +234,7 @@ except pywikibot.CircularRedirect: target = page self.problems.append(u"# %s is a self-linked redirect" - % page.title(asLink=True, textlink=True)) + % page.title(as_link=True, textlink=True)) except RuntimeError: # race condition: someone else removed the redirect while we # were checking for it @@ -242,24 +242,24 @@ if target.is_categorypage(): # this is a hard-redirect to a category page newtext = (u"{{%(template)s|%(cat)s}}" - % {'cat': target.title(withNamespace=False), + % {'cat': target.title(with_ns=False), 'template': self.template_list[0]}) try: page.text = newtext page.save(comment) self.log_text.append(u"* Added {{tl|%s}} to %s" % (self.template_list[0], - page.title(asLink=True, + page.title(as_link=True, textlink=True))) except pywikibot.Error: self.log_text.append(u"* Failed to add {{tl|%s}} to %s" % (self.template_list[0], - page.title(asLink=True, + page.title(as_link=True, textlink=True))) else: - self.problems.append(u"# %s is a hard redirect to %s" - % (page.title(asLink=True, textlink=True), - target.title(asLink=True, textlink=True))) + self.problems.append('# {} is a hard redirect to {}'.format( + page.title(as_link=True, textlink=True), + target.title(as_link=True, textlink=True)))
def run(self): """Run the bot.""" @@ -318,10 +318,10 @@ catpages = set() for cat in redircat.subcategories(): catpages.add(cat) - cat_title = cat.title(withNamespace=False) + cat_title = cat.title(with_ns=False) if "category redirect" in cat_title: self.log_text.append(u"* Ignoring %s" - % cat.title(asLink=True, textlink=True)) + % cat.title(as_link=True, textlink=True)) continue if hasattr(cat, "_catinfo"): # skip empty categories that don't return a "categoryinfo" key @@ -334,9 +334,9 @@ record[cat_title] = {today: None} try: newredirs.append("*# %s -> %s" - % (cat.title(asLink=True, textlink=True), + % (cat.title(as_link=True, textlink=True), cat.getCategoryRedirectTarget().title( - asLink=True, textlink=True))) + as_link=True, textlink=True))) except pywikibot.Error: pass # do a null edit on cat @@ -358,24 +358,24 @@ try: if not cat.isCategoryRedirect(): self.log_text.append(u"* False positive: %s" - % cat.title(asLink=True, + % cat.title(as_link=True, textlink=True)) continue except pywikibot.Error: self.log_text.append(u"* Could not load %s; ignoring" - % cat.title(asLink=True, textlink=True)) + % cat.title(as_link=True, textlink=True)) continue - cat_title = cat.title(withNamespace=False) + cat_title = cat.title(with_ns=False) if not self.readyToEdit(cat): counts[cat_title] = None self.log_text.append(u"* Skipping %s; in cooldown period." - % cat.title(asLink=True, textlink=True)) + % cat.title(as_link=True, textlink=True)) continue dest = cat.getCategoryRedirectTarget() if not dest.exists(): - self.problems.append("# %s redirects to %s" - % (cat.title(asLink=True, textlink=True), - dest.title(asLink=True, textlink=True))) + self.problems.append('# {} redirects to {}'.format( + cat.title(as_link=True, textlink=True), + dest.title(as_link=True, textlink=True))) # do a null edit on cat to update any special redirect # categories this wiki might maintain try: @@ -387,7 +387,7 @@ double = dest.getCategoryRedirectTarget() if double == dest or double == cat: self.log_text.append(u"* Redirect loop from %s" - % dest.title(asLink=True, + % dest.title(as_link=True, textlink=True)) # do a null edit on cat try: @@ -397,16 +397,16 @@ else: self.log_text.append( u"* Fixed double-redirect: %s -> %s -> %s" - % (cat.title(asLink=True, textlink=True), - dest.title(asLink=True, textlink=True), - double.title(asLink=True, textlink=True))) + % (cat.title(as_link=True, textlink=True), + dest.title(as_link=True, textlink=True), + double.title(as_link=True, textlink=True))) oldtext = cat.text # remove the old redirect from the old text, # leaving behind any non-redirect text oldtext = template_regex.sub("", oldtext) newtext = (u"{{%(redirtemp)s|%(ncat)s}}" % {'redirtemp': self.template_list[0], - 'ncat': double.title(withNamespace=False)}) + 'ncat': double.title(with_ns=False)}) newtext = newtext + oldtext.strip() try: cat.text = newtext @@ -417,7 +417,7 @@ continue
found, moved = self.move_contents(cat_title, - dest.title(withNamespace=False), + dest.title(with_ns=False), editSummary=comment) if found is None: self.log_text.append( diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 1ff5df0..7477c97 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -590,7 +590,7 @@ """Set parameters.""" # ensure we have a FilePage self.image = pywikibot.FilePage(image) - self.imageName = image.title(withNamespace=False) + self.imageName = image.title(with_ns=False) self.timestamp = None self.uploader = None
@@ -680,7 +680,7 @@ return False upBots = i18n.translate(self.site, uploadBots) user = pywikibot.User(self.site, nick) - luser = user.title(asUrl=True) + luser = user.title(as_url=True)
if upBots: for upBot in upBots: @@ -870,7 +870,7 @@ % (self.imageName, commons_image_with_this_hash.title( withNamespace=False))) - if (self.image.title(asUrl=True) == + if (self.image.title(as_url=True) == commons_image_with_this_hash.title(asUrl=True)): repme += " (same name)" self.report_image(self.imageName, self.rep_page, self.com, repme, @@ -907,7 +907,8 @@ time_image_list = []
for dup_page in duplicates: - if (dup_page.title(asUrl=True) != self.image.title(asUrl=True) or + if (dup_page.title(as_url=True) != self.image.title( + as_url=True) or self.timestamp is None): try: self.timestamp = dup_page.latest_file_info.timestamp @@ -939,7 +940,7 @@ % dup_page) images_to_tag_list.append(dup_page.title()) string += '* {0}\n'.format( - dup_page.title(asLink=True, textlink=True)) + dup_page.title(as_link=True, textlink=True)) else: pywikibot.output( u"Already put the dupe-template in the files's page" @@ -955,11 +956,12 @@ '__images__', '\n{0}* {1}\n'.format( string, - Page_older_image.title(asLink=True, textlink=True))) + Page_older_image.title( + as_link=True, textlink=True))) else: text_for_the_report = dupText.replace( '__image__', - Page_older_image.title(asLink=True, textlink=True)) + Page_older_image.title(as_link=True, textlink=True))
# Two iteration: report the "problem" to the user only once # (the last) @@ -985,7 +987,7 @@ already_reported_in_past = fp.revision_count(self.bots) from_regex = (r'\n*[[:%s%s]]' % (self.image_namespace, - re.escape(self.image.title(asUrl=True)))) + re.escape(self.image.title(as_url=True)))) # Delete the image in the list where we're write on text_for_the_report = re.sub(from_regex, '', text_for_the_report) @@ -998,7 +1000,7 @@ self.report( text_for_the_report, images_to_tag_list[-1], dupTalkText - % (Page_older_image.title(withNamespace=True), + % (Page_older_image.title(with_ns=True), string), dupTalkHead, commTalk=dupComment_talk, commImage=dupComment_image, unver=True) @@ -1007,17 +1009,20 @@ if only_report: repme = ((self.list_entry + 'has the following duplicates ' "('''forced mode'''):") - % self.image.title(asUrl=True)) + % self.image.title(as_url=True)) else: repme = ((self.list_entry + 'has the following duplicates:') - % self.image.title(asUrl=True)) + % self.image.title(as_url=True))
for dup_page in duplicates: - if dup_page.title(asUrl=True) == self.image.title(asUrl=True): + if ( + dup_page.title(as_url=True) == + self.image.title(as_url=True) + ): # the image itself, not report also this as duplicate continue repme += '\n** [[:%s%s]]' % (self.image_namespace, - dup_page.title(asUrl=True)) + dup_page.title(as_url=True))
result = self.report_image(self.imageName, self.rep_page, self.com, repme, addings=False) @@ -1167,7 +1172,7 @@ # the list_licenses are loaded in the __init__ # (not to load them multimple times) if template in self.list_licenses: - self.license_selected = template.title(withNamespace=False) + self.license_selected = template.title(with_ns=False) self.seems_ok = True # let the last "fake" license normally detected self.license_found = self.license_selected @@ -1244,9 +1249,9 @@ for template_selected in templatesInTheImageRaw: tp = pywikibot.Page(self.site, template_selected) for templateReal in self.licenses_found: - if (tp.title(asUrl=True, withNamespace=False).lower() == - templateReal.title(asUrl=True, - withNamespace=False).lower()): + if (tp.title(as_url=True, with_ns=False).lower() == + templateReal.title(as_url=True, + with_ns=False).lower()): if templateReal not in self.allLicenses: self.allLicenses.append(templateReal) break @@ -1519,7 +1524,7 @@ # Modify summary text pywikibot.setAction(dels) canctext = di % extension - notification = din % {'file': self.image.title(asLink=True, + notification = din % {'file': self.image.title(as_link=True, textlink=True)} head = dih self.report(canctext, self.imageName, notification, head) diff --git a/scripts/clean_sandbox.py b/scripts/clean_sandbox.py index e469072..b5d527b 100755 --- a/scripts/clean_sandbox.py +++ b/scripts/clean_sandbox.py @@ -165,11 +165,11 @@ now = time.strftime("%d %b %Y %H:%M:%S (UTC)", time.gmtime()) for sandbox_page in self.generator: pywikibot.output(u'Preparing to process sandbox page %s' - % sandbox_page.title(asLink=True)) + % sandbox_page.title(as_link=True)) if sandbox_page.isRedirectPage(): pywikibot.warning( u'%s is a redirect page, cleaning it anyway' - % sandbox_page.title(asLink=True)) + % sandbox_page.title(as_link=True)) try: text = sandbox_page.text if not self.getOption('text'): diff --git a/scripts/commons_link.py b/scripts/commons_link.py index dcfe800..0cfd3fd 100755 --- a/scripts/commons_link.py +++ b/scripts/commons_link.py @@ -74,7 +74,7 @@ )(commons, page.title()) try: commonspage.get(get_redirect=True) - pagetitle = commonspage.title(withNamespace=not catmode) + pagetitle = commonspage.title(with_ns=not catmode) if page.title() == pagetitle: old_text = page.get() text = old_text diff --git a/scripts/commonscat.py b/scripts/commonscat.py index ac679a0..0ffb292 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -242,16 +242,16 @@ """Load the given page, do some changes, and save it.""" if not page.exists(): pywikibot.output(u'Page %s does not exist. Skipping.' - % page.title(asLink=True)) + % page.title(as_link=True)) elif page.isRedirectPage(): pywikibot.output(u'Page %s is a redirect. Skipping.' - % page.title(asLink=True)) + % page.title(as_link=True)) elif page.isCategoryRedirect(): pywikibot.output(u'Page %s is a category redirect. Skipping.' - % page.title(asLink=True)) + % page.title(as_link=True)) elif page.isDisambig(): pywikibot.output(u'Page %s is a disambiguation. Skipping.' - % page.title(asLink=True)) + % page.title(as_link=True)) else: self.addCommonscat(page)
@@ -275,11 +275,11 @@ for template in ignoreTemplates[page.site.code]: if not isinstance(template, tuple): for pageTemplate in templatesInThePage: - if pageTemplate.title(withNamespace=False) == template: + if pageTemplate.title(with_ns=False) == template: return True else: for (inPageTemplate, param) in templatesWithParams: - if inPageTemplate.title(withNamespace=False) == template[0] \ + if inPageTemplate.title(with_ns=False) == template[0] \ and template[1] in param[0].replace(' ', ''): return True return False @@ -353,12 +353,12 @@ if not linktitle and (page.title().lower() in oldcat.lower() or oldcat.lower() in page.title().lower()): linktitle = oldcat - if linktitle and newcat != page.title(withNamespace=False): + if linktitle and newcat != page.title(with_ns=False): newtext = re.sub(r'(?i){{%s|?[^{}]*(?:{{.*}})?}}' % oldtemplate, u'{{%s|%s|%s}}' % (newtemplate, newcat, linktitle), page.get()) - elif newcat == page.title(withNamespace=False): + elif newcat == page.title(with_ns=False): newtext = re.sub(r'(?i){{%s|?[^{}]*(?:{{.*}})?}}' % oldtemplate, u'{{%s}}' % newtemplate, @@ -425,7 +425,7 @@ commonscatNote = u'' # See if commonscat is present for template in wikipediaPage.templatesWithParams(): - templateTitle = template[0].title(withNamespace=False) + templateTitle = template[0].title(with_ns=False) if templateTitle == primaryCommonscat \ or templateTitle in commonscatAlternatives: commonscatTemplate = templateTitle @@ -436,7 +436,7 @@ if len(template[1]) > 2: commonscatNote = template[1][2] else: - commonscatTarget = wikipediaPage.title(withNamespace=False) + commonscatTarget = wikipediaPage.title(with_ns=False) return (commonscatTemplate, commonscatTarget, commonscatLinktext, commonscatNote) return None @@ -480,18 +480,20 @@ elif commonsPage.isRedirectPage(): pywikibot.log(u"getCommonscat: The category is a redirect") return self.checkCommonscatLink( - commonsPage.getRedirectTarget().title(withNamespace=False)) + commonsPage.getRedirectTarget().title(with_ns=False)) elif "Category redirect" in commonsPage.templates(): pywikibot.log(u"getCommonscat: The category is a category redirect") for template in commonsPage.templatesWithParams(): - if (template[0].title(withNamespace=False) == "Category redirect" and - len(template[1]) > 0): + if ( + template[0].title(with_ns=False) == 'Category redirect' + and len(template[1]) > 0 + ): return self.checkCommonscatLink(template[1][0]) elif commonsPage.isDisambig(): pywikibot.log(u"getCommonscat: The category is disambiguation") return u'' else: - return commonsPage.title(withNamespace=False) + return commonsPage.title(with_ns=False) except pywikibot.BadTitle: # Funky title so not correct return u'' @@ -533,7 +535,7 @@ site.code) template_page = pywikibot.Page(site, u'Template:' + primaryCommonscat) generator = template_page.getReferences(namespaces=14, - onlyTemplateInclusion=True) + only_template_inclusion=True) else: generator = genFactory.getCombinedGenerator()
diff --git a/scripts/create_categories.py b/scripts/create_categories.py index 70cf106..3ed977d 100755 --- a/scripts/create_categories.py +++ b/scripts/create_categories.py @@ -57,7 +57,7 @@
def treat(self, page): """Create category in commons for that page.""" - title = page.title(withNamespace=False) + title = page.title(with_ns=False)
newpage = pywikibot.Category(pywikibot.Site('commons', 'commons'), '%s %s' % (self.basename, title)) diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 4f22b36..9002385 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -97,7 +97,7 @@ """ hashObject = hashlib.sha1() hashObject.update(self.downloadPhoto().getvalue()) - return [page.title(withNamespace=False) for page in + return [page.title(with_ns=False) for page in self.site.allimages(sha1=base64.b16encode(hashObject.digest()))]
def getTitle(self, fmt): @@ -228,7 +228,7 @@
templates = configurationPage.templatesWithParams() for (template, params) in templates: - if template.title(withNamespace=False) == u'Data ingestion': + if template.title(with_ns=False) == 'Data ingestion': for param in params: (field, sep, value) = param.partition(u'=')
diff --git a/scripts/djvutext.py b/scripts/djvutext.py index b448cd3..bbe9ed6 100644 --- a/scripts/djvutext.py +++ b/scripts/djvutext.py @@ -71,7 +71,7 @@ super(DjVuTextBot, self).__init__(site=index.site, **kwargs) self._djvu = djvu self._index = index - self._prefix = self._index.title(withNamespace=False) + self._prefix = self._index.title(with_ns=False) self._page_ns = self.site._proofread_page_ns.custom_name
if not pages: @@ -198,7 +198,7 @@ raise pywikibot.NoPage(index)
pywikibot.output('uploading text from %s to %s' - % (djvu.file_djvu, index_page.title(asLink=True))) + % (djvu.file_djvu, index_page.title(as_link=True)))
bot = DjVuTextBot(djvu, index_page, pages, **options) bot.run() diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py index bfa8d1c..8c3ee81 100755 --- a/scripts/fixing_redirects.py +++ b/scripts/fixing_redirects.py @@ -152,8 +152,8 @@ section): pywikibot.warning( 'Section #{0} not found on page {1}'.format( - section, target.title(asLink=True, - withSection=False))) + section, target.title(as_link=True, + with_section=False))) continue else: continue diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py index 06f386f..bef2894 100755 --- a/scripts/harvest_template.py +++ b/scripts/harvest_template.py @@ -173,11 +173,11 @@ pywikibot.output('Finding redirects...') if temp.isRedirectPage(): temp = temp.getRedirectTarget() - titles = [page.title(withNamespace=False) - for page in temp.getReferences(redirectsOnly=True, + titles = [page.title(with_ns=False) + for page in temp.getReferences(filter_redirects=True, namespaces=[10], follow_redirects=False)] - titles.append(temp.title(withNamespace=False)) + titles.append(temp.title(with_ns=False)) return titles
def _template_link_target(self, item, link_text): @@ -236,7 +236,7 @@ # Clean up template try: template = pywikibot.Page(page.site, template, - ns=10).title(withNamespace=False) + ns=10).title(with_ns=False) except pywikibot.exceptions.InvalidTitle: pywikibot.error( "Failed parsing template; '%s' should be the template name." @@ -287,14 +287,14 @@ elif claim.type == 'commonsMedia': commonssite = pywikibot.Site('commons', 'commons') imagelink = pywikibot.Link( - value, source=commonssite, defaultNamespace=6) + value, source=commonssite, default_namespace=6) image = pywikibot.FilePage(imagelink) if image.isRedirectPage(): image = pywikibot.FilePage(image.getRedirectTarget()) if not image.exists(): pywikibot.output( "{0} doesn't exist. I can't link to it" - ''.format(image.title(asLink=True))) + ''.format(image.title(as_link=True))) continue claim.setTarget(image) else: diff --git a/scripts/illustrate_wikidata.py b/scripts/illustrate_wikidata.py index 484838a..94e79e4 100755 --- a/scripts/illustrate_wikidata.py +++ b/scripts/illustrate_wikidata.py @@ -68,14 +68,14 @@ newclaim = pywikibot.Claim(self.repo, self.wdproperty) commonssite = pywikibot.Site("commons", "commons") imagelink = pywikibot.Link(imagename, source=commonssite, - defaultNamespace=6) + default_namespace=6) image = pywikibot.FilePage(imagelink) if image.isRedirectPage(): image = pywikibot.FilePage(image.getRedirectTarget())
if not image.exists(): pywikibot.output("%s doesn't exist so I can't link to it" - % image.title(asLink=True)) + % image.title(as_link=True)) return
newclaim.setTarget(image) diff --git a/scripts/imagecopy.py b/scripts/imagecopy.py index 72c1e5a..8787eda 100644 --- a/scripts/imagecopy.py +++ b/scripts/imagecopy.py @@ -252,7 +252,7 @@ """Run the bot.""" tosend = {'language': self.imagePage.site.lang.encode('utf-8'), 'image': self.imagePage.title( - withNamespace=False).encode('utf-8'), + with_ns=False).encode('utf-8'), 'newname': self.newname.encode('utf-8'), 'project': self.imagePage.site.family.name.encode('utf-8'), 'username': '', @@ -315,7 +315,7 @@ commentText = i18n.twtranslate( self.imagePage.site, 'commons-file-now-available', - {'localfile': self.imagePage.title(withNamespace=False), + {'localfile': self.imagePage.title(with_ns=False), 'commonsfile': self.newname})
pywikibot.showDiff(self.imagePage.get(), imtxt + addTemplate) @@ -327,15 +327,15 @@ moveSummary = i18n.twtranslate( self.imagePage.site, 'commons-file-moved', - {'localfile': self.imagePage.title(withNamespace=False), + {'localfile': self.imagePage.title(with_ns=False), 'commonsfile': self.newname})
# If the image is uploaded under a different name, replace all # instances - if self.imagePage.title(withNamespace=False) != self.newname: + if self.imagePage.title(with_ns=False) != self.newname: imagebot = ImageRobot( generator=self.preloadingGen, - oldImage=self.imagePage.title(withNamespace=False), + oldImage=self.imagePage.title(with_ns=False), newImage=self.newname, summary=moveSummary, always=True, loose=True) imagebot.run() @@ -535,7 +535,7 @@ (datetime, username, resolution, size, comment) = imagepage.getFileVersionHistory().pop() if always: - newname = imagepage.title(withNamespace=False) + newname = imagepage.title(with_ns=False) CommonsPage = pywikibot.Page(pywikibot.Site('commons', 'commons'), u'File:%s' % newname) @@ -545,7 +545,7 @@ while True: # Do the TkdialogIC to accept/reject and change te name (newname, skip) = TkdialogIC( - imagepage.title(withNamespace=False), + imagepage.title(with_ns=False), imagepage.get(), username, imagepage.permalink(with_protocol=True), imagepage.templates()).getnewname() @@ -557,7 +557,7 @@ # Did we enter a new name? if len(newname) == 0: # Take the old name - newname = imagepage.title(withNamespace=False) + newname = imagepage.title(with_ns=False) else: newname = newname.decode('utf-8')
diff --git a/scripts/imagecopy_self.py b/scripts/imagecopy_self.py index 849e6a9..e630467 100644 --- a/scripts/imagecopy_self.py +++ b/scripts/imagecopy_self.py @@ -405,7 +405,7 @@ licensetemplate = self.getNewLicensetemplate(imagepage) categories = self.getNewCategories(imagepage) return {u'imagepage': imagepage, - u'filename': imagepage.title(withNamespace=False), + 'filename': imagepage.title(with_ns=False), u'description': description, u'date': date, u'source': source, @@ -1001,7 +1001,7 @@
commentText = i18n.twtranslate( imagepage.site(), 'commons-file-now-available', - {'localfile': imagepage.title(withNamespace=False), + {'localfile': imagepage.title(with_ns=False), 'commonsfile': filename})
pywikibot.showDiff(imagepage.get(), imtxt + addTemplate) @@ -1009,18 +1009,18 @@
def replaceUsage(self, imagepage, filename): """Replace all usage if image is uploaded under a different name.""" - if imagepage.title(withNamespace=False) != filename: + if imagepage.title(with_ns=False) != filename: gen = pagegenerators.FileLinksGenerator(imagepage) preloadingGen = pagegenerators.PreloadingGenerator(gen)
moveSummary = i18n.twtranslate( imagepage.site(), 'commons-file-moved', - {'localfile': imagepage.title(withNamespace=False), + {'localfile': imagepage.title(with_ns=False), 'commonsfile': filename})
imagebot = image.ImageRobot( generator=preloadingGen, - oldImage=imagepage.title(withNamespace=False), + oldImage=imagepage.title(with_ns=False), newImage=filename, summary=moveSummary, always=True, loose=True) imagebot.run() diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py index 5ba8b05..527a96f 100755 --- a/scripts/imagerecat.py +++ b/scripts/imagerecat.py @@ -65,12 +65,12 @@ blacklistPage = pywikibot.Page(pywikibot.Site(u'commons', u'commons'), u'User:Multichill/Category_blacklist') for cat in blacklistPage.linkedPages(): - category_blacklist.append(cat.title(withNamespace=False)) + category_blacklist.append(cat.title(with_ns=False))
countryPage = pywikibot.Page(pywikibot.Site(u'commons', u'commons'), u'User:Multichill/Countries') for country in countryPage.linkedPages(): - countries.append(country.title(withNamespace=False)) + countries.append(country.title(with_ns=False)) return
@@ -109,7 +109,7 @@ """Get the categories currently on the image.""" result = [] for cat in imagepage.categories(): - result.append(cat.title(withNamespace=False)) + result.append(cat.title(with_ns=False)) return list(set(result))
@@ -130,14 +130,14 @@ family = site.family.name if lang == u'commons' and family == u'commons': parameters = urlencode( - {'i': imagepage.title(withNamespace=False).encode('utf-8'), + {'i': imagepage.title(with_ns=False).encode('utf-8'), 'r': 'on', 'go-clean': 'Find+Categories', 'p': search_wikis, 'cl': hint_wiki}) elif family == u'wikipedia': parameters = urlencode( - {'i': imagepage.title(withNamespace=False).encode('utf-8'), + {'i': imagepage.title(with_ns=False).encode('utf-8'), 'r': 'on', 'go-move': 'Find+Categories', 'p': search_wikis, @@ -329,7 +329,7 @@ if categoryPage.isCategoryRedirect(): result.append( categoryPage.getCategoryRedirectTarget().title( - withNamespace=False)) + with_ns=False)) else: result.append(cat) return result @@ -363,8 +363,8 @@ pywikibot.Site(u'commons', u'commons'), u'Category:' + bc) for subcategory in category.subcategories(): for country in listCountries: - if subcategory.title(withNamespace=False).endswith(country): - result.append(subcategory.title(withNamespace=False)) + if subcategory.title(with_ns=False).endswith(country): + result.append(subcategory.title(with_ns=False)) return list(set(result))
diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index 42e92fd..3ffd488 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -202,7 +202,7 @@ image = imagelist[i] pywikibot.output('-' * 60) pywikibot.output(u"%s. Found image: %s" - % (i, image.title(asLink=True))) + % (i, image.title(as_link=True))) try: # Show the image description page's contents pywikibot.output(image.get()) diff --git a/scripts/interwiki.py b/scripts/interwiki.py index f11ad05..6d0304c 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -2110,7 +2110,7 @@ page.site.namespaces[int(page.namespace())]) if page_namespace.case == 'first-letter': until = first_upper(until) - if page.title(withNamespace=False) > until: + if page.title(with_ns=False) > until: break
self.add(page, hints=self.conf.hints) @@ -2333,7 +2333,7 @@ if tmpl != []: templates = page.templatesWithParams() for template in templates: - if template[0].title(withNamespace=False).lower() in tmpl: + if template[0].title(with_ns=False).lower() in tmpl: return False return True
@@ -2508,7 +2508,7 @@ if optContinue: if pages: last = pages[-1] - nextPage = last.title(withNamespace=False) + '!' + nextPage = last.title(with_ns=False) + '!' namespace = last.namespace() else: pywikibot.output( diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py index 5d4e14c..d386574 100644 --- a/scripts/interwikidata.py +++ b/scripts/interwikidata.py @@ -76,13 +76,13 @@ not self.getOption('ignore_ns')): output('{page} is not in allowed namespaces, skipping' .format(page=self.current_page.title( - asLink=True))) + as_link=True))) return False self.iwlangs = pywikibot.textlib.getLanguageLinks( self.current_page.text, insite=self.current_page.site) if not self.iwlangs: output('No interlanguagelinks on {page}'.format( - page=self.current_page.title(asLink=True))) + page=self.current_page.title(as_link=True))) return False try: item = pywikibot.ItemPage.fromPage(self.current_page) @@ -122,7 +122,7 @@ data['sitelinks'][dbname] = {'site': dbname, 'title': title} data['labels'][site.lang] = {'language': site.lang, 'value': title} summary = ('Bot: New item with sitelink(s) from %s' - % self.current_page.title(asLink=True, insite=self.repo)) + % self.current_page.title(as_link=True, insite=self.repo))
item = pywikibot.ItemPage(self.repo) item.editEntity(data, new='item', summary=summary) @@ -148,7 +148,7 @@ if set(dbnames) - set(self.current_item.sitelinks.keys()): if not self.handle_complicated(): warning('Interwiki conflict in %s, skipping...' % - self.current_page.title(asLink=True)) + self.current_page.title(as_link=True)) return False output('Cleaning up the page') new_text = pywikibot.textlib.removeLanguageLinks( @@ -161,13 +161,13 @@ for iw_page in self.iwlangs.values(): if not iw_page.exists(): warning('Interwiki %s does not exist, skipping...' % - iw_page.title(asLink=True)) + iw_page.title(as_link=True)) continue try: wd_data.add(pywikibot.ItemPage.fromPage(iw_page)) except pywikibot.NoPage: output('Interwiki %s does not have an item' % - iw_page.title(asLink=True)) + iw_page.title(as_link=True)) return wd_data
def try_to_add(self): @@ -178,7 +178,7 @@ return None if len(wd_data) > 1: warning('Interwiki conflict in %s, skipping...' % - self.current_page.title(asLink=True)) + self.current_page.title(as_link=True)) return False item = list(wd_data).pop() if self.current_page.site.dbName() in item.sitelinks: @@ -187,7 +187,7 @@ return False output('Adding link to %s' % item.title()) item.setSitelink(self.current_page, summary='Added %s' % ( - self.current_page.title(asLink=True, insite=item.site))) + self.current_page.title(as_link=True, insite=item.site))) return item
def try_to_merge(self, item): @@ -198,7 +198,7 @@ return None if len(wd_data) > 1: warning('Interwiki conflict in %s, skipping...' % - self.current_page.title(asLink=True)) + self.current_page.title(as_link=True)) return False target_item = list(wd_data).pop() try: diff --git a/scripts/isbn.py b/scripts/isbn.py index ef19b41..f0bec35 100755 --- a/scripts/isbn.py +++ b/scripts/isbn.py @@ -1526,10 +1526,10 @@ % page.title()) except pywikibot.NoPage: pywikibot.output(u"Page %s does not exist" - % page.title(asLink=True)) + % page.title(as_link=True)) except pywikibot.IsRedirectPage: pywikibot.output(u"Page %s is a redirect; skipping." - % page.title(asLink=True)) + % page.title(as_link=True))
def run(self): """Run the bot.""" diff --git a/scripts/listpages.py b/scripts/listpages.py index 0529ef1..b5d3ddb 100755 --- a/scripts/listpages.py +++ b/scripts/listpages.py @@ -245,7 +245,7 @@ additional_text='Page {0} already exists.\n' 'You can use the -overwrite argument to ' 'replace the content of this page.' - .format(page_target.title(asLink=True))) + .format(page_target.title(as_link=True))) return False if re.match('^[a-z_-]+$', summary): summary = i18n.twtranslate(site, summary) diff --git a/scripts/makecat.py b/scripts/makecat.py index 14f7997..743a67b 100755 --- a/scripts/makecat.py +++ b/scripts/makecat.py @@ -69,7 +69,7 @@ cl = checklinks if linkterm: actualworkingcat = pywikibot.Category(mysite, workingcat.title(), - sortKey=linkterm) + sort_key=linkterm) else: actualworkingcat = workingcat if realinclude: diff --git a/scripts/misspelling.py b/scripts/misspelling.py index 2ea30ba..7aca9f6 100755 --- a/scripts/misspelling.py +++ b/scripts/misspelling.py @@ -132,7 +132,7 @@ if isinstance(templates, basestring): templates = (templates, ) for template, params in disambPage.templatesWithParams(): - if template.title(withNamespace=False) in templates: + if template.title(with_ns=False) in templates: # The correct spelling is in the last parameter. correctSpelling = params[-1] # On de.wikipedia, there are some cases where the diff --git a/scripts/movepages.py b/scripts/movepages.py index 1bbeb60..9e82d93 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -84,10 +84,12 @@ if not msg: msg = i18n.twtranslate(page.site, 'movepages-moving') pywikibot.output(u'Moving page %s to [[%s]]' - % (page.title(asLink=True), + % (page.title(as_link=True), newPageTitle)) - page.move(newPageTitle, reason=msg, movetalkpage=self.getOption('movetalkpage'), - deleteAndMove=self.getOption('noredirect')) + page.move( + newPageTitle, reason=msg, + movetalk=self.getOption('movetalkpage'), + noredirect=self.getOption('noredirect')) except pywikibot.PageRelatedError as error: pywikibot.output(error)
@@ -97,7 +99,7 @@ if self.getOption('skipredirects') and page.isRedirectPage(): pywikibot.output(u'Page %s is a redirect; skipping.' % page.title()) return - pagetitle = page.title(withNamespace=False) + pagetitle = page.title(with_ns=False) namesp = page.site.namespace(page.namespace()) if self.appendAll: newPageTitle = (u'%s%s%s' @@ -148,7 +150,7 @@ self.replacePattern = pywikibot.input( u'Enter the replace pattern:') self.regex = re.compile(searchPattern) - if page.title() == page.title(withNamespace=False): + if page.title() == page.title(with_ns=False): newPageTitle = self.regex.sub(self.replacePattern, page.title()) else: @@ -156,7 +158,7 @@ 'namespace prefix "%s:"?' % namesp, automatic_quit=False): newPageTitle = self.regex.sub( - self.replacePattern, page.title(withNamespace=False)) + self.replacePattern, page.title(with_ns=False)) self.noNamespace = True else: newPageTitle = self.regex.sub(self.replacePattern, diff --git a/scripts/newitem.py b/scripts/newitem.py index 50a06f1..82df37c 100755 --- a/scripts/newitem.py +++ b/scripts/newitem.py @@ -70,13 +70,13 @@ page.touch() except NoPage: pywikibot.error('Page {0} does not exist.'.format( - page.title(asLink=True))) + page.title(as_link=True))) except LockedPage: pywikibot.error('Page {0} is locked.'.format( - page.title(asLink=True))) + page.title(as_link=True))) except PageNotSaved: pywikibot.error('Page {0} not saved.'.format( - page.title(asLink=True))) + page.title(as_link=True)))
def _callback(self, page, exc): if exc is None: diff --git a/scripts/noreferences.py b/scripts/noreferences.py index 7aaef7e..06dac87 100755 --- a/scripts/noreferences.py +++ b/scripts/noreferences.py @@ -689,24 +689,24 @@ text = page.text except pywikibot.NoPage: pywikibot.warning('Page %s does not exist?!' - % page.title(asLink=True)) + % page.title(as_link=True)) continue except pywikibot.IsRedirectPage: pywikibot.output(u"Page %s is a redirect; skipping." - % page.title(asLink=True)) + % page.title(as_link=True)) continue except pywikibot.LockedPage: pywikibot.warning('Page %s is locked?!' - % page.title(asLink=True)) + % page.title(as_link=True)) continue if page.isDisambig(): pywikibot.output(u"Page %s is a disambig; skipping." - % page.title(asLink=True)) + % page.title(as_link=True)) continue if self.site.sitename == 'wikipedia:en' and page.isIpEdit(): pywikibot.warning( u"Page %s is edited by IP. Possible vandalized" - % page.title(asLink=True)) + % page.title(as_link=True)) continue if self.lacksReferences(text): newText = self.addReferences(text) @@ -714,14 +714,14 @@ self.userPut(page, page.text, newText, summary=self.comment) except pywikibot.EditConflict: pywikibot.warning('Skipping %s because of edit conflict' - % page.title(asLink=True)) + % page.title(as_link=True)) except pywikibot.SpamfilterError as e: pywikibot.warning( u'Cannot change %s because of blacklist entry %s' - % (page.title(asLink=True), e.url)) + % (page.title(as_link=True), e.url)) except pywikibot.LockedPage: pywikibot.warning('Skipping %s (locked page)' % - page.title(asLink=True)) + page.title(as_link=True))
def main(*args): diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index d770cb3..f003f97 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -218,7 +218,7 @@ def generator(self): """Generator method.""" gens = (t.getReferences(follow_redirects=True, namespaces=[6], - onlyTemplateInclusion=True) + only_template_inclusion=True) for t in self.nc_templates) gen = chain(*gens) gen = pg.DuplicateFilterPageGenerator(gen) @@ -231,7 +231,7 @@ for templateName, params in localImagePage.templatesWithParams(): if templateName in self.nc_templates: if params == []: - filenameOnCommons = localImagePage.title(withNamespace=False) + filenameOnCommons = localImagePage.title(with_ns=False) elif self.site.lang in namespaceInTemplate: skip = False filenameOnCommons = None @@ -245,7 +245,7 @@ break skip = True if not filenameOnCommons: - filenameOnCommons = localImagePage.title(withNamespace=False) + filenameOnCommons = localImagePage.title(with_ns=False) else: val = params[0].split('=') if len(val) == 1: @@ -273,24 +273,24 @@ continue commonsImagePage = pywikibot.FilePage(commons, 'Image:%s' % filenameOnCommons) - if (localImagePage.title(withNamespace=False) != - commonsImagePage.title(withNamespace=False)): + if (localImagePage.title(with_ns=False) != + commonsImagePage.title(with_ns=False)): usingPages = list(localImagePage.usingPages()) if usingPages and usingPages != [localImagePage]: pywikibot.output(color_format( '"{lightred}{0}{default}" is still used in {1} pages.', - localImagePage.title(withNamespace=False), + localImagePage.title(with_ns=False), len(usingPages))) if self.getOption('replace') is True: pywikibot.output(color_format( 'Replacing "{lightred}{0}{default}" by ' '"{lightgreen}{1}{default}".', - localImagePage.title(withNamespace=False), - commonsImagePage.title(withNamespace=False))) + localImagePage.title(with_ns=False), + commonsImagePage.title(with_ns=False))) bot = ImageBot( pg.FileLinksGenerator(localImagePage), - localImagePage.title(withNamespace=False), - commonsImagePage.title(withNamespace=False), + localImagePage.title(with_ns=False), + commonsImagePage.title(with_ns=False), '', self.getOption('replacealways'), self.getOption('replaceloose')) bot.run() @@ -303,9 +303,8 @@ pg.FileLinksGenerator( localImagePage), localImagePage.title( - withNamespace=False, asUrl=True), - commonsImagePage.title( - withNamespace=False), + with_ns=False, as_url=True), + commonsImagePage.title(with_ns=False), '', self.getOption('replacealways'), self.getOption('replaceloose')) bot.run() @@ -320,7 +319,7 @@ pywikibot.output(color_format( 'No page is using "{lightgreen}{0}{default}" ' 'anymore.', - localImagePage.title(withNamespace=False))) + localImagePage.title(with_ns=False))) commonsText = commonsImagePage.get() if self.getOption('replaceonly') is False: if sha1 == commonsImagePage.latest_file_info.sha1: diff --git a/scripts/protect.py b/scripts/protect.py index 6e78bad..bd1f24c 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -106,7 +106,7 @@ self.current_page = page if not self.user_confirm( 'Do you want to change the protection level of %s?' - % page.title(asLink=True, forceInterwiki=True)): + % page.title(as_link=True, force_interwiki=True)): return applicable = page.applicable_protections() protections = dict( diff --git a/scripts/redirect.py b/scripts/redirect.py index 59a00c9..647d76f 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -215,7 +215,7 @@ gen.set_maximum_items(self.api_number) for p in gen: done = (self.api_until and - p.title(withNamespace=False) >= self.api_until) + p.title(with_ns=False) >= self.api_until) if done: return yield p @@ -389,7 +389,7 @@ # to it need to be changed try: for page in moved_page.getReferences(follow_redirects=True, - redirectsOnly=True): + filter_redirects=True): yield page except pywikibot.NoPage: # original title must have been deleted after move @@ -480,7 +480,7 @@ content = page.get(get_redirect=True) except pywikibot.SectionError: content_page = pywikibot.Page(page.site, - page.title(withSection=False)) + page.title(with_section=False)) content = content_page.get(get_redirect=True) content = self.sdtemplate + '\n' + content try: @@ -554,8 +554,8 @@ if not done and self.user_confirm( u'Redirect target %s does not exist.\n' u'Do you want to delete %s?' - % (targetPage.title(asLink=True), - redir_page.title(asLink=True))): + % (targetPage.title(as_link=True), + redir_page.title(as_link=True))): self.delete_redirect(redir_page, 'redirect-remove-broken') elif not (self.getOption('delete') or movedTarget): pywikibot.output( @@ -563,7 +563,7 @@ except pywikibot.IsRedirectPage: pywikibot.output( "Redirect target {0} is also a redirect! {1}".format( - targetPage.title(asLink=True), + targetPage.title(as_link=True), "Won't delete anything." if self.getOption('delete') else "Skipping.")) else: @@ -571,7 +571,7 @@ # it exists and is not a redirect: no reason to touch it. pywikibot.output( "Redirect target {0} does exist! {1}".format( - targetPage.title(asLink=True), + targetPage.title(as_link=True), "Won't delete anything." if self.getOption('delete') else "Skipping."))
@@ -624,7 +624,7 @@ else: pywikibot.output( u' Links to: %s.' - % targetPage.title(asLink=True)) + % targetPage.title(as_link=True)) try: mw_msg = targetPage.site.mediawiki_message( 'wikieditor-toolbar-tool-redirect-example') @@ -639,10 +639,10 @@ # watch out for redirect loops if redirList.count(u'%s:%s' % (targetPage.site.lang, - targetPage.title(withSection=False))): + targetPage.title(with_section=False))): pywikibot.warning( u'Redirect target %s forms a redirect loop.' - % targetPage.title(asLink=True)) + % targetPage.title(as_link=True)) break # FIXME: doesn't work. edits twice! if self.getOption('delete'): # Delete the two redirects @@ -673,7 +673,7 @@ redir.set_redirect_target(targetPage, keep_section=True, save=False) summary = i18n.twtranslate(self.site, 'redirect-fix-double', - {'to': targetPage.title(asLink=True)} + {'to': targetPage.title(as_link=True)} ) pywikibot.showDiff(oldText, redir.text) if self.user_confirm(u'Do you want to accept the changes?'): diff --git a/scripts/reflinks.py b/scripts/reflinks.py index 7591f01..632bafe 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -438,7 +438,7 @@ self.stop_page_rev_id = self.stop_page.latest_revision_id else: pywikibot.warning('The stop page %s does not exist' - % self.stop_page.title(asLink=True)) + % self.stop_page.title(as_link=True))
# Regex to grasp content-type meta HTML tag in HTML source self.META_CONTENT = re.compile(br'(?i)<meta[^>]*content-type[^>]*>') @@ -513,15 +513,15 @@ new_text = page.get() if not page.canBeEdited(): pywikibot.output(u"You can't edit page %s" - % page.title(asLink=True)) + % page.title(as_link=True)) continue except pywikibot.NoPage: pywikibot.output('Page {} not found' - .format(page.title(asLink=True))) + .format(page.title(as_link=True))) continue except pywikibot.IsRedirectPage: pywikibot.output(u'Page %s is a redirect' - % page.title(asLink=True)) + % page.title(as_link=True)) continue
# for each link to change @@ -589,7 +589,7 @@ if f.status != requests.codes.ok: pywikibot.output(u'HTTP error (%s) for %s on %s' % (f.status, ref.url, - page.title(asLink=True)), + page.title(as_link=True)), toStdout=True) # 410 Gone, indicates that the resource has been # purposely removed @@ -607,7 +607,7 @@ # in [[fr:Cyanure]] pywikibot.output(color_format( '{lightred}Bad link{default} : {0} in {1}', - ref.url, page.title(asLink=True))) + ref.url, page.title(as_link=True))) continue except (URLError, socket.error, @@ -756,7 +756,7 @@ if actual_rev != self.stop_page_rev_id: pywikibot.output( '%s has been edited : Someone wants us to stop.' - % self.stop_page.title(asLink=True)) + % self.stop_page.title(as_link=True)) return
diff --git a/scripts/replace.py b/scripts/replace.py index 78cd405..9b77210 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -636,13 +636,13 @@ 'Skipping fix "{0}" on {1} because the title is on ' 'the exceptions list.'.format( replacement.container.name, - page.title(asLink=True))) + page.title(as_link=True))) skipped_containers.add(replacement.container.name) else: pywikibot.output( 'Skipping unnamed replacement ({0}) on {1} because ' 'the title is on the exceptions list.'.format( - replacement.description, page.title(asLink=True))) + replacement.description, page.title(as_link=True))) continue old_text = new_text new_text = textlib.replaceExcept( @@ -671,9 +671,10 @@ # This is an async put callback if not isinstance(err, Exception): self.changed_pages += 1 - self._pending_processed_titles.put((page.title(asLink=True), True)) + self._pending_processed_titles.put((page.title( + as_link=True), True)) else: # unsuccessful pages - self._pending_processed_titles.put((page.title(asLink=True), + self._pending_processed_titles.put((page.title(as_link=True), False))
def _replace_async_callback(self, page, err): @@ -721,17 +722,18 @@ if self.isTitleExcepted(page.title()): pywikibot.output( u'Skipping %s because the title is on the exceptions list.' - % page.title(asLink=True)) + % page.title(as_link=True)) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not page.canBeEdited(): pywikibot.output(u"You can't edit page %s" - % page.title(asLink=True)) + % page.title(as_link=True)) continue except pywikibot.NoPage: - pywikibot.output('Page %s not found' % page.title(asLink=True)) + pywikibot.output('Page %s not found' % page.title( + as_link=True)) continue applied = set() new_text = original_text @@ -741,7 +743,7 @@ if self.isTextExcepted(new_text): pywikibot.output(u'Skipping %s because it contains text ' u'that is on the exceptions list.' - % page.title(asLink=True)) + % page.title(as_link=True)) break while new_text != last_text: last_text = new_text @@ -751,7 +753,7 @@ break if new_text == original_text: pywikibot.output(u'No changes were necessary in %s' - % page.title(asLink=True)) + % page.title(as_link=True)) break if hasattr(self, 'addedCat'): # Fetch only categories in wikitext, otherwise the others diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index 7545031..74e1d8d 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -205,7 +205,7 @@
for site in self.sites: if dest_ns is not None: - page2 = Page(site, page1.title(withNamespace=False), dest_ns) + page2 = Page(site, page1.title(with_ns=False), dest_ns) pywikibot.output("\nCross namespace, new title: %s" % page2.title()) else: diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 7e51704..0278d54 100755 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -106,7 +106,7 @@ comment += ': ' + self.comment pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', - page.title(asLink=True, forceInterwiki=True, textlink=True))) + page.title(as_link=True, force_interwiki=True, textlink=True))) if not self.rollback: old = page.text page.text = page.getOldVersion(rev.revid) diff --git a/scripts/script_wui.py b/scripts/script_wui.py index 020acd5..50c9bf4 100755 --- a/scripts/script_wui.py +++ b/scripts/script_wui.py @@ -161,7 +161,7 @@ self.refs[item].get(force=True) # load all page contents except pywikibot.NoPage: pywikibot.error("The configuation page %s doesn't exists" - % self.refs[item].title(asLink=True)) + % self.refs[item].title(as_link=True)) raise # init background timer pywikibot.output(u'** Starting crontab background timer thread') diff --git a/scripts/selflink.py b/scripts/selflink.py index 2f77f27..38ee6df 100755 --- a/scripts/selflink.py +++ b/scripts/selflink.py @@ -39,7 +39,7 @@ self._page = page
def handle(self): - return "'''{0}'''".format(self._page.title(withSection=False)) + return "'''{0}'''".format(self._page.title(with_section=False))
class SelflinkBot(MultipleSitesBot, BaseUnlinkBot): @@ -68,7 +68,7 @@ if '<imagemap>' in self.current_page.text: pywikibot.output( u'Skipping page %s because it contains an image map.' - % self.current_page.title(asLink=True)) + % self.current_page.title(as_link=True)) return self.unlink(self.current_page)
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 81b2f05..50d4ce8 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -403,7 +403,7 @@ # TODO: start yielding before all referring pages have been found refs = [ page for page in self.disambPage.getReferences( - withTemplateInclusion=False, + with_template_inclusion=False, namespaces=0 if self.main_only else None ) ] @@ -486,7 +486,7 @@ @rtype: bool
""" - return self.enabled and refPage.title(asUrl=True) in self.ignorelist + return self.enabled and refPage.title(as_url=True) in self.ignorelist
def ignore(self, refPage): """Write page to ignorelist. @@ -499,11 +499,11 @@ # Skip this occurrence next time. filename = config.datafilepath( 'disambiguations', - self.disambPage.title(asUrl=True) + '.txt') + self.disambPage.title(as_url=True) + '.txt') try: # Open file for appending. If none exists, create a new one. with codecs.open(filename, 'a', 'utf-8') as f: - f.write(refPage.title(asUrl=True) + '\n') + f.write(refPage.title(as_url=True) + '\n') except IOError: pass
@@ -708,7 +708,7 @@ nochange = True
for page in chain( - (disambPage,), disambPage.getReferences(redirectsOnly=True) + (disambPage,), disambPage.getReferences(filter_redirects=True) ): treat_result = self.treat_disamb_only(refPage, page) if treat_result == 'nextpage': diff --git a/scripts/surnames_redirects.py b/scripts/surnames_redirects.py index e956ff8..5071c2d 100755 --- a/scripts/surnames_redirects.py +++ b/scripts/surnames_redirects.py @@ -84,10 +84,10 @@ new_page = pywikibot.Page(site, possible_name) if new_page.exists(): pywikibot.output('%s already exists, skipping...' - % new_page.title(asLink=True)) + % new_page.title(as_link=True)) else: pywikibot.output('%s doesn't exist' - % new_page.title(asLink=True)) + % new_page.title(as_link=True)) choice = pywikibot.input_yn( 'Do you want to create a redirect?') if choice: diff --git a/scripts/template.py b/scripts/template.py index de6db92..8732786 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -159,7 +159,7 @@ # The old syntax, {{msg:vfd}}, will also be found. templatePatterns = [] for template in self.templates: - templatePattern = template.title(withNamespace=False) + templatePattern = template.title(with_ns=False) if mysite.namespaces[10].case == 'first-letter': templatePattern = '[%s%s]%s' % (templatePattern[0].upper(), templatePattern[0].lower(), @@ -320,7 +320,7 @@ else: if not genFactory.handleArg(arg): templateName = pywikibot.Page(site, arg, ns=10) - templateNames.append(templateName.title(withNamespace=False)) + templateNames.append(templateName.title(with_ns=False))
if not templateNames: pywikibot.bot.suggest_help(missing_parameters=['templates']) diff --git a/scripts/templatecount.py b/scripts/templatecount.py index e22c726..ff383a0 100755 --- a/scripts/templatecount.py +++ b/scripts/templatecount.py @@ -141,7 +141,7 @@ for template in templates: transcluding_array = [] gen = pywikibot.Page(mysite, template, ns=mytpl).getReferences( - namespaces=namespaces, onlyTemplateInclusion=True) + namespaces=namespaces, only_template_inclusion=True) for page in gen: transcluding_array.append(page) yield template, transcluding_array diff --git a/scripts/touch.py b/scripts/touch.py index 7dc97cf..b5ee1bd 100755 --- a/scripts/touch.py +++ b/scripts/touch.py @@ -47,13 +47,13 @@ page.touch(botflag=self.getOption('botflag')) except pywikibot.NoPage: pywikibot.error(u"Page %s does not exist." - % page.title(asLink=True)) + % page.title(as_link=True)) except pywikibot.LockedPage: pywikibot.error(u"Page %s is locked." - % page.title(asLink=True)) + % page.title(as_link=True)) except pywikibot.PageNotSaved: pywikibot.error(u"Page %s not saved." - % page.title(asLink=True)) + % page.title(as_link=True))
class PurgeBot(MultipleSitesBot): @@ -63,7 +63,7 @@ def treat(self, page): """Purge the given page.""" pywikibot.output(u'Page %s%s purged' - % (page.title(asLink=True), + % (page.title(as_link=True), "" if page.purge() else " not"))
diff --git a/scripts/transferbot.py b/scripts/transferbot.py index 766489a..48d2846 100755 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -136,33 +136,33 @@
for page in gen: target_title = (prefix + page.namespace().canonical_prefix() - + page.title(withNamespace=False)) + + page.title(with_ns=False)) targetpage = pywikibot.Page(tosite, target_title) edithistpage = pywikibot.Page(tosite, target_title + '/edithistory') summary = 'Moved page from {old} ([[{new}/edithistory|history]])'\ - .format(old=page.title(asLink=True, insite=tosite), + .format(old=page.title(as_link=True, insite=tosite), new=targetpage.title() if not targetpage.namespace().subpages else '')
if targetpage.exists() and not overwrite: pywikibot.output( u"Skipped %s (target page %s exists)" % ( - page.title(asLink=True), - targetpage.title(asLink=True) + page.title(as_link=True), + targetpage.title(as_link=True) ) ) continue
pywikibot.output(u"Moving %s to %s..." - % (page.title(asLink=True), - targetpage.title(asLink=True))) + % (page.title(as_link=True), + targetpage.title(as_link=True)))
pywikibot.log("Getting page text.") text = page.get(get_redirect=True) text += ("<noinclude>\n\n<small>This page was moved from %s. It's " "edit history can be viewed at %s</small></noinclude>" - % (page.title(asLink=True, insite=targetpage.site), - edithistpage.title(asLink=True, insite=targetpage.site))) + % (page.title(as_link=True, insite=targetpage.site), + edithistpage.title(as_link=True, insite=targetpage.site)))
pywikibot.log("Getting edit history.") historytable = page.getVersionHistoryTable() diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py index 56629bd..3b7a428 100755 --- a/scripts/unusedfiles.py +++ b/scripts/unusedfiles.py @@ -72,7 +72,7 @@ u'http://' not in image.text): if self.template_image in image.text: pywikibot.output(u"%s done already" - % image.title(asLink=True)) + % image.title(as_link=True)) return
self.append_text(image, '\n\n' + self.template_image) diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index e26f981..18c3d7a 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -803,14 +803,14 @@ pywikibot.output(color_format( '{lightaqua}** Reporting dead link on ' '{0}...{default}', - talkPage.title(asLink=True))) + talkPage.title(as_link=True))) try: content = talkPage.get() + '\n\n\n' if url in content: pywikibot.output(color_format( '{lightaqua}** Dead link seems to have ' 'already been reported on {0}{default}', - talkPage.title(asLink=True))) + talkPage.title(as_link=True))) continue except (pywikibot.NoPage, pywikibot.IsRedirectPage): content = '' @@ -853,7 +853,7 @@ pywikibot.output(color_format( '{lightaqua}** SpamfilterError while trying to ' 'change {0}: {1}{default}', - talkPage.title(asLink=True), error.url)) + talkPage.title(as_link=True), error.url))
class WeblinkCheckerRobot(SingleSiteBot, ExistingPageBot): diff --git a/scripts/wikisourcetext.py b/scripts/wikisourcetext.py index bbbbd5a..df3f5c4 100644 --- a/scripts/wikisourcetext.py +++ b/scripts/wikisourcetext.py @@ -200,7 +200,7 @@
gen = itertools.chain(*gen_list)
- pywikibot.output('\nUploading text to %s\n' % index.title(asLink=True)) + pywikibot.output('\nUploading text to %s\n' % index.title(as_link=True))
bot = UploadTextBot(gen, site=index.site, **options) bot.run() diff --git a/tests/api_tests.py b/tests/api_tests.py index e73fa9a..84157c7 100644 --- a/tests/api_tests.py +++ b/tests/api_tests.py @@ -722,7 +722,7 @@ """Test PropertyGenerator with prop 'info'.""" mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=10)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, prop="info", @@ -740,7 +740,7 @@ """Test PropertyGenerator with prop 'revisions'.""" mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=10)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, prop="revisions", @@ -760,7 +760,7 @@ """Test PropertyGenerator with prop 'revisions' and 'coordinates'.""" mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=10)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, prop="revisions|coordinates", @@ -781,7 +781,7 @@ """Test PropertyGenerator with many limited props.""" mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=30)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator( site=self.site, @@ -809,7 +809,7 @@ # FIXME: test fails mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=30)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator( site=self.site, prop='info|categoryinfo|langlinks|templates', @@ -830,7 +830,7 @@ """Long duration test, with total & step that are a real scenario.""" mainpage = self.get_mainpage() links = list(mainpage.backlinks(total=300)) - titles = [l.title(withSection=False) + titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator( site=self.site, prop='info|categoryinfo|langlinks|templates', diff --git a/tests/category_tests.py b/tests/category_tests.py index 671317d..faf39b6 100644 --- a/tests/category_tests.py +++ b/tests/category_tests.py @@ -169,19 +169,19 @@ """Test the category's __init__.""" site = self.get_site() cat_normal = pywikibot.Category(site, 'Category:Foo') - self.assertEqual(cat_normal.title(withNamespace=False), 'Foo') + self.assertEqual(cat_normal.title(with_ns=False), 'Foo') self.assertEqual(cat_normal.namespace(), 14)
cat_missing = pywikibot.Category(site, 'Foo') - self.assertEqual(cat_missing.title(withNamespace=False), 'Foo') + self.assertEqual(cat_missing.title(with_ns=False), 'Foo') self.assertEqual(cat_missing.namespace(), 14)
cat_duplicate = pywikibot.Category(site, 'Category:Category:Foo') - self.assertEqual(cat_duplicate.title(withNamespace=False), 'Category:Foo') + self.assertEqual(cat_duplicate.title(with_ns=False), 'Category:Foo') self.assertEqual(cat_duplicate.namespace(), 14)
cat_dup_ns = pywikibot.Category(site, 'Category:Wikipedia:Test') - self.assertTrue(cat_dup_ns.title(withNamespace=False), 'Page:Foo') + self.assertTrue(cat_dup_ns.title(with_ns=False), 'Page:Foo') self.assertTrue(cat_dup_ns.namespace(), 14)
self.assertRaisesRegex(ValueError, self.NOCATEGORYNAMESPACE_RE, @@ -199,13 +199,14 @@ """Test the title method with asLink=True.""" site = self.get_site() cat = pywikibot.Category(site, 'Category:Wikipedia Categories') - self.assertEqual(cat.title(asLink=True, insite=cat.site), + self.assertEqual(cat.title(as_link=True, insite=cat.site), u'[[Category:Wikipedia Categories]]') cat_section = pywikibot.Category(site, 'Category:Wikipedia Categories#Foo') - self.assertEqual(cat_section.title(asLink=True, insite=cat_section.site), - u'[[Category:Wikipedia Categories#Foo]]') + self.assertEqual( + cat_section.title(as_link=True, insite=cat_section.site), + '[[Category:Wikipedia Categories#Foo]]') cat_dup = pywikibot.Category(site, 'Category:Wikipedia:Test') - self.assertEqual(cat_dup.title(asLink=True, insite=cat_dup.site), + self.assertEqual(cat_dup.title(as_link=True, insite=cat_dup.site), u'[[Category:Wikipedia:Test]]')
def test_sortkey(self): @@ -213,7 +214,8 @@ site = self.get_site() cat = pywikibot.Category(site, 'Category:Wikipedia categories', 'Example') self.assertEqual(cat.aslink(), '[[Category:Wikipedia categories|Example]]') - self.assertEqual(cat.aslink(sortKey='Foo'), '[[Category:Wikipedia categories|Foo]]') + self.assertEqual(cat.aslink(sort_key='Foo'), + '[[Category:Wikipedia categories|Foo]]')
class CategoryNewestPages(TestCase): diff --git a/tests/deletionbot_tests.py b/tests/deletionbot_tests.py index 80c3914..e4475f9 100644 --- a/tests/deletionbot_tests.py +++ b/tests/deletionbot_tests.py @@ -113,13 +113,13 @@
def delete_dummy(self, reason, prompt, mark, quit): """Dummy delete method.""" - TestDeletionBot.delete_args = [self.title(asLink=True), reason, prompt, + TestDeletionBot.delete_args = [self.title(as_link=True), reason, prompt, mark, quit]
def undelete_dummy(self, reason): """Dummy undelete method.""" - TestDeletionBot.undelete_args = [self.title(asLink=True), reason] + TestDeletionBot.undelete_args = [self.title(as_link=True), reason]
if __name__ == '__main__': # pragma: no cover diff --git a/tests/page_tests.py b/tests/page_tests.py index b95a196..f9d6601 100644 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -204,29 +204,30 @@ ns_name + u":Test page#Testing") self.assertEqual(p1.title(underscore=True), ns_name + u":Test_page#Testing") - self.assertEqual(p1.title(withNamespace=False), + self.assertEqual(p1.title(with_ns=False), u"Test page#Testing") - self.assertEqual(p1.title(withSection=False), + self.assertEqual(p1.title(with_section=False), ns_name + u":Test page") - self.assertEqual(p1.title(withNamespace=False, withSection=False), + self.assertEqual(p1.title(with_ns=False, with_section=False), u"Test page") - self.assertEqual(p1.title(asUrl=True), + self.assertEqual(p1.title(as_url=True), ns_name + "%3ATest_page%23Testing") - self.assertEqual(p1.title(asLink=True, insite=site), + self.assertEqual(p1.title(as_link=True, insite=site), u"[[" + ns_name + u":Test page#Testing]]") - self.assertEqual(p1.title(asLink=True, forceInterwiki=True, insite=site), - u"[[en:" + ns_name + u":Test page#Testing]]") - self.assertEqual(p1.title(asLink=True, textlink=True, insite=site), - p1.title(asLink=True, textlink=False, insite=site)) - self.assertEqual(p1.title(asLink=True, withNamespace=False, insite=site), + self.assertEqual( + p1.title(as_link=True, force_interwiki=True, insite=site), + '[[en:' + ns_name + ':Test page#Testing]]') + self.assertEqual(p1.title(as_link=True, textlink=True, insite=site), + p1.title(as_link=True, textlink=False, insite=site)) + self.assertEqual(p1.title(as_link=True, with_ns=False, insite=site), u"[[" + ns_name + u":Test page#Testing|Test page]]") - self.assertEqual(p1.title(asLink=True, forceInterwiki=True, - withNamespace=False, insite=site), + self.assertEqual(p1.title(as_link=True, force_interwiki=True, + with_ns=False, insite=site), u"[[en:" + ns_name + ":Test page#Testing|Test page]]") - self.assertEqual(p1.title(asLink=True, textlink=True, - withNamespace=False, insite=site), - p1.title(asLink=True, textlink=False, - withNamespace=False, insite=site)) + self.assertEqual(p1.title(as_link=True, textlink=True, + with_ns=False, insite=site), + p1.title(as_link=True, textlink=False, + with_ns=False, insite=site))
def testFileTitle(self): """Test title() method options in File namespace.""" @@ -240,29 +241,30 @@ u"File:Jean-Léon Gérôme 003.jpg") self.assertEqual(p2.title(underscore=True), u"File:Jean-Léon_Gérôme_003.jpg") - self.assertEqual(p2.title(withNamespace=False), + self.assertEqual(p2.title(with_ns=False), u"Jean-Léon Gérôme 003.jpg") - self.assertEqual(p2.title(withSection=False), + self.assertEqual(p2.title(with_section=False), u"File:Jean-Léon Gérôme 003.jpg") - self.assertEqual(p2.title(withNamespace=False, withSection=False), + self.assertEqual(p2.title(with_ns=False, with_section=False), u"Jean-Léon Gérôme 003.jpg") - self.assertEqual(p2.title(asUrl=True), + self.assertEqual(p2.title(as_url=True), u"File%3AJean-L%C3%A9on_G%C3%A9r%C3%B4me_003.jpg") - self.assertEqual(p2.title(asLink=True, insite=site), + self.assertEqual(p2.title(as_link=True, insite=site), u"[[File:Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(asLink=True, forceInterwiki=True, insite=site), - u"[[en:File:Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(asLink=True, textlink=True, insite=site), + self.assertEqual( + p2.title(as_link=True, force_interwiki=True, insite=site), + '[[en:File:Jean-Léon Gérôme 003.jpg]]') + self.assertEqual(p2.title(as_link=True, textlink=True, insite=site), u"[[:File:Jean-Léon Gérôme 003.jpg]]") self.assertEqual(p2.title(as_filename=True), u"File_Jean-Léon_Gérôme_003.jpg") - self.assertEqual(p2.title(asLink=True, withNamespace=False, insite=site), + self.assertEqual(p2.title(as_link=True, with_ns=False, insite=site), u"[[File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(asLink=True, forceInterwiki=True, - withNamespace=False, insite=site), + self.assertEqual(p2.title(as_link=True, force_interwiki=True, + with_ns=False, insite=site), u"[[en:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(asLink=True, textlink=True, - withNamespace=False, insite=site), + self.assertEqual(p2.title(as_link=True, textlink=True, + with_ns=False, insite=site), u"[[:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]")
def testImageAndDataRepository(self): @@ -353,35 +355,35 @@ self.assertEqual(p2.title(underscore=True), u"Test_page") self.assertEqual(p2.title(), - p2.title(withNamespace=False)) + p2.title(with_ns=False)) self.assertEqual(p2.title(), - p2.title(withSection=False)) - self.assertEqual(p2.title(asUrl=True), + p2.title(with_section=False)) + self.assertEqual(p2.title(as_url=True), p2.title(underscore=True)) - self.assertEqual(p2.title(asLink=True, insite=site), + self.assertEqual(p2.title(as_link=True, insite=site), u"[[Test page]]") self.assertEqual(p2.title(as_filename=True), p2.title(underscore=True)) self.assertEqual(p2.title(underscore=True), - p2.title(underscore=True, withNamespace=False)) + p2.title(underscore=True, with_ns=False)) self.assertEqual(p2.title(underscore=True), - p2.title(underscore=True, withSection=False)) - self.assertEqual(p2.title(underscore=True, asUrl=True), + p2.title(underscore=True, with_section=False)) + self.assertEqual(p2.title(underscore=True, as_url=True), p2.title(underscore=True)) - self.assertEqual(p2.title(underscore=True, asLink=True, insite=site), - p2.title(asLink=True, insite=site)) + self.assertEqual(p2.title(underscore=True, as_link=True, insite=site), + p2.title(as_link=True, insite=site)) self.assertEqual(p2.title(underscore=True, as_filename=True), p2.title(underscore=True)) self.assertEqual(p2.title(), - p2.title(withNamespace=False, withSection=False)) - self.assertEqual(p2.title(asUrl=True), - p2.title(withNamespace=False, asUrl=True)) - self.assertEqual(p2.title(asLink=True, insite=site), - p2.title(withNamespace=False, asLink=True, insite=site)) + p2.title(with_ns=False, with_section=False)) + self.assertEqual(p2.title(as_url=True), + p2.title(with_ns=False, as_url=True)) + self.assertEqual(p2.title(as_link=True, insite=site), + p2.title(with_ns=False, as_link=True, insite=site)) self.assertEqual(p2.title(as_filename=True), - p2.title(withNamespace=False, as_filename=True)) - self.assertEqual(p2.title(withNamespace=False, asLink=True, - forceInterwiki=True, insite=site), + p2.title(with_ns=False, as_filename=True)) + self.assertEqual(p2.title(with_ns=False, as_link=True, + force_interwiki=True, insite=site), u"[[" + site.code + u":Test page|Test page]]")
def testSection(self): @@ -487,7 +489,7 @@ if count >= 10: break count = 0 - for p in mainpage.backlinks(followRedirects=False): + for p in mainpage.backlinks(follow_redirects=False): count += 1 self.assertIsInstance(p, pywikibot.Page) if count >= 10: diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index 826cdfc..a381dcd 100644 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -194,7 +194,7 @@ source = pywikibot.Page(self.site, self.not_existing_invalid['title']) fixed_source = pywikibot.Page(self.site, - source.title(withNamespace=False), + source.title(with_ns=False), ns=self.site.proofread_page_ns) page = ProofreadPage(fixed_source) self.assertEqual(page.title(), fixed_source.title()) @@ -216,9 +216,9 @@ source = pywikibot.Link( self.valid['title'], source=self.site, - defaultNamespace=self.site.proofread_page_ns) + default_namespace=self.site.proofread_page_ns) page = ProofreadPage(source) - self.assertEqual(page.title(withNamespace=False), source.title) + self.assertEqual(page.title(with_ns=False), source.title) self.assertEqual(page.namespace(), source.namespace)
def test_valid_parsing(self): @@ -435,9 +435,9 @@ """Test IndexPage from valid Link as source.""" source = pywikibot.Link(self.valid_index_title, source=self.site, - defaultNamespace=self.site.proofread_page_ns) + default_namespace=self.site.proofread_page_ns) page = IndexPage(source) - self.assertEqual(page.title(withNamespace=False), source.title) + self.assertEqual(page.title(with_ns=False), source.title) self.assertEqual(page.namespace(), source.namespace)
diff --git a/tests/site_tests.py b/tests/site_tests.py index 8c7ffb1..63a462d 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -746,7 +746,7 @@ for page in mysite.alllinks(start="From", namespace=4, fromids=True, total=5): self.assertIsInstance(page, pywikibot.Page) - self.assertGreaterEqual(page.title(withNamespace=False), "From") + self.assertGreaterEqual(page.title(with_ns=False), 'From') self.assertTrue(hasattr(page, "_fromid")) errgen = mysite.alllinks(unique=True, fromids=True) self.assertRaises(pywikibot.Error, next, errgen) @@ -760,14 +760,14 @@ for cat in ac)) for cat in mysite.allcategories(total=5, start="Abc"): self.assertIsInstance(cat, pywikibot.Category) - self.assertGreaterEqual(cat.title(withNamespace=False), "Abc") + self.assertGreaterEqual(cat.title(with_ns=False), 'Abc') for cat in mysite.allcategories(total=5, prefix="Def"): self.assertIsInstance(cat, pywikibot.Category) - self.assertTrue(cat.title(withNamespace=False).startswith("Def")) + self.assertTrue(cat.title(with_ns=False).startswith('Def')) # Bug T17985 - reverse and start combined; fixed in v 1.14 for cat in mysite.allcategories(total=5, start="Hij", reverse=True): self.assertIsInstance(cat, pywikibot.Category) - self.assertLessEqual(cat.title(withNamespace=False), "Hij") + self.assertLessEqual(cat.title(with_ns=False), 'Hij')
def test_botusers(self): """Test the site.botusers() method.""" @@ -836,16 +836,16 @@ for impage in mysite.allimages(start="Ba", total=5): self.assertIsInstance(impage, pywikibot.FilePage) self.assertTrue(impage.exists()) - self.assertGreaterEqual(impage.title(withNamespace=False), "Ba") + self.assertGreaterEqual(impage.title(with_ns=False), 'Ba') # Bug T17985 - reverse and start combined; fixed in v 1.14 for impage in mysite.allimages(start="Da", reverse=True, total=5): self.assertIsInstance(impage, pywikibot.FilePage) self.assertTrue(impage.exists()) - self.assertLessEqual(impage.title(withNamespace=False), "Da") + self.assertLessEqual(impage.title(with_ns=False), 'Da') for impage in mysite.allimages(prefix="Ch", total=5): self.assertIsInstance(impage, pywikibot.FilePage) self.assertTrue(impage.exists()) - self.assertTrue(impage.title(withNamespace=False).startswith("Ch")) + self.assertTrue(impage.title(with_ns=False).startswith('Ch')) for impage in mysite.allimages(minsize=100, total=5): self.assertIsInstance(impage, pywikibot.FilePage) self.assertTrue(impage.exists()) @@ -1591,7 +1591,7 @@ if 'wiki' not in hit.title().lower(): self.assertTrue( any('wiki' in r.title().lower() - for r in hit.getReferences(redirectsOnly=True)), + for r in hit.getReferences(filter_redirects=True)), "'wiki' neither found in '{0}'.lower() " 'nor in its redirects'.format(hit.title())) except pywikibot.data.api.APIError as e: @@ -2604,14 +2604,14 @@ self.page = pywikibot.Page( self.site, 'File:Băieţi de Cartier - La Familia cover.jpg') - self.backlinks = list(self.page.backlinks(followRedirects=False, - filterRedirects=True, + self.backlinks = list(self.page.backlinks(follow_redirects=False, + filter_redirects=True, total=5)) self.references = list(self.page.getReferences(follow_redirects=True, - redirectsOnly=True, + filter_redirects=True, total=5)) self.nofollow = list(self.page.getReferences(follow_redirects=False, - redirectsOnly=True, + filter_redirects=True, total=5))
def test_backlinks_redirects_length(self): diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py index 45c79c6..078f1d6 100644 --- a/tests/textlib_tests.py +++ b/tests/textlib_tests.py @@ -305,22 +305,22 @@ [pywikibot.page.Category(self.site, 'Foo')]) self.assertEqual(textlib.getCategoryLinks( '[[Category:{{P1|Foo}}|bar]]', self.site, expand_text=True), - [pywikibot.page.Category(self.site, 'Foo', sortKey='bar')]) + [pywikibot.page.Category(self.site, 'Foo', sort_key='bar')]) self.assertEqual(textlib.getCategoryLinks( '[[Category:{{P1|{{P2|L33t|Foo}}}}|bar]]', self.site, expand_text=True), - [pywikibot.page.Category(self.site, 'Foo', sortKey='bar')]) + [pywikibot.page.Category(self.site, 'Foo', sort_key='bar')]) self.assertEqual(textlib.getCategoryLinks( '[[Category:Foo{{!}}bar]]', self.site, expand_text=True), - [pywikibot.page.Category(self.site, 'Foo', sortKey='bar')]) + [pywikibot.page.Category(self.site, 'Foo', sort_key='bar')]) self.assertEqual(textlib.getCategoryLinks( '[[Category:Foo{{!}}bar]][[Category:Wiki{{P2||pedia}}]]', self.site, expand_text=True), - [pywikibot.page.Category(self.site, 'Foo', sortKey='bar'), + [pywikibot.page.Category(self.site, 'Foo', sort_key='bar'), pywikibot.page.Category(self.site, 'Wikipedia')]) self.assertEqual(textlib.getCategoryLinks( '[[Category:Foo{{!}}and{{!}}bar]]', self.site, expand_text=True), - [pywikibot.page.Category(self.site, 'Foo', sortKey='and|bar')]) + [pywikibot.page.Category(self.site, 'Foo', sort_key='and|bar')]) with mock.patch.object(pywikibot, 'warning', autospec=True) as warn: textlib.getCategoryLinks('[[Category:nasty{{{!}}]]', self.site) warn.assert_called_once_with( diff --git a/tests/user_tests.py b/tests/user_tests.py index 6691402..aed04a4 100644 --- a/tests/user_tests.py +++ b/tests/user_tests.py @@ -29,7 +29,7 @@ user = User(self.site, 'Xqt') with suppress_warnings('pywikibot.page.User.name', DeprecationWarning): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username) + self.assertEqual(user.title(with_ns=False), user.username) self.assertTrue(user.isRegistered()) self.assertFalse(user.isAnonymous()) self.assertIsInstance(user.registration(), pywikibot.Timestamp) @@ -87,7 +87,7 @@ user = User(self.site, '123.45.67.89') with suppress_warnings('pywikibot.page.User.name', DeprecationWarning): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username) + self.assertEqual(user.title(with_ns=False), user.username) self.assertFalse(user.isRegistered()) self.assertTrue(user.isAnonymous()) self.assertIsNone(user.registration()) @@ -101,7 +101,7 @@ user = User(self.site, 'This user name is not registered yet') with suppress_warnings('pywikibot.page.User.name', DeprecationWarning): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username) + self.assertEqual(user.title(with_ns=False), user.username) self.assertFalse(user.isRegistered()) self.assertFalse(user.isAnonymous()) self.assertIsNone(user.registration()) @@ -115,7 +115,7 @@ user = User(self.site, 'Invalid char\x9f in Name') with suppress_warnings('pywikibot.page.User.name', DeprecationWarning): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username) + self.assertEqual(user.title(with_ns=False), user.username) self.assertFalse(user.isRegistered()) self.assertFalse(user.isAnonymous()) self.assertIsNone(user.registration()) @@ -139,7 +139,7 @@ self.assertEqual('#1242976', user.username) with suppress_warnings('pywikibot.page.User.name is deprecated'): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username[1:]) + self.assertEqual(user.title(with_ns=False), user.username[1:]) self.assertFalse(user.isRegistered()) self.assertFalse(user.isAnonymous()) self.assertIsNone(user.registration()) @@ -159,7 +159,7 @@ self.assertEqual('#1242976', user.username) with suppress_warnings('pywikibot.page.User.name is deprecated'): self.assertEqual(user.name(), user.username) - self.assertEqual(user.title(withNamespace=False), user.username[1:]) + self.assertEqual(user.title(with_ns=False), user.username[1:]) self.assertFalse(user.isRegistered()) self.assertFalse(user.isAnonymous()) self.assertIsNone(user.registration()) diff --git a/tests/wikibase_edit_tests.py b/tests/wikibase_edit_tests.py index 5c50893..d5d8a02 100644 --- a/tests/wikibase_edit_tests.py +++ b/tests/wikibase_edit_tests.py @@ -82,7 +82,7 @@
item.get(force=True)
- end_date = pywikibot.page.Claim(testsite, 'P88', isQualifier=True) + end_date = pywikibot.page.Claim(testsite, 'P88', is_qualifier=True) end_date.setTarget(pywikibot.WbTime(year=2012)) item.claims['P115'][0].addQualifier(end_date)
@@ -102,7 +102,7 @@
item.get(force=True)
- end_date = pywikibot.page.Claim(testsite, 'P88', isQualifier=True) + end_date = pywikibot.page.Claim(testsite, 'P88', is_qualifier=True) end_date.setTarget(pywikibot.WbTime(year=2012)) item.claims['P115'][0].addQualifier(end_date)
@@ -389,11 +389,11 @@
item.get(force=True)
- qual_1 = pywikibot.page.Claim(testsite, 'P88', isQualifier=True) + qual_1 = pywikibot.page.Claim(testsite, 'P88', is_qualifier=True) qual_1.setTarget(pywikibot.WbTime(year=2012)) item.claims['P115'][0].addQualifier(qual_1)
- qual_2 = pywikibot.page.Claim(testsite, 'P580', isQualifier=True) + qual_2 = pywikibot.page.Claim(testsite, 'P580', is_qualifier=True) qual_2.setTarget(pywikibot.ItemPage(testsite, 'Q67')) item.claims['P115'][0].addQualifier(qual_2)
diff --git a/tox.ini b/tox.ini index ddc9eda..7840119 100644 --- a/tox.ini +++ b/tox.ini @@ -180,7 +180,6 @@ pywikibot/fixes.py : E241 pywikibot/interwiki_graph.py : N803, N806 pywikibot/logging.py : N803 - pywikibot/page.py : N803, N806 pywikibot/pagegenerators.py : N803, N806 pywikibot/specialbots.py : N803, N806 pywikibot/textlib.py : E241, N801, N803, N806
pywikibot-commits@lists.wikimedia.org