jenkins-bot has submitted this change and it was merged.
Change subject: [doc] Add doc strings to methods (D102, D103) ......................................................................
[doc] Add doc strings to methods (D102, D103)
Change-Id: Idd1db70bd3315edb3bc0105bc175407f8e3393c9 --- M generate_family_file.py M pywikibot/cosmetic_changes.py M scripts/archivebot.py M scripts/casechecker.py M scripts/catall.py M scripts/cfd.py M scripts/checkimages.py M scripts/commons_link.py M scripts/freebasemappingupload.py M scripts/imagecopy.py M scripts/imagecopy_self.py M scripts/imageharvest.py M scripts/imagetransfer.py M scripts/interwiki.py M scripts/isbn.py M scripts/lonelypages.py M scripts/maintenance/cache.py M scripts/maintenance/compat2core.py M scripts/maintenance/wikimedia_sites.py M scripts/movepages.py M scripts/nowcommons.py M scripts/redirect.py M scripts/reflinks.py M scripts/replicate_wiki.py M scripts/revertbot.py M scripts/script_wui.py M scripts/solve_disambiguation.py M scripts/weblinkchecker.py M scripts/welcome.py M tests/deprecation_tests.py M tests/pagegenerators_tests.py M tests/site_tests.py M tests/textlib_tests.py M tests/wikibase_tests.py M tox.ini 35 files changed, 455 insertions(+), 59 deletions(-)
Approvals: Ladsgroup: Looks good to me, approved jenkins-bot: Verified
diff --git a/generate_family_file.py b/generate_family_file.py index b4675f8..81013ef 100755 --- a/generate_family_file.py +++ b/generate_family_file.py @@ -47,6 +47,7 @@ """Family file creator."""
def __init__(self, url=None, name=None, dointerwiki=None): + """Constructor.""" if url is None: url = raw_input("Please insert URL to wiki: ") if name is None: @@ -59,6 +60,7 @@ self.langs = [] # [Wiki('https://wiki/$1'), ...]
def run(self): + """Main method, generate family file.""" print("Generating family file from %s" % self.base_url)
w = Wiki(self.base_url) @@ -75,6 +77,7 @@ self.writefile()
def getlangs(self, w): + """Determine language of a site.""" print("Determining other languages...", end="") try: self.langs = w.langs @@ -113,6 +116,7 @@ if wiki[u'url'] == w.iwpath]
def getapis(self): + """Load other language pages.""" print("Loading wikis... ") for lang in self.langs: print(" * %s... " % (lang[u'prefix']), end="") @@ -126,6 +130,7 @@ print("in cache")
def writefile(self): + """Write the family file.""" fn = "pywikibot/families/%s_family.py" % self.name print("Writing %s... " % fn) try: diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py index e623e46..7ba4c8e 100755 --- a/pywikibot/cosmetic_changes.py +++ b/pywikibot/cosmetic_changes.py @@ -617,6 +617,7 @@ return text
def resolveHtmlEntities(self, text): + """"Resolve html entities.""" ignore = [ 38, # Ampersand (&) 39, # Single quotation mark (") - bug T26093 @@ -697,6 +698,7 @@ return text
def replaceDeprecatedTemplates(self, text): + """Replace deprecated templates.""" exceptions = ['comment', 'math', 'nowiki', 'pre'] builder = _MultiTemplateMatchBuilder(self.site)
@@ -720,7 +722,9 @@
# from fixes.py def fixSyntaxSave(self, text): + """Convert weblinks to wikilink, fix link syntax.""" def replace_link(match): + """Create a string to replace a single link.""" replacement = '[[' + match.group('link') if match.group('title'): replacement += '|' + match.group('title') @@ -781,7 +785,9 @@ return text
def fixHtml(self, text): + """Relace html markups with wikitext markups.""" def replace_header(match): + """Create a header string for replacing.""" depth = int(match.group(1)) return r'{0} {1} {0}'.format('=' * depth, match.group(2))
@@ -811,6 +817,7 @@ return text
def fixReferences(self, text): + """Fix references tags.""" # See also https://en.wikipedia.org/wiki/User:AnomieBOT/source/tasks/OrphanReferenceFix... exceptions = ['nowiki', 'comment', 'math', 'pre', 'source', 'startspace'] @@ -827,9 +834,9 @@ return text
def fixStyle(self, text): + """Convert prettytable to wikitable class.""" exceptions = ['nowiki', 'comment', 'math', 'pre', 'source', 'startspace'] - # convert prettytable to wikitable class if self.site.code in ('de', 'en'): text = textlib.replaceExcept(text, r'(class="[^"]*)prettytable([^"]*")', @@ -837,6 +844,7 @@ return text
def fixTypo(self, text): + """Fix units.""" exceptions = ['nowiki', 'comment', 'math', 'pre', 'source', 'startspace', 'gallery', 'hyperlink', 'interwiki', 'link'] # change <number> ccm -> <number> cm³ @@ -855,6 +863,7 @@ return text
def fixArabicLetters(self, text): + """Fix arabic and persian letters.""" if self.site.code not in ['ckb', 'fa']: return text exceptions = [ diff --git a/scripts/archivebot.py b/scripts/archivebot.py index 53228df..ecd8701 100755 --- a/scripts/archivebot.py +++ b/scripts/archivebot.py @@ -218,6 +218,16 @@
def generate_transclusions(site, template, namespaces=[]): + """ + Generate transclusions. + + @param site: the site for the template transclusions + @type site: Site + @param template: normalized title of the template + @type template: unicode + @param namespace: namespace filter for transcluded pages + @type ns: list + """ pywikibot.output(u'Fetching template transclusions...') transclusion_page = pywikibot.Page(site, template, ns=10) return transclusion_page.getReferences(onlyTemplateInclusion=True, @@ -253,15 +263,19 @@ """Class building a UTC tzinfo object."""
def utcoffset(self, dt): # pylint: disable=unused-argument + """Subclass implementation, return timedelta(0).""" return ZERO
def tzname(self, dt): # pylint: disable=unused-argument + """Subclass implementation.""" return 'UTC'
def dst(self, dt): # pylint: disable=unused-argument + """Subclass implementation, return timedelta(0).""" return ZERO
def __repr__(self): + """Return a string representation.""" return "%s()" % self.__class__.__name__
@@ -279,6 +293,7 @@ """
def __init__(self, title, now, timestripper): + """Constructor.""" self.title = title self.now = now self.ts = timestripper @@ -287,11 +302,13 @@ self.timestamp = None
def __repr__(self): + """Return a string representation.""" return '%s("%s",%d bytes)' \ % (self.__class__.__name__, self.title, len(self.content.encode('utf-8')))
def feed_line(self, line): + """Add a line to the content and find the newest timestamp.""" if not self.content and not line: return
@@ -305,13 +322,21 @@ self.timestamp = max(self.timestamp, timestamp)
def size(self): + """Return size of discussion thread.""" return len(self.title.encode('utf-8')) + len( self.content.encode('utf-8')) + 12
def to_text(self): + """Return wikitext discussion thread.""" return u"== %s ==\n\n%s" % (self.title, self.content)
def should_be_archived(self, archiver): + """ + Check whether thread has to be archived. + + @return: archiving reason i18n string or empty string. + @rtype: string + """ algo = archiver.get_attr('algo') re_t = re.search(r'^old((.*))$', algo) if re_t: @@ -337,6 +362,7 @@ """
def __init__(self, source, archiver, params=None): + """Constructor.""" super(DiscussionPage, self).__init__(source) self.threads = [] self.full = False @@ -393,6 +419,7 @@ % (len(self.threads), self))
def feed_thread(self, thread, max_archive_size=(250 * 1024, 'B')): + """Check whether archive size exceeded.""" self.threads.append(thread) self.archived_threads += 1 if max_archive_size[1] == 'B': @@ -404,10 +431,12 @@ return self.full
def size(self): + """Return size of talk page threads.""" return len(self.header.encode('utf-8')) + sum(t.size() for t in self.threads)
def update(self, summary, sort_threads=False): + """Recombine threads and save page.""" if sort_threads: pywikibot.output(u'Sorting threads...') self.threads.sort(key=lambda t: t.timestamp) @@ -423,7 +452,8 @@
class PageArchiver(object):
- """A class that encapsulates all archiving methods. + """ + A class that encapsulates all archiving methods.
__init__ expects a pywikibot.Page object. Execute by running the .run() method. @@ -432,6 +462,7 @@ algo = 'none'
def __init__(self, page, tpl, salt, force=False): + """Constructor.""" self.attributes = { 'algo': ['old(24h)', False], 'archive': ['', False], @@ -456,30 +487,36 @@ self.month_num2orig_names[n + 1] = {"long": _long, "short": _short}
def get_attr(self, attr, default=''): + """Get an archiver attribute.""" return self.attributes.get(attr, [default])[0]
def set_attr(self, attr, value, out=True): + """Set an archiver attribute.""" if attr == 'archive': value = value.replace('_', ' ') self.attributes[attr] = [value, out]
def saveables(self): + """Return a list of saveable attributes.""" return [a for a in self.attributes if self.attributes[a][1] and a != 'maxage']
def attr2text(self): + """Return a template with archiver saveable attributes.""" return '{{%s\n%s\n}}' \ % (self.tpl.title(withNamespace=(self.tpl.namespace() != 10)), '\n'.join('|%s = %s' % (a, self.get_attr(a)) for a in self.saveables()))
def key_ok(self): + """Return whether key is valid.""" s = md5() s.update(self.salt + '\n') s.update(self.page.title().encode('utf8') + '\n') return self.get_attr('key') == s.hexdigest()
def load_config(self): + """Load and validate archiver template.""" pywikibot.output(u'Looking for: {{%s}} in %s' % (self.tpl.title(), self.page)) for tpl in self.page.templatesWithParams(): if tpl[0] == pywikibot.Page(self.site, self.tpl.title(), ns=10): @@ -493,13 +530,13 @@ raise MissingConfigError('Missing argument "algo" in template')
def feed_archive(self, archive, thread, max_archive_size, params=None): - """Feed the thread to one of the archives. + """ + Feed the thread to one of the archives.
If it doesn't exist yet, create it. If archive name is an empty string (or None), discard the thread. Also checks for security violations. - """ title = archive.title() if not title: @@ -515,6 +552,7 @@ return self.archives[title].feed_thread(thread, max_archive_size)
def analyze_page(self): + """Analyze DiscussionPage.""" max_arch_size = str2size(self.get_attr('maxarchivesize')) arch_counter = int(self.get_attr('counter', '1')) oldthreads = self.page.threads @@ -556,6 +594,7 @@ return set(whys)
def run(self): + """Run the bot.""" if not self.page.botMayEdit(): return whys = self.analyze_page() diff --git a/scripts/casechecker.py b/scripts/casechecker.py index 30f4c8c..bfe286d 100755 --- a/scripts/casechecker.py +++ b/scripts/casechecker.py @@ -55,6 +55,7 @@
def SetColor(color): + """Set windows color.""" if std_out_handle: try: return ctypes.windll.kernel32.SetConsoleTextAttribute( @@ -128,7 +129,7 @@ filterredir = 'nonredirects'
def __init__(self): - + """Constructor with arg parsing.""" for arg in pywikibot.handle_args(): if arg.startswith('-from'): if arg.startswith('-from:'): @@ -281,6 +282,7 @@ % self.site.code)
def RunQuery(self, params): + """API query.""" while True: # Get data req = api.Request(**params) @@ -314,6 +316,7 @@ continue
def Run(self): + """Run the bot.""" try: self.lastLetter = ''
@@ -343,6 +346,7 @@ raise
def ProcessDataBlock(self, data): + """Process data block given by RunQuery().""" if 'query' not in data or 'pages' not in data['query']: return
@@ -489,15 +493,18 @@ raise ValueError(u'Stopping because we are done')
def WikiLog(self, text): + """Write log.""" pywikibot.output(text) self.wikilog.write(text + u'\n') self.wikilog.flush()
def FindBadWords(self, title): + """Retrieve bad words.""" for m in self.badWordPtrn.finditer(title): yield title[m.span()[0]:m.span()[1]]
def ProcessTitle(self, title): + """Process title.""" badWords = list(self.FindBadWords(title)) if len(badWords) > 0: # Allow known words, allow any roman numerals with local suffixes @@ -612,6 +619,7 @@ return (infoText, possibleAlternatives)
def PickTarget(self, title, original, candidates): + """Pick target from candidates.""" if len(candidates) == 0: return if len(candidates) == 1: @@ -666,6 +674,7 @@ return candidates[int(choice) - 1]
def ColorCodeWord(self, word, toScreen=False): + """Colorize code word.""" if not toScreen: res = u"<b>" lastIsCyr = word[0] in self.localLtr @@ -706,6 +715,7 @@ return res + self.suffixClr + u"</b>"
def AddNoSuggestionTitle(self, title): + """Add backlinks to log.""" if title in self.seenUnresolvedLinks: return True self.seenUnresolvedLinks.add(title) @@ -737,6 +747,7 @@ return False
def PutNewPage(self, pageObj, pageTxt, msg): + """Save new page.""" title = pageObj.title(asLink=True, textlink=True) coloredMsg = u', '.join([self.ColorCodeWord(m) for m in msg]) if pageObj.text == pageTxt: @@ -761,29 +772,34 @@ return False
def MakeMoveSummary(self, fromTitle, toTitle): + """Move summary from i18n.""" return i18n.twtranslate(self.site, "casechecker-replacement-linklist", {'source': fromTitle, 'target': toTitle})
def MakeLink(self, title, colorcode=True): + """Create a colored link string.""" prf = u'' if self.Page(title).namespace() == 0 else u':' cc = u'|««« %s »»»' % self.ColorCodeWord(title) if colorcode else u'' return u"[[%s%s%s]]" % (prf, title, cc)
def OpenLogFile(self, filename): + """Open logfile.""" try: return codecs.open(filename, 'a', 'utf-8') except IOError: return codecs.open(filename, 'w', 'utf-8')
def AppendLineToLog(self, filename, text): + """Write text to logfile.""" with self.OpenLogFile(filename) as f: f.write(text + u'\n')
def Page(self, title): + """Create Page object from title.""" return pywikibot.Page(self.site, title)
def ReplaceLink(self, text, oldtxt, newtxt): - + """Replace links.""" frmParts = [s.strip(self.stripChars) for s in self.wordBreaker.split(oldtxt)] toParts = [s.strip(self.stripChars) diff --git a/scripts/catall.py b/scripts/catall.py index 5ddd6d7..48c9f17 100755 --- a/scripts/catall.py +++ b/scripts/catall.py @@ -35,6 +35,7 @@
def choosecats(pagetext): + """Coose categories.""" chosen = [] done = False length = 1000 @@ -71,6 +72,7 @@
def make_categories(page, list, site=None): + """Make categories.""" if site is None: site = pywikibot.Site() pllist = [] diff --git a/scripts/cfd.py b/scripts/cfd.py index 63ca86b..639252e 100755 --- a/scripts/cfd.py +++ b/scripts/cfd.py @@ -63,9 +63,11 @@ """Helper class."""
def __init__(self): + """Constructor.""" self.result = None
def check(self, pattern, text): + """Search pattern.""" self.result = pattern.search(text) return self.result
@@ -175,12 +177,16 @@ robot = None
-# This function grabs the wiki source of a category page and attempts to -# extract a link to the CFD per-day discussion page from the CFD template. -# If the CFD template is not there, it will return the value of the second -# parameter, which is essentially a fallback that is extracted from the -# per-day subheadings on the working page. def findDay(pageTitle, oldDay): + """ + Find day link from CFD template. + + This function grabs the wiki source of a category page and attempts to + extract a link to the CFD per-day discussion page from the CFD template. + If the CFD template is not there, it will return the value of the second + parameter, which is essentially a fallback that is extracted from the + per-day subheadings on the working page. + """ page = pywikibot.Page(pywikibot.Site(), u"Category:" + pageTitle) try: pageSrc = page.text diff --git a/scripts/checkimages.py b/scripts/checkimages.py index f478b9b..76bb4bb 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -575,21 +575,25 @@ @property @deprecated def project(self): + """Return family name.""" return self.site.family.name
@property @deprecated def botolist(self): + """Return bots.""" return self.bots
@botolist.setter @deprecated def botolist(self, value): + """Set bots.""" self.bots = value
@property @deprecated def botnick(self): + """Return username.""" return self.site.username()
def setParameters(self, imageName): diff --git a/scripts/commons_link.py b/scripts/commons_link.py index f00c3fd..f83e20f 100755 --- a/scripts/commons_link.py +++ b/scripts/commons_link.py @@ -51,6 +51,7 @@ """Commons linking bot."""
def __init__(self, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'action': None, }) @@ -62,6 +63,7 @@ self.findTemplate3 = re.compile(r'{{[Cc]ommons')
def run(self): + """Run the bot.""" if not all((self.getOption('action'), self.generator)): return catmode = (self.getOption('action') == 'categories') diff --git a/scripts/freebasemappingupload.py b/scripts/freebasemappingupload.py index 1e57f37..9ed598f 100755 --- a/scripts/freebasemappingupload.py +++ b/scripts/freebasemappingupload.py @@ -36,6 +36,7 @@ """Freebase Mapping bot."""
def __init__(self, filename): + """Constructor.""" self.repo = pywikibot.Site('wikidata', 'wikidata').data_repository() self.filename = filename if not os.path.exists(self.filename): @@ -44,6 +45,7 @@ sys.exit(1)
def run(self): + """Run the bot.""" # Set up some items we will use a lot. self.claim = pywikibot.Claim(self.repo, 'P646') # freebase mapping # And sources! @@ -58,6 +60,7 @@ self.processLine(line.strip())
def processLine(self, line): + """Process a single line.""" if not line or line.startswith('#'): return mid, sameas, qid, dot = line.split() diff --git a/scripts/imagecopy.py b/scripts/imagecopy.py index d1b5d6f..6882ebb 100644 --- a/scripts/imagecopy.py +++ b/scripts/imagecopy.py @@ -228,6 +228,7 @@
def pageTextPost(url, parameters): + """Get data from commons helper page.""" gotInfo = False while not gotInfo: try: @@ -247,12 +248,14 @@ """Facilitate transfer of image/file to commons."""
def __init__(self, imagePage, newname, category): + """Constructor.""" self.imagePage = imagePage self.newname = newname self.category = category threading.Thread.__init__(self)
def run(self): + """Run the bot.""" tosend = {'language': self.imagePage.site.language().encode('utf-8'), 'image': self.imagePage.title( withNamespace=False).encode('utf-8'), @@ -395,6 +398,7 @@
def __init__(self, image_title, content, uploader, url, templates, commonsconflict=0): + """Constructor.""" super(TkdialogIC, self).__init__() self.root = Tkinter.Tk() # "%dx%d%+d%+d" % (width, height, xoffset, yoffset) diff --git a/scripts/imagecopy_self.py b/scripts/imagecopy_self.py index ff5a386..d24afac 100644 --- a/scripts/imagecopy_self.py +++ b/scripts/imagecopy_self.py @@ -328,12 +328,14 @@ """Tries to fetch information for all images in the generator."""
def __init__(self, pagegenerator, prefetchQueue): + """Constructor.""" self.pagegenerator = pagegenerator self.prefetchQueue = prefetchQueue imagerecat.initLists() threading.Thread.__init__(self)
def run(self): + """Run imageFetcher.""" for page in self.pagegenerator: self.processImage(page) self.prefetchQueue.put(None) @@ -605,12 +607,14 @@ """Prompt all images to the user."""
def __init__(self, prefetchQueue, uploadQueue): + """Constructor.""" self.prefetchQueue = prefetchQueue self.uploadQueue = uploadQueue self.autonomous = False threading.Thread.__init__(self)
def run(self): + """Run thread.""" while True: fields = self.prefetchQueue.get() if fields: @@ -673,6 +677,7 @@ imagepage, description, date, source, author, licensetemplate, categories """ + """Constructor.""" self.root = Tkinter.Tk() # "%dx%d%+d%+d" % (width, height, xoffset, yoffset) # Always appear the same size and in the bottom-left corner @@ -848,11 +853,13 @@ """Upload all images."""
def __init__(self, uploadQueue): + """Constructor.""" self.uploadQueue = uploadQueue self.checktemplate = True threading.Thread.__init__(self)
def run(self): + """Run uploader.""" while True: # Change later fields = self.uploadQueue.get() if fields: @@ -1001,6 +1008,7 @@
def main(*args): + """Process command line arguments and invoke bot.""" generator = None autonomous = False checkTemplate = True diff --git a/scripts/imageharvest.py b/scripts/imageharvest.py index c2d0705..6e5e366 100644 --- a/scripts/imageharvest.py +++ b/scripts/imageharvest.py @@ -67,6 +67,7 @@
def main(give_url, image_url, desc): + """Run the bot.""" url = give_url image_url = '' if url == '': diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index f13f4cd..7c2a0fe 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -121,6 +121,7 @@
def __init__(self, generator, targetSite=None, interwiki=False, keep_name=False, ignore_warning=False): + """Constructor.""" self.generator = generator self.interwiki = interwiki self.targetSite = targetSite @@ -197,6 +198,7 @@ summary=reason)
def showImageList(self, imagelist): + """Print image list.""" for i in range(len(imagelist)): image = imagelist[i] print("-" * 60) @@ -232,6 +234,7 @@ print("=" * 60)
def run(self): + """Run the bot.""" for page in self.generator: if self.interwiki: imagelist = [] diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 7b500ca..4a5016a 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -670,12 +670,14 @@ '_deletedRevs']
def SPdeleteStore(): + """Delete SPStore.""" if StoredPage.SPpath: del StoredPage.SPstore os.unlink(StoredPage.SPpath) SPdeleteStore = staticmethod(SPdeleteStore)
def __init__(self, page): + """Constructor.""" for attr in StoredPage.SPcopy: setattr(self, attr, getattr(page, attr))
@@ -693,13 +695,16 @@ self.SPcontentSet = False
def SPgetContents(self): + """Get stored content.""" return StoredPage.SPstore[self.SPkey]
def SPsetContents(self, contents): + """Store content.""" self.SPcontentSet = True StoredPage.SPstore[self.SPkey] = contents
def SPdelContents(self): + """Delete stored content.""" if self.SPcontentSet: del StoredPage.SPstore[self.SPkey]
@@ -715,26 +720,23 @@ """
def __init__(self): - # self.tree : - # Dictionary: - # keys: Site - # values: list of pages - # All pages found within Site are kept in - # self.tree[site] + """Constructor.
- # While using dict values would be faster for - # the remove() operation, - # keeping list values is important, because - # the order in which the pages were found matters: - # the earlier a page is found, the closer it is to the - # Subject.originPage. Chances are that pages found within - # 2 interwiki distance from the originPage are more related - # to the original topic than pages found later on, after - # 3, 4, 5 or more interwiki hops. + While using dict values would be faster for the remove() operation, + keeping list values is important, because the order in which the pages + were found matters: the earlier a page is found, the closer it is to the + Subject.originPage. Chances are that pages found within 2 interwiki + distance from the originPage are more related to the original topic + than pages found later on, after 3, 4, 5 or more interwiki hops.
- # Keeping this order is hence important to display an ordered - # list of pages to the user when he'll be asked to resolve - # conflicts. + Keeping this order is hence important to display an ordered + list of pages to the user when he'll be asked to resolve + conflicts. + + @ivar tree: dictionary with Site as keys and list of page as values. + All pages found within Site are kept in self.tree[site]. + @type tree: dict + """ self.tree = {} self.size = 0
@@ -747,9 +749,11 @@ pass
def __len__(self): + """length of the object.""" return self.size
def add(self, page): + """Add a page to the tree.""" site = page.site if site not in self.tree: self.tree[site] = [] @@ -757,6 +761,7 @@ self.size += 1
def remove(self, page): + """Remove a page from the tree.""" try: self.tree[page.site].remove(page) self.size -= 1 @@ -777,6 +782,7 @@ yield site, len(d)
def __iter__(self): + """Iterate through all items of the tree.""" for site, plist in self.tree.items(): for page in plist: yield page @@ -784,7 +790,7 @@
class Subject(interwiki_graph.Subject):
- u""" + """ Class to follow the progress of a single 'subject'.
(i.e. a page with all its translations) @@ -1033,6 +1039,7 @@ return True
def skipPage(self, page, target, counter): + """Return whether page has to be skipped.""" return self.isIgnored(target) or \ self.namespaceMismatch(page, target, counter) or \ self.wiktionaryMismatch(target) @@ -1114,6 +1121,7 @@ return False
def wiktionaryMismatch(self, page): + """Check for ignoring pages.""" if self.originPage and globalvar.same == 'wiktionary': if page.title().lower() != self.originPage.title().lower(): pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" @@ -1205,6 +1213,7 @@ return (False, None)
def isIgnored(self, page): + """Return True if pages is to be ignored.""" if page.site.lang in globalvar.neverlink: pywikibot.output(u"Skipping link %s to an ignored language" % page) return True @@ -1214,6 +1223,7 @@ return False
def reportInterwikilessPage(self, page): + """Report interwikiless page.""" if not globalvar.quiet: pywikibot.output(u"NOTE: %s does not have any interwiki links" % self.originPage) @@ -1225,6 +1235,7 @@ f.close()
def askForHints(self, counter): + """Ask for hints to other sites.""" if not self.workonme: # Do not ask hints for pages that we don't work on anyway return @@ -1510,6 +1521,7 @@ self.problemfound = True
def whereReport(self, page, indent=4): + """Report found interlanguage links with conflicts.""" for page2 in sorted(self.foundIn[page]): if page2 is None: pywikibot.output(u" " * indent + "Given as a hint.") @@ -1517,6 +1529,7 @@ pywikibot.output(u" " * indent + unicode(page2))
def assemble(self): + """Assemble language links.""" # No errors have been seen so far, except.... errorCount = self.problemfound # Build up a dictionary of all pages found, with the site as key. @@ -2081,6 +2094,7 @@ self.generateUntil = until
def dump(self, append=True): + """Write dump file.""" site = pywikibot.Site() dumpfn = pywikibot.config.datafilepath( 'data', @@ -2271,8 +2285,8 @@ return True
def queryStep(self): + """Delete the ones that are done now.""" self.oneQuery() - # Delete the ones that are done now. for i in range(len(self.subjects) - 1, -1, -1): subj = self.subjects[i] if subj.isDone(): @@ -2301,11 +2315,12 @@ self.queryStep()
def __len__(self): + """Return length of subjects.""" return len(self.subjects)
def compareLanguages(old, new, insite): - + """Compare changes and setup i18n message.""" oldiw = set(old) newiw = set(new)
@@ -2357,6 +2372,7 @@
def botMayEdit(page): + """Test for allowed edits.""" tmpl = [] try: tmpl, loc = moved_links[page.site.code] @@ -2378,6 +2394,7 @@
def readWarnfile(filename, bot): + """Read old interlanguage conficts.""" import warnfile reader = warnfile.WarnfileReader(filename) # we won't use removeHints diff --git a/scripts/isbn.py b/scripts/isbn.py index 483514a..b96676c 100755 --- a/scripts/isbn.py +++ b/scripts/isbn.py @@ -1229,12 +1229,14 @@ """ISBN 13."""
def __init__(self, code, checksumMissing=False): + """Constructor.""" self.code = code if checksumMissing: self.code += str(self.calculateChecksum()) self.checkValidity()
def possiblePrefixes(self): + """Return possible prefixes.""" return ['978', '979']
def digits(self): @@ -1249,6 +1251,7 @@ return result
def checkValidity(self): + """Check validity of ISBN.""" if len(self.digits()) != 13: raise InvalidIsbnException('The ISBN %s is not 13 digits long.' % self.code) @@ -1257,7 +1260,11 @@ % self.code)
def calculateChecksum(self): - # See https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_13 + """ + Calculate checksum. + + See https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_13 + """ sum = 0 for i in range(0, 13 - 1, 2): sum += self.digits()[i] @@ -1271,10 +1278,12 @@ """ISBN 10."""
def __init__(self, code): + """Constructor.""" self.code = code self.checkValidity()
def possiblePrefixes(self): + """Return possible prefixes.""" return []
def digits(self): @@ -1302,6 +1311,7 @@ % self.code)
def checkValidity(self): + """Check validity of ISBN.""" if len(self.digits()) != 10: raise InvalidIsbnException('The ISBN %s is not 10 digits long.' % self.code) @@ -1325,6 +1335,7 @@ return ISBN13(code, checksumMissing=True)
def format(self): + """Format ISBN number.""" # load overridden superclass method ISBN.format(self) # capitalize checksum @@ -1471,6 +1482,7 @@ """ISBN bot."""
def __init__(self, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'to13': False, 'format': False, @@ -1482,6 +1494,7 @@ self.comment = i18n.twtranslate(pywikibot.Site(), 'isbn-formatting')
def treat(self, page): + """Treat a page.""" try: old_text = page.get() for match in self.isbnR.finditer(old_text): @@ -1517,6 +1530,7 @@ % page.title(asLink=True))
def run(self): + """Run the bot.""" for page in self.generator: self.treat(page)
@@ -1526,6 +1540,7 @@ """ISBN bot to be run on Wikibase sites."""
def __init__(self, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'to13': False, 'format': False, @@ -1543,6 +1558,7 @@ self.comment = i18n.twtranslate(pywikibot.Site(), 'isbn-formatting')
def treat(self, page, item): + """Treat a page.""" change_messages = []
if self.isbn_10_prop_id in item.claims: diff --git a/scripts/lonelypages.py b/scripts/lonelypages.py index 8097a48..ef2f0e4 100755 --- a/scripts/lonelypages.py +++ b/scripts/lonelypages.py @@ -62,6 +62,7 @@ """The orphan template configuration."""
def __init__(self, site, name, parameters, aliases=None, subst=False): + """Constructor.""" self._name = name if not aliases: aliases = [] @@ -105,6 +106,7 @@ """Orphan page tagging bot."""
def __init__(self, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'enablePage': None, # Check if someone set an enablePage or not 'disambigPage': None, # If no disambigPage given, not use it. @@ -151,6 +153,7 @@ return self._settings
def enable_page(self): + """Enable or disable bot via wiki page.""" enable = self.getOption('enablePage') if enable is not None: try: @@ -167,6 +170,7 @@ return True
def run(self): + """Run the bot.""" # If the enable page is set to disable, turn off the bot # (useful when the bot is run on a server) if not self.enable_page(): diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py index a705340..1edd367 100755 --- a/scripts/maintenance/cache.py +++ b/scripts/maintenance/cache.py @@ -95,9 +95,11 @@ self.filename = filename
def __str__(self): + """Return string equivalent of object.""" return self.filename
def __repr__(self): + """Representation of object.""" return self._cachefile_path()
def _create_file_name(self): @@ -109,6 +111,7 @@ return self.directory
def _cachefile_path(self): + """Return cache file path.""" return os.path.join(self._get_cache_dir(), self._create_file_name())
@@ -286,6 +289,7 @@
def _parse_command(command, name): + """Parse command.""" obj = globals().get(command) if callable(obj): return obj @@ -329,26 +333,31 @@
def incorrect_hash(entry): + """Incorrect hash.""" if hashlib.sha256(entry.key.encode('utf-8')).hexdigest() != entry.filename: return entry
def older_than(entry, interval): + """Find older entries.""" if entry._cachetime + interval < datetime.datetime.now(): return entry
def newer_than(entry, interval): + """Find newer entries.""" if entry._cachetime + interval >= datetime.datetime.now(): return entry
def older_than_one_day(entry): + """Find more than one day old entries.""" if older_than(entry, datetime.timedelta(days=1)): return entry
def recent(entry): + """Find entries newer than on hour.""" if newer_than(entry, datetime.timedelta(hours=1)): return entry
@@ -369,6 +378,7 @@
def main(): + """Process command line arguments and invoke bot.""" local_args = pywikibot.handleArgs() cache_paths = None delete = False diff --git a/scripts/maintenance/compat2core.py b/scripts/maintenance/compat2core.py index a3b0168..de59ee0 100755 --- a/scripts/maintenance/compat2core.py +++ b/scripts/maintenance/compat2core.py @@ -126,10 +126,12 @@ """Script conversion bot."""
def __init__(self, filename=None, warnonly=False): + """Constructor.""" self.source = filename self.warnonly = warnonly
def run(self): + """Run the bot.""" self.get_source() self.get_dest() if not self.warnonly: @@ -137,6 +139,7 @@ self.warning()
def get_source(self): + """Get source script.""" while True: if self.source is None: self.source = pywikibot.input( @@ -155,6 +158,7 @@ self.source = None
def get_dest(self): + """Ask for destination script name.""" self.dest = u'%s-core.%s' % tuple(self.source.rsplit(u'.', 1)) if not self.warnonly and not pywikibot.input_yn( u'Destination file is %s.' % self.dest, @@ -163,6 +167,7 @@ exit()
def convert(self): + """Convert script.""" f = codecs.open(self.source, "r", "utf-8") text = f.read() f.close() @@ -173,6 +178,7 @@ g.close()
def warning(self): + """Show warnings and hints.""" filename = self.source if self.warnonly else self.dest g = codecs.open(filename, "r", "utf-8") for i, line in enumerate(g, start=1): @@ -183,6 +189,7 @@
def main(): + """Process command line arguments and invoke bot.""" filename = None warnonly = False
diff --git a/scripts/maintenance/wikimedia_sites.py b/scripts/maintenance/wikimedia_sites.py index d26c958..9ba8a32 100755 --- a/scripts/maintenance/wikimedia_sites.py +++ b/scripts/maintenance/wikimedia_sites.py @@ -45,6 +45,7 @@
def update_family(families): + """Update family files.""" for family in families or familiesDict.keys(): pywikibot.output('\nChecking family %s:' % family)
diff --git a/scripts/movepages.py b/scripts/movepages.py index 78c62bf..bf07c1b 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -63,6 +63,7 @@ """Page move bot."""
def __init__(self, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'prefix': None, 'noredirect': False, @@ -78,6 +79,7 @@ self.noNamespace = False
def moveOne(self, page, newPageTitle): + """Move on page to newPageTitle.""" try: msg = self.getOption('summary') if not msg: @@ -91,6 +93,7 @@ pywikibot.output(error)
def treat(self, page): + """Treat a single page.""" self.current_page = page if self.getOption('skipredirects') and page.isRedirectPage(): pywikibot.output(u'Page %s is a redirect; skipping.' % page.title()) diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index 2b79247..166b776 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -193,6 +193,7 @@ """Bot to delete migrated files."""
def __init__(self, **kwargs): + """Constructor.""" self.availableOptions.update({ 'replace': False, 'replacealways': False, @@ -207,6 +208,7 @@ sys.exit('Do not run this bot on Commons!')
def ncTemplates(self): + """Return nowcommons templates.""" if self.site.lang in nowCommons: return nowCommons[self.site.lang] else: @@ -221,6 +223,7 @@ return self._nc_templates
def useHashGenerator(self): + """Use hash generator.""" # https://toolserver.org/~multichill/nowcommons.php?language=it&page=2&... lang = self.site.lang num_page = 0 @@ -280,6 +283,7 @@ break
def getPageGenerator(self): + """Generator method.""" if self.getOption('use_hash'): gen = self.useHashGenerator() else: @@ -292,6 +296,7 @@ return gen
def findFilenameOnCommons(self, localImagePage): + """Find filename on Commons.""" filenameOnCommons = None for templateName, params in localImagePage.templatesWithParams(): if templateName in self.nc_templates: @@ -320,6 +325,7 @@ return filenameOnCommons
def run(self): + """Run the bot.""" commons = pywikibot.Site('commons', 'commons') comment = i18n.twtranslate(self.site, 'imagetransfer-nowcommons_notice')
diff --git a/scripts/redirect.py b/scripts/redirect.py index 678a0e5..6691a49 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -106,6 +106,7 @@ def __init__(self, xmlFilename=None, namespaces=[], offset=-1, use_move_log=False, use_api=False, start=None, until=None, number=None, step=None, page_title=None): + """Constructor.""" self.site = pywikibot.Site() self.xmlFilename = xmlFilename self.namespaces = namespaces @@ -274,6 +275,7 @@ yield (redirect, result, target, final)
def retrieve_broken_redirects(self): + """Retrieve broken redirects.""" if self.use_api: count = 0 for (pagetitle, type, target, final) \ @@ -302,6 +304,7 @@ yield page
def retrieve_double_redirects(self): + """Retrieve double redirects.""" if self.use_move_log: gen = self.get_moved_pages_redirects() for redir_page in gen: @@ -384,6 +387,7 @@ """Redirect bot."""
def __init__(self, action, generator, **kwargs): + """Constructor.""" self.availableOptions.update({ 'number': None, 'delete': False, @@ -397,11 +401,13 @@ self.exiting = False
def delete_broken_redirects(self): + """Process all broken redirects.""" # get reason for deletion text for redir_name in self.generator.retrieve_broken_redirects(): self.delete_1_broken_redirect(redir_name)
def delete_1_broken_redirect(self, redir_name): + """Treat one broken redirect.""" if isinstance(redir_name, basestring): redir_page = pywikibot.Page(self.site, redir_name) else: @@ -526,10 +532,12 @@ if self.getOption('delete') else "Skipping."))
def fix_double_redirects(self): + """Process double redirects.""" for redir_name in self.generator.retrieve_double_redirects(): self.fix_1_double_redirect(redir_name)
def fix_1_double_redirect(self, redir_name): + """Treat one double redirect.""" if isinstance(redir_name, basestring): redir = pywikibot.Page(self.site, redir_name) else: @@ -684,6 +692,7 @@ break
def fix_double_or_delete_broken_redirects(self): + """Process all redirects for 'both' action.""" # TODO: part of this should be moved to generator, the rest merged into # self.run() count = 0 diff --git a/scripts/reflinks.py b/scripts/reflinks.py index be630cb..4fe758a 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -196,6 +196,7 @@ """Container to handle a single bare reference."""
def __init__(self, link, name): + """Constructor.""" self.refname = name self.link = link self.site = pywikibot.Site() @@ -273,6 +274,7 @@ """
def __init__(self): + """Constructor.""" # Match references self.REFS = re.compile( r'(?i)<ref(?P<params>[^>/]*)>(?P<content>.*?)</ref>') @@ -283,6 +285,7 @@ self.autogen = i18n.twtranslate(pywikibot.Site(), 'reflinks-autogen')
def process(self, text): + """Process the page.""" # keys are ref groups # values are a dict where : # keys are ref content diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index 784b6d3..44b37af 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -75,6 +75,7 @@ """Work is done in here."""
def __init__(self, options): + """Constructor.""" self.options = options
if options.original_wiki: @@ -184,6 +185,7 @@ sync_overview_page.save(self.put_message(site))
def put_message(self, site): + """Return synchonization message.""" return ('%s replicate_wiki.py synchronization from %s' % (site.user(), str(self.original)))
@@ -234,6 +236,14 @@
def main(*args): + """ + Process command line arguments and invoke bot. + + If args is an empty list, sys.argv is used. + + @param args: command line arguments + @type args: list of unicode + """ my_args = pywikibot.handle_args(args)
parser = ArgumentParser(add_help=False) diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 6a47536..d635963 100755 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -49,6 +49,7 @@ """
def __init__(self, site, user=None, comment=None, rollback=False): + """Constructor.""" self.site = site self.comment = comment self.user = user @@ -57,6 +58,7 @@ self.rollback = rollback
def get_contributions(self, max=500, ns=None): + """Get contributions.""" count = 0 iterator = pywikibot.tools.empty_iterator() never_continue = False @@ -73,7 +75,7 @@ yield item
def revert_contribs(self, callback=None): - + """Revert contrubutions.""" if callback is None: callback = self.callback
@@ -92,9 +94,11 @@ return
def callback(self, item): + """Callback funktion.""" return 'top' in item
def revert(self, item): + """Revert a single item.""" page = pywikibot.Page(self.site, item['title']) history = list(page.revisions(total=2)) if len(history) > 1: @@ -133,6 +137,7 @@ return u"The edit(s) made in %s by %s was rollbacked" % (page.title(), self.user)
def log(self, msg): + """Log the message msg.""" pywikibot.output(msg)
@@ -141,6 +146,7 @@ """Example revert bot."""
def callback(self, item): + """Callback funktion for 'private' revert bot.""" if 'top' in item: page = pywikibot.Page(self.site, item['title']) text = page.get(get_redirect=True) diff --git a/scripts/script_wui.py b/scripts/script_wui.py index e247701..81441bc 100755 --- a/scripts/script_wui.py +++ b/scripts/script_wui.py @@ -125,6 +125,7 @@ """WikiUserInterface bot."""
def __init__(self, *arg): + """Constructor.""" pywikibot.output(color_format( '{lightgreen}* Initialization of bot{default}'))
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 5886164..c0003fa 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -352,8 +352,12 @@
def correctcap(link, text): - # If text links to a page with title link uncapitalized, uncapitalize link, - # otherwise capitalize it + """ + Capitalize link. + + If text links to a page with title link uncapitalized, uncapitalize link, + otherwise capitalize it + """ linkupper = link.title() linklower = first_lower(linkupper) if "[[%s]]" % linklower in text or "[[%s|" % linklower in text: @@ -367,6 +371,7 @@ """Referring Page generator, with an ignore manager."""
def __init__(self, disambPage, primary=False, minimum=0, main_only=False): + """Constructor.""" self.disambPage = disambPage # if run with the -primary argument, enable the ignore manager self.primaryIgnoreManager = PrimaryIgnoreManager(disambPage, @@ -375,6 +380,7 @@ self.main_only = main_only
def __iter__(self): + """Yield pages.""" # TODO: start yielding before all referring pages have been found refs = [ page for page in self.disambPage.getReferences( @@ -417,6 +423,7 @@ """
def __init__(self, disambPage, enabled=False): + """Constructor.""" self.disambPage = disambPage self.enabled = enabled self.ignorelist = [] @@ -445,9 +452,11 @@ pass
def isIgnored(self, refPage): + """Return if refPage is to be ignored.""" return self.enabled and refPage.title(asUrl=True) in self.ignorelist
def ignore(self, refPage): + """Write page to ignorelist.""" if self.enabled: # Skip this occurrence next time. filename = config.datafilepath( @@ -559,6 +568,7 @@
def __init__(self, always, alternatives, getAlternatives, dnSkip, generator, primary, main_only, minimum=0): + """Constructor.""" super(DisambiguationRobot, self).__init__() self.always = always self.alternatives = alternatives @@ -594,18 +604,19 @@ return None
def makeAlternativesUnique(self): - # remove duplicate entries stable + """Remove duplicate entries stable.""" unique = set(self.alternatives) self.alternatives = [alt for alt in self.alternatives if alt in unique]
def listAlternatives(self): + """Show a list of alternatives.""" list = u'\n' for i in range(len(self.alternatives)): list += (u"%3i - %s\n" % (i, self.alternatives[i])) pywikibot.output(list)
def setupRegexes(self): - # compile regular expressions + """Compile regular expressions.""" self.ignore_contents_regexes = [] if self.mylang in self.ignore_contents: for ig in self.ignore_contents[self.mylang]: @@ -893,6 +904,7 @@ return True
def findAlternatives(self, disambPage): + """Look for alternative links of disambiguation pages.""" if disambPage.isRedirectPage() and not self.primary: if (disambPage.site.lang in self.primary_redir_template and self.primary_redir_template[disambPage.site.lang] @@ -969,6 +981,7 @@
def setSummaryMessage(self, disambPage, new_targets=[], unlink_counter=0, dn=False): + """Setup i18n summary message.""" # make list of new targets comma = self.mysite.mediawiki_message(u"comma-separator") targets = comma.join(u'[[%s]]' % page_title @@ -1031,7 +1044,7 @@ 'count': len(new_targets)})
def run(self): - + """Run the bot.""" for disambPage in self.generator: self.primaryIgnoreManager = PrimaryIgnoreManager( disambPage, enabled=self.primary) diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 99bef29..62e2b09 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -309,6 +309,7 @@ self.HTTPignore = HTTPignore
def getConnection(self): + """Get a connection.""" if self.scheme == 'http': return httplib.HTTPConnection(self.host) elif self.scheme == 'https': @@ -317,6 +318,7 @@ raise NotAnURLError(self.url)
def getEncodingUsedByServer(self): + """Get encodung used by server.""" if not self.serverEncoding: try: pywikibot.output( @@ -338,6 +340,7 @@ return self.serverEncoding
def readEncodingFromResponse(self, response): + """Read encoding from response.""" if not self.serverEncoding: try: ct = response.getheader('Content-Type') @@ -348,6 +351,7 @@ pass
def changeUrl(self, url): + """Change url.""" self.url = url # we ignore the fragment (self.scheme, self.host, self.path, self.query, @@ -534,6 +538,7 @@ """
def __init__(self, page, url, history, HTTPignore, day): + """Constructor.""" threading.Thread.__init__(self) self.page = page self.url = url @@ -555,6 +560,7 @@ self.day = day
def run(self): + """Run the bot.""" ok = False try: header = self.header @@ -609,6 +615,7 @@ """
def __init__(self, reportThread, site=None): + """Constructor.""" self.reportThread = reportThread if not site: self.site = pywikibot.Site() @@ -723,6 +730,7 @@ """
def __init__(self): + """Constructor.""" threading.Thread.__init__(self) self.semaphore = threading.Semaphore() self.queue = [] @@ -736,13 +744,16 @@ self.semaphore.release()
def shutdown(self): + """Finish thread.""" self.finishing = True
def kill(self): + """Kill thread.""" # TODO: remove if unneeded self.killed = True
def run(self): + """Run thread.""" while not self.killed: if len(self.queue) == 0: if self.finishing: diff --git a/scripts/welcome.py b/scripts/welcome.py index b531656..60c981d 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -467,6 +467,7 @@ % (self.site, self.site.code, self.site.family.name))
def badNameFilter(self, name, force=False): + """Check for bad names.""" if not globalvar.filtBadName: return False
@@ -562,6 +563,7 @@ return False
def reportBadAccount(self, name=None, final=False): + """Report bad account.""" # Queue process if name: if globalvar.confirm: @@ -625,6 +627,7 @@ return True
def makelogpage(self, queue=None): + """Make log page.""" if queue is None: queue = [] if not globalvar.makeWelcomeLog or len(queue) == 0: @@ -672,9 +675,11 @@ time.sleep(10)
def parseNewUserLog(self): + """Retrieve ne users.""" return self.site.logevents('newusers', total=globalvar.queryLimit)
def defineSign(self, force=False): + """Setup signature.""" if hasattr(self, '_randomSignature') and not force: return self._randomSignature
@@ -716,6 +721,7 @@ return self._randomSignature
def run(self): + """Run the bot.""" while True: welcomed_count = 0 us = (pywikibot.User(self.site, users.user()) diff --git a/tests/deprecation_tests.py b/tests/deprecation_tests.py index cd25d38..98161ec 100644 --- a/tests/deprecation_tests.py +++ b/tests/deprecation_tests.py @@ -62,6 +62,7 @@ net = False
def test_add_full_name_decorator(self): + """Test add_decorated_full_name() method.""" self.assertRaisesRegex( Exception, __name__ + '.decorated_func', @@ -171,28 +172,34 @@ @classmethod @deprecated() def class_method(cls, foo=None): + """Deprecated class method.""" return foo
@staticmethod @deprecated() def static_method(foo=None): + """Deprecated static method.""" return foo
@deprecated() def instance_method(self, foo=None): + """Deprecated instance method.""" self.foo = foo return foo
@deprecated def instance_method2(self, foo=None): + """Another deprecated instance method.""" self.foo = foo return foo
def undecorated_method(self, foo=None): + """Not deprecated instance method.""" return foo
@deprecate_arg('bah', 'foo') def deprecated_instance_method_arg(self, foo=None): + """Instance method with deprecated parameters.""" self.foo = foo return foo
@@ -208,6 +215,7 @@
@deprecated_args(bah='foo', bah2='foo2') def deprecated_instance_method_args_multi(self, foo, foo2): + """Instance method with multiple deprecated parameters.""" self.foo = foo self.foo2 = foo2 return (foo, foo2) @@ -215,21 +223,25 @@ @deprecated() @deprecate_arg('bah', 'foo') def deprecated_instance_method_and_arg(self, foo): + """Deprecated instance method with deprecated parameters.""" self.foo = foo return foo
@deprecate_arg('bah', 'foo') @deprecated() def deprecated_instance_method_and_arg2(self, foo): + """Deprecating decorators in reverse order.""" self.foo = foo return foo
@remove_last_args(['foo', 'bar']) def deprecated_all(self): + """Deprecating positional parameters.""" return None
@remove_last_args(['bar']) def deprecated_all2(self, foo): + """Deprecating last positional parameter.""" return foo
@@ -247,6 +259,7 @@ """Deprecated class."""
def __init__(self, foo=None): + """Constructor.""" self.foo = foo
@@ -310,6 +323,7 @@ self.assertEqual(rv.__doc__, doc)
def test_deprecated_function_bad_args(self): + """Test @deprecated function with bad arguments.""" rv = deprecated_func_bad_args(None) self.assertEqual(rv, None) self.assertOneDeprecationParts(__name__ + '.deprecated_func_bad_args') @@ -328,6 +342,7 @@ self.assertOneDeprecationParts(__name__ + '.deprecated_func_bad_args')
def test_deprecated_instance_method(self): + """Test @deprecated instance method.""" f = DeprecatedMethodClass()
rv = f.instance_method() @@ -349,6 +364,7 @@ __name__ + '.DeprecatedMethodClass.instance_method')
def test_deprecated_instance_method2(self): + """Test @deprecated instance method 2.""" f = DeprecatedMethodClass()
rv = f.instance_method2() @@ -410,7 +426,9 @@ self.assertOneDeprecationParts(__name__ + '.DeprecatedClass')
def test_deprecate_function_arg(self): + """Test @deprecated function argument.""" def tests(func): + """Test function.""" rv = func() self.assertEqual(rv, None) self.assertNoDeprecation() @@ -442,6 +460,7 @@ tests(deprecated_func_arg2)
def test_deprecate_and_remove_function_args(self): + """Test @deprecated and removed function argument.""" rv = deprecated_func_arg3() self.assertEqual(rv, None) self.assertNoDeprecation() @@ -585,6 +604,7 @@ "The value(s) provided for 'bar' have been dropped." % __name__)
def test_remove_last_args_invalid(self): + """Test invalid @remove_last_args on functions.""" self.assertRaisesRegex( TypeError, r"(deprecated_all2() missing 1 required positional argument: 'foo'|" # Python 3 diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 76d1c28..9d8397c 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -71,22 +71,27 @@ titles = en_wp_page_titles + en_wp_nopage_titles
def setUp(self): + """Setup test.""" super(TestDryPageGenerators, self).setUp() self.site = self.get_site()
def assertFunction(self, obj): + """Assert function test.""" self.assertTrue(hasattr(pagegenerators, obj)) self.assertTrue(hasattr(getattr(pagegenerators, obj), '__call__'))
def test_module_import(self): + """Test module import.""" self.assertIn("pywikibot.pagegenerators", sys.modules)
def test_PagesFromTitlesGenerator(self): + """Test PagesFromTitlesGenerator.""" self.assertFunction("PagesFromTitlesGenerator") gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) self.assertPagelistTitles(gen, self.titles)
def test_NamespaceFilterPageGenerator(self): + """Test NamespaceFilterPageGenerator.""" self.assertFunction("NamespaceFilterPageGenerator") site = self.site gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site) @@ -108,6 +113,7 @@ self.assertEqual(len(tuple(gen)), 10)
def test_RegexFilterPageGenerator(self): + """Test RegexFilterPageGenerator.""" self.assertFunction("RegexFilterPageGenerator") gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) gen = pagegenerators.RegexFilterPageGenerator(gen, '/doc') @@ -166,6 +172,7 @@ self.assertEqual(len(tuple(gen)), 7)
def test_RegexBodyFilterPageGenerator(self): + """Test RegexBodyFilterPageGenerator.""" self.assertFunction("RegexBodyFilterPageGenerator") gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site=self.site) @@ -194,12 +201,14 @@ category_list = ['Category:Validated']
def setUp(self): + """Setup tests.""" super(TestCategoryFilterPageGenerator, self).setUp() self.site = self.get_site() self.titles = [self.base_title % i for i in range(1, 11)] self.catfilter_list = [pywikibot.Category(self.site, cat) for cat in self.category_list]
def test_CategoryFilterPageGenerator(self): + """Test CategoryFilterPageGenerator.""" site = self.site gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site) gen = pagegenerators.CategoryFilterPageGenerator(gen, self.catfilter_list, site) @@ -218,11 +227,13 @@ base_title = 'Page:Popular Science Monthly Volume 1.djvu/%s'
def setUp(self): + """Setup tests.""" super(TestQualityFilterPageGenerator, self).setUp() self.site = self.get_site() self.titles = [self.base_title % i for i in range(1, 11)]
def test_QualityFilterPageGenerator(self): + """Test QualityFilterPageGenerator.""" site = self.site gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site) gen = pagegenerators.QualityFilterPageGenerator(gen, [0]) @@ -243,6 +254,7 @@ titles = en_wp_page_titles
def test_first_edit(self): + """Test first edit.""" expect = ( u'The Addams Family (pinball)', u'Talk:Nowy Sącz', @@ -262,6 +274,7 @@ self.assertTrue(all(p.title not in expect for p in opposite_pages))
def test_last_edit(self): + """Test last edit.""" two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2) nine_days_ago = datetime.datetime.now() - datetime.timedelta(days=9)
@@ -294,6 +307,7 @@ code = 'test'
def test_subpage_filter(self): + """Test SubpageFilterGenerator.""" site = self.get_site() test_cat = pywikibot.Category(site, 'Subpage testing')
@@ -318,6 +332,7 @@ """Test RepeatingGenerator."""
def test_RepeatingGenerator(self): + """Test RepeatingGenerator.""" items = list( pagegenerators.RepeatingGenerator(self.site.recentchanges, key_func=lambda x: x['revid'], @@ -354,6 +369,7 @@ )
def test_brackets(self): + """Test TextfilePageGenerator with brackets.""" filename = join_data_path('pagelist-brackets.txt') site = self.get_site() titles = list(pagegenerators.TextfilePageGenerator(filename, site)) @@ -364,6 +380,7 @@ self.assertPageTitlesEqual(titles, expected_titles)
def test_lines(self): + """Test TextfilePageGenerator with newlines.""" filename = join_data_path('pagelist-lines.txt') site = self.get_site() titles = list(pagegenerators.TextfilePageGenerator(filename, site)) @@ -379,6 +396,7 @@ """Test the year page generator."""
def test_basic(self): + """Test YearPageGenerator.""" site = self.get_site() # Some languages are missing (T85681) if (site.lang not in date.formats['YearBC']) or (site.lang not in date.formats['YearAD']): @@ -403,11 +421,13 @@
@classmethod def setUpClass(cls): + """Setup class for tests.""" super(TestDayPageGenerator, cls).setUpClass() cls.site = cls.get_site() cls.fd = date.FormatDate(cls.site)
def _run_test(self, startMonth=1, endMonth=12, year=2000): + """Test method for DayPageGenerator.""" params = { 'startMonth': startMonth, 'endMonth': endMonth, @@ -535,27 +555,32 @@ dry = True
def test_one_namespace(self): + """Test one namespace.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:2') self.assertEqual(gf.namespaces, set([2]))
def test_two_namespaces(self): + """Test two namespaces.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:2') gf.handleArg('-ns:Talk') self.assertEqual(gf.namespaces, set([2, 1]))
def test_two_named_namespaces(self): + """Test two named namespaces.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:Talk,File') self.assertEqual(gf.namespaces, set([1, 6]))
def test_two_numeric_namespaces(self): + """Test two namespaces delimited by colon.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:1,6') self.assertEqual(gf.namespaces, set([1, 6]))
def test_immutable_namespaces_on_read(self): + """test immutable namespaces on read.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:1,6') self.assertEqual(gf.namespaces, set([1, 6])) @@ -564,6 +589,7 @@ self.assertEqual(gf.namespaces, set([1, 6]))
def test_unsupported_quality_level_filter(self): + """Test unsupported option.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) self.assertRaises(UnknownExtension, gf.handleArg, '-ql:2')
@@ -644,12 +670,14 @@ """Test pagegenerators.GeneratorFactory."""
def test_ns(self): + """Test namespace option.""" gf = pagegenerators.GeneratorFactory() gf.handleArg('-ns:1') gen = gf.getCombinedGenerator() self.assertIsNone(gen)
def test_allpages_default(self): + """Test allpages generator.""" gf = pagegenerators.GeneratorFactory() self.assertTrue(gf.handleArg('-start:!')) gf.handleArg('-limit:10') @@ -663,6 +691,7 @@ self.assertEqual(page.namespace(), 0)
def test_allpages_ns(self): + """Test allpages generator with namespace argument.""" gf = pagegenerators.GeneratorFactory() self.assertTrue(gf.handleArg('-start:!')) gf.handleArg('-limit:10') @@ -674,6 +703,7 @@ self.assertPagesInNamespaces(gen, 1)
def test_regexfilter_default(self): + """Test allpages generator with titleregex filter.""" gf = pagegenerators.GeneratorFactory() # Matches titles with the same two or more continous characters self.assertTrue(gf.handleArg('-start')) @@ -688,6 +718,7 @@ self.assertRegex(page.title().lower(), '(.)\1+')
def test_regexfilter_ns_after(self): + """Test allpages generator with titleregex and namespace filter.""" gf = pagegenerators.GeneratorFactory() self.assertTrue(gf.handleArg('-start')) self.assertTrue(gf.handleArg('-titleregex:.*')) @@ -699,6 +730,7 @@ self.assertPagesInNamespaces(pages, 1)
def test_regexfilter_ns_before(self): + """Test allpages generator with namespace and titleregex filter.""" gf = pagegenerators.GeneratorFactory() self.assertTrue(gf.handleArg('-start')) gf.handleArg('-ns:1') @@ -723,6 +755,7 @@ gf.getCombinedGenerator)
def test_prefixing_default(self): + """Test prefixindex generator.""" gf = pagegenerators.GeneratorFactory() self.assertTrue(gf.handleArg('-prefixindex:a')) gf.handleArg('-limit:10') @@ -736,6 +769,7 @@ self.assertTrue(page.title().lower().startswith('a'))
def test_prefixing_ns(self): + """Test prefixindex generator with namespace filter.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-prefixindex:a') @@ -745,6 +779,7 @@ self.assertPagesInNamespaces(gen, 1)
def test_newpages_default(self): + """Test newpages generator.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-newpages') gen = gf.getCombinedGenerator() @@ -754,6 +789,7 @@ self.assertLessEqual(len(pages), 60)
def test_newpages_ns_default(self): + """Test newpages generator with limit argument.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-newpages:10') gen = gf.getCombinedGenerator() @@ -761,6 +797,7 @@ self.assertPagesInNamespaces(gen, 0)
def test_newpages_ns(self): + """Test newpages generator with limit argument and namespace filter.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-newpages:10') @@ -769,6 +806,7 @@ self.assertPagesInNamespaces(gen, 1)
def test_recentchanges_ns_default(self): + """Test recentchanges generator.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-recentchanges:50') gen = gf.getCombinedGenerator() @@ -776,6 +814,7 @@ self.assertPagesInNamespacesAll(gen, set([0, 1, 2]), skip=True)
def test_recentchanges_ns(self): + """Test recentchanges generator with namespace.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-recentchanges:10') @@ -784,6 +823,7 @@ self.assertPagesInNamespaces(gen, 1)
def test_recentchanges_ns_multi(self): + """Test recentchanges generator with multiple namespaces.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-ns:3') @@ -858,6 +898,7 @@
@unittest.expectedFailure def test_logevents_parse(self): + """Test wrong logevents option.""" gf = pagegenerators.GeneratorFactory() self.assertFalse(gf.handleArg("-log")) self.assertFalse(gf.handleArg("-log:text_here")) @@ -866,6 +907,7 @@ Exception)
def test_logevents_default(self): + """Test old logevents option handling.""" gf = pagegenerators.GeneratorFactory(site=self.site) self.assertTrue(gf.handleArg('-newuserslog')) self.assertOneDeprecationParts('The usage of "-newuserslog"', @@ -877,6 +919,7 @@ self.assertTrue(all(isinstance(item, pywikibot.Page) for item in pages))
def test_logevents_default_multi(self): + """Test old logevents option handling with limit argument.""" gf = pagegenerators.GeneratorFactory(site=self.site) self.assertTrue(gf.handleArg('-newuserslog:10')) gen = gf.getCombinedGenerator() @@ -886,6 +929,7 @@ self.assertTrue(all(isinstance(item, pywikibot.Page) for item in pages))
def test_logevents_ns(self): + """Test old logevents option with limit argument and namespace.""" gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-newuserslog:10') @@ -895,6 +939,7 @@ self.assertTrue(all(isinstance(item, pywikibot.Page) for item in gen))
def test_logevents_user_multi(self): + """Test old logevents option for a given user.""" gf = pagegenerators.GeneratorFactory(site=self.site) user = self.get_site().user() self.assertTrue(gf.handleArg('-newuserslog:' + user + ';10')) @@ -918,12 +963,14 @@ """Page intersect_generators test cases."""
def test_intersect_newpages_twice(self): + """Test newpages intersection.""" site = self.get_site() self.assertEqualItertools( [pagegenerators.NewpagesPageGenerator(site=site, total=10), pagegenerators.NewpagesPageGenerator(site=site, total=10)])
def test_intersect_newpages_and_recentchanges(self): + """Test intersection betweem newpages and recentchanges.""" site = self.get_site() self.assertEqualItertools( [pagegenerators.NewpagesPageGenerator(site=site, total=50), @@ -939,6 +986,7 @@ code = 'en'
def test_intersect_newpages_csd(self): + """Test intersection between newpages and sd candidates.""" site = self.get_site() self.assertEqualItertools([ pagegenerators.NewpagesPageGenerator(site=site, total=10), @@ -954,6 +1002,7 @@
@classmethod def setUpClass(cls): + """Setup test class.""" super(LiveRCPageGeneratorTestCase, cls).setUpClass() try: import socketIO_client # noqa @@ -966,6 +1015,7 @@ % socketIO_client.__version__)
def test_RC_pagegenerator_result(self): + """Test RC pagegenerator.""" import logging lgr = logging.getLogger('socketIO_client') lgr.setLevel(logging.DEBUG) diff --git a/tests/site_tests.py b/tests/site_tests.py index 989e940..327509f 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -251,6 +251,7 @@ cached = True
def testPickleAbility(self): + """Test pickle ability.""" mysite = self.get_site() mysite_str = pickle.dumps(mysite, protocol=config.pickle_protocol) mysite_pickled = pickle.loads(mysite_str) @@ -589,6 +590,7 @@
@allowed_failure # T78276 def test_allpages_langlinks_enabled(self): + """Test allpages with langlinks enabled.""" mysite = self.get_site() for page in mysite.allpages(filterlanglinks=True, total=5): self.assertIsInstance(page, pywikibot.Page) @@ -597,6 +599,7 @@ self.assertNotEqual(page.langlinks(), [])
def test_allpages_langlinks_disabled(self): + """Test allpages with langlinks disabled.""" mysite = self.get_site() for page in mysite.allpages(filterlanglinks=False, total=5): self.assertIsInstance(page, pywikibot.Page) @@ -605,6 +608,7 @@ self.assertEqual(page.langlinks(), [])
def test_allpages_pagesize(self): + """Test allpages with page maxsize parameter.""" mysite = self.get_site() for page in mysite.allpages(minsize=100, total=5): self.assertIsInstance(page, pywikibot.Page) @@ -624,6 +628,7 @@ 200)
def test_allpages_protection(self): + """Test allpages with protect_type parameter.""" mysite = self.get_site() for page in mysite.allpages(protect_type="edit", total=5): self.assertIsInstance(page, pywikibot.Page) @@ -924,6 +929,7 @@ self.assertLessEqual(len(pages), 10)
def test_protectedpages_edit_level(self): + """Test protectedpages protection level.""" site = self.get_site() levels = set() all_levels = site.protection_levels().difference(['']) @@ -1058,6 +1064,7 @@ user = True
def test_methods(self): + """Test user related methods.""" mysite = self.get_site() self.assertIsInstance(mysite.is_blocked(), bool) self.assertIsInstance(mysite.messages(), bool) @@ -1588,6 +1595,7 @@ sysop = True
def test_methods(self): + """Test sysop related methods.""" mysite = self.get_site() self.assertIsInstance(mysite.is_blocked(True), bool) self.assertIsInstance(mysite.has_right("edit", True), bool) @@ -1916,6 +1924,7 @@ self.mysite.version = self.orig_version
def _test_tokens(self, version, test_version, additional_token): + """Test tokens.""" if version and self._version < MediaWikiVersion(version): raise unittest.SkipTest( u'Site %s version %s is too low for this tests.' @@ -1974,6 +1983,7 @@ self._test_tokens('1.24wmf19', '1.24wmf20', 'deleteglobalaccount')
def testInvalidToken(self): + """Test invalid token.""" self.assertRaises(pywikibot.Error, lambda t: self.mysite.tokens[t], "invalidtype")
@@ -2033,6 +2043,7 @@ cached = True
def testExtensions(self): + """Test Extensions.""" mysite = self.get_site() # test automatically getting extensions cache if 'extensions' in mysite.siteinfo: @@ -2055,7 +2066,7 @@ cached = True
def test_API_limits_with_site_methods(self): - # test step/total parameters for different sitemethods + """Test step/total parameters for different sitemethods.""" mysite = self.get_site() mypage = pywikibot.Page(mysite, 'Albert Einstein') mycat = pywikibot.Page(mysite, 'Category:1879 births') @@ -2156,6 +2167,7 @@ """Test asynchronous siteinfo fetch."""
def test_async_request(self): + """Test async request.""" self.assertTrue(page_put_queue.empty()) self.assertNotIn('statistics', self.site.siteinfo) async_request(self.site.siteinfo.get, 'statistics') @@ -2169,6 +2181,7 @@ """Test site.loadrevisions() caching."""
def setUp(self): + """Setup tests.""" self._page = self.get_mainpage(force=True) super(TestSiteLoadRevisionsCaching, self).setUp()
@@ -2189,6 +2202,7 @@ # Implemented without setUpClass(cls) and global variables as objects # were not completely disposed and recreated but retained 'memory' def setUp(self): + """Setup tests.""" super(TestSiteLoadRevisions, self).setUp() self.mysite = self.get_site() self.mainpage = pywikibot.Page(pywikibot.Link("Main Page", self.mysite)) @@ -2327,6 +2341,7 @@ cached = True
def testInterWikiForward(self): + """Test interwiki forward.""" self.site = self.get_site() self.mainpage = pywikibot.Page(pywikibot.Link("Main Page", self.site)) # test pagelanglinks on commons, @@ -2346,6 +2361,7 @@ cached = True
def testNamespaceCase(self): + """Test namespace case.""" site = self.get_site()
main_namespace = site.namespaces[0] @@ -2364,6 +2380,7 @@ cached = True
def testNamespaceAliases(self): + """test namespace aliases.""" site = self.get_site()
namespaces = site.namespaces @@ -2402,6 +2419,7 @@ user = True
def test_is_uploaddisabled(self, key): + """Test is_uploaddisabled().""" site = self.get_site(key) if self.sites[key]['enabled']: self.assertFalse(site.is_uploaddisabled()) @@ -2816,11 +2834,13 @@ }
def test_enwp(self): + """Test sametitle for enwp.""" self.assertTrue(self.get_site('enwp').sametitle('Foo', 'foo')) self.assertFalse(self.get_site('enwp').sametitle( 'Template:Test template', 'Template:Test Template'))
def test_dewp(self): + """Test sametitle for dewp.""" site = self.get_site('dewp') self.assertTrue(site.sametitle('Foo', 'foo')) self.assertTrue(site.sametitle('Benutzer:Foo', 'User:Foo')) @@ -2828,9 +2848,11 @@ self.assertTrue(site.sametitle('Benutzerin:Foo', 'Benutzer:Foo'))
def test_enwt(self): + """Test sametitle for enwt.""" self.assertFalse(self.get_site('enwt').sametitle('Foo', 'foo'))
def test_general(self, code): + """Test sametitle.""" site = self.get_site(code) self.assertTrue(site.sametitle('File:Foo', 'Image:Foo')) self.assertTrue(site.sametitle(':Foo', 'Foo')) @@ -3049,6 +3071,7 @@ self._run_test("http://www.tvtropes.org/pmwiki/pmwiki.php/Main/$1")
def _run_test(self, url): + """Run test method.""" site = pywikibot.site.NonMWAPISite(url) with self.assertRaises(NotImplementedError): site.attr diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py index c822d67..a9be159 100644 --- a/tests/textlib_tests.py +++ b/tests/textlib_tests.py @@ -42,24 +42,30 @@ net = False
def setUp(self): + """Setup tests.""" self.catresult1 = ('[[Category:Cat1]]%(LS)s[[Category:Cat2]]%(LS)s' % {'LS': config.LS}) super(TestSectionFunctions, self).setUp()
def contains(self, fn, sn): + """Invoke does_text_contain_section().""" return textlib.does_text_contain_section( files[fn], sn)
def assertContains(self, fn, sn, *args, **kwargs): + """Test that files[fn] contains sn.""" self.assertEqual(self.contains(fn, sn), True, *args, **kwargs)
def assertNotContains(self, fn, sn, *args, **kwargs): + """Test that files[fn] does not contain sn.""" self.assertEqual(self.contains(fn, sn), False, *args, **kwargs)
def testCurrentBehaviour(self): + """Test that 'Editing' is found.""" self.assertContains("enwiki_help_editing", u"Editing")
def testSpacesInSection(self): + """Test with spaces in section.""" self.assertContains("enwiki_help_editing", u"Minor_edits") self.assertNotContains('enwiki_help_editing', '#Minor edits', "Incorrect, '#Minor edits' does not work") @@ -70,17 +76,20 @@
@unittest.expectedFailure def testNonAlphabeticalCharactersInSection(self): + """Test with non-alphabetical chars in section.""" self.assertContains('enwiki_help_editing', 'Talk_.28discussion.29_pages', 'As used in the TOC') self.assertContains('enwiki_help_editing', 'Talk_(discussion)_pages', 'Understood by mediawiki')
def test_spaces_outside_section(self): + """Test with spaces around section.""" self.assertContains("enwiki_help_editing", u"Naming and_moving") self.assertContains("enwiki_help_editing", u" Naming and_moving ") self.assertContains("enwiki_help_editing", u" Naming and_moving_")
def test_link_in_section(self): + """Test with link inside section.""" # section is ==[[Wiki markup]]== self.assertContains("enwiki_help_editing", u"[[Wiki markup]]", "Link as section header") self.assertContains('enwiki_help_editing', '[[:Wiki markup]]', diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py index 164bbd9..ed3cd7e 100644 --- a/tests/wikibase_tests.py +++ b/tests/wikibase_tests.py @@ -52,6 +52,7 @@ """Test site.loadrevisions() caching."""
def setUp(self): + """Setup test.""" self._page = ItemPage(self.get_repo(), 'Q60') super(TestLoadRevisionsCaching, self).setUp()
@@ -100,11 +101,13 @@
@classmethod def setUpClass(cls): + """Setup test class.""" super(TestGeneral, cls).setUpClass() enwiki = pywikibot.Site('en', 'wikipedia') cls.mainpage = pywikibot.Page(pywikibot.page.Link("Main Page", enwiki))
def testWikibase(self): + """Wikibase tests.""" repo = self.get_repo() item_namespace = repo.namespaces[0] self.assertEqual(item_namespace.defaultcontentmodel, 'wikibase-item') @@ -142,6 +145,7 @@ dry = True
def test_WbTime(self): + """Test WbTime.""" repo = self.get_repo() t = pywikibot.WbTime(site=repo, year=2010, hour=12, minute=43) self.assertEqual(t.toTimestr(), '+00000002010-01-01T12:43:00Z') @@ -149,6 +153,7 @@ self.assertRaises(ValueError, pywikibot.WbTime, site=repo, precision='invalid_precision')
def test_WbQuantity_integer(self): + """Test WbQuantity for integer value.""" q = pywikibot.WbQuantity(amount=1234, error=1) self.assertEqual(q.toWikibase(), {'amount': '+1234', 'lowerBound': '+1233', @@ -167,31 +172,35 @@ 'upperBound': '-3', 'unit': '1', })
def test_WbQuantity_float_27(self): + """Test WbQuantity for float value.""" q = pywikibot.WbQuantity(amount=0.044405586) q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict)
def test_WbQuantity_scientific(self): + """Test WbQuantity for scientific notation.""" q = pywikibot.WbQuantity(amount='1.3e-13', error='1e-14') q_dict = {'amount': '+1.3e-13', 'lowerBound': '+1.2e-13', 'upperBound': '+1.4e-13', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict)
def test_WbQuantity_decimal(self): + """Test WbQuantity for decimal value.""" q = pywikibot.WbQuantity(amount=Decimal('0.044405586')) q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict)
def test_WbQuantity_string(self): + """Test WbQuantity for decimal notation.""" q = pywikibot.WbQuantity(amount='0.044405586') q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict)
def test_WbQuantity_formatting(self): - # test other WbQuantity methods + """Test other WbQuantity methods.""" q = pywikibot.WbQuantity(amount='0.044405586') self.assertEqual("%s" % q, '{\n' @@ -206,11 +215,12 @@ "unit=1)" % {'val': '0.044405586'})
def test_WbQuantity_equality(self): + """Test WbQuantity equality.""" q = pywikibot.WbQuantity(amount='0.044405586') self.assertEqual(q, q)
def test_WbQuantity_fromWikibase(self): - # test WbQuantity.fromWikibase() instantiating + """Test WbQuantity.fromWikibase() instantiating.""" q = pywikibot.WbQuantity.fromWikibase({u'amount': u'+0.0229', u'lowerBound': u'0', u'upperBound': u'1', @@ -221,33 +231,36 @@ 'upperBound': '+1.0000', 'unit': '1', })
def test_WbQuantity_errors(self): - # test WbQuantity error handling + """Test WbQuantity error handling.""" self.assertRaises(ValueError, pywikibot.WbQuantity, amount=None, error=1)
def test_WbMonolingualText_string(self): + """Test WbMonolingualText string.""" q = pywikibot.WbMonolingualText(text='Test that basics work', language='en') q_dict = {'text': 'Test that basics work', 'language': 'en'} self.assertEqual(q.toWikibase(), q_dict)
def test_WbMonolingualText_unicode(self): + """Test WbMonolingualText unicode.""" q = pywikibot.WbMonolingualText(text='Testa det här', language='sv') q_dict = {'text': 'Testa det här', 'language': 'sv'} self.assertEqual(q.toWikibase(), q_dict)
def test_WbMonolingualText_equality(self): + """Test WbMonolingualText equality.""" q = pywikibot.WbMonolingualText(text='Thou shall test this!', language='en-gb') self.assertEqual(q, q)
def test_WbMonolingualText_fromWikibase(self): - # test WbMonolingualText.fromWikibase() instantiating + """Test WbMonolingualText.fromWikibase() instantiating.""" q = pywikibot.WbMonolingualText.fromWikibase({'text': 'Test this!', 'language': u'en'}) self.assertEqual(q.toWikibase(), {'text': 'Test this!', 'language': 'en'})
def test_WbMonolingualText_errors(self): - # test WbMonolingualText error handling + """Test WbMonolingualText error handling.""" self.assertRaises(ValueError, pywikibot.WbMonolingualText, text='', language='sv') self.assertRaises(ValueError, pywikibot.WbMonolingualText, @@ -266,7 +279,11 @@ dry = True
def test_ItemPage_extensibility(self): + """Test ItemPage extensibility.""" class MyItemPage(pywikibot.ItemPage): + + """Dummy ItemPage subclass.""" + pass page = pywikibot.Page(self.site, 'foo') self.assertIsInstance(MyItemPage.fromPage(page, lazy_load=True), @@ -303,11 +320,13 @@
@classmethod def setUpClass(cls): + """Setup test class.""" super(TestItemLoad, cls).setUpClass() cls.site = cls.get_site('enwiki') cls.nyc = pywikibot.Page(pywikibot.page.Link("New York City", cls.site))
def test_item_normal(self): + """Test normal wikibase item.""" wikidata = self.get_repo() item = pywikibot.ItemPage(wikidata, 'Q60') self.assertEqual(item._link._title, 'Q60') @@ -365,13 +384,18 @@ # self.assertTrue(item.labels['en'].lower().endswith('main page'))
def test_empty_item(self): - # should not raise an error as the constructor only requires - # the site parameter, with the title parameter defaulted to None + """ + Test empty wikibase item. + + should not raise an error as the constructor only requires + the site parameter, with the title parameter defaulted to None. + """ wikidata = self.get_repo() item = pywikibot.ItemPage(wikidata) self.assertEqual(item._link._title, '-1')
def test_item_invalid_titles(self): + """Test invalid titles of wikibase items.""" wikidata = self.get_repo() for title in ['null', 'NULL', 'None', '', '-2', '1', '0', '+1', @@ -380,14 +404,19 @@ pywikibot.ItemPage, wikidata, title)
def test_item_untrimmed_title(self): + """ + Test intrimmed titles of wikibase items. + + Spaces in the title should not cause an error. + """ wikidata = self.get_repo() - # spaces in the title should not cause an error item = pywikibot.ItemPage(wikidata, ' Q60 ') self.assertEqual(item._link._title, 'Q60') self.assertEqual(item.title(), 'Q60') item.get()
def test_item_missing(self): + """Test nmissing item.""" wikidata = self.get_repo() # this item has never existed item = pywikibot.ItemPage(wikidata, 'Q7') @@ -413,6 +442,7 @@ self.assertEqual(item.title(), 'Q7')
def test_item_never_existed(self): + """Test non-existent item.""" wikidata = self.get_repo() # this item has not been created item = pywikibot.ItemPage(wikidata, 'Q9999999999999999999') @@ -421,6 +451,7 @@ self.assertRaises(pywikibot.NoPage, item.get)
def test_fromPage_noprops(self): + """Test item from page with properties.""" page = self.nyc item = pywikibot.ItemPage.fromPage(page) self.assertEqual(item._link._title, '-1') @@ -435,6 +466,7 @@ self.assertTrue(item.exists())
def test_fromPage_noprops_with_section(self): + """Test item from page with section.""" page = pywikibot.Page(self.nyc.site, self.nyc.title() + '#foo') item = pywikibot.ItemPage.fromPage(page) self.assertEqual(item._link._title, '-1') @@ -449,6 +481,7 @@ self.assertTrue(item.exists())
def test_fromPage_props(self): + """Test item from page without properties.""" page = self.nyc # fetch page properties page.properties() @@ -467,6 +500,7 @@ self.assertTrue(item.exists())
def test_fromPage_lazy(self): + """Test item from page with lazy_load.""" page = pywikibot.Page(pywikibot.page.Link("New York City", self.site)) item = pywikibot.ItemPage.fromPage(page, lazy_load=True) self.assertEqual(item._defined_by(), @@ -483,6 +517,7 @@ self.assertTrue(item.exists())
def test_fromPage_invalid_title(self): + """Test item from page with invalid title.""" page = pywikibot.Page(pywikibot.page.Link("[]", self.site)) self.assertRaises(pywikibot.InvalidTitle, pywikibot.ItemPage.fromPage, page)
@@ -539,17 +574,29 @@ pywikibot.ItemPage.fromPage, page)
def test_fromPage_redirect(self): - # this is a redirect, and should not have a wikidata item + """ + Test item from redirect page. + + A redirect should not have a wikidata item. + """ link = pywikibot.page.Link("Main page", self.site) self._test_fromPage_noitem(link)
def test_fromPage_missing(self): - # this is a deleted page, and should not have a wikidata item + """ + Test item from deleted page. + + A deleted page should not have a wikidata item. + """ link = pywikibot.page.Link("Test page", self.site) self._test_fromPage_noitem(link)
def test_fromPage_noitem(self): - # this is a new page, and should not have a wikidata item yet + """ + Test item from new page. + + A new created page should not have a wikidata item yet. + """ page = _get_test_unconnected_page(self.site) link = page._link self._test_fromPage_noitem(link) @@ -582,12 +629,14 @@ """Test redirect and non-redirect items."""
def test_normal_item(self): + """Test normal item.""" wikidata = self.get_repo() item = pywikibot.ItemPage(wikidata, 'Q1') self.assertFalse(item.isRedirectPage()) self.assertRaises(pywikibot.IsNotRedirectPage, item.getRedirectTarget)
def test_redirect_item(self): + """Test redirect item.""" wikidata = self.get_repo() item = pywikibot.ItemPage(wikidata, 'Q10008448') item.get(get_redirect=True) @@ -619,6 +668,7 @@ self.assertEqual(claim.getType(), 'globecoordinate')
def test_get(self): + """Test PropertyPage.get() method.""" wikidata = self.get_repo() property_page = pywikibot.PropertyPage(wikidata, 'P625') property_page.get() @@ -703,6 +753,7 @@ """Test behavior of ItemPage methods inherited from BasePage."""
def setUp(self): + """Setup tests.""" self._page = ItemPage(self.get_repo(), 'Q60') super(TestItemBasePageMethods, self).setUp()
@@ -717,6 +768,7 @@ """Test behavior of Page methods for wikibase item."""
def setUp(self): + """Setup tests.""" self._page = pywikibot.Page(self.site, 'Q60') super(TestPageMethodsWithItemTitle, self).setUp()
@@ -781,6 +833,7 @@ }
def setUp(self): + """Setup Tests.""" super(TestLinks, self).setUp() self.wdp = pywikibot.ItemPage(self.get_repo(), 'Q60') self.wdp.id = 'Q60' @@ -789,10 +842,12 @@ self.wdp.get()
def test_iterlinks_page_object(self): + """Test iterlinks for page objects.""" page = [pg for pg in self.wdp.iterlinks() if pg.site.code == 'af'][0] self.assertEqual(page, pywikibot.Page(self.get_site('afwiki'), u'New York Stad'))
def test_iterlinks_filtering(self): + """Test iterlinks for a given family.""" wikilinks = list(self.wdp.iterlinks('wikipedia')) wvlinks = list(self.wdp.iterlinks('wikivoyage'))
@@ -814,17 +869,20 @@ dry = True
def setUp(self): + """Setup tests.""" super(TestWriteNormalizeLang, self).setUp() self.site = self.get_site() self.lang_out = {'en': 'foo'}
def test_normalize_lang(self): + """Test _normalizeLanguages() method.""" lang_in = {self.site: 'foo'}
response = WikibasePage._normalizeLanguages(lang_in) self.assertEqual(response, self.lang_out)
def test_normalized_lang(self): + """Test _normalizeData() method.""" response = WikibasePage._normalizeData( copy.deepcopy(self.lang_out)) self.assertEqual(response, self.lang_out) @@ -841,6 +899,7 @@ net = False
def setUp(self): + """Setup tests.""" super(TestWriteNormalizeData, self).setUp() self.data_out = { 'aliases': {'en': [{'language': 'en', 'value': 'Bah'}]}, @@ -848,6 +907,7 @@ }
def test_normalize_data(self): + """Test _normalizeData() method.""" data_in = { 'aliases': {'en': ['Bah']}, 'labels': {'en': 'Foo'}, @@ -857,6 +917,7 @@ self.assertEqual(response, self.data_out)
def test_normalized_data(self): + """Test _normalizeData() method for normalized data.""" response = WikibasePage._normalizeData( copy.deepcopy(self.data_out)) self.assertEqual(response, self.data_out) @@ -883,8 +944,12 @@ """Test cases to test namespaces of Wikibase entities."""
def test_empty_wikibase_page(self): - # As a base class it should be able to instantiate - # it with minimal arguments + """ + Test empty wikibase page. + + As a base class it should be able to instantiate + it with minimal arguments + """ wikidata = self.get_repo() page = pywikibot.page.WikibasePage(wikidata) self.assertRaises(AttributeError, page.namespace) @@ -993,6 +1058,7 @@
@classmethod def setUpClass(cls): + """Setup test class.""" super(TestAlternateNamespaces, cls).setUpClass()
cls.get_repo()._namespaces = NamespacesDict({ @@ -1007,6 +1073,7 @@ })
def test_alternate_item_namespace(self): + """Test alternate item namespace.""" item = pywikibot.ItemPage(self.repo, 'Q60') self.assertEqual(item.namespace(), 90) self.assertEqual(item.id, 'Q60') @@ -1020,6 +1087,7 @@ self.assertEqual(item._defined_by(), {'ids': 'Q60'})
def test_alternate_property_namespace(self): + """Test alternate property namespace.""" prop = pywikibot.PropertyPage(self.repo, 'P21') self.assertEqual(prop.namespace(), 92) self.assertEqual(prop.id, 'P21') @@ -1099,6 +1167,7 @@ dry = True
def setUp(self): + """Setup test.""" super(TestJSON, self).setUp() wikidata = self.get_repo() self.wdp = pywikibot.ItemPage(wikidata, 'Q60') @@ -1111,12 +1180,14 @@ del self.wdp._content['lastrevid']
def test_itempage_json(self): + """Test itempage json.""" old = json.dumps(self.wdp._content, indent=2, sort_keys=True) new = json.dumps(self.wdp.toJSON(), indent=2, sort_keys=True)
self.assertEqual(old, new)
def test_json_diff(self): + """Test json diff.""" del self.wdp.labels['en'] del self.wdp.claims['P213'] expected = { diff --git a/tox.ini b/tox.ini index 0708150..5c47069 100644 --- a/tox.ini +++ b/tox.ini @@ -117,22 +117,20 @@ require-code = true putty-ignore = pwb.py,scripts/interwiki.py,scripts/flickrripper.py,pywikibot/site.py, /# noqa: E731/ : +E731 - generate_family_file.py : +D102,T001 + generate_family_file.py : +T001 pwb.py : +T001 - pywikibot/cosmetic_changes.py : +D102, D103 pywikibot/date.py,pywikibot/family.py,pywikibot/fixes.py,pywikibot/textlib.py,pywikibot/userinterfaces/terminal_interface_unix.py,pywikibot/userinterfaces/terminal_interface_win32.py,pywikibot/families/wikipedia_family.py : +E241 tests/page_tests.py : +E241 scripts/,/pagegenerators.parameterHelp/ : +E241 scripts/blockreview.py,scripts/imagetransfer.py,scripts/interwiki.py,scripts/maintenance/wikimedia_sites.py : +E241 - tests/deprecation_tests.py,tests/pagegenerators_tests.py,tests/site_tests.py,tests/textlib_tests.py,tests/wikibase_tests.py : +D102 tests/ui_tests.py : +D102, D103 tests/__init__.py,tests/aspects.py,tests/script_tests.py,tests/site_detect_tests.py : +T001 tests/pwb/ : +T001 scripts/casechecker.py,scripts/imagetransfer.py : +T001 scripts/maintenance/make_i18n_dict.py : +T001 - scripts/archivebot.py,scripts/casechecker.py,scripts/commons_link.py,scripts/cfd.py,scripts/imagecopy.py,scripts/imagecopy_self.py,scripts/interwiki.py,scripts/replicate_wiki.py,scripts/solve_disambiguation.py,scripts/maintenance/compat2core.py,scripts/archive/ : +D102, D103 - scripts/checkimages.py,scripts/freebasemappingupload.py,scripts/imagetransfer.py,scripts/lonelypages.py,scripts/movepages.py,scripts/nowcommons.py,scripts/redirect.py,scripts/isbn.py,scripts/reflinks.py,scripts/script_wui.py,scripts/revertbot.py,scripts/weblinkchecker.py,scripts/welcome.py : +D102 - scripts/catall.py,scripts/imageharvest.py,scripts/makecat.py,scripts/maintenance/cache.py,scripts/maintenance/wikimedia_sites.py : +D103 + scripts/archive/ : +D102, D103 + scripts/script_wui.py : +D102 + scripts/makecat.py : +D103 scripts/interwiki.py,/""/ : +P102 pywikibot/__init__.py,/link_regex/ : +P103 tests/textlib_tests.py,/self.assert.*{{/ : +P103
pywikibot-commits@lists.wikimedia.org