jenkins-bot has submitted this change and it was merged.
Change subject: rename ImagePage to FilePage ......................................................................
rename ImagePage to FilePage
for consistency with the MediaWiki canonical namespace name; also, ImagePage did not actually expose any methods peculiar to images
- replaced all occurrences - added a notice to README-conversion.txt - updated compat2core.py to make replacements easier
Change-Id: I289da42fa4b5f78e4837c8e4c1a3a72e9d322990 --- M README-conversion.txt M pywikibot/__init__.py M pywikibot/data/api.py M pywikibot/page.py M pywikibot/pagegenerators.py M pywikibot/site.py M scripts/checkimages.py M scripts/harvest_template.py M scripts/illustrate_wikidata.py M scripts/image.py M scripts/imagerecat.py M scripts/imagetransfer.py M scripts/maintenance/compat2core.py M scripts/nowcommons.py M scripts/upload.py M tests/page_tests.py M tests/site_tests.py 17 files changed, 119 insertions(+), 115 deletions(-)
Approvals: John Vandenberg: Looks good to me, approved jenkins-bot: Verified
diff --git a/README-conversion.txt b/README-conversion.txt index b344c33..b137a28 100644 --- a/README-conversion.txt +++ b/README-conversion.txt @@ -73,7 +73,7 @@ this below) handles link parsing and interpretation that doesn't require access to the wiki server.
-A third syntax allows easy conversion from a Page object to an ImagePage or +A third syntax allows easy conversion from a Page object to a FilePage or Category, or vice versa: e.g., Category(pageobj) converts a Page to a Category, as long as the page is in the category namespace.
@@ -99,9 +99,10 @@
- getVersionHistory(): Returns a pywikibot.Timestamp object instead of a MediaWiki one
-=== ImagePage objects === +=== FilePage objects ===
-For ImagePage objects, the getFileMd5Sum() method is deprecated; it is +The old ImagePage class has been renamed into FilePage. +For FilePage objects, the getFileMd5Sum() method is deprecated; it is recommended to replace it with getFileSHA1Sum(), because MediaWiki now stores the SHA1 hash of images.
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py index e29691f..b3ff554 100644 --- a/pywikibot/__init__.py +++ b/pywikibot/__init__.py @@ -55,7 +55,7 @@
__all__ = ( 'config', 'ui', 'UnicodeMixin', 'translate', - 'Page', 'ImagePage', 'Category', 'Link', 'User', + 'Page', 'FilePage', 'ImagePage', 'Category', 'Link', 'User', 'ItemPage', 'PropertyPage', 'Claim', 'TimeStripper', 'html2unicode', 'url2unicode', 'unicode2html', 'stdout', 'output', 'warning', 'error', 'critical', 'debug', 'exception', @@ -533,7 +533,7 @@ getSite = Site # alias for backwards-compability
-from .page import Page, ImagePage, Category, Link, User, ItemPage, PropertyPage, Claim +from .page import Page, FilePage, ImagePage, Category, Link, User, ItemPage, PropertyPage, Claim from .page import html2unicode, url2unicode, unicode2html
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index 9ff7ba7..b02c263 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -927,14 +927,14 @@
class ImagePageGenerator(PageGenerator):
- """Like PageGenerator, but yields ImagePage objects instead of Pages.""" + """Like PageGenerator, but yields FilePage objects instead of Pages."""
def result(self, pagedata): p = PageGenerator.result(self, pagedata) - image = pywikibot.ImagePage(p) + filepage = pywikibot.FilePage(p) if 'imageinfo' in pagedata: - image._imageinfo = pagedata['imageinfo'][0] - return image + filepage._imageinfo = pagedata['imageinfo'][0] + return filepage
class PropertyGenerator(QueryGenerator): diff --git a/pywikibot/page.py b/pywikibot/page.py index 7108a2b..fb5bffe 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -71,7 +71,7 @@
- If the first argument is a Page, create a copy of that object. This can be used to convert an existing Page into a subclass - object, such as Category or ImagePage. (If the title is also + object, such as Category or FilePage. (If the title is also given as the second argument, creates a copy with that title; this is used when pages are moved.) - If the first argument is a Site, create a Page on that Site @@ -1230,13 +1230,13 @@ @deprecate_arg("followRedirects", None) @deprecate_arg("loose", None) def imagelinks(self, step=None, total=None, content=False): - """Iterate ImagePage objects for images displayed on this Page. + """Iterate FilePage objects for images displayed on this Page.
@param step: limit each API call to this number of pages @param total: iterate no more than this number of pages in total @param content: if True, retrieve the content of the current version of each image description page (default False) - @return: a generator that yields ImagePage objects. + @return: a generator that yields FilePage objects.
""" return self.site.pageimages(self, step=step, total=total, @@ -1805,23 +1805,11 @@ pywikibot.warning(u"Page.replaceImage() is no longer supported.")
-class ImagePage(Page): +class FilePage(Page):
- """A subclass of Page representing an image descriptor wiki page. + """A subclass of Page representing a file description page.
- Supports the same interface as Page, with the following added methods: - - getImagePageHtml : Download image page and return raw HTML text. - fileURL : Return the URL for the image described on this - page. - fileIsShared : Return True if image stored on a shared - repository like Wikimedia Commons or Wikitravel. - getFileMd5Sum : Return image file's MD5 checksum. - getFileVersionHistory : Return the image file's version history. - getFileVersionHistoryTable: Return the version history in the form of a - wiki table. - usingPages : Iterate Pages on which the image is displayed. - + Supports the same interface as Page, with some added methods. """
@deprecate_arg("insite", None) @@ -1829,14 +1817,14 @@ """Constructor.""" Page.__init__(self, source, title, 6) if self.namespace() != 6: - raise ValueError(u"'%s' is not in the image namespace!" % title) + raise ValueError(u"'%s' is not in the file namespace!" % title)
def getImagePageHtml(self): """ - Download the image page, and return the HTML, as a unicode string. + Download the file page, and return the HTML, as a unicode string.
Caches the HTML code, so that if you run this method twice on the - same ImagePage object, the page will only be downloaded once. + same FilePage object, the page will only be downloaded once. """ if not hasattr(self, '_imagePageHtml'): from pywikibot.comms import http @@ -1846,7 +1834,7 @@ return self._imagePageHtml
def fileUrl(self): - """Return the URL for the image described on this page.""" + """Return the URL for the file described on this page.""" # TODO add scaling option? if not hasattr(self, '_imageinfo'): self._imageinfo = self.site.loadimageinfo(self) @@ -1861,7 +1849,7 @@ return self.fileIsShared()
def fileIsShared(self): - """Check if the image is stored on any known shared repository. + """Check if the file is stored on any known shared repository.
@return: bool """ @@ -1878,7 +1866,7 @@ return self.fileUrl().startswith( 'https://upload.wikimedia.org/wikipedia/commons/')
- @deprecated("ImagePage.getFileSHA1Sum()") + @deprecated("FilePage.getFileSHA1Sum()") def getFileMd5Sum(self): """Return image file's MD5 checksum.""" # FIXME: MD5 might be performed on incomplete file due to server disconnection @@ -1892,13 +1880,13 @@ return md5Checksum
def getFileSHA1Sum(self): - """Return image file's SHA1 checksum.""" + """Return the file's SHA1 checksum.""" if not hasattr(self, '_imageinfo'): self._imageinfo = self.site.loadimageinfo(self) return self._imageinfo['sha1']
def getFileVersionHistory(self): - """Return the image file's version history. + """Return the file's version history.
@return: An iterator yielding tuples containing (timestamp, username, resolution, filesize, comment). @@ -1921,7 +1909,7 @@ u'\n|----\n'.join(lines) + '\n|}'
def usingPages(self, step=None, total=None, content=False): - """Yield Pages on which the image is displayed. + """Yield Pages on which the file is displayed.
@param step: limit each API call to this number of pages @param total: iterate no more than this number of pages in total @@ -1931,6 +1919,9 @@ """ return self.site.imageusage( self, step=step, total=total, content=content) + + +ImagePage = FilePage
class Category(Page): @@ -2606,7 +2597,7 @@ raise StopIteration for item in self.site.logevents( logtype='upload', user=self.username, total=total): - yield (ImagePage(self.site, item.title().title()), + yield (FilePage(self.site, item.title().title()), unicode(item.timestamp()), item.comment(), item.pageid() > 0 @@ -3085,7 +3076,7 @@
types = {'wikibase-item': ItemPage, 'string': basestring, - 'commonsMedia': ImagePage, + 'commonsMedia': FilePage, 'globe-coordinate': pywikibot.Coordinate, 'url': basestring, 'time': pywikibot.WbTime, @@ -3254,7 +3245,7 @@ if claim.type == 'wikibase-item': claim.target = ItemPage(site, 'Q' + str(value['numeric-id'])) elif claim.type == 'commonsMedia': - claim.target = ImagePage(site.image_repository(), value) + claim.target = FilePage(site.image_repository(), value) elif claim.type == 'globe-coordinate': claim.target = pywikibot.Coordinate.fromWikibase(value, site) elif claim.type == 'time': diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py index 0220107..3a8ed2c 100644 --- a/pywikibot/pagegenerators.py +++ b/pywikibot/pagegenerators.py @@ -311,12 +311,12 @@ 'pywikibot-enter-file-links-processing') if fileLinksPageTitle.startswith(self.site.namespace(6) + ":"): - fileLinksPage = pywikibot.ImagePage(self.site, - fileLinksPageTitle) + fileLinksPage = pywikibot.FilePage(self.site, + fileLinksPageTitle) else: - fileLinksPage = pywikibot.ImagePage(self.site, - 'Image:' + - fileLinksPageTitle) + fileLinksPage = pywikibot.FilePage(self.site, + 'Image:' + + fileLinksPageTitle) gen = FileLinksGenerator(fileLinksPage) elif arg.startswith('-unusedfiles'): if len(arg) == 12: @@ -663,8 +663,8 @@ yield pywikibot.Page(pywikibot.Link(item["title"], site))
-def FileLinksGenerator(referredImagePage, step=None, total=None, content=False): - return referredImagePage.usingPages(step=step, total=total, content=content) +def FileLinksGenerator(referredFilePage, step=None, total=None, content=False): + return referredFilePage.usingPages(step=step, total=total, content=content)
def ImagesPageGenerator(pageWithImages, step=None, total=None, content=False): @@ -956,14 +956,17 @@ yield pywikibot.Category(page)
-def ImageGenerator(generator): +def FileGenerator(generator): """ - Wraps around another generator. Yields the same pages, but as ImagePage + Wraps around another generator. Yields the same pages, but as FilePage objects instead of Page objects. Makes sense only if it is ascertained that only images are being retrieved. """ for page in generator: - yield pywikibot.ImagePage(page) + yield pywikibot.FilePage(page) + + +ImageGenerator = FileGenerator
def PageWithTalkPageGenerator(generator): @@ -1064,7 +1067,7 @@ if site is None: site = pywikibot.Site() for page in site.unusedfiles(total=total): - yield pywikibot.ImagePage(page.site, page.title()) + yield pywikibot.FilePage(page.site, page.title())
@deprecate_arg("number", "total") diff --git a/pywikibot/site.py b/pywikibot/site.py index 30b07c9..b319a99 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -2743,7 +2743,7 @@ total=None, content=False): """Iterate all images, ordered by image title.
- Yields ImagePages, but these pages need not exist on the wiki. + Yields FilePages, but these pages need not exist on the wiki.
@param start: start at this title (name need not exist) @param prefix: only iterate titles starting with this substring @@ -2835,11 +2835,11 @@
def imageusage(self, image, namespaces=None, filterredir=None, step=None, total=None, content=False): - """Iterate Pages that contain links to the given ImagePage. + """Iterate Pages that contain links to the given FilePage.
- @param image: the image to search for (ImagePage need not exist on + @param image: the image to search for (FilePage need not exist on the wiki) - @type image: ImagePage + @type image: FilePage @param filterredir: if True, only yield redirects; if False (and not None), only yield non-redirects (default: yield both) @param content: if True, load the current content of each iterated page @@ -3812,22 +3812,23 @@ def getImagesFromAnHash(self, hash_found=None): return self.getFilesFromAnHash(hash_found)
- def upload(self, imagepage, source_filename=None, source_url=None, + @deprecate_arg('imagepage', 'filepage') + def upload(self, filepage, source_filename=None, source_url=None, comment=None, text=None, watch=False, ignore_warnings=False): """Upload a file to the wiki.
Either source_filename or source_url, but not both, must be provided.
- @param imagepage: an ImagePage object from which the wiki-name of the + @param filepage: a FilePage object from which the wiki-name of the file will be obtained. @param source_filename: path to the file to be uploaded @param source_url: URL of the file to be uploaded @param comment: Edit summary; if this is not provided, then - imagepage.text will be used. An empty summary is not permitted. + filepage.text will be used. An empty summary is not permitted. This may also serve as the initial page text (see below). @param text: Initial page text; if this is not set, then - imagepage.text will be used, or comment. - @param watch: If true, add imagepage to the bot user's watchlist + filepage.text will be used, or comment. + @param watch: If true, add filepage to the bot user's watchlist @param ignore_warnings: if true, ignore API warnings and force upload (for example, to overwrite an existing file); default False
@@ -3855,15 +3856,15 @@ raise ValueError("APISite.upload: must provide either " "source_filename or source_url, not both.") if comment is None: - comment = imagepage.text + comment = filepage.text if not comment: raise ValueError("APISite.upload: cannot upload file without " "a summary/description.") if text is None: - text = imagepage.text + text = filepage.text if not text: text = comment - token = self.token(imagepage, "edit") + token = self.token(filepage, "edit") if source_filename: # upload local file # make sure file actually exists @@ -3873,7 +3874,7 @@ # TODO: if file size exceeds some threshold (to be determined), # upload by chunks (--> os.path.getsize(source_filename)) req = api.Request(site=self, action="upload", token=token, - filename=imagepage.title(withNamespace=False), + filename=filepage.title(withNamespace=False), file=source_filename, comment=comment, text=text, mime=True) else: @@ -3883,7 +3884,7 @@ "User '%s' is not authorized to upload by URL on site %s." % (self.user(), self)) req = api.Request(site=self, action="upload", token=token, - filename=imagepage.title(withNamespace=False), + filename=filepage.title(withNamespace=False), url=source_url, comment=comment, text=text) if watch: req["watch"] = "" @@ -3905,7 +3906,7 @@ pywikibot.output(u"Upload: unrecognized response: %s" % result) if result["result"] == "Success": pywikibot.output(u"Upload successful.") - imagepage._imageinfo = result["imageinfo"] + filepage._imageinfo = result["imageinfo"] return
@deprecate_arg("number", "step") @@ -3952,13 +3953,11 @@ yield (newpage, pageitem['timestamp'], pageitem['newlen'], u'', pageitem['user'], pageitem['comment'])
- @deprecate_arg("number", None) - @deprecate_arg("repeat", None) - def newimages(self, user=None, start=None, end=None, reverse=False, - step=None, total=None): - """Yield information about newly uploaded images. + def newfiles(self, user=None, start=None, end=None, reverse=False, + step=None, total=None): + """Yield information about newly uploaded files.
- Yields a tuple of ImagePage, Timestamp, user(unicode), comment(unicode). + Yields a tuple of FilePage, Timestamp, user(unicode), comment(unicode).
N.B. the API does not provide direct access to Special:Newimages, so this is derived from the "upload" log events instead. @@ -3969,11 +3968,17 @@ start=start, end=end, reverse=reverse, step=step, total=total): # event.title() actually returns a Page - image = pywikibot.ImagePage(event.title()) + filepage = pywikibot.FilePage(event.title()) date = event.timestamp() user = event.user() comment = event.comment() or u'' - yield (image, date, user, comment) + yield (filepage, date, user, comment) + + @deprecated("Site().newfiles()") + @deprecate_arg("number", None) + @deprecate_arg("repeat", None) + def newimages(self, *args, **kwargs): + return self.newfiles(*args, **kwargs)
@deprecate_arg("number", None) @deprecate_arg("repeat", None) @@ -4069,7 +4074,7 @@ @deprecate_arg("repeat", None) def uncategorizedimages(self, number=None, repeat=True, step=None, total=None): - """Yield ImagePages from Special:Uncategorizedimages.""" + """Yield FilePages from Special:Uncategorizedimages.""" uigen = self._generator(api.ImagePageGenerator, type_arg="querypage", gqppage="Uncategorizedimages", @@ -4111,18 +4116,19 @@ step=step, total=total) return ucgen
- @deprecate_arg("number", None) - @deprecate_arg("repeat", None) def unusedfiles(self, step=None, total=None): - """Yield ImagePage objects from Special:Unusedimages.""" + """Yield FilePage objects from Special:Unusedimages.""" uigen = self._generator(api.ImagePageGenerator, type_arg="querypage", gqppage="Unusedimages", step=step, total=total) return uigen
- # synonym - unusedimages = unusedfiles + @deprecated("Site().unusedfiles()") + @deprecate_arg("number", None) + @deprecate_arg("repeat", None) + def unusedimages(self, *args, **kwargs): + return self.unusedfiles(*args, **kwargs)
@deprecate_arg("number", None) @deprecate_arg("repeat", None) diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 75bff69..c1ba117 100644 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -637,7 +637,7 @@
""" self.imageName = imageName - self.image = pywikibot.ImagePage(self.site, self.imageName) + self.image = pywikibot.FilePage(self.site, self.imageName) self.timestamp = None self.uploader = None
@@ -705,7 +705,7 @@
""" # Get the image's description - reportPageObject = pywikibot.ImagePage(self.site, self.image_to_report) + reportPageObject = pywikibot.FilePage(self.site, self.image_to_report)
try: reportPageText = reportPageObject.get() @@ -847,7 +847,7 @@ text) if results: for result in results: - wikiPage = pywikibot.ImagePage(self.site, result) + wikiPage = pywikibot.FilePage(self.site, result) yield wikiPage else: pywikibot.output(link) @@ -863,7 +863,7 @@ regex = re.compile(r'%s' % regexp, re.UNICODE | re.DOTALL) results = regex.findall(textrun) for image in results: - yield pywikibot.ImagePage(self.site, image) + yield pywikibot.FilePage(self.site, image)
def loadHiddenTemplates(self): """ Function to load the white templates """ @@ -891,7 +891,7 @@ max_usage = 0 for element in listGiven: imageName = element[1] - imagePage = pywikibot.ImagePage(self.site, imageName) + imagePage = pywikibot.FilePage(self.site, imageName) imageUsage = [page for page in imagePage.usingPages()] if len(imageUsage) > 0 and len(imageUsage) > max_usage: max_usage = len(imageUsage) @@ -1009,7 +1009,7 @@ duplicates_comment_image) duplicateRegex = r'[[:File:%s]] has the following duplicates' \ % re.escape(self.convert_to_url(self.imageName)) - imagePage = pywikibot.ImagePage(self.site, self.imageName) + imagePage = pywikibot.FilePage(self.site, self.imageName) hash_found = imagePage.getHash() duplicates = self.site.getFilesFromAnHash(hash_found)
@@ -1029,7 +1029,7 @@ time_list = []
for duplicate in duplicates: - DupePage = pywikibot.ImagePage(self.site, duplicate) + DupePage = pywikibot.FilePage(self.site, duplicate)
if DupePage.title(asUrl=True) != self.image.title(asUrl=True) or \ self.timestamp is None: @@ -1040,16 +1040,16 @@ time_list.append(data_seconds) older_image = self.returnOlderTime(time_image_list, time_list) # And if the images are more than two? - Page_oder_image = pywikibot.ImagePage(self.site, older_image) + Page_oder_image = pywikibot.FilePage(self.site, older_image) string = '' images_to_tag_list = []
for duplicate in duplicates: - if pywikibot.ImagePage(self.site, duplicate) \ - == pywikibot.ImagePage(self.site, older_image): + if pywikibot.FilePage(self.site, duplicate) \ + == pywikibot.FilePage(self.site, older_image): # the older image, not report also this as duplicate continue - DupePage = pywikibot.ImagePage(self.site, duplicate) + DupePage = pywikibot.FilePage(self.site, duplicate) try: DupPageText = DupePage.get() older_page_text = Page_oder_image.get() diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py index ac9bc01..1f09720 100755 --- a/scripts/harvest_template.py +++ b/scripts/harvest_template.py @@ -172,9 +172,9 @@ elif claim.type == 'commonsMedia': commonssite = pywikibot.Site("commons", "commons") imagelink = pywikibot.Link(value, source=commonssite, defaultNamespace=6) - image = pywikibot.ImagePage(imagelink) + image = pywikibot.FilePage(imagelink) if image.isRedirectPage(): - image = pywikibot.ImagePage(image.getRedirectTarget()) + image = pywikibot.FilePage(image.getRedirectTarget()) if not image.exists(): pywikibot.output('[[%s]] doesn't exist so I can't link to it' % (image.title(),)) continue diff --git a/scripts/illustrate_wikidata.py b/scripts/illustrate_wikidata.py index 01504de..cf6cdaf 100644 --- a/scripts/illustrate_wikidata.py +++ b/scripts/illustrate_wikidata.py @@ -67,9 +67,9 @@ newclaim = pywikibot.Claim(self.repo, self.wdproperty) commonssite = pywikibot.Site("commons", "commons") imagelink = pywikibot.Link(imagename, source=commonssite, defaultNamespace=6) - image = pywikibot.ImagePage(imagelink) + image = pywikibot.FilePage(imagelink) if image.isRedirectPage(): - image = pywikibot.ImagePage(image.getRedirectTarget()) + image = pywikibot.FilePage(image.getRedirectTarget()) if not image.exists(): pywikibot.output('[[%s]] doesn't exist so I can't link to it' % (image.title(),)) continue diff --git a/scripts/image.py b/scripts/image.py index 3189d71..9d2ffc8 100644 --- a/scripts/image.py +++ b/scripts/image.py @@ -191,7 +191,7 @@
if old_image: site = pywikibot.Site() - old_imagepage = pywikibot.ImagePage(site, old_image) + old_imagepage = pywikibot.FilePage(site, old_image) gen = pagegenerators.FileLinksGenerator(old_imagepage) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = ImageRobot(preloadingGen, old_image, new_image, **options) diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py index 2956bc1..88c2318 100644 --- a/scripts/imagerecat.py +++ b/scripts/imagerecat.py @@ -76,7 +76,7 @@ for page in generator: if page.exists() and (page.namespace() == 6) and \ (not page.isRedirectPage()): - imagepage = pywikibot.ImagePage(page.site, page.title()) + imagepage = pywikibot.FilePage(page.site, page.title()) pywikibot.output(u'Working on ' + imagepage.title())
if onlyUncat and not(u'Uncategorized' in imagepage.templates()): diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index aff1a39..2706554 100644 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -265,7 +265,7 @@ for linkedPage in page.interwiki(): imagelist.append(linkedPage.imagelinks(followRedirects=True)) elif page.isImage(): - imagePage = pywikibot.ImagePage(page.site, page.title()) + imagePage = pywikibot.FilePage(page.site, page.title()) imagelist = [imagePage] else: imagePage = (page.imagelinks(followRedirects=True)).result( diff --git a/scripts/maintenance/compat2core.py b/scripts/maintenance/compat2core.py index cad299f..bc74cf3 100644 --- a/scripts/maintenance/compat2core.py +++ b/scripts/maintenance/compat2core.py @@ -4,9 +4,9 @@ This is a helper script to convert compat 1.0 scripts to the new core 2.0 framework.
-NOTE: Please be aware that this script is not be able to convert your codes +NOTE: Please be aware that this script is not able to convert your codes completely. It may support you with some automatic replacements and it gives -some warnings and hints for converting. Please refer the converting guide +some warnings and hints for converting. Please refer to the converting guide README-conversion.txt in the core framework folder and check your codes finally.
The scripts asks for the .py file and converts it to @@ -36,7 +36,7 @@ import codecs import pywikibot
-# be carefull with replacement order! +# be careful with replacement order! replacements = ( # doc strings ('#\r?\n__version__', @@ -63,6 +63,8 @@ ('catlib.change_category\s*((\s*)(?P<article>.+?),\s*(?P<oldcat>.+?),', r'\g<article>.change_category(\1\g<oldcat>,'), ('userlib.User\s*(\s*', 'pywikibot.User('), + # change ImagePage to FilePage + ('pywikibot.ImagePage\s*(\s*', 'pywikibot.FilePage('), # deprecated title methods ('.urlname\s*(\s*)', '.title(asUrl=True)'), ('.urlname\s*(\s*(?:withNamespace\s*=\s*)?(True|False)+\s*)', @@ -72,9 +74,10 @@ ('.aslink\s*(\s*)', '.title(asLink=True)'), # other deprecated methods ('(?<!site).encoding\s*(\s*)', '.site.encoding()'), + ('.newimages\s*(', '.newfiles('), # new core methods ('.get\s*(\s*get_redirect\s*=\s*True\s*)', '.text'), - # stopme() is doen by the framework itself + # stopme() is done by the framework itself ('(\s*)try:\s*\r?\n\s+main()\s*\r?\n\s*finally:\s*\r?\n\s+pywikibot.stopme()', r'\1main()'), ) @@ -95,7 +98,7 @@ 'User.contributions() returns a pywikibot.Timestamp object instead of a\n' 'MediaWiki one'), ('.getFileMd5Sum(', - 'ImagePage.getFileMd5Sum() is deprecated should be replaced by ' + 'FilePage.getFileMd5Sum() is deprecated should be replaced by ' 'getFileSHA1Sum()'), (' wikipedia.', '"wikipedia" library has been changed to "pywikibot".'), diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index 19de9ac..14c3f06 100644 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -320,7 +320,7 @@ pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) try: - localImagePage = pywikibot.ImagePage(self.site, page.title()) + localImagePage = pywikibot.FilePage(self.site, page.title()) if localImagePage.fileIsShared(): pywikibot.output(u'File is already on Commons.') continue @@ -333,7 +333,7 @@ if not filenameOnCommons and not self.getOption('use_hash'): pywikibot.output(u'NowCommons template not found.') continue - commonsImagePage = pywikibot.ImagePage(commons, 'Image:%s' + commonsImagePage = pywikibot.FilePage(commons, 'Image:%s' % filenameOnCommons) if localImagePage.title(withNamespace=False) == \ commonsImagePage.title(withNamespace=False) and self.getOption('use_hash'): @@ -362,7 +362,7 @@ oImageRobot.run() # If the image is used with the urlname the # previous function won't work - if len(list(pywikibot.ImagePage(self.site, + if len(list(pywikibot.FilePage(self.site, page.title()).usingPages())) > 0 and \ self.getOption('replaceloose'): oImageRobot = image.ImageRobot( @@ -376,7 +376,7 @@ self.getOption('replaceloose')) oImageRobot.run() # refresh because we want the updated list - usingPages = len(list(pywikibot.ImagePage( + usingPages = len(list(pywikibot.FilePage( self.site, page.title()).usingPages())) if usingPages > 0 and self.getOption('use_hash'): # just an enter diff --git a/scripts/upload.py b/scripts/upload.py index 8a694ae..5c39065 100755 --- a/scripts/upload.py +++ b/scripts/upload.py @@ -198,7 +198,7 @@ filename = self.process_filename()
site = self.targetSite - imagepage = pywikibot.ImagePage(site, filename) # normalizes filename + imagepage = pywikibot.FilePage(site, filename) # normalizes filename imagepage.text = self.description
pywikibot.output(u'Uploading file to %s via API....' % site) diff --git a/tests/page_tests.py b/tests/page_tests.py index 4948638..dca5d2f 100644 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -352,7 +352,7 @@ for p in mainpage.langlinks(): self.assertType(p, pywikibot.Link) for p in mainpage.imagelinks(): - self.assertType(p, pywikibot.ImagePage) + self.assertType(p, pywikibot.FilePage) for p in mainpage.templates(): self.assertType(p, pywikibot.Page) for t, params in mainpage.templatesWithParams(): diff --git a/tests/site_tests.py b/tests/site_tests.py index 621da4e..241bcbe 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -312,7 +312,7 @@ for cm in mysite.categorymembers(cat): self.assertType(cat, pywikibot.Page) # test pageimages - self.assertTrue(all(isinstance(im, pywikibot.ImagePage) + self.assertTrue(all(isinstance(im, pywikibot.FilePage) for im in mysite.pageimages(mainpage))) # test pagetemplates self.assertTrue(all(isinstance(te, pywikibot.Page) @@ -473,27 +473,27 @@
ai = list(mysite.allimages(total=10)) self.assertTrue(len(ai) <= 10) - self.assertTrue(all(isinstance(image, pywikibot.ImagePage) + self.assertTrue(all(isinstance(image, pywikibot.FilePage) for image in ai)) for impage in mysite.allimages(start="Ba", total=5): - self.assertType(impage, pywikibot.ImagePage) + self.assertType(impage, pywikibot.FilePage) self.assertTrue(mysite.page_exists(impage)) self.assertTrue(impage.title(withNamespace=False) >= "Ba") # # Bug # 15985 # for impage in mysite.allimages(start="Da", reverse=True, total=5): -# self.assertType(impage, pywikibot.ImagePage) +# self.assertType(impage, pywikibot.FilePage) # self.assertTrue(mysite.page_exists(impage)) # self.assertTrue(impage.title() <= "Da") for impage in mysite.allimages(prefix="Ch", total=5): - self.assertType(impage, pywikibot.ImagePage) + self.assertType(impage, pywikibot.FilePage) self.assertTrue(mysite.page_exists(impage)) self.assertTrue(impage.title(withNamespace=False).startswith("Ch")) for impage in mysite.allimages(minsize=100, total=5): - self.assertType(impage, pywikibot.ImagePage) + self.assertType(impage, pywikibot.FilePage) self.assertTrue(mysite.page_exists(impage)) self.assertTrue(impage._imageinfo["size"] >= 100) for impage in mysite.allimages(maxsize=2000, total=5): - self.assertType(impage, pywikibot.ImagePage) + self.assertType(impage, pywikibot.FilePage) self.assertTrue(mysite.page_exists(impage)) self.assertTrue(impage._imageinfo["size"] <= 2000)