[Pywikipedia-l] SVN: [5305] trunk/pywikipedia/wikipedia.py

wikipedian at svn.wikimedia.org wikipedian at svn.wikimedia.org
Mon May 5 10:15:22 UTC 2008


Revision: 5305
Author:   wikipedian
Date:     2008-05-05 10:15:22 +0000 (Mon, 05 May 2008)

Log Message:
-----------
applied portions of patch [ 1911954 ] "wikipedia.py recursive redirect + various fixes" by 
AndreasJS

These changes were applied:

Page.title(): Esperanto decoding, fix bug 1425206 and patch 1880140

Page._putPage(): add page name to ServerError message

Page.move(): add NoPage and PageNotSaved exceptions

Page.delete(mark = True): add delete reason as parameter to {{Delete}} tag

getLanguageLinks(): allow interwiki links like :en: pointing to Main Page


This change was skipped:

Page.getRedirectTarget(recursive = True): follow redirects, can raise
exception InfiniteLoop

Modified Paths:
--------------
    trunk/pywikipedia/wikipedia.py

Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py	2008-05-04 23:24:23 UTC (rev 5304)
+++ trunk/pywikipedia/wikipedia.py	2008-05-05 10:15:22 UTC (rev 5305)
@@ -474,6 +474,8 @@
             title = title.replace(u"''", u'%27%27')
         if underscore:
             title = title.replace(' ', '_')
+        if self.site().lang == 'eo':
+            title = decodeEsperantoX(title)
         return title
 
     def titleWithoutNamespace(self, underscore=False):
@@ -1319,8 +1321,8 @@
             except ServerError:
                 output(u''.join(traceback.format_exception(*sys.exc_info())))
                 output(
-            u'Got a server error when putting; will retry in %i minute%s.'
-                       % (retry_delay, retry_delay != 1 and "s" or ""))
+            u'Got a server error when putting %s; will retry in %i minute%s.'
+                       % (self.aslink(), retry_delay, retry_delay != 1 and "s" or ""))
                 time.sleep(60 * retry_delay)
                 retry_delay *= 2
                 if retry_delay > 30:
@@ -2044,7 +2046,7 @@
         else:
             if self.site().mediawiki_message('articleexists') in data or self.site().mediawiki_message('delete_and_move') in data:
                 if safe:
-                    output(u'Page moved failed: Target page [[%s]] already exists.' % newtitle)
+                    output(u'Page move failed: Target page [[%s]] already exists.' % newtitle)
                     return False
                 else:
                     try:
@@ -2053,6 +2055,12 @@
                     except NoUsername:
                         output(u'Page moved failed: Target page [[%s]] already exists.' % newtitle)
                         return False
+            elif not self.exists():
+                raise NoPage(u'Page move failed: Source page [[%s]] does not exist.' % newtitle)
+                return False
+            elif  Page(self.site(),newtitle).exists():
+                raise PageNotSaved(u'Page move failed: Target page [[%s]] already exists.' % newtitle)
+                return False
             else:
                 output(u'Page move failed for unknown reason.')
                 try:
@@ -2084,7 +2092,7 @@
              if mark and self.exists():
                  text = self.get(get_redirect = True)
                  output(u'Cannot delete page %s - marking the page for deletion instead:' % self.aslink())
-                 self.put(u'{{delete}}\n%s ~~~~\n----\n\n%s' % (reason, text), comment = reason)
+                 self.put(u'{{delete|%s}}\n%s ~~~~\n----\n\n%s' % (reason, reason, text), comment = reason)
                  return
              else:
                  raise
@@ -3195,19 +3203,15 @@
             if '|' in pagetitle:
                 # ignore text after the pipe
                 pagetitle = pagetitle[:pagetitle.index('|')]
-            if not pagetitle:
-                output(u"ERROR: %s - ignoring impossible link to %s:%s"
-                       % (pageLink, lang, pagetitle))
-            else:
-                # we want the actual page objects rather than the titles
-                site = insite.getSite(code = lang)
-                try:
-                    result[site] = Page(site, pagetitle, insite = insite)
-                except Error:
-                    output(
+            # we want the actual page objects rather than the titles
+            site = insite.getSite(code = lang)
+            try:
+                result[site] = Page(site, pagetitle, insite = insite)
+            except Error:
+                output(
         u"[getLanguageLinks] Text contains invalid interwiki link [[%s:%s]]."
                            % (lang, pagetitle))
-                    continue
+                continue
     return result
 
 def removeLanguageLinks(text, site = None, marker = ''):





More information about the Pywikipedia-l mailing list