Revision: 8566
Author: xqt
Date: 2010-09-16 11:14:05 +0000 (Thu, 16 Sep 2010)
Log Message:
-----------
Follow up for r8562, r8565
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2010-09-15 18:16:33 UTC (rev 8565)
+++ trunk/pywikipedia/wikipedia.py 2010-09-16 11:14:05 UTC (rev 8566)
@@ -3910,7 +3910,8 @@
successful = False
for page2 in self.pages:
if page2.sectionFreeTitle() == page.sectionFreeTitle():
- if not (hasattr(page2,'_contents') or hasattr(page2, '_getexception')) or self.force:
+ if not (hasattr(page2,'_contents') or \
+ hasattr(page2, '_getexception')) or self.force:
page2.editRestriction = entry.editRestriction
page2.moveRestriction = entry.moveRestriction
if editRestriction == 'autoconfirmed':
@@ -3920,8 +3921,12 @@
page2._ipedit = ipedit
page2._revisionId = revisionId
page2._editTime = timestamp
-## leads to a bug with python 2.4.3 tracker 3066934
-## page2._versionhistory = [(revisionId, str(Timestamp.fromtimestampformat(timestamp)), username, entry.comment)]
+ page2._versionhistory = [
+ (revisionId,
+ time.strftime("%Y-%m-%dT%H:%M:%SZ",
+ time.strptime(str(timestamp),
+ "%Y%m%d%H%M%S")),
+ username, entry.comment)]
section = page2.section()
# Store the content
page2._contents = text
@@ -3936,13 +3941,16 @@
# This is used for checking deletion conflict.
# Use the data loading time.
- page2._startTime = time.strftime('%Y%m%d%H%M%S', time.gmtime())
+ page2._startTime = time.strftime('%Y%m%d%H%M%S',
+ time.gmtime())
if section:
- m = re.search("\.3D\_*(\.27\.27+)?(\.5B\.5B)?\_*%s\_*(\.5B\.5B)?(\.27\.27+)?\_*\.3D" % re.escape(section), sectionencode(text,page2.site().encoding()))
+ m = re.search("\.3D\_*(\.27\.27+)?(\.5B\.5B)?\_*%s\_*(\.5B\.5B)?(\.27\.27+)?\_*\.3D"
+ % re.escape(section), sectionencode(text,page2.site().encoding()))
if not m:
try:
page2._getexception
- output(u"WARNING: Section not found: %s" % page2.aslink(forceInterwiki = True))
+ output(u"WARNING: Section not found: %s"
+ % page2.aslink(forceInterwiki = True))
except AttributeError:
# There is no exception yet
page2._getexception = SectionError
@@ -3950,8 +3958,10 @@
# Note that there is no break here. The reason is that there
# might be duplicates in the pages list.
if not successful:
- output(u"BUG>> title %s (%s) not found in list" % (title, page.aslink(forceInterwiki=True)))
- output(u'Expected one of: %s' % u','.join([page2.aslink(forceInterwiki=True) for page2 in self.pages]))
+ output(u"BUG>> title %s (%s) not found in list"
+ % (title, page.aslink(forceInterwiki=True)))
+ output(u'Expected one of: %s'
+ % u','.join([page2.aslink(forceInterwiki=True) for page2 in self.pages]))
raise PageNotFound
def headerDone(self, header):
Revision: 8562
Author: xqt
Date: 2010-09-15 12:54:46 +0000 (Wed, 15 Sep 2010)
Log Message:
-----------
pre"loading" version history, submitted by DrTrigon.
Fix for old bug: VH was never reloaded if len(VH) < revCount.
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2010-09-15 11:32:15 UTC (rev 8561)
+++ trunk/pywikipedia/wikipedia.py 2010-09-15 12:54:46 UTC (rev 8562)
@@ -2559,8 +2559,11 @@
dataQuery = self._versionhistoryearliest
else:
thisHistoryDone = True
- elif not hasattr(self, '_versionhistory') or forceReload:
+ elif not hasattr(self, '_versionhistory') or forceReload or \
+ len(self._versionhistory) < revCount:
self._versionhistory = []
+ # ?? does not load if len(self._versionhistory) > revCount
+ # shouldn't it
elif getAll and len(self._versionhistory) == revCount:
# Cause a reload, or at least make the loop run
thisHistoryDone = False
@@ -3907,7 +3910,7 @@
successful = False
for page2 in self.pages:
if page2.sectionFreeTitle() == page.sectionFreeTitle():
- if not (hasattr(page2,'_contents') or hasattr(page2,'_getexception')) or self.force:
+ if not (hasattr(page2,'_contents') or hasattr(page2, '_getexception')) or self.force:
page2.editRestriction = entry.editRestriction
page2.moveRestriction = entry.moveRestriction
if editRestriction == 'autoconfirmed':
@@ -3917,6 +3920,7 @@
page2._ipedit = ipedit
page2._revisionId = revisionId
page2._editTime = timestamp
+ page2._versionhistory = [(revisionId, str(Timestamp.fromtimestampformat(timestamp)), username, entry.comment)]
section = page2.section()
# Store the content
page2._contents = text
Revision: 8561
Author: xqt
Date: 2010-09-15 11:32:15 +0000 (Wed, 15 Sep 2010)
Log Message:
-----------
expandtemplates parameter for Page.get() - Fully resolve templates within page content submitted by DrTrogom. Thanks.
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2010-09-15 10:11:05 UTC (rev 8560)
+++ trunk/pywikipedia/wikipedia.py 2010-09-15 11:32:15 UTC (rev 8561)
@@ -585,7 +585,7 @@
return self.autoFormat()[0] is not None
def get(self, force=False, get_redirect=False, throttle=True,
- sysop=False, change_edit_time=True):
+ sysop=False, change_edit_time=True, expandtemplates=False):
"""Return the wiki-text of the page.
This will retrieve the page from the server if it has not been
@@ -604,6 +604,8 @@
If change_edit_time is False, do not check this version for changes
before saving. This should be used only if the page has been loaded
previously.
+ If expandtemplates is True, all templates in the page content are
+ fully resolved too (if API is used).
"""
# NOTE: The following few NoPage exceptions could already be thrown at
@@ -622,7 +624,7 @@
if self.site().isInterwikiLink(self.title()):
raise NoPage('%s is not a local page on %s!'
% (self.aslink(), self.site()))
- if force:
+ if force or expandtemplates:
# When forcing, we retry the page no matter what. Old exceptions
# and contents do not apply any more.
for attr in ['_redirarg', '_getexception', '_contents']:
@@ -640,7 +642,8 @@
# Make sure we did try to get the contents once
if not hasattr(self, '_contents'):
try:
- self._contents = self._getEditPage(get_redirect = get_redirect, throttle = throttle, sysop = sysop)
+ self._contents = self._getEditPage(get_redirect=get_redirect, throttle=throttle, sysop=sysop,
+ expandtemplates = expandtemplates)
hn = self.section()
if hn:
m = re.search("=+ *%s *=+" % hn, self._contents)
@@ -669,7 +672,7 @@
return self._contents
def _getEditPage(self, get_redirect=False, throttle=True, sysop=False,
- oldid=None, change_edit_time=True):
+ oldid=None, change_edit_time=True, expandtemplates=False):
"""Get the contents of the Page via API query
Do not use this directly, use get() instead.
@@ -677,6 +680,8 @@
Arguments:
oldid - Retrieve an old revision (by id), not the current one
get_redirect - Get the contents, even if it is a redirect page
+ expandtemplates - Fully resolve templates within page content
+ (if API is used)
This method returns the raw wiki text as a unicode string.
"""
@@ -694,6 +699,8 @@
}
if oldid:
params['rvstartid'] = oldid
+ if expandtemplates:
+ params[u'rvexpandtemplates'] = u''
if throttle:
get_throttle()