Revision: 5914
Author: wikipedian
Date: 2008-09-22 16:15:37 +0000 (Mon, 22 Sep 2008)
Log Message:
-----------
Fixed the Esperanto X-convention bug [ 2006208 ] by rolling back many changes that
concerned Esperanto X-conv.
I fixed this on 2008-08-21 already, but somehow my commit seems to have failed
(sorry), so now I retry to commit it.
Modified Paths:
--------------
trunk/pywikipedia/families/wikipedia_family.py
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/families/wikipedia_family.py
===================================================================
--- trunk/pywikipedia/families/wikipedia_family.py 2008-09-22 09:58:41 UTC (rev 5913)
+++ trunk/pywikipedia/families/wikipedia_family.py 2008-09-22 16:15:37 UTC (rev 5914)
@@ -966,16 +966,4 @@
return self.code2encoding(code),
def shared_image_repository(self, code):
- return ('commons', 'commons')
-
- def post_get_convert(self, site, getText):
- if site.lang == 'eo':
- return wikipedia.decodeEsperantoX(getText)
- else:
- return getText
-
- def pre_put_convert(self, site, getText):
- if site.lang == 'eo':
- return wikipedia.encodeEsperantoX(getText)
- else:
- return getText
+ return ('commons', 'commons')
\ No newline at end of file
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2008-09-22 09:58:41 UTC (rev 5913)
+++ trunk/pywikipedia/wikipedia.py 2008-09-22 16:15:37 UTC (rev 5914)
@@ -799,12 +799,12 @@
else:
self._isWatched = False
# Now process the contents of the textarea
- # Unescape HTML characters, strip whitespace and postconvert
- pagetext = text[i1:i2]
- pagetext = unescape(pagetext)
- pagetext = pagetext.rstrip()
- pagetext = self.site().post_get_convert(pagetext)
-
+ # Unescape HTML characters, strip whitespace
+ pagetext = text[i1:i2]
+ pagetext = unescape(pagetext)
+ pagetext = pagetext.rstrip()
+ if self.site().lang == 'eo':
+ pagetext = decodeEsperantoX(pagetext)
m = self.site().redirectRegex().match(pagetext)
if m:
# page text matches the redirect pattern
@@ -1295,7 +1295,12 @@
import watchlist
watchArticle = watchlist.isWatched(self.title(), site = self.site())
newPage = not self.exists()
- newtext = self.site().pre_put_convert(newtext)
+ # if posting to an Esperanto wiki, we must e.g. write Bordeauxx instead
+ # of Bordeaux
+ if self.site().lang == 'eo':
+ newtext = encodeEsperantoX(newtext)
+ comment = encodeEsperantoX(comment)
+
return self._putPage(newtext, comment, watchArticle, minorEdit,
newPage, self.site().getToken(sysop = sysop), sysop = sysop)
@@ -2237,7 +2242,7 @@
reason = input(u'Please enter a reason for the deletion:')
answer = 'y'
if prompt and not hasattr(self.site(), '_noDeletePrompt'):
- answer = inputChoice(u'Do you want to delete %s?' % self.aslink(forceInterwiki = True), ['Yes', 'No', 'All'], ['Y', 'N', 'A'], 'N')
+ answer = inputChoice(u'Do you want to delete %s?' % self.aslink(forceInterwiki = True), ['yes', 'no', 'all'], ['y', 'N', 'a'], 'N')
if answer == 'a':
answer = 'y'
self.site()._noDeletePrompt = True
@@ -2939,6 +2944,9 @@
def getData(self):
address = self.site.export_address()
pagenames = [page.sectionFreeTitle() for page in self.pages]
+ # We need to use X convention for requested page titles.
+ if self.site.lang == 'eo':
+ pagenames = [encodeEsperantoX(pagetitle) for pagetitle in pagenames]
pagenames = u'\r\n'.join(pagenames)
if type(pagenames) is not unicode:
output(u'Warning: xmlreader.WikipediaXMLHandler.getData() got non-unicode page names. Please report this.')
@@ -3995,11 +4003,6 @@
linktrail: Return regex for trailing chars displayed as part of a link.
disambcategory: Category in which disambiguation pages are listed.
- post_get_convert: Converts text data from the site immediatly after get
- i.e. EsperantoX -> unicode
- pre_put_convert: Converts text data from the site immediatly before put
- i.e. unicode -> EsperantoX
-
Methods that yield Page objects derived from a wiki's Special: pages
(note, some methods yield other information in a tuple along with the
Pages; see method docs for details) --
@@ -5840,12 +5843,6 @@
"""Return regex for trailing chars displayed as part of a link."""
return self.family.linktrail(self.lang)
- def post_get_convert(self, getText):
- return self.family.post_get_convert(self, getText)
-
- def pre_put_convert(self, putText):
- return self.family.pre_put_convert(self, putText)
-
def language(self):
"""Return Site's language code."""
return self.lang
Patches item #2477551, was opened at 2008-12-30 18:56
Message generated for change (Tracker Item Submitted) made by Item Submitter
You can respond by visiting:
https://sourceforge.net/tracker/?func=detail&atid=603140&aid=2477551&group_…
Please note that this message will contain a full copy of the comment thread,
including the initial issue submission, for this request,
not just the latest update.
Category: None
Group: None
Status: Open
Resolution: None
Priority: 5
Private: No
Submitted By: Nobody/Anonymous (nobody)
Assigned to: Nobody/Anonymous (nobody)
Summary: Pass "sysop" through wikipedia.put()
Initial Comment:
This allows the use of the sysop account to be forced; useful for eg closing XfD discussions where for transparency the admin account must be seen to make the change.
Changes:
wikipedia.py:
@@ line 1326:
-- force=False):
++ force=False, sysop=sysop):
@@ line 1342:
-- sysop = self._getActionUser(action = 'edit', restriction = self.editRestriction, sysop = False)
++ sysop = self._getActionUser(action = 'edit', restriction = self.editRestriction, sysop = sysop)
Default behavior is unchanged.
Feel free to contact me: happy <underscore> melon <at> hotmail <dot> co <dot> uk
----------------------------------------------------------------------
You can respond by visiting:
https://sourceforge.net/tracker/?func=detail&atid=603140&aid=2477551&group_…
Hello,
I am using Python 3.0, and I have problem when I want to use my bot. Why when I tried to log in (login.py), the system shows that there is a syntax error in line 55? Thank you.
_________________________________________________________________
More than messages–check out the rest of the Windows Live™.
http://www.microsoft.com/windows/windowslive/
Revision: 6212
Author: cydeweys
Date: 2008-12-27 17:55:58 +0000 (Sat, 27 Dec 2008)
Log Message:
-----------
Added the functionality to process pages from a list of categories.
Modified Paths:
--------------
trunk/pywikipedia/replace.py
Modified: trunk/pywikipedia/replace.py
===================================================================
--- trunk/pywikipedia/replace.py 2008-12-27 15:20:35 UTC (rev 6211)
+++ trunk/pywikipedia/replace.py 2008-12-27 17:55:58 UTC (rev 6212)
@@ -16,6 +16,10 @@
Argument can also be given as "-page:pagetitle". You can
give this parameter multiple times to edit multiple pages.
+-category Works on all of the pages in a specific category. Specify
+ this argument multiple times to work on multiple categories
+ simultaneously.
+
Furthermore, the following command line parameters are supported:
-regex Make replacements using regular expressions. If this argument
@@ -501,6 +505,8 @@
# Between a regex and another (using -fix) sleep some time (not to waste
# too much CPU
sleep = None
+ # A list of categories whose pages we should process.
+ categories = []
# Read commandline parameters.
for arg in wikipedia.handleArgs():
@@ -520,6 +526,8 @@
xmlFilename = arg[5:]
elif arg =='-sql':
useSql = True
+ elif arg.startswith('-category'):
+ categories.append(arg[len('-category:'):])
elif arg.startswith('-page'):
if len(arg) == 5:
PageTitles.append(wikipedia.input(
@@ -547,14 +555,14 @@
elif arg == '-nocase':
caseInsensitive = True
elif arg.startswith('-addcat:'):
- add_cat = arg[8:]
+ add_cat = arg[len('addcat:'):]
elif arg.startswith('-namespace:'):
try:
namespaces.append(int(arg[11:]))
except ValueError:
namespaces.append(arg[11:])
elif arg.startswith('-summary:'):
- wikipedia.setAction(arg[9:])
+ wikipedia.setAction(arg[len('-summary:'):])
summary_commandline = True
elif arg.startswith('-allowoverlap'):
allowoverlap = True
@@ -632,7 +640,7 @@
exceptions = fix['exceptions']
replacements = fix['replacements']
- # already compile all regular expressions here to save time later
+ # Pre-compile all regular expressions here to save time later
for i in range(len(replacements)):
old, new = replacements[i]
if not regex:
@@ -681,7 +689,9 @@
%s
LIMIT 200""" % (whereClause, exceptClause)
gen = pagegenerators.MySQLPageGenerator(query)
-
+ elif categories:
+ gens = [pagegenerators.CategorizedPageGenerator(catlib.Category(wikipedia.getSite(), 'Category:' + t)) for t in categories]
+ gen = pagegenerators.DuplicateFilterPageGenerator(pagegenerators.CombinedPageGenerator(gens))
elif PageTitles:
pages = [wikipedia.Page(wikipedia.getSite(), PageTitle)
for PageTitle in PageTitles]
Revision: 6211
Author: russblau
Date: 2008-12-27 15:20:35 +0000 (Sat, 27 Dec 2008)
Log Message:
-----------
-.has_key() +in; for future maintainability
Modified Paths:
--------------
branches/rewrite/pywikibot/__init__.py
branches/rewrite/pywikibot/comms/threadedhttp.py
branches/rewrite/pywikibot/config2.py
branches/rewrite/pywikibot/family.py
branches/rewrite/pywikibot/login.py
branches/rewrite/pywikibot/page.py
branches/rewrite/pywikibot/scripts/replace.py
branches/rewrite/pywikibot/site.py
branches/rewrite/pywikibot/textlib.py
Modified: branches/rewrite/pywikibot/__init__.py
===================================================================
--- branches/rewrite/pywikibot/__init__.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/__init__.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -90,7 +90,7 @@
except ImportError:
raise ValueError("Invalid interface name '%(interface)s'" % locals())
key = '%s:%s:%s' % (fam, code, user)
- if not _sites.has_key(key):
+ if not key in _sites:
_sites[key] = __Site(code=code, fam=fam, user=user, sysop=sysop)
pywikibot.output("Instantiating Site object '%(site)s'"
% {'site': _sites[key]}, level=DEBUG)
Modified: branches/rewrite/pywikibot/comms/threadedhttp.py
===================================================================
--- branches/rewrite/pywikibot/comms/threadedhttp.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/comms/threadedhttp.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -246,12 +246,12 @@
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
- if not response.has_key('location') and response.status != 300:
+ if "location" not in response and response.status != 300:
raise httplib2.RedirectMissingLocation(
"Redirected but the response is missing a Location: header.",
response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
- if response.has_key('location'):
+ if "location" in response:
location = response['location']
(scheme, authority, path, query, fragment) = httplib2.parse_uri(
location)
@@ -261,7 +261,7 @@
% (location, response['location']))
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
- if not response.has_key('content-location'):
+ if "content-location" not in response:
response['content-location'] = absolute_uri
httplib2._updateCache(headers, response, content, self.cache,
cachekey)
@@ -269,7 +269,7 @@
headers.pop('if-none-match', None)
headers.pop('if-modified-since', None)
- if response.has_key('location'):
+ if "location" in response:
location = response['location']
redirect_method = ((response.status == 303) and
(method not in ["GET", "HEAD"])
Modified: branches/rewrite/pywikibot/config2.py
===================================================================
--- branches/rewrite/pywikibot/config2.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/config2.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -94,7 +94,7 @@
__sys.argv.remove(arg)
break
else:
- if os.environ.has_key("PYWIKIBOT2_DIR"):
+ if "PYWIKIBOT2_DIR" in os.environ:
base_dir = os.environ["PYWIKIBOT2_DIR"]
else:
is_windows = __sys.platform == 'win32'
Modified: branches/rewrite/pywikibot/family.py
===================================================================
--- branches/rewrite/pywikibot/family.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/family.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -625,7 +625,7 @@
return self.known_families
def linktrail(self, code, fallback = '_default'):
- if self.linktrails.has_key(code):
+ if code in self.linktrails:
return self.linktrails[code]
elif fallback:
return self.linktrails[fallback]
@@ -751,7 +751,7 @@
## return None
##
def disambig(self, code, fallback = '_default'):
- if self.disambiguationTemplates.has_key(code):
+ if code in self.disambiguationTemplates:
return self.disambiguationTemplates[code]
elif fallback:
return self.disambiguationTemplates[fallback]
Modified: branches/rewrite/pywikibot/login.py
===================================================================
--- branches/rewrite/pywikibot/login.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/login.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -110,8 +110,8 @@
"""
return True # DEBUG
- if botList.has_key(self.site.family.name)\
- and botList[self.site.family.name].has_key(self.site.code):
+ if self.site.family.name in botList \
+ and self.site.code in botList[self.site.family.name]:
botListPageTitle = botList[self.site.family.name][self.site.code]
botListPage = pywikibot.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/page.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -968,7 +968,7 @@
"""
if self._deletedRevs == None:
self.loadDeletedRevisions()
- if not self._deletedRevs.has_key(timestamp):
+ if timestamp not in self._deletedRevs:
#TODO: Throw an exception?
return None
self._deletedRevs[timestamp][4] = undelete
@@ -1839,7 +1839,7 @@
unicodeCodepoint = int(match.group('hex'), 16)
elif match.group('name'):
name = match.group('name')
- if htmlentitydefs.name2codepoint.has_key(name):
+ if name in htmlentitydefs.name2codepoint:
# We found a known HTML entity.
unicodeCodepoint = htmlentitydefs.name2codepoint[name]
result += text[:match.start()]
Modified: branches/rewrite/pywikibot/scripts/replace.py
===================================================================
--- branches/rewrite/pywikibot/scripts/replace.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/scripts/replace.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -193,9 +193,9 @@
self.skipping = bool(xmlStart)
self.excsInside = []
- if self.exceptions.has_key('inside-tags'):
+ if "inside-tags" in self.exceptions:
self.excsInside += self.exceptions['inside-tags']
- if self.exceptions.has_key('inside'):
+ if "inside" in self.exceptions:
self.excsInside += self.exceptions['inside']
import xmlreader
self.site = pywikibot.getSite()
@@ -226,11 +226,11 @@
pass
def isTitleExcepted(self, title):
- if self.exceptions.has_key('title'):
+ if "title" in self.exceptions:
for exc in self.exceptions['title']:
if exc.search(title):
return True
- if self.exceptions.has_key('require-title'):
+ if "require-title" in self.exceptions:
for req in self.exceptions['require-title']:
if not req.search(title): # if not all requirements are met:
return True
@@ -238,7 +238,7 @@
return False
def isTextExcepted(self, text):
- if self.exceptions.has_key('text-contains'):
+ if "text-contains" in self.exceptions:
for exc in self.exceptions['text-contains']:
if exc.search(text):
return True
@@ -304,11 +304,11 @@
"""
Iff one of the exceptions applies for the given title, returns True.
"""
- if self.exceptions.has_key('title'):
+ if "title" in self.exceptions:
for exc in self.exceptions['title']:
if exc.search(title):
return True
- if self.exceptions.has_key('require-title'):
+ if "require-title" in self.exceptions:
for req in self.exceptions['require-title']:
if not req.search(title):
return True
@@ -319,7 +319,7 @@
Iff one of the exceptions applies for the given page contents,
returns True.
"""
- if self.exceptions.has_key('text-contains'):
+ if "text-contains" in self.exceptions:
for exc in self.exceptions['text-contains']:
if exc.search(original_text):
return True
@@ -332,9 +332,9 @@
"""
new_text = original_text
exceptions = []
- if self.exceptions.has_key('inside-tags'):
+ if "inside-tags" in self.exceptions:
exceptions += self.exceptions['inside-tags']
- if self.exceptions.has_key('inside'):
+ if "inside" in self.exceptions:
exceptions += self.exceptions['inside']
for old, new in self.replacements:
if self.sleep != None:
@@ -624,12 +624,12 @@
pywikibot.output(u'Available predefined fixes are: %s'
% fixes.fixes.keys())
return
- if fix.has_key('regex'):
+ if "regex" in fix:
regex = fix['regex']
- if fix.has_key('msg'):
- wikipedia.setAction(
- pywikibot.translate(pywikibot.getSite(), fix['msg']))
- if fix.has_key('exceptions'):
+ if "msg" in fix:
+ edit_summary = \
+ pywikibot.translate(pywikibot.getSite(), fix['msg'])
+ if "exceptions" in fix:
exceptions = fix['exceptions']
replacements = fix['replacements']
@@ -645,7 +645,7 @@
replacements[i] = oldR, new
for exceptionCategory in ['title', 'require-title', 'text-contains', 'inside']:
- if exceptions.has_key(exceptionCategory):
+ if exceptionCategory in exceptions:
patterns = exceptions[exceptionCategory]
if not regex:
patterns = [re.escape(pattern) for pattern in patterns]
Modified: branches/rewrite/pywikibot/site.py
===================================================================
--- branches/rewrite/pywikibot/site.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/site.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -91,7 +91,7 @@
self.__family = fam
# if we got an outdated language code, use the new one instead.
- if self.__family.obsolete.has_key(self.__code):
+ if self.__code in self.__family.obsolete:
if self.__family.obsolete[self.__code] is not None:
self.__code = self.__family.obsolete[self.__code]
else:
@@ -940,7 +940,7 @@
"""
rest = self.page_restrictions(page)
- sysop_protected = rest.has_key('edit') and rest['edit'][0] == 'sysop'
+ sysop_protected = "edit" in rest and rest['edit'][0] == 'sysop'
try:
api.LoginManager(site=self, sysop=sysop_protected)
except NoUsername:
@@ -1349,7 +1349,7 @@
raise Error(
u"loadrevisions: Query on %s returned data on '%s'"
% (page, pagedata['title']))
- if pagedata.has_key('missing'):
+ if "missing" in pagedata:
raise NoPage(u'Page %s does not exist'
% page.title(asLink=True))
else:
Modified: branches/rewrite/pywikibot/textlib.py
===================================================================
--- branches/rewrite/pywikibot/textlib.py 2008-12-27 01:24:33 UTC (rev 6210)
+++ branches/rewrite/pywikibot/textlib.py 2008-12-27 15:20:35 UTC (rev 6211)
@@ -111,7 +111,7 @@
if isinstance(exc, str) or isinstance(exc, unicode):
# assume it's a reference to the exceptionRegexes dictionary
# defined above.
- if not exceptionRegexes.has_key(exc):
+ if exc not in exceptionRegexes:
raise ValueError("Unknown tag type: " + exc)
dontTouchRegexes.append(exceptionRegexes[exc])
else:
@@ -922,12 +922,12 @@
if hasattr(code,'lang'):
code = code.lang
- if xdict.has_key(code):
+ if code in xdict:
return xdict[code]
for alt in _altlang(code):
- if xdict.has_key(alt):
+ if alt in xdict:
return xdict[alt]
- if xdict.has_key('en'):
+ if "en" in xdict:
return xdict['en']
return xdict.values()[0]