Revision: 4315 Author: russblau Date: 2007-09-17 14:29:03 +0000 (Mon, 17 Sep 2007)
Log Message: ----------- Convert more bots to use standardized data file paths
Modified Paths: -------------- trunk/pywikipedia/imagecopy.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/makecat.py trunk/pywikipedia/solve_disambiguation.py trunk/pywikipedia/spellcheck.py trunk/pywikipedia/splitwarning.py trunk/pywikipedia/warnfile.py trunk/pywikipedia/weblinkchecker.py trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/imagecopy.py =================================================================== --- trunk/pywikipedia/imagecopy.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/imagecopy.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -393,7 +393,7 @@ imageP.put(imtxt+u'\n\n{{NowCommons}}', u'{{NowCommons}}') #-etiqueta ok skip view #texto -archivo="Uploadbot.localskips.txt" +archivo=wikipedia.datafilepath("Uploadbot.localskips.txt") try: open(archivo, 'r') except IOError:
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/interwiki.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -582,7 +582,8 @@ def reportInterwikilessPage(self, page): wikipedia.output(u"NOTE: %s does not have any interwiki links" % self.originPage.aslink(True)) if config.without_interwiki: - f = codecs.open('without_interwiki.txt', 'a', 'utf-8') + f = codecs.open( + wikipedia.datafilepath('without_interwiki.txt'), 'a', 'utf-8') f.write("# %s \n" % page.aslink()) f.close()
@@ -728,7 +729,9 @@ self.problemfound = True if globalvar.autonomous: try: - f = codecs.open('autonomous_problem.dat', 'a', 'utf-8') + f = codecs.open( + wikipedia.datafilepath('autonomous_problem.dat'), + 'a', 'utf-8') f.write("* %s {%s}" % (self.originPage.aslink(True), txt)) if config.interwiki_graph and config.interwiki_graph_url: filename = interwiki_graph.getFilename(self.originPage, extension = config.interwiki_graph_formats[0])
Modified: trunk/pywikipedia/makecat.py =================================================================== --- trunk/pywikipedia/makecat.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/makecat.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -220,7 +220,8 @@ mysite = wikipedia.getSite() wikipedia.setAction(wikipedia.translate(mysite,msg) + ' ' + workingcatname) workingcat = catlib.Category(mysite,mysite.category_namespace()+':'+workingcatname) - filename = 'category/' + wikipedia.UnicodeToAsciiHtml(workingcatname) + '_exclude.txt' + filename = wikipedia.datafilepath('category', + wikipedia.UnicodeToAsciiHtml(workingcatname) + '_exclude.txt') try: f = codecs.open(filename, 'r', encoding = mysite.encoding()) for line in f.readlines():
Modified: trunk/pywikipedia/solve_disambiguation.py =================================================================== --- trunk/pywikipedia/solve_disambiguation.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/solve_disambiguation.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -382,10 +382,11 @@ self.enabled = enabled
self.ignorelist = [] - filename = 'disambiguations/' + self.disambPage.urlname() + '.txt' + filename = wikipedia.datafilepath('disambiguations', + self.disambPage.urlname() + '.txt') try: # The file is stored in the disambiguation/ subdir. Create if necessary. - f = codecs.open(self.makepath(filename), 'r', 'utf-8') + f = codecs.open(filename, 'r', 'utf-8') for line in f.readlines(): # remove trailing newlines and carriage returns while line[-1] in ['\n', '\r']: @@ -403,36 +404,17 @@ def ignore(self, refPage): if self.enabled: # Skip this occurence next time. - filename = 'disambiguations/' + self.disambPage.urlname() + '.txt' + filename = wikipedia.datafilepath('disambiguations', + self.disambPage.urlname() + '.txt') try: # Open file for appending. If none exists yet, create a new one. # The file is stored in the disambiguation/ subdir. Create if necessary. - f = codecs.open(self.makepath(filename), 'a', 'utf-8') + f = codecs.open(filename, 'a', 'utf-8') f.write(refPage.urlname() + '\n') f.close() except IOError: pass
- def makepath(self, path): - """ creates missing directories for the given path and - returns a normalized absolute version of the path. - - - if the given path already exists in the filesystem - the filesystem is not modified. - - - otherwise makepath creates directories along the given path - using the dirname() of the path. You may append - a '/' to the path if you want it to be a directory path. - - from holger@trillke.net 2002/03/18 - """ - from os import makedirs - from os.path import normpath,dirname,exists,abspath - - dpath = normpath(dirname(path)) - if not exists(dpath): makedirs(dpath) - return normpath(abspath(path)) -
class DisambiguationRobot(object): ignore_contents = {
Modified: trunk/pywikipedia/spellcheck.py =================================================================== --- trunk/pywikipedia/spellcheck.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/spellcheck.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -56,9 +56,9 @@
__version__ = '$Id$'
-import re,sys +import re, sys import wikipedia, pagegenerators -import string,codecs +import string, codecs
msg={ 'en':u'Bot-aided spell checker', @@ -75,25 +75,6 @@ def __init__(self,text): self.style = text
-def makepath(path): - """ creates missing directories for the given path and - returns a normalized absolute version of the path. - - - if the given path already exists in the filesystem - the filesystem is not modified. - - - otherwise makepath creates directories along the given path - using the dirname() of the path. You may append - a '/' to the path if you want it to be a directory path. - - from holger@trillke.net 2002/03/18 - """ - from os import makedirs - from os.path import normpath,dirname,exists,abspath - - dpath = normpath(dirname(path)) - if not exists(dpath): makedirs(dpath) - return normpath(abspath(path))
def distance(a,b): # Calculates the Levenshtein distance between a and b. @@ -458,10 +439,11 @@ if not checklang: checklang = mysite.language() wikipedia.setAction(wikipedia.translate(mysite,msg)) - filename = 'spelling/spelling-' + checklang + '.txt' + filename = wikipedia.datafilepath('spelling', + 'spelling-' + checklang + '.txt') print "Getting wordlist" try: - f = codecs.open(makepath(filename), 'r', encoding = mysite.encoding()) + f = codecs.open(filename, 'r', encoding = mysite.encoding()) for line in f.readlines(): # remove trailing newlines and carriage returns try: @@ -541,14 +523,15 @@ title = wikipedia.input(u"Which page to check now? (enter to stop)") finally: wikipedia.stopme() - filename = 'spelling/spelling-' + checklang + '.txt' + filename = wikipedia.datafilepath('spelling', + 'spelling-' + checklang + '.txt') if rebuild: list = knownwords.keys() list.sort() - f = codecs.open(makepath(filename), 'w', encoding = mysite.encoding()) + f = codecs.open(filename, 'w', encoding = mysite.encoding()) else: list = newwords - f = codecs.open(makepath(filename), 'a', encoding = mysite.encoding()) + f = codecs.open(filename, 'a', encoding = mysite.encoding()) for word in list: if Word(word).isCorrect(): if word != uncap(word):
Modified: trunk/pywikipedia/splitwarning.py =================================================================== --- trunk/pywikipedia/splitwarning.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/splitwarning.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -17,7 +17,8 @@ count={}
# TODO: Variable log filename -logFile = codecs.open('logs/interwiki.log', 'r', 'utf-8') +fn = wikipedia.datafilepath("logs", "interwiki.log") +logFile = codecs.open(fn, 'r', 'utf-8') rWarning = re.compile('WARNING: (?P<family>.+?): [[(?P<code>.+?):.*') for line in logFile: m = rWarning.match(line) @@ -26,7 +27,10 @@ code = m.group('code') if code in wikipedia.getSite().languages(): if not files.has_key(code): - files[code] = codecs.open('logs/warning-%s-%s.log' % (family, code), 'w', 'utf-8') + files[code] = codecs.open( + wikipedia.datafilepath('logs', + '/warning-%s-%s.log' % (family, code), + 'w', 'utf-8') count[code] = 0 files[code].write(line) count[code] += 1
Modified: trunk/pywikipedia/warnfile.py =================================================================== --- trunk/pywikipedia/warnfile.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/warnfile.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -115,11 +115,15 @@ def main(): filename = None for arg in wikipedia.handleArgs(): - filename = arg + if os.path.isabs(arg): + filename = arg + else: + filename = wikipedia.datafilepath("logs", arg)
if not filename: mysite = wikipedia.getSite() - filename = 'logs/warning-%s-%s.log' % (mysite.family.name, mysite.lang) + filename = wikipedia.datafilepath('logs', + 'warning-%s-%s.log' % (mysite.family.name, mysite.lang) reader = WarnfileReader(filename) bot = WarnfileRobot(reader) bot.run()
Modified: trunk/pywikipedia/weblinkchecker.py =================================================================== --- trunk/pywikipedia/weblinkchecker.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/weblinkchecker.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -472,7 +472,9 @@ self.reportThread = reportThread site = wikipedia.getSite() self.semaphore = threading.Semaphore() - self.datfilename = 'deadlinks/deadlinks-%s-%s.dat' % (site.family.name, site.lang) + self.datfilename = wikipedia.datafilepath('deadlinks', + 'deadlinks-%s-%s.dat' + % (site.family.name, site.lang)) # Count the number of logged links, so that we can insert captions # from time to time self.logCount = 0 @@ -498,7 +500,8 @@ isoDate = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(date)) errorReport += "** In [[%s]] on %s, %s\n" % (pageTitle, isoDate, error) wikipedia.output(u"** Logging link for deletion.") - txtfilename = 'deadlinks/results-%s-%s.txt' % (site.family.name, site.lang) + txtfilename = wikipedia.datafilepath('deadlinks', + 'results-%s-%s.txt' % (site.family.name, site.lang)) txtfile = codecs.open(txtfilename, 'a', 'utf-8') self.logCount += 1 if self.logCount % 30 == 0:
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2007-09-17 10:51:24 UTC (rev 4314) +++ trunk/pywikipedia/wikipedia.py 2007-09-17 14:29:03 UTC (rev 4315) @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- """ Library to get and put pages on a MediaWiki.
@@ -4444,8 +4444,9 @@ base directory. Argument(s) are zero or more directory names, followed by a data file name. + Any directories in the path that do not already exist are created. """ - return os.path.join(config.base_dir, *filename) + return makepath(os.path.join(config.base_dir, *filename))
######################### # Interpret configuration