Revision: 4512 Author: russblau Date: 2007-11-06 21:51:20 +0000 (Tue, 06 Nov 2007)
Log Message: ----------- Moved functions that depend on user's local configuration to the config.py module
Modified Paths: -------------- trunk/pywikipedia/casechecker.py trunk/pywikipedia/category.py trunk/pywikipedia/config.py trunk/pywikipedia/copyright.py trunk/pywikipedia/imagecopy.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/login.py trunk/pywikipedia/makecat.py trunk/pywikipedia/solve_disambiguation.py trunk/pywikipedia/spellcheck.py trunk/pywikipedia/splitwarning.py trunk/pywikipedia/userinterfaces/terminal_interface.py trunk/pywikipedia/warnfile.py trunk/pywikipedia/watchlist.py trunk/pywikipedia/weblinkchecker.py trunk/pywikipedia/welcome.py trunk/pywikipedia/wikipedia.py trunk/pywikipedia/wikipediatools.py
Modified: trunk/pywikipedia/casechecker.py =================================================================== --- trunk/pywikipedia/casechecker.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/casechecker.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -153,7 +153,7 @@ raise ValueError(u'Suspects must be the same size')
if not os.path.isabs(self.wikilogfile): - self.wikilogfile = wikipedia.datafilepath(self.wikilogfile) + self.wikilogfile = wikipedia.config.datafilepath(self.wikilogfile) try: self.wikilog = codecs.open(self.wikilogfile, 'a', 'utf-8') except IOError:
Modified: trunk/pywikipedia/category.py =================================================================== --- trunk/pywikipedia/category.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/category.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -163,10 +163,10 @@ else: try: if not os.path.isabs(filename): - filename = wikipedia.datafilepath(filename) + filename = wikipedia.config.datafilepath(filename) f = bz2.BZ2File(filename, 'r') wikipedia.output(u'Reading dump from %s' - % wikipedia.shortpath(filename)) + % wikipedia.config.shortpath(filename)) databases = pickle.load(f) f.close() # keys are categories, values are 2-tuples with lists as entries. @@ -230,9 +230,9 @@ Saves the contents of the dictionaries superclassDB and catContentDB to disk. ''' if not os.path.isabs(filename): - filename = wikipedia.datafilepath(filename) + filename = wikipedia.config.datafilepath(filename) wikipedia.output(u'Dumping to %s, please wait...' - % wikipedia.shortpath(filename)) + % wikipedia.config.shortpath(filename)) f = bz2.BZ2File(filename, 'w') databases = { 'catContentDB': self.catContentDB, @@ -686,7 +686,7 @@ self.catTitle = catTitle self.catDB = catDB if not os.path.isabs(filename): - filename = wikipedia.datafilepath(filename) + filename = wikipedia.config.datafilepath(filename) self.filename = filename # TODO: make maxDepth changeable with a parameter or config file entry self.maxDepth = maxDepth
Modified: trunk/pywikipedia/config.py =================================================================== --- trunk/pywikipedia/config.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/config.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -408,8 +408,6 @@ else: print "WARNING: Skipped '%s': owned by someone else."%_filename
-del os, re - # Test for obsoleted and/or unknown variables. for _key in globals().keys(): if _key[0]=='_': @@ -444,6 +442,45 @@
# Save base_dir for use by other modules base_dir = _base_dir + +def makepath(path): + """Return a normalized absolute version of the path argument. + + - if the given path already exists in the filesystem + the filesystem is not modified. + + - otherwise makepath creates directories along the given path + using the dirname() of the path. You may append + a '/' to the path if you want it to be a directory path. + + from holger@trillke.net 2002/03/18 + + """ + from os import makedirs + from os.path import normpath, dirname, exists, abspath + + dpath = normpath(dirname(path)) + if not exists(dpath): makedirs(dpath) + return normpath(abspath(path)) + +def datafilepath(*filename): + """Return an absolute path to a data file in a standard location. + + Argument(s) are zero or more directory names, optionally followed by a + data file name. The return path is offset to config.base_dir. Any + directories in the path that do not already exist are created. + + """ + import os + return makepath(os.path.join(base_dir, *filename)) + +def shortpath(path): + """Return a file path relative to config.base_dir.""" + import os + if path.startswith(base_dir): + return path[len(base_dir) + len(os.path.sep) : ] + return path + # # When called as main program, list all configuration variables # @@ -468,3 +505,5 @@ del __sys.modules[__name__].__dict__[__var]
del __var, __sys +del os, re +
Modified: trunk/pywikipedia/copyright.py =================================================================== --- trunk/pywikipedia/copyright.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/copyright.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -112,7 +112,7 @@ error_color = 'lightred'
appdir = "copyright" -output_file = wikipedia.datafilepath(appdir, "output.txt") +output_file = wikipedia.config.datafilepath(appdir, "output.txt")
pages_for_exclusion_database = [ ('it', 'Wikipedia:Sospette violazioni di copyright/Lista di esclusione', 'exclusion_list.txt'), @@ -297,7 +297,7 @@
def exclusion_file_list(): for i in pages_for_exclusion_database: - path = wikipedia.datafilepath(appdir, i[0], i[2]) + path = wikipedia.config.datafilepath(appdir, i[0], i[2]) wikipedia.makepath(path) p = wikipedia.Page(wikipedia.getSite(i[0]), i[1]) yield p, path @@ -306,12 +306,14 @@ for page, path in exclusion_file_list(): try: if not os.path.exists(path): - print 'Creating file '%s' (%s)' % (wikipedia.shortpath(path), page.aslink()) + print 'Creating file '%s' (%s)' % ( + wikipedia.config.shortpath(path), page.aslink()) force_update = True else: file_age = time.time() - os.path.getmtime(path) if file_age > 24 * 60 * 60: - print 'Updating file '%s' (%s)' % (wikipedia.shortpath(path), page.aslink()) + print 'Updating file '%s' (%s)' % ( + wikipedia.config.shortpath(path), page.aslink()) force_update = True except OSError: raise @@ -384,7 +386,7 @@ result_list.append(entry)
result_list += read_file( - wikipedia.datafilepath(appdir, 'exclusion_list.txt'), + wikipedia.config.datafilepath(appdir, 'exclusion_list.txt'), cut_comment = True, cut_newlines = True ).splitlines()
@@ -557,7 +559,7 @@ print "** " + entry
def exclusion_list_dump(): - f = open(wikipedia.datafilepath(appdir, 'exclusion_list.dump'), 'w') + f = open(wikipedia.config.datafilepath(appdir, 'exclusion_list.dump'), 'w') f.write('\n'.join(excl_list)) f.close() print "Exclusion list dump saved." @@ -984,7 +986,7 @@ % (title.replace(" ", "_").replace(""", "%22"), id, "author") + output, - wikipedia.datafilepath(appdir, "ID_output.txt")) + wikipedia.config.datafilepath(appdir, "ID_output.txt"))
class CheckRobot: def __init__(self, generator):
Modified: trunk/pywikipedia/imagecopy.py =================================================================== --- trunk/pywikipedia/imagecopy.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/imagecopy.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -393,7 +393,7 @@ imageP.put(imtxt+u'\n\n{{NowCommons}}', u'{{NowCommons}}') #-etiqueta ok skip view #texto -archivo=wikipedia.datafilepath("Uploadbot.localskips.txt") +archivo=wikipedia.config.datafilepath("Uploadbot.localskips.txt") try: open(archivo, 'r') except IOError:
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/interwiki.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -598,7 +598,7 @@ wikipedia.output(u"NOTE: %s does not have any interwiki links" % self.originPage.aslink(True)) if config.without_interwiki: f = codecs.open( - wikipedia.datafilepath('without_interwiki.txt'), 'a', 'utf-8') + wikipedia.config.datafilepath('without_interwiki.txt'), 'a', 'utf-8') f.write("# %s \n" % page.aslink()) f.close()
@@ -745,7 +745,7 @@ if globalvar.autonomous: try: f = codecs.open( - wikipedia.datafilepath('autonomous_problem.dat'), + wikipedia.config.datafilepath('autonomous_problem.dat'), 'a', 'utf-8') f.write("* %s {%s}" % (self.originPage.aslink(True), txt)) if config.interwiki_graph and config.interwiki_graph_url: @@ -1189,7 +1189,7 @@
def dump(self): site = wikipedia.getSite() - dumpfn = wikipedia.datafilepath( + dumpfn = wikipedia.config.datafilepath( 'interwiki-dumps', 'interwikidump-%s-%s.txt' % (site.family.name, site.lang)) f = codecs.open(dumpfn, 'w', 'utf-8') @@ -1547,7 +1547,7 @@
if optRestore or optContinue: site = wikipedia.getSite() - dumpFileName = wikipedia.datafilepath( + dumpFileName = wikipedia.config.datafilepath( 'interwiki-dumps', u'interwikidump-%s-%s.txt' % (site.family.name, site.lang))
Modified: trunk/pywikipedia/login.py =================================================================== --- trunk/pywikipedia/login.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/login.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -172,7 +172,7 @@ The argument data is the raw data, as returned by getCookie().
Returns nothing.""" - filename = wikipedia.datafilepath('login-data', + filename = wikipedia.config.datafilepath('login-data', '%s-%s-%s-login.data' % (self.site.family.name, self.site.lang, self.username)) f = open(filename, 'w')
Modified: trunk/pywikipedia/makecat.py =================================================================== --- trunk/pywikipedia/makecat.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/makecat.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -220,7 +220,7 @@ mysite = wikipedia.getSite() wikipedia.setAction(wikipedia.translate(mysite,msg) + ' ' + workingcatname) workingcat = catlib.Category(mysite,mysite.category_namespace()+':'+workingcatname) - filename = wikipedia.datafilepath('category', + filename = wikipedia.config.datafilepath('category', wikipedia.UnicodeToAsciiHtml(workingcatname) + '_exclude.txt') try: f = codecs.open(filename, 'r', encoding = mysite.encoding())
Modified: trunk/pywikipedia/solve_disambiguation.py =================================================================== --- trunk/pywikipedia/solve_disambiguation.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/solve_disambiguation.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -379,7 +379,7 @@ self.enabled = enabled
self.ignorelist = [] - filename = wikipedia.datafilepath('disambiguations', + filename = wikipedia.config.datafilepath('disambiguations', self.disambPage.urlname() + '.txt') try: # The file is stored in the disambiguation/ subdir. Create if necessary. @@ -401,7 +401,7 @@ def ignore(self, refPage): if self.enabled: # Skip this occurence next time. - filename = wikipedia.datafilepath('disambiguations', + filename = wikipedia.config.datafilepath('disambiguations', self.disambPage.urlname() + '.txt') try: # Open file for appending. If none exists yet, create a new one.
Modified: trunk/pywikipedia/spellcheck.py =================================================================== --- trunk/pywikipedia/spellcheck.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/spellcheck.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -439,7 +439,7 @@ if not checklang: checklang = mysite.language() wikipedia.setAction(wikipedia.translate(mysite,msg)) - filename = wikipedia.datafilepath('spelling', + filename = wikipedia.config.datafilepath('spelling', 'spelling-' + checklang + '.txt') print "Getting wordlist" try: @@ -523,7 +523,7 @@ title = wikipedia.input(u"Which page to check now? (enter to stop)") finally: wikipedia.stopme() - filename = wikipedia.datafilepath('spelling', + filename = wikipedia.config.datafilepath('spelling', 'spelling-' + checklang + '.txt') if rebuild: list = knownwords.keys()
Modified: trunk/pywikipedia/splitwarning.py =================================================================== --- trunk/pywikipedia/splitwarning.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/splitwarning.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- """Splits a interwiki.log file into chunks of warnings separated by language""" # # (C) Rob W.W. Hooft, 2003 @@ -17,7 +17,7 @@ count={}
# TODO: Variable log filename -fn = wikipedia.datafilepath("logs", "interwiki.log") +fn = wikipedia.config.datafilepath("logs", "interwiki.log") logFile = codecs.open(fn, 'r', 'utf-8') rWarning = re.compile('WARNING: (?P<family>.+?): [[(?P<code>.+?):.*') for line in logFile: @@ -28,7 +28,7 @@ if code in wikipedia.getSite().languages(): if not files.has_key(code): files[code] = codecs.open( - wikipedia.datafilepath('logs', + wikipedia.config.datafilepath('logs', 'warning-%s-%s.log' % (family, code)), 'w', 'utf-8') count[code] = 0
Modified: trunk/pywikipedia/userinterfaces/terminal_interface.py =================================================================== --- trunk/pywikipedia/userinterfaces/terminal_interface.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/userinterfaces/terminal_interface.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -55,7 +55,7 @@ std_out_handle = ctypes.windll.kernel32.GetStdHandle(-11) csbi = CONSOLE_SCREEN_BUFFER_INFO() ctypes.windll.kernel32.GetConsoleScreenBufferInfo(std_out_handle, ctypes.byref(csbi)) - return csbi.wAttributes & 0x0007 + return (csbi.wAttributes & 0x0007)
# TODO: other colors: #0 = Black @@ -86,7 +86,7 @@ }
windowsColors = { - 'default': getDefaultTextColorInWindows(), + 'default': 7, 'lightblue': 9, 'lightgreen': 10, 'lightaqua': 11, @@ -95,7 +95,6 @@ 'lightyellow': 14, }
- colorTagR = re.compile('\03{(?P<name>%s)}' % '|'.join(windowsColors.keys()))
class UI:
Modified: trunk/pywikipedia/warnfile.py =================================================================== --- trunk/pywikipedia/warnfile.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/warnfile.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -118,11 +118,11 @@ if os.path.isabs(arg): filename = arg else: - filename = wikipedia.datafilepath("logs", arg) + filename = wikipedia.config.datafilepath("logs", arg)
if not filename: mysite = wikipedia.getSite() - filename = wikipedia.datafilepath('logs', + filename = wikipedia.config.datafilepath('logs', 'warning-%s-%s.log' % (mysite.family.name, mysite.lang)) reader = WarnfileReader(filename) bot = WarnfileRobot(reader)
Modified: trunk/pywikipedia/watchlist.py =================================================================== --- trunk/pywikipedia/watchlist.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/watchlist.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -34,7 +34,7 @@ # Use cached copy if it exists. watchlist = cache[site] else: - fn = wikipedia.datafilepath('watchlists', + fn = wikipedia.config.datafilepath('watchlists', 'watchlist-%s-%s.dat' % (site.family.name, site.lang)) try: # find out how old our saved dump is (in seconds) @@ -72,14 +72,14 @@ watchlist.append(pageName) # Save the watchlist to disk # The file is stored in the watchlists subdir. Create if necessary. - f = open(wikipedia.datafilepath('watchlists', + f = open(wikipedia.config.datafilepath('watchlists', 'watchlist-%s-%s.dat' % (site.family.name, site.lang)), 'w') pickle.dump(watchlist, f) f.close()
def refresh_all(): import dircache, time - filenames = dircache.listdir(wikipedia.datafilepath('watchlists')) + filenames = dircache.listdir(wikipedia.config.datafilepath('watchlists')) watchlist_filenameR = re.compile('watchlist-([a-z-:]+).dat') for filename in filenames: match = watchlist_filenameR.match(filename)
Modified: trunk/pywikipedia/weblinkchecker.py =================================================================== --- trunk/pywikipedia/weblinkchecker.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/weblinkchecker.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -472,7 +472,7 @@ self.reportThread = reportThread site = wikipedia.getSite() self.semaphore = threading.Semaphore() - self.datfilename = wikipedia.datafilepath('deadlinks', + self.datfilename = wikipedia.config.datafilepath('deadlinks', 'deadlinks-%s-%s.dat' % (site.family.name, site.lang)) # Count the number of logged links, so that we can insert captions @@ -500,7 +500,7 @@ isoDate = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(date)) errorReport += "** In [[%s]] on %s, %s\n" % (pageTitle, isoDate, error) wikipedia.output(u"** Logging link for deletion.") - txtfilename = wikipedia.datafilepath('deadlinks', + txtfilename = wikipedia.config.datafilepath('deadlinks', 'results-%s-%s.txt' % (site.family.name, site.lang)) txtfile = codecs.open(txtfilename, 'a', 'utf-8') self.logCount += 1
Modified: trunk/pywikipedia/welcome.py =================================================================== --- trunk/pywikipedia/welcome.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/welcome.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -449,10 +449,10 @@ wikipedia.output(u'Error! - No fileName!') raise FilenameNotSet("No signature filename specified.") try: - f = codecs.open(wikipedia.datafilepath(fileSignName), 'r', + f = codecs.open(wikipedia.config.datafilepath(fileSignName), 'r', encoding=config.console_encoding) except: - f = codecs.open(wikipedia.datafilepath(fileSignName), 'r', + f = codecs.open(wikipedia.config.datafilepath(fileSignName), 'r', encoding='utf-8') signText = f.read() f.close() @@ -629,7 +629,8 @@ welcomer = u'{{subst:Benvenuto}} %s'
welcomed_users = list() - if savedata == True and os.path.exists(wikipedia.datafilepath(filename)): + if savedata == True and os.path.exists( + wikipedia.config.datafilepath(filename)): f = file(filename) number_user = cPickle.load(f) yield number_user
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/wikipedia.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -55,9 +55,6 @@
showDiff(oldtext, newtext): Prints the differences between oldtext and newtext on the screen - datafilepath: Return an absolute path to a data file in a standard - location. - shortpath: Return a relative form of the data file pathname.
Wikitext manipulation functions: each of these takes a unicode string containing wiki text as its first argument, and returns a modified version @@ -561,6 +558,10 @@ previously.
""" + # TODO: is the description of nofollow_redirects accurate? I can't + # tell where nofollow_redirects is doing anything different than + # get_redirect! + # NOTE: The following few NoPage exceptions could already be thrown at # the Page() constructor. They are raised here instead for convenience, # because all scripts are prepared for NoPage exceptions raised by @@ -767,6 +768,9 @@ """Return text of an old revision of this page; same options as get().""" # TODO: should probably check for bad pagename, NoPage, and other # exceptions that would prevent retrieving text, as get() does + + # TODO: should this default to change_edit_time = False? If we're not + # getting the current version, why change the timestamps? return self._getEditPage( get_redirect=get_redirect, throttle=throttle, sysop=sysop, oldid=oldid, @@ -1163,8 +1167,6 @@ change_edit_time = True, sysop = True) except NoPage: pass - - # if posting to an Esperanto wiki, we must e.g. write Bordeauxx instead # of Bordeaux if self.site().lang == 'eo': @@ -1451,6 +1453,7 @@ """Return a list of Pages that this Page links to.
Excludes interwiki and category links. + """ result = [] try: @@ -2577,7 +2580,7 @@ self.setDelay(mindelay)
def logfn(self): - return datafilepath('throttle.log') + return config.datafilepath('throttle.log')
def checkMultiplicity(self): self.lock.acquire() @@ -3382,7 +3385,7 @@ fam = config.family try: # search for family module in the 'families' subdirectory - sys.path.append(datafilepath('families')) + sys.path.append(config.datafilepath('families')) exec "import %s_family as myfamily" % fam except ImportError: if fatal: @@ -3669,7 +3672,7 @@ else: tmp = '%s-%s-%s-login.data' % ( self.family.name, self.lang, username) - fn = datafilepath('login-data', tmp) + fn = config.datafilepath('login-data', tmp) if not os.path.exists(fn): self._cookies = None self.loginStatusKnown = True @@ -5003,48 +5006,12 @@ nonGlobalArgs.append(arg) return nonGlobalArgs
-def makepath(path): - """Return a normalized absolute version of the path argument. - - - if the given path already exists in the filesystem - the filesystem is not modified. - - - otherwise makepath creates directories along the given path - using the dirname() of the path. You may append - a '/' to the path if you want it to be a directory path. - - from holger@trillke.net 2002/03/18 - - """ - from os import makedirs - from os.path import normpath, dirname, exists, abspath - - dpath = normpath(dirname(path)) - if not exists(dpath): makedirs(dpath) - return normpath(abspath(path)) - -def datafilepath(*filename): - """Return an absolute path to a data file in a standard location. - - Argument(s) are zero or more directory names, optionally followed by a - data file name. The return path is offset to config.base_dir. Any - directories in the path that do not already exist are created. - - """ - return makepath(os.path.join(config.base_dir, *filename)) - -def shortpath(path): - """Return a file path relative to config.base_dir.""" - if path.startswith(config.base_dir): - return path[len(config.base_dir) + len(os.path.sep) : ] - return path - ######################### # Interpret configuration #########################
# search for user interface module in the 'userinterfaces' subdirectory -sys.path.append(datafilepath('userinterfaces')) +sys.path.append(config.datafilepath('userinterfaces')) exec "import %s_interface as uiModule" % config.userinterface ui = uiModule.UI() verbose = 0 @@ -5263,7 +5230,7 @@ if enabled: if not logname: logname = '%s.log' % calledModuleName() - logfn = datafilepath('logs', logname) + logfn = config.datafilepath('logs', logname) try: logfile = codecs.open(logfn, 'a', 'utf-8') except IOError: @@ -5546,7 +5513,7 @@ # Special opener in case we are using a site with authentication if config.authenticate: import urllib2, cookielib - COOKIEFILE = datafilepath('login-data', 'cookies.lwp') + COOKIEFILE = config.datafilepath('login-data', 'cookies.lwp') cj = cookielib.LWPCookieJar() if os.path.isfile(COOKIEFILE): cj.load(COOKIEFILE)
Modified: trunk/pywikipedia/wikipediatools.py =================================================================== --- trunk/pywikipedia/wikipediatools.py 2007-11-06 10:49:56 UTC (rev 4511) +++ trunk/pywikipedia/wikipediatools.py 2007-11-06 21:51:20 UTC (rev 4512) @@ -2,14 +2,18 @@ import os, sys
def get_base_dir(): - """ Determine the directory in which user-specific information is stored. - This is determined in the following order - - 1. If the script was called with a -dir: argument, use the directory provided - in this argument - 2. If the user has a PYWIKIBOT_DIR environment variable, use the value of it - 3. If the script was started from a directory that contains a user-config.py - file, use this directory as the base - 4. If all else fails, use the directory from which this module was loaded + """Return the directory in which user-specific information is stored. + + This is determined in the following order - + 1. If the script was called with a -dir: argument, use the directory + provided in this argument + 2. If the user has a PYWIKIBOT_DIR environment variable, use the value + of it + 3. If the script was started from a directory that contains a + user-config.py file, use this directory as the base + 4. If all else fails, use the directory from which this module was + loaded. + """ for arg in sys.argv[1:]: if arg.startswith("-dir:"):
pywikipedia-l@lists.wikimedia.org