Revision: 5587
Author: russblau
Date: 2008-06-17 16:44:20 +0000 (Tue, 17 Jun 2008)
Log Message:
-----------
Better control over logging output.
Modified Paths:
--------------
branches/rewrite/pywikibot/__init__.py
branches/rewrite/pywikibot/comms/http.py
branches/rewrite/pywikibot/comms/threadedhttp.py
branches/rewrite/pywikibot/data/api.py
branches/rewrite/pywikibot/family.py
branches/rewrite/pywikibot/login.py
branches/rewrite/pywikibot/page.py
branches/rewrite/pywikibot/site.py
branches/rewrite/pywikibot/throttle.py
Modified: branches/rewrite/pywikibot/__init__.py
===================================================================
--- branches/rewrite/pywikibot/__init__.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/__init__.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -10,10 +10,7 @@
__version__ = '$Id: $'
import logging
-logging.getLogger().setLevel(logging.DEBUG)
-
from exceptions import *
-
import config
_sites = {}
@@ -33,6 +30,8 @@
@type user: unicode
"""
+ logger = logging.getLogger("wiki")
+
if code == None:
code = default_code
if fam == None:
@@ -51,7 +50,7 @@
key = '%s:%s:%s' % (fam, code, user)
if not _sites.has_key(key):
_sites[key] = __Site(code=code, fam=fam, user=user)
- logging.debug("Instantiating Site object '%s'"
+ logger.debug("Instantiating Site object '%s'"
% _sites[key])
return _sites[key]
@@ -59,8 +58,9 @@
from page import Page, ImagePage, Category, Link
-# DEBUG
+# User interface functions (kept extremely simple for debugging)
+
def output(text):
print text
@@ -70,9 +70,36 @@
return getpass.getpass(prompt)
return raw_input(prompt)
+
+# Logger configuration
+
+logging.basicConfig()
+logging.getLogger().setLevel(logging.INFO)
+
+def set_debug(layer):
+ """Set the logger for specified layer to DEBUG level.
+
+ The framework has four layers (by default, others can be added), each
+ designated by a string --
+
+ 1. "comm": the communication layer (http requests, etc.)
+ 2. "data": the raw data layer (API requests, XML dump parsing)
+ 3. "wiki": the wiki content representation layer (Page and Site objects)
+ 4. "bot": the application layer
+
+ This method sets the logger for any specified layer to the DEBUG level,
+ causing it to output extensive debugging information. If this method is
+ not called for a layer, the default logging setting is the INFO level.
+
+ This method does not check the 'layer' argument for validity.
+
+ """
+ logging.getLogger(layer).setLevel(logging.DEBUG)
+
+
# Throttle and thread handling
-threadpool = []
+threadpool = [] # add page-putting threads to this list as they are created
def stopme():
"""Drop this process from the throttle log, after pending threads finish.
@@ -81,9 +108,12 @@
at Python exit.
"""
+ logger = logging.getLogger("wiki")
+
+ logger.debug("stopme() called")
threadcount = sum(1 for thd in threadpool if thd.isAlive())
if threadcount:
- logging.info("Waiting for approximately %s threads to finish."
+ logger.info("Waiting for approximately %s threads to finish."
% threadcount)
for thd in threadpool:
if thd.isAlive():
@@ -91,10 +121,9 @@
# only need one drop() call because all throttles use the same global pid
try:
_sites[_sites.keys()[0]].throttle.drop()
- logging.info("Dropped throttle(s).")
+ logger.info("Dropped throttle(s).")
except IndexError:
pass
import atexit
atexit.register(stopme)
-
Modified: branches/rewrite/pywikibot/comms/http.py
===================================================================
--- branches/rewrite/pywikibot/comms/http.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/comms/http.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -31,7 +31,9 @@
import cookielib
import threadedhttp
+logger = logging.getLogger("comm")
+
# global variables
useragent = 'Pywikipediabot/2.0' # This should include some global version string
@@ -49,13 +51,13 @@
try:
cookie_jar.load()
except (IOError, cookielib.LoadError):
- logging.debug("Loading cookies failed.")
+ logger.debug("Loading cookies failed.")
else:
- logging.debug("Loaded cookies from file.")
+ logger.debug("Loaded cookies from file.")
# Build up HttpProcessors
-logging.info('Starting %i threads...' % numthreads)
+logger.info('Starting %i threads...' % numthreads)
for i in range(numthreads):
proc = threadedhttp.HttpProcessor(http_queue, cookie_jar, connection_pool)
proc.setDaemon(True)
@@ -66,10 +68,10 @@
def _flush():
for i in threads:
http_queue.put(None)
- logging.info('Waiting for threads to finish... ')
+ logger.info('Waiting for threads to finish... ')
for i in threads:
i.join()
- logging.debug('All threads finished.')
+ logger.debug('All threads finished.')
atexit.register(_flush)
# export cookie_jar to global namespace
@@ -99,6 +101,6 @@
raise request.data
if request.data[0].status != 200:
- logging.warning("Http response status %s" % request.data[0].status)
+ logger.warning("Http response status %s" % request.data[0].status)
return request.data[1]
Modified: branches/rewrite/pywikibot/comms/threadedhttp.py
===================================================================
--- branches/rewrite/pywikibot/comms/threadedhttp.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/comms/threadedhttp.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -30,11 +30,14 @@
import cookielib
import sys
+logger = logging.getLogger("comm")
+
+
# easy_install safeguarded dependencies
try:
import pkg_resources
except ImportError:
- logging.critical(
+ logger.critical(
"Error : You need the python module setuptools to use this module")
sys.exit(1)
pkg_resources.require("httplib2")
@@ -50,7 +53,7 @@
The pool drops excessive connections added.
"""
- logging.debug("Creating connection pool.")
+ logger.debug("Creating connection pool.")
self.connections = {}
self.lock = threading.Lock()
self.maxnum = maxnum
@@ -59,7 +62,7 @@
"""Destructor to close all connections in the pool."""
self.lock.acquire()
try:
- logging.debug("Closing connection pool (%s connections)"
+ logger.debug("Closing connection pool (%s connections)"
% len(self.connections))
for key in self.connections:
for connection in self.connections[key]:
@@ -81,7 +84,7 @@
try:
if identifier in self.connections:
if len(self.connections[identifier]) > 0:
- logging.debug("Retrieved connection from '%s' pool."
+ logger.debug("Retrieved connection from '%s' pool."
% identifier)
return self.connections[identifier].pop()
return None
@@ -101,7 +104,7 @@
self.connections[identifier] = []
if len(self.connections[identifier]) == self.maxnum:
- logging.debug('closing %s connection %r'
+ logger.debug('closing %s connection %r'
% (identifier, connection))
connection.close()
del connection
@@ -194,7 +197,7 @@
# Redirect hack: we want to regulate redirects
follow_redirects = self.follow_redirects
self.follow_redirects = False
- logging.debug('%r' % (
+ logger.debug('%r' % (
(uri.replace("%7C","|"),
method, body, headers, max_redirects, connection_type),))
try:
@@ -252,7 +255,7 @@
location)
if authority == None:
response['location'] = httplib2.urlparse.urljoin(uri, location)
- logging.debug('Relative redirect: changed [%s] to [%s]'
+ logger.debug('Relative redirect: changed [%s] to [%s]'
% (location, response['location']))
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
@@ -318,11 +321,11 @@
def run(self):
# The Queue item is expected to either an HttpRequest object
# or None (to shut down the thread)
- logging.debug('Thread started, waiting for requests.')
+ logger.debug('Thread started, waiting for requests.')
while (True):
item = self.queue.get()
if item is None:
- logging.debug('Shutting down thread.')
+ logger.debug('Shutting down thread.')
return
try:
item.data = self.http.request(*item.args, **item.kwargs)
Modified: branches/rewrite/pywikibot/data/api.py
===================================================================
--- branches/rewrite/pywikibot/data/api.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/data/api.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -22,6 +22,8 @@
import pywikibot
from pywikibot import login
+logger = logging.getLogger("data")
+
lagpattern = re.compile(r"Waiting for [\d.]+: (?P<lag>\d+) seconds? lagged")
_modules = {} # cache for retrieved API parameter information
@@ -160,7 +162,7 @@
self.params[key] = self.params[key].encode(
self.site.encoding())
except Exception:
- logging.exception("key=%s, params=%s" % (key, self.params[key]))
+ logger.exception("key=%s, params=%s" % (key, self.params[key]))
params = urllib.urlencode(self.params)
while True:
# TODO catch http errors
@@ -181,7 +183,7 @@
uri = uri + "?" + params
rawdata = http.request(self.site, uri)
except Exception, e: #TODO: what exceptions can occur here?
- logging.warning(traceback.format_exc())
+ logger.warning(traceback.format_exc())
print uri, params
self.wait()
continue
@@ -192,7 +194,7 @@
except ValueError:
# if the result isn't valid JSON, there must be a server
# problem. Wait a few seconds and try again
- logging.warning(
+ logger.warning(
"Non-JSON response received from server %s; the server may be down."
% self.site)
print rawdata
@@ -222,7 +224,7 @@
if code == "maxlag":
lag = lagpattern.search(info)
if lag:
- logging.info(
+ logger.info(
"Pausing due to database lag: " + info)
self.site.throttle.lag(int(lag.group("lag")))
continue
@@ -240,7 +242,7 @@
self.max_retries -= 1
if self.max_retries < 0:
raise TimeoutError("Maximum retries attempted without success.")
- logging.warn("Waiting %s seconds before retrying." % self.retry_wait)
+ logger.warn("Waiting %s seconds before retrying." % self.retry_wait)
time.sleep(self.retry_wait)
# double the next wait, but do not exceed 120 seconds
self.retry_wait = min(120, self.retry_wait * 2)
@@ -326,7 +328,7 @@
else:
self.query_limit = int(param["max"])
self.prefix = _modules[mod]["prefix"]
- logging.debug("%s: Set query_limit to %i."
+ logger.debug("%s: Set query_limit to %i."
% (self.__class__.__name__, self.query_limit))
return
@@ -349,19 +351,19 @@
self.request[self.prefix+"limit"] = str(new_limit)
self.data = self.request.submit()
if not self.data or not isinstance(self.data, dict):
- logging.debug(
+ logger.debug(
"%s: stopped iteration because no dict retrieved from api."
% self.__class__.__name__)
return
if not ("query" in self.data
and self.resultkey in self.data["query"]):
- logging.debug(
+ logger.debug(
"%s: stopped iteration because 'query' and result keys not found in api response."
% self.__class__.__name__)
- logging.debug(self.data)
+ logger.debug(self.data)
return
pagedata = self.data["query"][self.resultkey]
- logging.debug("%s received %s; limit=%s"
+ logger.debug("%s received %s; limit=%s"
% (self.__class__.__name__, pagedata.keys(),
self.limit))
if isinstance(pagedata, dict):
@@ -549,7 +551,7 @@
if __name__ == "__main__":
from pywikibot import Site
- logging.getLogger().setLevel(logging.DEBUG)
+ logger.setLevel(logger.DEBUG)
mysite = Site("en", "wikipedia")
print "starting test...."
def _test():
Modified: branches/rewrite/pywikibot/family.py
===================================================================
--- branches/rewrite/pywikibot/family.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/family.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -1,11 +1,13 @@
# -*- coding: utf-8 -*-
+
+__version__='$Id$'
+
import config
-
import logging
import re
import urllib
-__version__='$Id$'
+logger = logging.getLogger("wiki")
# Parent class for all wiki families
Modified: branches/rewrite/pywikibot/login.py
===================================================================
--- branches/rewrite/pywikibot/login.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/login.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -51,6 +51,9 @@
import pywikibot
from pywikibot.exceptions import *
+logger = logging.getLogger("wiki")
+
+
# On some wikis you are only allowed to run a bot if there is a link to
# the bot's user page in a specific list.
botList = {
@@ -234,17 +237,17 @@
# self.password = self.password.encode(self.site.encoding())
- logging.info(u"Logging in to %s as %s" % (self.site, self.username))
+ logger.info(u"Logging in to %s as %s" % (self.site, self.username))
cookiedata = self.getCookie()
if cookiedata:
self.storecookiedata(cookiedata)
- logging.info(u"Should be logged in now")
+ logger.info(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
- logging.error(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.code])
+ logger.error(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.code])
return True
else:
- logging.error(u"Login failed. Wrong password or CAPTCHA answer?")
+ logger.error(u"Login failed. Wrong password or CAPTCHA answer?")
if retry:
self.password = None
return self.login(retry = True)
@@ -283,7 +286,7 @@
for lang in namedict[familyName].iterkeys():
site = pywikibot.getSite(code=lang, fam=familyName)
if not forceLogin and site.loggedInAs(sysop = sysop) != None:
- logging.info(u'Already logged in on %s' % site)
+ logger.info(u'Already logged in on %s' % site)
else:
loginMan = LoginManager(password, sysop = sysop, site = site)
loginMan.login()
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/page.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -20,6 +20,8 @@
import unicodedata
import urllib
+logger = logging.getLogger("wiki")
+
reNamespace = re.compile("^(.+?) *: *(.*)$")
@@ -66,10 +68,10 @@
"""
if insite is not None:
- logging.debug(
+ logger.debug(
"The 'insite' option in Page constructor is deprecated.")
if defaultNamespace is not None:
- logging.debug(
+ logger.debug(
"The 'defaultNamespace' option in Page constructor is deprecated.")
if isinstance(source, pywikibot.site.BaseSite):
self._site = source
@@ -167,7 +169,7 @@
if underscore or asUrl:
title = title.replace(u' ', u'_')
if savetitle:
- logging.debug(
+ logger.debug(
u"Page.title(savetitle=...) is deprecated.")
if savetitle or asUrl:
encodedTitle = title.encode(self.site().encoding())
@@ -200,7 +202,7 @@
"""
if underscore:
- logging.debug(
+ logger.debug(
u"Page.section(underscore=...) is deprecated.")
if self._section:
return self._section
@@ -287,11 +289,11 @@
"""
if throttle is not None:
- logging.debug("Page.get(throttle) option is deprecated.")
+ logger.debug("Page.get(throttle) option is deprecated.")
if nofollow_redirects is not None:
- logging.debug("Page.get(nofollow_redirects) option is deprecated.")
+ logger.debug("Page.get(nofollow_redirects) option is deprecated.")
if change_edit_time is not None:
- logging.debug("Page.get(change_edit_time) option is deprecated.")
+ logger.debug("Page.get(change_edit_time) option is deprecated.")
if force:
# When forcing, we retry the page no matter what. Old exceptions
# do not apply any more.
@@ -320,13 +322,13 @@
"""
if throttle is not None:
- logging.debug(
+ logger.debug(
"Page.getOldVersion(throttle) option is deprecated.")
if nofollow_redirects is not None:
- logging.debug(
+ logger.debug(
"Page.getOldVersion(nofollow_redirects) option is deprecated.")
if change_edit_time is not None:
- logging.debug(
+ logger.debug(
"Page.getOldVersion(change_edit_time) option is deprecated.")
if force or not oldid in self._revisions:
self.site().loadrevisions(self, getText=True, ids=oldid,
@@ -628,11 +630,11 @@
done = self.site().editpage(self, summary=comment, minor=minor,
watch=watch, unwatch=unwatch)
if not done:
- logging.warn("Page %s not saved" % self.title(asLink=True))
+ logger.warn("Page %s not saved" % self.title(asLink=True))
else:
- logging.info("Page %s saved" % self.title(asLink=True))
+ logger.info("Page %s saved" % self.title(asLink=True))
except pywikibot.Error, err:
- logging.exception("Error saving page %s" % self.title(asLink=True))
+ logger.exception("Error saving page %s" % self.title(asLink=True))
if callback:
callback(self, err)
@@ -706,10 +708,10 @@
"""
if followRedirects is not None:
- logging.debug(
+ logger.debug(
u"Page.imagelinks(followRedirects) option is deprecated.")
if loose is not None:
- logging.debug(
+ logger.debug(
u"Page.imagelinks(loose) option is deprecated.")
return self.site().pageimages(self)
@@ -744,7 +746,7 @@
# follow_redirects makes no sense here because category membership
# doesn't follow redirects
if nofollow_redirects is not None:
- logging.debug(
+ logger.debug(
u"Page.categories(nofollow_redirects) option is deprecated.")
return self.site().pagecategories(self, withSortKey=withSortKey)
@@ -832,10 +834,10 @@
"""
if throttle is not None:
- logging.debug(
+ logger.debug(
u"Page.move: throttle option is deprecated.")
if reason is None:
- logging.info(u'Moving %s to [[%s]].'
+ logger.info(u'Moving %s to [[%s]].'
% (self.title(asLink=True), newtitle))
reason = pywikibot.input(u'Please enter a reason for the move:')
# TODO: implement "safe" parameter
@@ -854,10 +856,10 @@
"""
if throttle is not None:
- logging.debug(
+ logger.debug(
u"Page.delete: throttle option is deprecated.")
if reason is None:
- logging.info(u'Deleting %s.' % (self.title(asLink=True)))
+ logger.info(u'Deleting %s.' % (self.title(asLink=True)))
reason = pywikibot.input(u'Please enter a reason for the deletion:')
answer = u'y'
if prompt and not hasattr(self.site(), '_noDeletePrompt'):
@@ -932,10 +934,10 @@
"""
if throttle is not None:
- logging.debug(
+ logger.debug(
u"Page.undelete: throttle option is deprecated.")
if comment is None:
- logging.info(u'Preparing to undelete %s.'
+ logger.info(u'Preparing to undelete %s.'
% (self.title(asLink=True)))
comment = pywikibot.input(
u'Please enter a reason for the undeletion:')
@@ -958,14 +960,14 @@
"""
if throttle is not None:
- logging.debug(
+ logger.debug(
u"Page.protect: throttle option is deprecated.")
if reason is None:
if unprotect:
un = u'un'
else:
un = u''
- logging.info(u'Preparing to %sprotect %s.'
+ logger.info(u'Preparing to %sprotect %s.'
% (un, self.title(asLink=True)))
reason = pywikibot.input(u'Please enter a reason for the action:')
if unprotect:
@@ -990,7 +992,7 @@
DEPRECATED: use Site.encoding() instead
"""
- logging.debug(u"Page.encoding() is deprecated; use Site.encoding().")
+ logger.debug(u"Page.encoding() is deprecated; use Site.encoding().")
return self.site().encoding()
def titleWithoutNamespace(self, underscore=False):
@@ -999,7 +1001,7 @@
DEPRECATED: use self.title(withNamespace=False) instead.
"""
- logging.debug(
+ logger.debug(
u"Page.titleWithoutNamespace() method is deprecated.")
return self.title(underscore=underscore, withNamespace=False,
withSection=False)
@@ -1010,7 +1012,7 @@
DEPRECATED: use self.title(withSection=False) instead.
"""
- logging.debug(
+ logger.debug(
u"Page.sectionFreeTitle() method is deprecated.")
return self.title(underscore=underscore, withSection=False)
@@ -1020,7 +1022,7 @@
DEPRECATED: use self.title(asLink=True) instead.
"""
- logging.debug(u"Page.aslink() method is deprecated.")
+ logger.debug(u"Page.aslink() method is deprecated.")
return self.title(asLink=True, forceInterwiki=forceInterwiki,
allowInterwiki=not noInterwiki, textlink=textlink)
@@ -1030,7 +1032,7 @@
DEPRECATED: use self.title(asUrl=True) instead.
"""
- logging.debug(u"Page.urlname() method is deprecated.")
+ logger.debug(u"Page.urlname() method is deprecated.")
return self.title(asUrl=True)
####### DISABLED METHODS (warnings provided) ######
@@ -1039,12 +1041,12 @@
def removeImage(self, image, put=False, summary=None, safe=True):
"""Old method to remove all instances of an image from page."""
- logging.warning(u"Page.removeImage() is no longer supported.")
+ logger.warning(u"Page.removeImage() is no longer supported.")
def replaceImage(self, image, replacement=None, put=False, summary=None,
safe=True):
"""Old method to replace all instances of an image with another."""
- logging.warning(u"Page.replaceImage() is no longer supported.")
+ logger.warning(u"Page.replaceImage() is no longer supported.")
class ImagePage(Page):
@@ -1107,7 +1109,7 @@
def getFileMd5Sum(self):
"""Return image file's MD5 checksum."""
- logging.debug(
+ logger.debug(
"ImagePage.getFileMd5Sum() is deprecated; use getFileSHA1Sum().")
# FIXME: MD5 might be performed on incomplete file due to server disconnection
# (see bug #1795683).
@@ -1159,7 +1161,7 @@
"""
if sortKey is not None:
- logging.debug(
+ logger.debug(
"The 'sortKey' option in Category constructor is deprecated.")
Page.__init__(self, source, title, 14)
if self.namespace() != 14:
@@ -1183,7 +1185,7 @@
"""
if forceInterwiki is not None \
or textlink is not None or noInterwiki is not None:
- logging.debug("All arguments to Category.aslink() are deprecated.")
+ logger.debug("All arguments to Category.aslink() are deprecated.")
if sortKey:
titleWithSortKey = '%s|%s' % (self.title(withSection=False),
self.sortKey)
@@ -1261,11 +1263,11 @@
catname = self.site().category_namespace() + ':' + catname
targetCat = Category(self.site(), catname)
if targetCat.exists():
- logging.warn('Target page %s already exists!'
+ logger.warn('Target page %s already exists!'
% targetCat.title())
return False
else:
- logging.info('Moving text from %s to %s.'
+ logger.info('Moving text from %s to %s.'
% (self.title(), targetCat.title()))
authors = ', '.join(self.contributingUsers())
creationSummary = pywikibot.translate(
@@ -1297,11 +1299,11 @@
catname = self.site().category_namespace() + ':' + catname
targetCat = Category(self.site(), catname)
if targetCat.exists():
- logging.warn('Target page %s already exists!'
+ logger.warn('Target page %s already exists!'
% targetCat.title())
return False
else:
- logging.info('Moving text from %s to %s.'
+ logger.info('Moving text from %s to %s.'
% (self.title(), targetCat.title()))
authors = ', '.join(self.contributingUsers())
creationSummary = pywikibot.translate(
@@ -1325,22 +1327,22 @@
#### DEPRECATED METHODS ####
def subcategoriesList(self, recurse=False):
"""DEPRECATED: Equivalent to list(self.subcategories(...))"""
- logging.debug("Category.subcategoriesList() method is deprecated.")
+ logger.debug("Category.subcategoriesList() method is deprecated.")
return sorted(list(set(self.subcategories(recurse))))
def articlesList(self, recurse=False):
"""DEPRECATED: equivalent to list(self.articles(...))"""
- logging.debug("Category.articlesList() method is deprecated.")
+ logger.debug("Category.articlesList() method is deprecated.")
return sorted(list(set(self.articles(recurse))))
def supercategories(self):
"""DEPRECATED: equivalent to self.categories()"""
- logging.debug("Category.supercategories() method is deprecated.")
+ logger.debug("Category.supercategories() method is deprecated.")
return self.categories()
def supercategoriesList(self):
"""DEPRECATED: equivalent to list(self.categories(...))"""
- logging.debug("Category.articlesList() method is deprecated.")
+ logger.debug("Category.articlesList() method is deprecated.")
return sorted(list(set(self.categories())))
Modified: branches/rewrite/pywikibot/site.py
===================================================================
--- branches/rewrite/pywikibot/site.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/site.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -27,7 +27,9 @@
import threading
import urllib
+logger = logging.getLogger("wiki")
+
class PageInUse(pywikibot.Error):
"""Page cannot be reserved for writing due to existing lock."""
@@ -55,7 +57,7 @@
exec "import %s_family as myfamily" % fam
except ImportError:
if fatal:
- logging.exception(u"""\
+ logger.exception(u"""\
Error importing the %s family. This probably means the family
does not exist. Also check your configuration file."""
% fam)
@@ -384,7 +386,7 @@
DEPRECATED (use .user() method instead)
"""
- logging.debug("Site.loggedInAs() method is deprecated.")
+ logger.debug("Site.loggedInAs() method is deprecated.")
return self.logged_in(sysop) and self.user()
def login(self, sysop=False):
@@ -626,11 +628,11 @@
rvgen.request["titles"] = "|".join(cache.keys())
rvgen.request[u"rvprop"] = \
u"ids|flags|timestamp|user|comment|content"
- logging.info(u"Retrieving %s pages from %s."
+ logger.info(u"Retrieving %s pages from %s."
% (len(cache), self)
)
for pagedata in rvgen:
-# logging.debug("Preloading %s" % pagedata)
+# logger.debug("Preloading %s" % pagedata)
try:
if pagedata['title'] not in cache:
raise Error(
@@ -638,9 +640,9 @@
% pagedata['title']
)
except KeyError:
- logging.debug("No 'title' in %s" % pagedata)
- logging.debug("pageids=%s" % pageids)
- logging.debug("titles=%s" % cache.keys())
+ logger.debug("No 'title' in %s" % pagedata)
+ logger.debug("pageids=%s" % pageids)
+ logger.debug("titles=%s" % cache.keys())
continue
page = cache[pagedata['title']]
api.update_page(page, pagedata)
@@ -1019,9 +1021,9 @@
if not isinstance(namespace, int):
raise Error("allpages: only one namespace permitted.")
if throttle is not None:
- logging.debug("allpages: the 'throttle' parameter is deprecated.")
+ logger.debug("allpages: the 'throttle' parameter is deprecated.")
if includeRedirects is not None:
- logging.debug(
+ logger.debug(
"allpages: the 'includeRedirect' parameter is deprecated.")
if includeRedirects:
if includeRedirects == "only":
@@ -1126,7 +1128,7 @@
def categories(self, number=10, repeat=False):
"""Deprecated; retained for backwards-compatibility"""
- logging.debug(
+ logger.debug(
"Site.categories() method is deprecated; use .allcategories()")
if repeat:
limit = None
@@ -1216,12 +1218,12 @@
if starttime and endtime:
if reverse:
if starttime > endtime:
- logging.error(
+ logger.error(
"blocks: starttime must be before endtime with reverse=True")
return
else:
if endtime < starttime:
- logging.error(
+ logger.error(
"blocks: endtime must be before starttime with reverse=False")
return
bkgen = api.ListGenerator("blocks", site=self)
@@ -1417,7 +1419,7 @@
"""
if number is not None:
- logging.debug("search: number parameter is deprecated; use limit")
+ logger.debug("search: number parameter is deprecated; use limit")
limit = number
if not searchstring:
raise Error("search: searchstring cannot be empty")
@@ -1426,7 +1428,7 @@
srgen = PageGenerator("search", gsrsearch=searchstring, gsrwhat=where,
site=self)
if not namespaces:
- logging.warning("search: namespaces cannot be empty; using [0].")
+ logger.warning("search: namespaces cannot be empty; using [0].")
namespaces = [0]
if isinstance(namespaces, basestring):
srgen.request["gsrnamespace"] = namespaces
@@ -1732,11 +1734,11 @@
while True:
try:
result = req.submit()
- logging.debug("editpage response: %s" % result)
+ logger.debug("editpage response: %s" % result)
except api.APIError, err:
self.unlock_page(page)
if err.code.endswith("anon") and self.logged_in():
- logging.debug(
+ logger.debug(
"editpage: received '%s' even though bot is logged in" % err.code)
errdata = {
'site': self,
@@ -1752,7 +1754,7 @@
raise EditConflict(self._ep_errors[err.code] % errdata)
if err.code in self._ep_errors:
raise Error(self._ep_errors[err.code] % errdata)
- logging.debug("editpage: Unexpected error code '%s' received."
+ logger.debug("editpage: Unexpected error code '%s' received."
% err.code)
raise
assert ("edit" in result and "result" in result["edit"]), result
@@ -1781,21 +1783,21 @@
continue
else:
self.unlock_page(page)
- logging.error(
+ logger.error(
"editpage: unknown CAPTCHA response %s, page not saved"
% captcha)
return False
else:
self.unlock_page(page)
- logging.error("editpage: unknown failure reason %s"
+ logger.error("editpage: unknown failure reason %s"
% str(result))
return False
else:
self.unlock_page(page)
- logging.error(
+ logger.error(
"editpage: Unknown result code '%s' received; page not saved"
% result["edit"]["result"])
- logging.error(str(result))
+ logger.error(str(result))
return False
# catalog of move errors for use in error messages
@@ -1861,11 +1863,11 @@
req['noredirect'] = ""
try:
result = req.submit()
- logging.debug("movepage response: %s" % result)
+ logger.debug("movepage response: %s" % result)
except api.APIError, err:
self.unlock_page(page)
if err.code.endswith("anon") and self.logged_in():
- logging.debug(
+ logger.debug(
"movepage: received '%s' even though bot is logged in" % err.code)
errdata = {
'site': self,
@@ -1877,16 +1879,16 @@
}
if err.code in self._mv_errors:
raise Error(self._mv_errors[err.code] % errdata)
- logging.debug("movepage: Unexpected error code '%s' received."
+ logger.debug("movepage: Unexpected error code '%s' received."
% err.code)
raise
self.unlock_page(page)
if "move" not in result:
- logging.error("movepage: %s" % result)
+ logger.error("movepage: %s" % result)
raise Error("movepage: unexpected response")
# TODO: Check for talkmove-error messages
if "talkmove-error-code" in result["move"]:
- logging.warning(u"movepage: Talk page %s not moved"
+ logger.warning(u"movepage: Talk page %s not moved"
% (page.toggleTalkPage().title(asLink=True)))
return pywikibot.Page(page, newtitle)
Modified: branches/rewrite/pywikibot/throttle.py
===================================================================
--- branches/rewrite/pywikibot/throttle.py 2008-06-16 17:52:16 UTC (rev 5586)
+++ branches/rewrite/pywikibot/throttle.py 2008-06-17 16:44:20 UTC (rev 5587)
@@ -17,6 +17,8 @@
import threading
import time
+logger = logging.getLogger("wiki")
+
pid = False # global process identifier
# Don't check for other processes unless this is set
@@ -58,7 +60,7 @@
def checkMultiplicity(self):
global pid
self.lock.acquire()
- logging.debug("Checking multiplicity: pid = %s" % pid)
+ logger.debug("Checking multiplicity: pid = %s" % pid)
try:
processes = []
my_pid = 1
@@ -107,7 +109,7 @@
f.close()
self.process_multiplicity = count
if self.verbosedelay:
- logging.info(
+ logger.info(
u"Found %s processes running, including the current process."
% count)
finally:
@@ -216,7 +218,7 @@
self.next_multiplicity = math.log(1+requestsize)/math.log(2.0)
# Announce the delay if it exceeds a preset limit
if waittime > config.noisysleep:
- logging.info(u"Sleeping for %.1f seconds, %s"
+ logger.info(u"Sleeping for %.1f seconds, %s"
% (waittime,
time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime()))
@@ -246,7 +248,7 @@
wait = delay - (time.time() - started)
if wait > 0:
if wait > config.noisysleep:
- logging.warn(u"Sleeping for %.1f seconds, %s"
+ logger.warn(u"Sleeping for %.1f seconds, %s"
% (wait,
time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime()))