http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11394
Revision: 11394
Author: drtrigon
Date: 2013-04-19 23:20:57 +0000 (Fri, 19 Apr 2013)
Log Message:
-----------
improvement; follow-up to r11390 and r11393 more adoption to rewrite
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2013-04-19 21:28:02 UTC (rev 11393)
+++ trunk/pywikipedia/wikipedia.py 2013-04-19 23:20:57 UTC (rev 11394)
@@ -9185,9 +9185,10 @@
log.log(_level, text, extra=context, **kwargs)
+ # instead of logging handler for output to console (StreamHandler)
if _level <> INFO:
text = u'%s: %s' % (logging.getLevelName(_level), text)
- _outputOld(text)
+ _outputOld(text, decoder, newline, (_level == STDOUT), **kwargs)
def _outputOld(text, decoder=None, newline=True, toStdout=False, **kwargs):
"""Output a message to the user via the userinterface.
@@ -9223,9 +9224,6 @@
text = unicode(text, 'iso8859-1')
if newline:
text += u'\n'
- caller = inspect.getouterframes(inspect.currentframe())[1][3]
- if not (caller == 'logoutput'):
- logoutput(text)
if input_lock.locked():
cache_output(text, toStdout = toStdout)
else:
@@ -9241,8 +9239,35 @@
(args, kwargs) = output_cache.pop(0)
ui.output(*args, **kwargs)
-output = _outputOld
+def output(text, decoder=None, newline=True, toStdout=False, **kwargs):
+ """Output a message to the user via the userinterface.
+ Works like print, but uses the encoding used by the user's console
+ (console_encoding in the configuration file) instead of ASCII.
+
+ If decoder is None, text should be a unicode string. Otherwise it
+ should be encoded in the given encoding.
+
+ If newline is True, a linebreak will be added after printing the text.
+
+ If toStdout is True, the text will be sent to standard output,
+ so that it can be piped to another process. All other text will
+ be sent to stderr. See: http://en.wikipedia.org/wiki/Pipeline_%28Unix%29
+
+ text can contain special sequences to create colored output. These
+ consist of the escape character \03 and the color name in curly braces,
+ e. g. \03{lightpurple}. \03{default} resets the color.
+
+ Other keyword arguments are passed unchanged to the logger; so far, the
+ only argument that is useful is "exc_info=True", which causes the
+ log message to include an exception traceback.
+
+ """
+ if toStdout: # maintained for backwards-compatibity only
+ logoutput(text, decoder, newline, STDOUT, **kwargs)
+ else:
+ logoutput(text, decoder, newline, INFO, **kwargs)
+
def stdout(text, decoder=None, newline=True, **kwargs):
"""Output script results to the user via the userinterface."""
logoutput(text, decoder, newline, STDOUT, **kwargs)
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11393
Revision: 11393
Author: drtrigon
Date: 2013-04-19 21:28:02 +0000 (Fri, 19 Apr 2013)
Log Message:
-----------
improvement; follow-up to r11390 in order to use proper output/logging functions
Modified Paths:
--------------
trunk/pywikipedia/blockpageschecker.py
trunk/pywikipedia/blockreview.py
trunk/pywikipedia/category.py
trunk/pywikipedia/catimages.py
trunk/pywikipedia/catlib.py
trunk/pywikipedia/censure.py
trunk/pywikipedia/checkimages.py
trunk/pywikipedia/featured.py
trunk/pywikipedia/fixing_redirects.py
trunk/pywikipedia/followlive.py
trunk/pywikipedia/imagecopy_self.py
trunk/pywikipedia/imagerecat.py
trunk/pywikipedia/interwiki.py
trunk/pywikipedia/login.py
trunk/pywikipedia/lonelypages.py
trunk/pywikipedia/movepages.py
trunk/pywikipedia/patrol.py
trunk/pywikipedia/pywikibot/comms/http.py
trunk/pywikipedia/rciw.py
trunk/pywikipedia/redirect.py
trunk/pywikipedia/reflinks.py
trunk/pywikipedia/replace.py
trunk/pywikipedia/selflink.py
trunk/pywikipedia/subster.py
trunk/pywikipedia/subster_irc.py
trunk/pywikipedia/sum_disc.py
trunk/pywikipedia/table2wiki.py
trunk/pywikipedia/upload.py
trunk/pywikipedia/us-states.py
trunk/pywikipedia/welcome.py
trunk/pywikipedia/wikipedia.py
trunk/pywikipedia/xmlreader.py
Modified: trunk/pywikipedia/blockpageschecker.py
===================================================================
--- trunk/pywikipedia/blockpageschecker.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/blockpageschecker.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -354,7 +354,7 @@
if changes == 0:
# We tried to fix edit-protection templates, but it did not work.
- pywikibot.output('Warning : No edit-protection template could be found')
+ pywikibot.warning('No edit-protection template could be found')
if moveBlockCheck and changes > -1:
# checking move protection now
@@ -402,7 +402,7 @@
if changes == 0:
# We tried to fix move-protection templates, but it did not work.
- pywikibot.output('Warning : No move-protection template could be found')
+ pywikibot.warning('No move-protection template could be found')
if oldtext != text:
Modified: trunk/pywikipedia/blockreview.py
===================================================================
--- trunk/pywikipedia/blockreview.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/blockreview.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -82,7 +82,7 @@
self.unblock_tpl[self.site.lang],
defaultNamespace=10)
except KeyError:
- pywikibot.output(u'ERROR: Language "%s" not supported by this bot.'
+ pywikibot.error(u'Language "%s" not supported by this bot.'
% self.site.lang)
else:
for page in genPage.getReferences(follow_redirects=False,
Modified: trunk/pywikipedia/category.py
===================================================================
--- trunk/pywikipedia/category.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/category.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -302,9 +302,9 @@
% page.title(asLink=True))
except pywikibot.IsRedirectPage, arg:
redirTarget = pywikibot.Page(self.site, arg.args[0])
- pywikibot.output(u"WARNING: Page %s is a redirect to %s; skipping."
- % (page.title(asLink=True),
- redirTarget.title(asLink=True)))
+ pywikibot.warning(u"Page %s is a redirect to %s; skipping."
+ % (page.title(asLink=True),
+ redirTarget.title(asLink=True)))
else:
return text
return None
Modified: trunk/pywikipedia/catimages.py
===================================================================
--- trunk/pywikipedia/catimages.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/catimages.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -34,8 +34,6 @@
X-untagged[:#] Use daniel's tool as generator:
X http://toolserver.org/~daniel/WikiSense/UntaggedImages.php
-
-
"""
#
@@ -214,10 +212,10 @@
# how small and how many features are detected as faces (or eyes)
scale = max([1., np.average(np.array(img.shape)[0:2]/500.)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Faces_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Faces_CV]')
return
except AttributeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Faces_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Faces_CV]')
return
#detectAndDraw( image, cascade, nestedCascade, scale );
@@ -419,10 +417,10 @@
scale = max([1., np.average(np.array(img.shape)[0:2]/400.)])
#scale = max([1., np.average(np.array(img.shape)[0:2]/300.)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_People_CV]')
+ pywikibot.warning(u'unknown file type [_detect_People_CV]')
return
except AttributeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_People_CV]')
+ pywikibot.warning(u'unknown file type [_detect_People_CV]')
return
# similar to face detection
@@ -529,10 +527,10 @@
# how small and how many features are detected
scale = max([1., np.average(np.array(img.shape)[0:2]/500.)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Geometry_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Geometry_CV]')
return self._buffer_Geometry
except AttributeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Geometry_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Geometry_CV]')
return self._buffer_Geometry
# similar to face or people detection
@@ -750,7 +748,7 @@
(l, t) = (0, 0)
i = im
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_SegmentColors_JSEGnPIL]')
+ pywikibot.warning(u'unknown file type [_detect_SegmentColors_JSEGnPIL]')
return
result = []
@@ -764,7 +762,7 @@
##(pic, scale) = self._util_detect_ColorSegments_JSEG(pic) # (final split)
#hist = self._util_get_ColorSegmentsHist_PIL(i, pic, scale) #
except TypeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_SegmentColors_JSEGnPIL]')
+ pywikibot.warning(u'unknown file type [_detect_SegmentColors_JSEGnPIL]')
return
i = 0
# (may be do an additional region merge according to same color names...)
@@ -805,7 +803,7 @@
i = Image.open(self.image_path_JPEG)
h = i.histogram()
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_AverageColor_PILnCV]')
+ pywikibot.warning(u'unknown file type [_detect_AverageColor_PILnCV]')
return
result = self._util_average_Color_colormath(h)
@@ -863,7 +861,7 @@
try:
i = Image.open(self.image_path)
except IOError:
- pywikibot.output(u'WARNING: unknown (image) file type [_detect_Properties_PIL]')
+ pywikibot.warning(u'unknown (image) file type [_detect_Properties_PIL]')
return
# http://mail.python.org/pipermail/image-sig/1999-May/000740.html
@@ -906,7 +904,7 @@
# result = {}
# # DO NOT use ImageMagick (identify) instead of PIL to get these info !!
else:
- pywikibot.output(u'WARNING: unknown (generic) file type [_detect_Properties_PIL]')
+ pywikibot.warning(u'unknown (generic) file type [_detect_Properties_PIL]')
return
result['Dimensions'] = self.image_size
@@ -1001,7 +999,7 @@
try:
smallImg = im.resize( tuple(np.int_(np.array(im.size)/scale)), Image.ANTIALIAS )
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_util_detect_ColorSegments_JSEG]')
+ pywikibot.warning(u'unknown file type [_util_detect_ColorSegments_JSEG]')
return
#im.thumbnail(size, Image.ANTIALIAS) # size is 640x480
@@ -1070,7 +1068,7 @@
try:
smallImg = im.resize( tuple(np.int_(np.array(im.size)/scale)), Image.ANTIALIAS )
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_util_get_ColorSegmentsHist_PIL]')
+ pywikibot.warning(u'unknown file type [_util_get_ColorSegmentsHist_PIL]')
return
imgsize = float(smallImg.size[0]*smallImg.size[1])
@@ -1190,10 +1188,10 @@
# how small and how many features are detected
scale = max([1., np.average(np.array(img.shape)[0:2]/maxdim)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Trained_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Trained_CV]')
return
except AttributeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Trained_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Trained_CV]')
return
# similar to face detection
@@ -1358,10 +1356,10 @@
# pdfinterp.process_pdf(rsrcmgr, device, fp, set(), maxpages=0, password='',
# caching=True, check_extractable=False)
#except AssertionError:
- # pywikibot.output(u'WARNING: pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]')
+ # pywikibot.warning(u'pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]')
# return
#except TypeError:
- # pywikibot.output(u'WARNING: pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]')
+ # pywikibot.warning(u'pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]')
# return
#fp.close()
#device.close()
@@ -1417,7 +1415,7 @@
scale = max([1., np.average(np.array(img.size)/200.)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_recognize_OpticalCodes_dmtxNzbar]')
+ pywikibot.warning(u'unknown file type [_recognize_OpticalCodes_dmtxNzbar]')
return
smallImg = img.resize( (int(img.size[0]/scale), int(img.size[1]/scale)) )
@@ -1454,7 +1452,7 @@
img = Image.open(self.image_path_JPEG).convert('L')
width, height = img.size
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_recognize_OpticalCodes_dmtxNzbar]')
+ pywikibot.warning(u'unknown file type [_recognize_OpticalCodes_dmtxNzbar]')
return
scanner = zbar.ImageScanner()
@@ -1504,10 +1502,10 @@
scale = max([1., np.average(np.array(im.shape)[0:2]/1000.)])
except IOError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Chessboard_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Chessboard_CV]')
return
except AttributeError:
- pywikibot.output(u'WARNING: unknown file type [_detect_Chessboard_CV]')
+ pywikibot.warning(u'unknown file type [_detect_Chessboard_CV]')
return
smallImg = np.empty( (cv.Round(im.shape[1]/scale), cv.Round(im.shape[0]/scale)), dtype=np.uint8 )
@@ -1522,7 +1520,7 @@
#found_all, corners = cv.FindChessboardCorners( im, chessboard_dim )
found_all, corners = cv2.findChessboardCorners( im, chessboard_dim )
except cv2.error, e:
- pywikibot.output(u'%s' % e)
+ pywikibot.error(u'%s' % e)
#cv2.drawChessboardCorners( im, chessboard_dim, corners, found_all )
##cv2.imshow("win", im)
@@ -1806,7 +1804,7 @@
try:
(width, height) = (int(float(width)+0.5), int(float(height)+0.5))
except ValueError:
- pywikibot.output(u'WARNING: %s contains incompatible unit(s), skipped' % ((width, height),))
+ pywikibot.warning(u'%s contains incompatible unit(s), skipped' % ((width, height),))
return
else:
(width, height) = self.image_size
@@ -1921,8 +1919,8 @@
available = [item in res for item in ['FacesDetected', 'ValidAFPoints']]
unknown = ['face' in item.lower() for item in res.keys()]
if make and (True in (available+unknown)):
- pywikibot.output(u"WARNING: skipped '%s' since not supported (yet) [_detect_Faces_EXIF]" % make)
- pywikibot.output(u"WARNING: FacesDetected: %s - ValidAFPoints: %s" % tuple(available))
+ pywikibot.warning(u"skipped '%s' since not supported (yet) [_detect_Faces_EXIF]" % make)
+ pywikibot.warning(u"FacesDetected: %s - ValidAFPoints: %s" % tuple(available))
# finally, rotate face coordinates if image was rotated
if wasRotated:
@@ -2209,7 +2207,7 @@
# Load important components
if (yaafe.loadComponentLibrary('yaafe-io')!=0):
- pywikibot.output(u'WARNING: cannot load yaafe-io component library !') # ! needed, else it will crash !
+ pywikibot.warning(u'cannot load yaafe-io component library !') # ! needed, else it will crash !
# Build a DataFlow object using FeaturePlan
fp = yaafe.FeaturePlan(sample_rate=44100, normalize=0.98, resample=False)
@@ -2764,7 +2762,7 @@
#self.image_size = (None, None)
mime = mimetypes.guess_all_extensions('%s/%s' % tuple(self.image_mime[0:2]))
if self.image_fileext.lower() not in mime:
- pywikibot.output(u'WARNING: File extension does not match MIME type! File extension should be %s.' % mime)
+ pywikibot.warning(u'File extension does not match MIME type! File extension should be %s.' % mime)
# SVG: rasterize the SVG to bitmap (MAY BE GET FROM WIKI BY DOWNLOAD?...)
# (Mediawiki uses librsvg too: http://commons.wikimedia.org/wiki/SVG#SVGs_in_MediaWiki)
@@ -3481,12 +3479,12 @@
Bot.downloadImage()
except IOError, err:
# skip if download not possible
- pywikibot.output(u"WARNING: %s, skipped..." % err)
+ pywikibot.warning(u"%s, skipped..." % err)
continue
except Exception, err:
# skip on any unexpected error, but report it
- pywikibot.output(u"ERROR: %s" % err)
- pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\
+ pywikibot.error(u"%s" % err)
+ pywikibot.error(u"was not able to process page %s !!!\n" %\
image.title(asLink=True))
continue
resultCheck = Bot.checkStep()
@@ -3496,8 +3494,8 @@
if ret:
outresult.append( ret )
except AttributeError:
- pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\
- image.title(asLink=True))
+ pywikibot.error(u"was not able to process page %s !!!\n" %\
+ image.title(asLink=True))
limit += -1
if not tagged:
posfile = open(os.path.join(scriptdir, 'cache/catimages_start'), "w")
@@ -3548,13 +3546,13 @@
Bot.downloadImage()
except IOError, err:
# skip if download not possible
- pywikibot.output(u"WARNING: %s, skipped..." % err)
+ pywikibot.warning(u"%s, skipped..." % err)
continue
except Exception, err:
# skip on any unexpected error, but report it
- pywikibot.output(u"ERROR: %s" % err)
- pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\
- image.title(asLink=True))
+ pywikibot.error(u"%s" % err)
+ pywikibot.error(u"was not able to process page %s !!!\n" %\
+ image.title(asLink=True))
continue
# gather all features (information) related to current image
Modified: trunk/pywikipedia/catlib.py
===================================================================
--- trunk/pywikipedia/catlib.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/catlib.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -655,8 +655,8 @@
newCatList.append(cat)
if not changesMade:
- wikipedia.output(u'ERROR: %s is not in category %s!'
- % (article.title(asLink=True), oldCat.title()))
+ wikipedia.error(u'%s is not in category %s!'
+ % (article.title(asLink=True), oldCat.title()))
else:
text = article.get(get_redirect=True)
try:
Modified: trunk/pywikipedia/censure.py
===================================================================
--- trunk/pywikipedia/censure.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/censure.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -33,13 +33,13 @@
site = pywikibot.getSite()
if not (site.language() + '.' + site.family.name) in badWordList or not (site.language() + '.' + site.family.name) in logPages:
- pywikibot.output('Error: your language isn\'t supported, see the source code for further details')
+ pywikibot.error('your language isn\'t supported, see the source code for further details')
sys.exit(1)
ownWordPage = pywikibot.Page(site, badWordList[site.language() + '.' + site.family.name])
try:
ownWordList = ownWordPage.get(get_redirect = True)
except pywikibot.NoPage:
- pywikibot.output('Error: the page containing the bad word list of your language doesn\'t exist')
+ pywikibot.error('the page containing the bad word list of your language doesn\'t exist')
sys.exit(1)
ownWordList = ownWordList.split('\n')
del ownWordList[0]
@@ -109,7 +109,7 @@
pywikibot.output(u'%s doesn\'t match any of the bad word list' %title)
def main():
- pywikibot.output('Warning: this script should not be run manually/directly, but automatically by maintainer.py')
+ pywikibot.warning('this script should not be run manually/directly, but automatically by maintainer.py')
if len(sys.argv) == 1:
pywikibot.output("Usage: censure.py <article title>")
sys.exit(1)
Modified: trunk/pywikipedia/checkimages.py
===================================================================
--- trunk/pywikipedia/checkimages.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/checkimages.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -1600,7 +1600,7 @@
elif imagechanges.lower() == 'true':
imagestatus = True
else:
- pywikibot.output(u"Error! Imagechanges set wrongly!")
+ pywikibot.error(u"Imagechanges set wrongly!")
self.settingsData = None
break
summary = tupla[5]
Modified: trunk/pywikipedia/featured.py
===================================================================
--- trunk/pywikipedia/featured.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/featured.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -322,8 +322,8 @@
try:
method = info[site.lang][0]
except KeyError:
- pywikibot.output(
- u'Error: language %s doesn\'t has %s category source.'
+ pywikibot.error(
+ u'language %s doesn\'t has %s category source.'
% (site.lang, pType))
return
name = info[site.lang][1]
Modified: trunk/pywikipedia/fixing_redirects.py
===================================================================
--- trunk/pywikipedia/fixing_redirects.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/fixing_redirects.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -187,7 +187,7 @@
try:
page.put(text, comment)
except (pywikibot.Error):
- pywikibot.output('Error: unable to put %s' % page)
+ pywikibot.error('unable to put %s' % page)
def main():
featured = False
Modified: trunk/pywikipedia/followlive.py
===================================================================
--- trunk/pywikipedia/followlive.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/followlive.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -456,14 +456,14 @@
choices = answer[1:].split(',')
except ValueError:
# User entered wrong value
- pywikibot.output(u'ERROR: "%s" is not valid' % answer)
+ pywikibot.error(u'"%s" is not valid' % answer)
continue
else:
try:
choices = answer.split(',')
except ValueError:
# User entered wrong value
- pywikibot.output(u'ERROR: "%s" is not valid' % answer)
+ pywikibot.error(u'"%s" is not valid' % answer)
continue
#test input
for choice in choices:
@@ -474,7 +474,7 @@
else:
answered = x in range(1, len(questionlist)+1)
if not answered:
- pywikibot.output(u'ERROR: "%s" is not valid' % answer)
+ pywikibot.error(u'"%s" is not valid' % answer)
continue
summary = u''
for choice in choices:
@@ -489,8 +489,8 @@
pywikibot.output(u'appending %s...' % questionlist[answer])
self.content += '\n' + questionlist[answer]
else:
- pywikibot.output(
- u'ERROR: "pos" should be "top" or "bottom" for template '
+ pywikibot.error(
+ u'"pos" should be "top" or "bottom" for template '
u'%s. Contact a developer.' % questionlist[answer])
sys.exit("Exiting")
summary += tpl['msg']+' '
@@ -537,8 +537,8 @@
if __name__ == "__main__":
try:
for arg in pywikibot.handleArgs():
- pywikibot.output(
- u'Warning: argument "%s" not understood; ignoring.' % arg)
+ pywikibot.warning(
+ u'argument "%s" not understood; ignoring.' % arg)
bot = CleaningBot()
bot.run()
except:
Modified: trunk/pywikipedia/imagecopy_self.py
===================================================================
--- trunk/pywikipedia/imagecopy_self.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/imagecopy_self.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -913,10 +913,10 @@
def main(args):
- pywikibot.output(u'WARNING: This is an experimental bot')
- pywikibot.output(u'WARNING: It will only work on self published work images')
- pywikibot.output(u'WARNING: This bot is still full of bugs')
- pywikibot.output(u'WARNING: Use at your own risk!')
+ pywikibot.warnnig(u'This is an experimental bot')
+ pywikibot.warning(u'It will only work on self published work images')
+ pywikibot.warning(u'This bot is still full of bugs')
+ pywikibot.warning(u'Use at your own risk!')
generator = None;
autonomous = False
Modified: trunk/pywikipedia/imagerecat.py
===================================================================
--- trunk/pywikipedia/imagerecat.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/imagerecat.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -230,7 +230,7 @@
elif addresspart.tag in invalidParts:
pywikibot.output(u'Dropping %s, %s' % (addresspart.tag, addresspart.text))
else:
- pywikibot.output(u'WARNING %s, %s is not in addressparts lists' % (addresspart.tag, addresspart.text))
+ pywikibot.warning(u'%s, %s is not in addressparts lists' % (addresspart.tag, addresspart.text))
#print result
return result
Modified: trunk/pywikipedia/interwiki.py
===================================================================
--- trunk/pywikipedia/interwiki.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/interwiki.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -1267,8 +1267,8 @@
dictName, year = page.autoFormat()
if dictName is not None:
if self.originPage:
- pywikibot.output(
- u'WARNING: %s:%s relates to %s:%s, which is an '
+ pywikibot.warning(
+ u'%s:%s relates to %s:%s, which is an '
u'auto entry %s(%s)'
% (self.originPage.site.language(), self.originPage,
page.site.language(), page, dictName, year))
@@ -1925,8 +1925,8 @@
rmPage.site.lang in ['hak', 'hi', 'cdo', 'sa'] and \
pywikibot.unicode_error: # work-arround for bug #3081100 (do not remove affected pages)
new[rmsite] = rmPage
- pywikibot.output(
- u"WARNING: %s is either deleted or has a mismatching disambiguation state."
+ pywikibot.warning(
+ u"%s is either deleted or has a mismatching disambiguation state."
% rmPage)
# Re-Check what needs to get done
mods, mcomment, adding, removing, modifying = compareLanguages(old,
@@ -1977,8 +1977,8 @@
if pywikibot.unicode_error:
for x in removing:
if x.lang in ['hi', 'cdo']:
- pywikibot.output(
-u'\03{lightred}WARNING: This may be false positive due to unicode bug #3081100\03{default}')
+ pywikibot.warning(
+u'\03{lightred}This may be false positive due to unicode bug #3081100\03{default}')
break
ask = True
if globalvar.force or globalvar.cleanup:
@@ -2029,21 +2029,21 @@
pywikibot.output(u'Page %s is locked. Skipping.' % page)
raise SaveError(u'Locked')
except pywikibot.EditConflict:
- pywikibot.output(
- u'ERROR putting page: An edit conflict occurred. Giving up.')
+ pywikibot.error(
+ u'putting page: An edit conflict occurred. Giving up.')
raise SaveError(u'Edit conflict')
except (pywikibot.SpamfilterError), error:
- pywikibot.output(
- u'ERROR putting page: %s blacklisted by spamfilter. Giving up.'
+ pywikibot.error(
+ u'putting page: %s blacklisted by spamfilter. Giving up.'
% (error.url,))
raise SaveError(u'Spam filter')
except (pywikibot.PageNotSaved), error:
- pywikibot.output(u'ERROR putting page: %s' % (error.args,))
+ pywikibot.error(u'putting page: %s' % (error.args,))
raise SaveError(u'PageNotSaved')
except (socket.error, IOError), error:
if timeout>3600:
raise
- pywikibot.output(u'ERROR putting page: %s' % (error.args,))
+ pywikibot.error(u'putting page: %s' % (error.args,))
pywikibot.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
timeout *= 2
@@ -2051,7 +2051,7 @@
except pywikibot.ServerError:
if timeout > 3600:
raise
- pywikibot.output(u'ERROR putting page: ServerError.')
+ pywikibot.error(u'putting page: ServerError.')
pywikibot.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
timeout *= 2
@@ -2088,8 +2088,8 @@
try:
linkedPages = set(page.interwiki())
except pywikibot.NoPage:
- pywikibot.output(
- u"WARNING: Page %s does no longer exist?!" % page)
+ pywikibot.warning(
+ u"Page %s does no longer exist?!" % page)
break
# To speed things up, create a dictionary which maps sites
# to pages. This assumes that there is only one interwiki
@@ -2101,14 +2101,14 @@
if expectedPage != page:
try:
linkedPage = linkedPagesDict[expectedPage.site]
- pywikibot.output(
- u"WARNING: %s: %s does not link to %s but to %s"
+ pywikibot.warning(
+ u"%s: %s does not link to %s but to %s"
% (page.site.family.name,
page, expectedPage, linkedPage))
except KeyError:
if not expectedPage.site.is_data_repository():
- pywikibot.output(
- u"WARNING: %s: %s does not link to %s"
+ pywikibot.warning(
+ u"%s: %s does not link to %s"
% (page.site.family.name,
page, expectedPage))
# Check for superfluous links
@@ -2117,12 +2117,12 @@
# Check whether there is an alternative page on that language.
# In this case, it was already reported above.
if linkedPage.site not in expectedSites:
- pywikibot.output(
- u"WARNING: %s: %s links to incorrect %s"
+ pywikibot.warning(
+ u"%s: %s links to incorrect %s"
% (page.site.family.name,
page, linkedPage))
except (socket.error, IOError):
- pywikibot.output(u'ERROR: could not report backlinks')
+ pywikibot.error(u'could not report backlinks')
class InterwikiBot(object):
@@ -2282,8 +2282,8 @@
self.generateMore(globalvar.maxquerysize - mycount)
except pywikibot.ServerError:
# Could not extract allpages special page?
- pywikibot.output(
- u'ERROR: could not retrieve more pages. Will try again in %d seconds'
+ pywikibot.error(
+ u'could not retrieve more pages. Will try again in %d seconds'
% timeout)
time.sleep(timeout)
timeout *= 2
Modified: trunk/pywikipedia/login.py
===================================================================
--- trunk/pywikipedia/login.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/login.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -391,7 +391,7 @@
return
if pywikibot.verbose > 1:
- pywikibot.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
+ pywikibot.warning(u"Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
Modified: trunk/pywikipedia/lonelypages.py
===================================================================
--- trunk/pywikipedia/lonelypages.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/lonelypages.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -208,7 +208,7 @@
for j in refs:
if j == None:
# We have to find out why the function returns that value
- pywikibot.output(u'Error: 1 --> Skip page')
+ pywikibot.error(u'1 --> Skip page')
continue
refsList.append(j)
# This isn't possible with a generator
@@ -218,7 +218,7 @@
# Never understood how a list can turn in "None", but it happened :-S
elif refsList == None:
# We have to find out why the function returns that value
- pywikibot.output(u'Error: 2 --> Skip page')
+ pywikibot.error(u'2 --> Skip page')
continue
else:
# Ok, no refs, no redirect... let's check if there's already the template
Modified: trunk/pywikipedia/movepages.py
===================================================================
--- trunk/pywikipedia/movepages.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/movepages.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -238,8 +238,8 @@
else:
oldName1 = page.title()
if oldName1:
- pywikibot.output(
- u'WARNING: file %s contains odd number of links' % filename)
+ pywikibot.warning(
+ u'file %s contains odd number of links' % filename)
elif arg == '-noredirect':
noredirect = True
elif arg == '-always':
@@ -248,14 +248,14 @@
skipredirects = True
elif arg.startswith('-from:'):
if oldName:
- pywikibot.output(u'WARNING: -from:%s without -to:' % oldName)
+ pywikibot.warning(u'-from:%s without -to:' % oldName)
oldName = arg[len('-from:'):]
elif arg.startswith('-to:'):
if oldName:
fromToPairs.append([oldName, arg[len('-to:'):]])
oldName = None
else:
- pywikibot.output(u'WARNING: %s without -from' % arg)
+ pywikibot.warning(u'%s without -from' % arg)
elif arg.startswith('-prefix'):
if len(arg) == len('-prefix'):
prefix = pywikibot.input(u'Enter the prefix:')
@@ -270,7 +270,7 @@
genFactory.handleArg(arg)
if oldName:
- pywikibot.output(u'WARNING: -from:%s without -to:' % oldName)
+ pywikibot.warning(u'-from:%s without -to:' % oldName)
for pair in fromToPairs:
page = pywikibot.Page(pywikibot.getSite(), pair[0])
bot = MovePagesBot(None, prefix, noredirect, always, skipredirects,
Modified: trunk/pywikipedia/patrol.py
===================================================================
--- trunk/pywikipedia/patrol.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/patrol.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -102,7 +102,7 @@
# cascade if there isnt a whitelist to fallback on
if not self.whitelist:
raise
- pywikibot.output(u'Error: ' + e)
+ pywikibot.error(u'%s' % e)
def add_to_tuples(self, tuples, user, page):
if pywikibot.verbose:
Modified: trunk/pywikipedia/pywikibot/comms/http.py
===================================================================
--- trunk/pywikipedia/pywikibot/comms/http.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/pywikibot/comms/http.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -121,9 +121,9 @@
# read & info can raise socket.error
headers = f.info()
if (int(headers.get('content-length', '-1')) > 1E7):
- pywikibot.output(u'WARNING: Target is of huge size (>10MB) is '
- u'that correct? Downloading will take some '
- u'time, please be patient.')
+ pywikibot.warning(u'Target is of huge size (>10MB) is '
+ u'that correct? Downloading will take some '
+ u'time, please be patient.')
text = f.read()
break
except KeyboardInterrupt:
@@ -143,8 +143,8 @@
retry_attempt += 1
if retry_attempt > config.maxretries:
raise MaxTriesExceededError()
- pywikibot.output(
- u"WARNING: Could not open '%s'.Maybe the server or\n "
+ pywikibot.warning(
+ u"Could not open '%s'.Maybe the server or\n "
u"your connection is down. Retrying in %i minutes..."
% (url, retry_idle_time))
time.sleep(retry_idle_time * 60)
@@ -164,8 +164,8 @@
retry_attempt += 1
if retry_attempt > config.maxretries:
raise MaxTriesExceededError()
- pywikibot.output(
- u"WARNING: Could not open '%s'. Maybe the server or\n your "
+ pywikibot.warning(
+ u"Could not open '%s'. Maybe the server or\n your "
u"connection is down. Retrying in %i minutes..."
% (url, retry_idle_time))
time.sleep(retry_idle_time * 60)
@@ -198,8 +198,8 @@
# We need to split it to get a value
content_length = int(headers.get('content-length', '0').split(',')[0])
if content_length != len(text) and 'content-length' in headers:
- pywikibot.output(
- u'Warning! len(text) does not match content-length: %s != %s'
+ pywikibot.warning(
+ u'len(text) does not match content-length: %s != %s'
% (len(text), content_length))
return request(site, uri, retry, sysop, data, compress, no_hostname,
cookie_only, back_response)
@@ -213,7 +213,7 @@
charset = m.group(1)
else:
if pywikibot.verbose:
- pywikibot.output(u"WARNING: No character set found.")
+ pywikibot.warning(u"No character set found.")
# UTF-8 as default
charset = 'utf-8'
# Check if this is the charset we expected
@@ -223,9 +223,9 @@
if (not back_response) or verbose:
pywikibot.output(u'%s' %e)
if no_hostname:
- pywikibot.output(u'ERROR: Invalid charset found on %s.' % uri)
+ pywikibot.error(u'Invalid charset found on %s.' % uri)
else:
- pywikibot.output(u'ERROR: Invalid charset found on %s://%s%s.'
+ pywikibot.error(u'Invalid charset found on %s://%s%s.'
% (site.protocol(), site.hostname(), uri))
# Convert HTML to Unicode
try:
@@ -234,10 +234,10 @@
if (not back_response) or verbose:
pywikibot.output(u'%s' %e)
if no_hostname:
- pywikibot.output(u'ERROR: Invalid characters found on %s, '
- u'replaced by \\ufffd.' % uri)
+ pywikibot.error(u'Invalid characters found on %s, '
+ u'replaced by \\ufffd.' % uri)
else:
- pywikibot.output(u'ERROR: Invalid characters found on %s://%s%s, '
+ pywikibot.error(u'Invalid characters found on %s://%s%s, '
u'replaced by \\ufffd.'
% (site.protocol(), site.hostname(), uri))
# We use error='replace' in case of bad encoding.
Modified: trunk/pywikipedia/rciw.py
===================================================================
--- trunk/pywikipedia/rciw.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/rciw.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -70,7 +70,7 @@
self.queue.put_nowait(page)
def main():
- pywikibot.output('Warning: this script can not be run manually/directly, but automatically by maintainer.py')
+ pywikibot.warning('this script can not be run manually/directly, but automatically by maintainer.py')
if __name__ == "__main__":
main()
Modified: trunk/pywikipedia/redirect.py
===================================================================
--- trunk/pywikipedia/redirect.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/redirect.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -559,17 +559,17 @@
else:
pass # target found
except pywikibot.SectionError:
- pywikibot.output(
- u'Warning: Redirect target section %s doesn\'t exist.'
+ pywikibot.warning(
+ u'Redirect target section %s doesn\'t exist.'
% newRedir.title(asLink=True))
except pywikibot.BadTitle, e:
# str(e) is in the format 'BadTitle: [[Foo]]'
- pywikibot.output(
- u'Warning: Redirect target %s is not a valid page title.'
+ pywikibot.warning(
+ u'Redirect target %s is not a valid page title.'
% str(e)[10:])
#sometimes this error occures. Invalid Title starting with a '#'
except pywikibot.InvalidTitle, err:
- pywikibot.output(u'Warning: %s' % err)
+ pywikibot.warning(u'%s' % err)
break
except pywikibot.NoPage:
if len(redirList) == 1:
@@ -583,8 +583,8 @@
% newRedir.title(asLink=True))
break # skip if automatic
else:
- pywikibot.output(
- u"Warning: Redirect target %s doesn't exist."
+ pywikibot.warning(
+ u"Redirect target %s doesn't exist."
% newRedir.title(asLink=True))
except pywikibot.ServerError:
pywikibot.output(u'Skipping due to server error: '
@@ -602,8 +602,8 @@
u"Skipping toolbar example: Redirect source is potentially vandalized.")
break
if targetPage.site != self.site:
- pywikibot.output(
- u'Warning: redirect target (%s) is on a different site.'
+ pywikibot.warning(
+ u'redirect target (%s) is on a different site.'
% targetPage.title(asLink=True))
if self.always:
break # skip if automatic
@@ -612,8 +612,8 @@
% (targetPage.site.lang,
targetPage.sectionFreeTitle())
) > 0:
- pywikibot.output(
- u'Warning: Redirect target %s forms a redirect loop.'
+ pywikibot.warning(
+ u'Redirect target %s forms a redirect loop.'
% targetPage.title(asLink=True))
break ### doesn't work. edits twice!
## try:
Modified: trunk/pywikipedia/reflinks.py
===================================================================
--- trunk/pywikipedia/reflinks.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/reflinks.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -464,7 +464,7 @@
u'Cannot change %s because of blacklist entry %s'
% (page.title(), e.url))
except pywikibot.PageNotSaved, error:
- pywikibot.output(u'Error putting page: %s' % (error.args,))
+ pywikibot.error(u'putting page: %s' % (error.args,))
except pywikibot.LockedPage:
pywikibot.output(u'Skipping %s (locked page)'
% (page.title(),))
Modified: trunk/pywikipedia/replace.py
===================================================================
--- trunk/pywikipedia/replace.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/replace.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -549,8 +549,8 @@
u'Cannot change %s because of blacklist entry %s'
% (page.title(), e.url))
except pywikibot.PageNotSaved, error:
- pywikibot.output(u'Error putting page: %s'
- % (error.args,))
+ pywikibot.error(u'putting page: %s'
+ % (error.args,))
except pywikibot.LockedPage:
pywikibot.output(u'Skipping %s (locked page)'
% (page.title(),))
Modified: trunk/pywikipedia/selflink.py
===================================================================
--- trunk/pywikipedia/selflink.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/selflink.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -132,7 +132,7 @@
try:
linkedPage = pywikibot.Page(page.site(), match.group('title'))
except pywikibot.InvalidTitle, err:
- pywikibot.output(u'Warning: %s' % err)
+ pywikibot.warning(u'%s' % err)
return text, False
# Check whether the link found is to the current page itself.
Modified: trunk/pywikipedia/subster.py
===================================================================
--- trunk/pywikipedia/subster.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/subster.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -172,7 +172,7 @@
pywikibot.output(u'Setting process TimeZone (TZ): %s' % str(time.tzname)) # ('CET', 'CEST')
else:
# e.g. windows doesn't have that attribute
- pywikibot.output(u'WARNING: This operating system has NO SUPPORT for setting TimeZone by code! Before running this script, please set the TimeZone manually to one approriate for use with the Wikipedia language and region you intend to.')
+ pywikibot.warning(u'This operating system has NO SUPPORT for setting TimeZone by code! Before running this script, please set the TimeZone manually to one approriate for use with the Wikipedia language and region you intend to.')
# init constants
self._bot_config = bot_config
Modified: trunk/pywikipedia/subster_irc.py
===================================================================
--- trunk/pywikipedia/subster_irc.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/subster_irc.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -130,7 +130,7 @@
try:
thread.start_new_thread( main_subster, (self.refs[page_title], params) )
except:
- pywikibot.output(u"WARNING: unable to start thread")
+ pywikibot.warning(u"unable to start thread")
# Define a function for the thread
def main_subster(page, params=None):
Modified: trunk/pywikipedia/sum_disc.py
===================================================================
--- trunk/pywikipedia/sum_disc.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/sum_disc.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -387,7 +387,7 @@
# warnings/exceptions are printed to log, could be get by panel.py from there!
# (separate and explicit warning handling not used anymore)
#for warning in self._global_warn: # output all warnings to log (what about a special wiki page?)
- # pywikibot.output( "%s: %s" % warning )
+ # pywikibot.warning( "%s: %s" % warning )
def compressHistory(self, users = []):
"""Read history, and re-write new history without any duplicates.
Modified: trunk/pywikipedia/table2wiki.py
===================================================================
--- trunk/pywikipedia/table2wiki.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/table2wiki.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -466,7 +466,7 @@
try:
text = page.get()
except pywikibot.NoPage:
- pywikibot.output(u"ERROR: couldn't find %s" % page.title())
+ pywikibot.error(u"couldn't find %s" % page.title())
return False
except pywikibot.IsRedirectPage:
pywikibot.output(u'Skipping redirect %s' % page.title())
@@ -476,8 +476,8 @@
# Check if there are any marked tags left
markedTableTagR = re.compile("<##table##|</##table##>", re.IGNORECASE)
if markedTableTagR.search(newText):
- pywikibot.output(
- u'ERROR: not all marked table start or end tags processed!')
+ pywikibot.error(
+ u'not all marked table start or end tags processed!')
return
if convertedTables == 0:
Modified: trunk/pywikipedia/upload.py
===================================================================
--- trunk/pywikipedia/upload.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/upload.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -167,8 +167,8 @@
dt += 60
else:
if pywikibot.verbose:
- pywikibot.output(
-u"WARNING: No check length to retrieved data is possible.")
+ pywikibot.warning(
+u"No check length to retrieved data is possible.")
else:
# Opening local files with MyURLopener would be possible, but we
# don't do it because it only accepts ASCII characters in the
Modified: trunk/pywikipedia/us-states.py
===================================================================
--- trunk/pywikipedia/us-states.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/us-states.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -93,8 +93,8 @@
elif arg == '-force':
force = True
else:
- pywikibot.output(
- u'Warning: argument "%s" not understood; ignoring.' % arg)
+ pywikibot.warning(
+ u'argument "%s" not understood; ignoring.' % arg)
mysite = pywikibot.getSite()
for p in mysite.allpages(start = start):
@@ -111,12 +111,12 @@
u"Not creating %s - redirect already exists."
% goal)
else:
- pywikibot.output(
- u"WARNING!!! %s already exists but redirects elsewhere!"
+ pywikibot.warning(
+ u"%s already exists but redirects elsewhere!"
% goal)
except pywikibot.IsNotRedirectPage:
- pywikibot.output(
- u"WARNING!!! Page %s already exists and is not a redirect. Please check page!"
+ pywikibot.warning(
+ u"Page %s already exists and is not a redirect. Please check page!"
% goal)
except pywikibot.NoPage:
change=''
Modified: trunk/pywikipedia/welcome.py
===================================================================
--- trunk/pywikipedia/welcome.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/welcome.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -499,7 +499,7 @@
pywikibot.output(u"The whitelist's page doesn't exist!")
else:
showStatus(4)
- pywikibot.output(u"WARNING: The whitelist hasn't been setted!")
+ pywikibot.warning(u"The whitelist hasn't been setted!")
# Join the whitelist words.
self._whitelist = list_white + whitelist_default
@@ -730,7 +730,7 @@
f = codecs.open(pywikibot.config.datafilepath(globalvar.signFileName), 'r',
encoding='utf-8')
except IOError:
- pywikibot.output(u'Error! - No fileName!')
+ pywikibot.error(u'No fileName!')
raise FilenameNotSet("No signature filename specified.")
signText = f.read()
@@ -971,7 +971,7 @@
# file where is stored the random signature index
filename = pywikibot.config.datafilepath('welcome-%s-%s.data' % (pywikibot.default_family, pywikibot.default_code))
if globalvar.offset and globalvar.timeoffset:
- pywikibot.output('WARING: both -offset and -timeoffset were provided, ignoring -offset')
+ pywikibot.warning('both -offset and -timeoffset were provided, ignoring -offset')
globalvar.offset = 0
bot = WelcomeBot()
try:
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/wikipedia.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -9225,7 +9225,7 @@
text += u'\n'
caller = inspect.getouterframes(inspect.currentframe())[1][3]
if not (caller == 'logoutput'):
- _logOld(text)
+ logoutput(text)
if input_lock.locked():
cache_output(text, toStdout = toStdout)
else:
@@ -9241,21 +9241,6 @@
(args, kwargs) = output_cache.pop(0)
ui.output(*args, **kwargs)
-def _logOld(text):
- """Write the given text to the logfile."""
- if logger:
- # remove all color markup
- plaintext = colorTagR.sub('', text)
- # save the text in a logfile (will be written in utf-8)
- for line in plaintext.splitlines():
- type = line.split(':')
- func = 'info'
- if len(type) > 1:
- func = type[0].strip().lower()
- if func not in ['debug', 'warning', 'error', 'critical']:
- func = 'info'
- getattr(logger, func)(line.rstrip())
-
output = _outputOld
def stdout(text, decoder=None, newline=True, **kwargs):
Modified: trunk/pywikipedia/xmlreader.py
===================================================================
--- trunk/pywikipedia/xmlreader.py 2013-04-19 15:37:30 UTC (rev 11392)
+++ trunk/pywikipedia/xmlreader.py 2013-04-19 21:28:02 UTC (rev 11393)
@@ -289,8 +289,8 @@
"""Return a generator that will yield XmlEntry objects"""
print 'Reading XML dump...'
if not 'iterparse' in globals():
- pywikibot.output(
-u'''WARNING: cElementTree not found. Using slower fallback solution.
+ pywikibot.warning(
+u'''cElementTree not found. Using slower fallback solution.
Consider installing the python-celementtree package.''')
return self.regex_parse()
else:
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11390
Revision: 11390
Author: drtrigon
Date: 2013-04-19 13:33:09 +0000 (Fri, 19 Apr 2013)
Log Message:
-----------
improvement; adopted output and logging to rewrite further
new feature; introduced 'critical' output/logging function (from rewrite)
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2013-04-19 13:20:22 UTC (rev 11389)
+++ trunk/pywikipedia/wikipedia.py 2013-04-19 13:33:09 UTC (rev 11390)
@@ -126,7 +126,7 @@
import os, sys
import httplib, socket, urllib, urllib2, cookielib
-import traceback
+import traceback, inspect
import time, threading, Queue
import math
import re, codecs, difflib, locale
@@ -164,7 +164,12 @@
except ValueError:
WIDEBUILD = False
+from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
+STDOUT = 16
+VERBOSE = 18
+INPUT = 25
+
# Format string for the default user agent.
USER_AGENT_FORMAT = '{script}/r{version[rev]} Pywikipediabot/1.0'
@@ -733,7 +738,7 @@
m = re.search("=+[ ']*%s[ ']*=+" % re.escape(hn),
self._contents)
if verbose and not m:
- output(u"WARNING: Section does not exist: %s" % self)
+ warning(u"Section does not exist: %s" % self)
# Store any exceptions for later reference
except NoPage:
self._getexception = NoPage
@@ -931,7 +936,7 @@
else:
output( unicode(text) )
# We assume that the server is down. Wait some time, then try again.
- output( u"WARNING: No text area found on %s%s. Maybe the server is down. Retrying in %i minutes..." % (self.site().hostname(), path, retry_idle_time) )
+ warning( u"No text area found on %s%s. Maybe the server is down. Retrying in %i minutes..." % (self.site().hostname(), path, retry_idle_time) )
time.sleep(retry_idle_time * 60)
# Next time wait longer, but not longer than half an hour
retry_idle_time *= 2
@@ -944,7 +949,7 @@
m = re.search('var wgRestrictionEdit = \\["(\w+)"\\]', text)
if m:
if verbose:
- output(u"DBG> page is locked for group %s" % m.group(1))
+ debug(u"page is locked for group %s" % m.group(1))
self.editRestriction = m.group(1);
else:
self.editRestriction = ''
@@ -1070,16 +1075,16 @@
u'prop' : u'sections',
}
- pywikibot.get_throttle()
- pywikibot.output(u"Reading section info from %s via API..." % self.title(asLink=True))
+ get_throttle()
+ output(u"Reading section info from %s via API..." % self.title(asLink=True))
result = query.GetData(params, self.site())
# JIRA: DRTRIGON-90; catch and convert error (convert it such that the whole page gets processed later)
try:
r = result[u'parse'][u'sections']
except KeyError: # sequence of sometimes occuring "KeyError: u'parse'"
- pywikibot.output(u'WARNING: Query result (gS): %r' % result)
- raise pywikibot.Error('Problem occured during data retrieval for sections in %s!' % self.title(asLink=True))
+ warning(u'Query result (gS): %r' % result)
+ raise Error('Problem occured during data retrieval for sections in %s!' % self.title(asLink=True))
#debug_data = str(r) + '\n'
debug_data = str(result) + '\n'
@@ -1117,7 +1122,7 @@
item[u'wikiline'] = None
r[i] = item
break
- except pywikibot.Error:
+ except Error:
pos = None
if (pos == None):
raise # re-raise
@@ -1180,8 +1185,8 @@
u'rvsection' : section[u'index'],
}
- pywikibot.get_throttle()
- pywikibot.output(u" Reading section %s from %s via API..." % (section[u'index'], self.title(asLink=True)))
+ get_throttle()
+ output(u" Reading section %s from %s via API..." % (section[u'index'], self.title(asLink=True)))
result = query.GetData(params, self.site())
# JIRA: DRTRIGON-90; catch and convert error (convert it such that the whole page gets processed later)
@@ -1189,8 +1194,8 @@
r = result[u'query'][u'pages'].values()[0]
pl = r[u'revisions'][0][u'*'].splitlines()
except KeyError: # sequence of sometimes occuring "KeyError: u'parse'"
- pywikibot.output(u'WARNING: Query result (gSBO): %r' % result)
- raise pywikibot.Error('Problem occured during data retrieval for sections in %s!' % self.title(asLink=True))
+ warning(u'Query result (gSBO): %r' % result)
+ raise Error('Problem occured during data retrieval for sections in %s!' % self.title(asLink=True))
if pl:
possible_headers = [ (pl[0], pl[0]) ]
@@ -1213,8 +1218,8 @@
section[u'wikiline_bo'] = self._contents.find(section[u'wikiline'], pos)
if section[u'wikiline_bo'] < 0: # nothing found, report/raise error !
#page._getexception = ...
- raise pywikibot.Error('Problem occured during attempt to retrieve and resolve sections in %s!' % self.title(asLink=True))
- #pywikibot.output(...)
+ raise Error('Problem occured during attempt to retrieve and resolve sections in %s!' % self.title(asLink=True))
+ #output(...)
# (or create a own error, e.g. look into interwiki.py)
def permalink(self, oldid=None):
@@ -1438,8 +1443,8 @@
if template in catredirs:
# Get target (first template argument)
if not args:
- pywikibot.output(u'Warning: redirect target for %s is missing'
- % self.title(asLink=True))
+ warning(u'redirect target for %s is missing'
+ % self.title(asLink=True))
self._catredirect = False
else:
self._catredirect = self.site().namespace(14) + ":" + args[0]
@@ -1817,11 +1822,11 @@
for link in reflist("li", recursive=False):
title = link.a.string
if title is None:
- output(u"DBG> invalid <li> item in Whatlinkshere: %s" % link)
+ debug(u"invalid <li> item in Whatlinkshere: %s" % link)
try:
p = Page(self.site(), title)
except InvalidTitle:
- output(u"DBG> Whatlinkshere:%s contains invalid link to %s"
+ debug(u"Whatlinkshere:%s contains invalid link to %s"
% (self.title(), title))
continue
isredirect, istemplate = False, False
@@ -2561,7 +2566,7 @@
if response.code != 302 and data.strip() != u"":
# Something went wrong, and we don't know what. Show the
# HTML code that hopefully includes some error message.
- output(u"ERROR: Unexpected response from wiki server.")
+ error(u"Unexpected response from wiki server.")
output(u" %s (%s) " % (response.code, response.msg))
output(data)
# Unexpected responses should raise an error and not pass,
@@ -4492,7 +4497,7 @@
params['token'] = self.site().getToken(sysop = sysop)
if botflag:
params['bot'] = 1
- output(u"Remving claim from %s" % self.title())
+ output(u"Removing claim from %s" % self.title())
data = query.GetData(params, self.site(), sysop=sysop)
if 'error' in data:
raise RuntimeError("API query error: %s" % data)
@@ -5057,8 +5062,8 @@
s = ''.join(traceback.format_exception(*sys.exc_info()))
if not isinstance(s, unicode):
s = s.decode('utf-8')
- output(u'%s\nDBG> got network error in _GetAll.run. ' \
- 'Sleeping for %d seconds...' % (s, self.sleeptime))
+ debug(u'%s\nDBG> got network error in _GetAll.run. ' \
+ 'Sleeping for %d seconds...' % (s, self.sleeptime))
self.sleep()
else:
if 'error' in data:
@@ -5080,8 +5085,8 @@
s = ''.join(traceback.format_exception(*sys.exc_info()))
if not isinstance(s, unicode):
s = s.decode('utf-8')
- output(u'%s\nDBG> got network error in _GetAll.run. ' \
- 'Sleeping for %d seconds...' % (s, self.sleeptime))
+ debug(u'%s\nDBG> got network error in _GetAll.run. ' \
+ 'Sleeping for %d seconds...' % (s, self.sleeptime))
self.sleep()
else:
if "<title>Wiki does not exist</title>" in data:
@@ -5173,7 +5178,7 @@
if not m:
try:
page2._getexception
- output(u"WARNING: Section not found: %s" % page2)
+ warning(u"Section not found: %s" % page2)
except AttributeError:
# There is no exception yet
page2._getexception = SectionError
@@ -5198,8 +5203,8 @@
if version != self.site.version() and \
versionnumber(self.site.lang,
version=version) != versionnumber(self.site.lang):
- output(u'WARNING: Family file %s contains version number %s, but it should be %s'
- % (self.site.family.name, self.site.version(), version))
+ warning(u'Family file %s contains version number %s, but it should be %s'
+ % (self.site.family.name, self.site.version(), version))
# Verify case
if self.site.nocapitalize:
@@ -5207,7 +5212,7 @@
else:
case = 'first-letter'
if case != header.case.strip():
- output(u'WARNING: Family file %s contains case %s, but it should be %s' % (self.site.family.name, case, header.case.strip()))
+ warning(u'Family file %s contains case %s, but it should be %s' % (self.site.family.name, case, header.case.strip()))
# Verify namespaces
lang = self.site.lang
@@ -5230,13 +5235,13 @@
flag = u"is '%s', but should be removed (default value '%s')" % (ns, nshdr)
else:
flag = u"is '%s', but should be '%s'" % (ns, nshdr)
- output(u"WARNING: Outdated family file %s: namespace['%s'][%i] %s" % (self.site.family.name, lang, id, flag))
+ warning(u"Outdated family file %s: namespace['%s'][%i] %s" % (self.site.family.name, lang, id, flag))
#self.site.family.namespaces[id][lang] = nshdr
else:
- output(u"WARNING: Missing namespace in family file %s: namespace['%s'][%i] (it is set to '%s')" % (self.site.family.name, lang, id, nshdr))
+ warning(u"Missing namespace in family file %s: namespace['%s'][%i] (it is set to '%s')" % (self.site.family.name, lang, id, nshdr))
for id in self.site.family.namespaces:
if self.site.family.isDefinedNSLanguage(id, lang) and id not in header.namespaces:
- output(u"WARNING: Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id))
+ warning(u"Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id))
def getData(self, curonly=True):
address = self.site.export_address()
@@ -5246,7 +5251,7 @@
pagenames = [encodeEsperantoX(pagetitle) for pagetitle in pagenames]
pagenames = u'\r\n'.join(pagenames)
if type(pagenames) is not unicode:
- output(u'Warning: xmlreader.WikipediaXMLHandler.getData() got non-unicode page names. Please report this.')
+ warning(u'xmlreader.WikipediaXMLHandler.getData() got non-unicode page names. Please report this.')
output(str(pagenames))
# convert Unicode string to the encoding used on that wiki
pagenames = pagenames.encode(self.site.encoding())
@@ -5341,8 +5346,8 @@
if not m:
try:
page2._getexception
- output(u"WARNING: Section not found: %s"
- % page2)
+ warning(u"Section not found: %s"
+ % page2)
except AttributeError:
# There is no exception yet
page2._getexception = SectionError
@@ -5365,8 +5370,8 @@
if version != self.site.version() and \
versionnumber(self.site.lang,
version=version) != versionnumber(self.site.lang):
- output(u'WARNING: Family file %s contains version number %s, but it should be %s'
- % (self.site.family.name, self.site.version(), version))
+ warning(u'Family file %s contains version number %s, but it should be %s'
+ % (self.site.family.name, self.site.version(), version))
# Verify case
if self.site.nocapitalize:
@@ -5374,7 +5379,7 @@
else:
case = 'first-letter'
if case != header['general']['case'].strip():
- output(u'WARNING: Family file %s contains case %s, but it should be %s' % (self.site.family.name, case, header.case.strip()))
+ warning(u'Family file %s contains case %s, but it should be %s' % (self.site.family.name, case, header.case.strip()))
# Verify namespaces
lang = self.site.lang
@@ -5398,13 +5403,13 @@
flag = u"is '%s', but should be removed (default value '%s')" % (ns, nshdr)
else:
flag = u"is '%s', but should be '%s'" % (ns, nshdr)
- output(u"WARNING: Outdated family file %s: namespace['%s'][%i] %s" % (self.site.family.name, lang, id, flag))
+ warning(u"Outdated family file %s: namespace['%s'][%i] %s" % (self.site.family.name, lang, id, flag))
#self.site.family.namespaces[id][lang] = nshdr
else:
- output(u"WARNING: Missing namespace in family file %s: namespace['%s'][%i] (it is set to '%s')" % (self.site.family.name, lang, id, nshdr))
+ warning(u"Missing namespace in family file %s: namespace['%s'][%i] (it is set to '%s')" % (self.site.family.name, lang, id, nshdr))
for id in self.site.family.namespaces:
if self.site.family.isDefinedNSLanguage(id, lang) and u'%i' % id not in header['namespaces']:
- output(u"WARNING: Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id ) )
+ warning(u"Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id ) )
def getDataApi(self):
pagenames = [page.sectionFreeTitle() for page in self.pages]
@@ -5977,8 +5982,8 @@
"""
## # DEPRECATED warning. Should be uncommented if scripts are actualized
-## pywikibot.output('Page.site() method is DEPRECATED, '
-## 'use Page.site instead.')
+## output('Page.site() method is DEPRECATED, '
+## 'use Page.site instead.')
return self
@property
@@ -6414,8 +6419,8 @@
if ('action' in predata) and pywikibot.simulate and \
(predata['action'] in pywikibot.config.actions_to_block) and \
(address not in [self.export_address()]):
- pywikibot.output(u'\03{lightyellow}SIMULATION: %s action blocked.\03{default}'%\
- predata['action'])
+ output(u'\03{lightyellow}SIMULATION: %s action blocked.\03{default}'%\
+ predata['action'])
import StringIO
f_dummy = StringIO.StringIO()
f_dummy.__dict__.update({u'code': 0, u'msg': u''})
@@ -6487,8 +6492,8 @@
retry_attempt += 1
if retry_attempt > config.maxretries:
raise MaxTriesExceededError()
- output(u"WARNING: Could not open '%s'.\nMaybe the server is down. Retrying in %i minutes..."
- % (url, retry_idle_time))
+ warning(u"Could not open '%s'.\nMaybe the server is down. Retrying in %i minutes..."
+ % (url, retry_idle_time))
time.sleep(retry_idle_time * 60)
# Next time wait longer, but not longer than half an hour
retry_idle_time *= 2
@@ -6509,7 +6514,7 @@
retry_attempt += 1
if retry_attempt > config.maxretries:
raise MaxTriesExceededError()
- output(u"WARNING: Could not open '%s'. Maybe the server or\n your connection is down. Retrying in %i minutes..."
+ warning(u"Could not open '%s'. Maybe the server or\n your connection is down. Retrying in %i minutes..."
% (url, retry_idle_time))
time.sleep(retry_idle_time * 60)
retry_idle_time *= 2
@@ -6540,9 +6545,8 @@
# We need to split it to get a value
content_length = int(headers.get('content-length', '0').split(',')[0])
if content_length != len(text) and 'content-length' in headers:
- output(
- u'Warning! len(text) does not match content-length: %s != %s'
- % (len(text), content_length))
+ warning(u'len(text) does not match content-length: %s != %s'
+ % (len(text), content_length))
return self.postData(address, data, contentType, sysop, compress,
cookies)
@@ -6555,7 +6559,7 @@
charset = m.group(1)
else:
if verbose:
- output(u"WARNING: No character set found.")
+ warning(u"No character set found.")
# UTF-8 as default
charset = 'utf-8'
# Check if this is the charset we expected
@@ -6566,8 +6570,8 @@
except UnicodeDecodeError, e:
if verbose:
output(u'%s' %e)
- output(u'ERROR: Invalid characters found on %s://%s%s, replaced by \\ufffd.'
- % (self.protocol(), self.hostname(), address))
+ error(u'Invalid characters found on %s://%s%s, replaced by \\ufffd.'
+ % (self.protocol(), self.hostname(), address))
# We use error='replace' in case of bad encoding.
text = unicode(text, charset, errors = 'replace')
@@ -6624,7 +6628,7 @@
account = 'Your sysop account'
else:
account = 'Your account'
- output(u'\nWARNING: %s on %s is blocked by %s.\nReason: %s\nEditing using this account will stop the run.\n'
+ warning(u'\n%s on %s is blocked by %s.\nReason: %s\nEditing using this account will stop the run.\n'
% (account, self, text['blockedby'], text['blockreason']))
self._isBlocked[index] = 'blockedby' in text
@@ -6673,9 +6677,9 @@
if sysop:
output(u'Note: Your sysop account on %s does not have a bot flag. Its edits will be visible in the recent changes.' % self)
else:
- output(u'WARNING: Your account on %s does not have a bot flag. Its edits will be visible in the recent changes and it may get blocked.' % self)
+ warning(u'Your account on %s does not have a bot flag. Its edits will be visible in the recent changes and it may get blocked.' % self)
if sysop and 'sysop' not in self._rights[index]:
- output(u'WARNING: Your sysop account on %s does not seem to have sysop rights. You may not be able to perform any sysop-restricted actions using it.' % self)
+ warning(u'Your sysop account on %s does not seem to have sysop rights. You may not be able to perform any sysop-restricted actions using it.' % self)
else:
# 'groups' is not exists, set default rights
self._rights[index] = []
@@ -6711,10 +6715,10 @@
self._token[index] = data['edittoken']
self._userData[index] = True
else:
- output(u'WARNING: Token not found on %s. You will not be able to edit any page.' % self)
+ warning(u'Token not found on %s. You will not be able to edit any page.' % self)
else:
if not self._isBlocked[index]:
- output(u'WARNING: Token not found on %s. You will not be able to edit any page.' % self)
+ warning(u'Token not found on %s. You will not be able to edit any page.' % self)
def _getUserDataOld(self, text, sysop = False, force = True):
"""
@@ -6740,7 +6744,7 @@
account = 'Your sysop account'
else:
account = 'Your account'
- output(u'WARNING: %s on %s is blocked. Editing using this account will stop the run.' % (account, self))
+ warning(u'%s on %s is blocked. Editing using this account will stop the run.' % (account, self))
self._isBlocked[index] = blocked
# Check for new messages
@@ -6808,9 +6812,9 @@
if sysop:
output(u'Note: Your sysop account on %s does not have a bot flag. Its edits will be visible in the recent changes.' % self)
else:
- output(u'WARNING: Your account on %s does not have a bot flag. Its edits will be visible in the recent changes and it may get blocked.' % self)
+ warning(u'Your account on %s does not have a bot flag. Its edits will be visible in the recent changes and it may get blocked.' % self)
if sysop and 'sysop' not in self._rights[index]:
- output(u'WARNING: Your sysop account on %s does not seem to have sysop rights. You may not be able to perform any sysop-restricted actions using it.' % self)
+ warning(u'Your sysop account on %s does not seem to have sysop rights. You may not be able to perform any sysop-restricted actions using it.' % self)
else:
# We don't have wgUserGroups, and can't check the rights
self._rights[index] = []
@@ -6845,7 +6849,7 @@
# there is a textarea and the tab "view source" is not shown
if u'<textarea' in text and u'<li id="ca-viewsource"' not in text and not self._isBlocked[index]:
# Token not found
- output(u'WARNING: Token not found on %s. You will not be able to edit any page.' % self)
+ warning(u'Token not found on %s. You will not be able to edit any page.' % self)
def siteinfo(self, key = 'general', force = False, dump = False):
"""Get Mediawiki Site informations by API
@@ -6993,7 +6997,7 @@
# No messages could be added.
# We assume that the server is down.
# Wait some time, then try again.
- output(u'WARNING: No messages found in Special:Allmessages. Maybe the server is down. Retrying in %i minutes...' % retry_idle_time)
+ warning(u'No messages found in Special:Allmessages. Maybe the server is down. Retrying in %i minutes...' % retry_idle_time)
time.sleep(retry_idle_time * 60)
# Next time wait longer, but not longer than half an hour
retry_attempt += 1
@@ -7172,9 +7176,9 @@
if start:
params['lestart'] = start
if offset and offset > 0:
- output(u'WARNING: offset parameter %s ignored,\n'
- u' start parameter is set to %s'
- % (offset, start))
+ warning(u'offset parameter %s ignored,\n'
+ u' start parameter is set to %s'
+ % (offset, start))
# offset in hours from now
elif offset and offset > 0:
start = Timestamp.utcnow() - datetime.timedelta(0, offset*3600)
@@ -7803,7 +7807,7 @@
get_throttle()
data = query.GetData(params, self)
if verbose:
- output('DEBUG: allpages>>> data.keys() %s' % data.keys())
+ debug('allpages>>> data.keys() %s' % data.keys())
if 'warnings' in data:
warning = data['warnings']['allpages']['*']
raise RuntimeError("API query warning: %s" % warning)
@@ -9089,7 +9093,7 @@
logger = logging.getLogger() # root logger
if logger.handlers: # init just once (if re-called)
- logger = logging.getLogger('pywikibot')
+ logger = logging.getLogger('pywiki')
return
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
@@ -9114,10 +9118,10 @@
if os.path.exists(logfn) and (ver == int('0206')):
t = os.stat(logfn).st_mtime
fh.rolloverAt = fh.computeRollover(t)
- fh.setLevel(logging.DEBUG if debug else logging.INFO)
+ fh.setLevel(DEBUG if debug else INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
- ch.setLevel(logging.INFO)
+ ch.setLevel(INFO)
# create formatter and add it to the handlers (using LogRecord attributes)
formatter = logging.Formatter(
fmt='%(asctime)s %(name)18s: %(levelname)-8s %(message)s',
@@ -9131,7 +9135,7 @@
logger.addHandler(fh) # output to logfile
#logger.addHandler(ch) # output to terminal/shell console
- logger = logging.getLogger('pywikibot')
+ logger = logging.getLogger('pywiki')
else:
# disable the log file
logger = None
@@ -9143,26 +9147,49 @@
colorTagR = re.compile('\03{.*?}', re.UNICODE)
-def log(text):
- """Write the given text to the logfile."""
- if logger:
- # remove all color markup
- plaintext = colorTagR.sub('', text)
- # save the text in a logfile (will be written in utf-8)
- for line in plaintext.splitlines():
- type = line.split(':')
- func = 'info'
- if len(type) > 1:
- func = type[0].strip().lower()
- if func not in ['debug', 'warning', 'error', 'critical', 'info']:
- func = 'info'
- getattr(logger, func)(line.rstrip())
-
output_lock = threading.Lock()
input_lock = threading.Lock()
output_cache = []
-def output(text, decoder=None, newline=True, toStdout=False, **kwargs):
+def logoutput(text, decoder=None, newline=True, _level=INFO, _logger="",
+ **kwargs):
+ """Format output and send to the logging module.
+
+ Backend function used by all the user-output convenience functions.
+
+ """
+ if _logger:
+ log = logging.getLogger("pywiki." + _logger)
+ else:
+ log = logging.getLogger("pywiki")
+
+ # make sure logging system has been initialized
+ if not logger:
+ setLogfileStatus(True)
+
+ context = {}
+
+ if decoder:
+ text = unicode(text, decoder)
+ elif not isinstance(text, unicode):
+ if not isinstance(text, str):
+ # looks like text is a non-text object.
+ # Maybe it has a __unicode__ builtin ?
+ # (allows to print Page, Site...)
+ text = unicode(text)
+ else:
+ try:
+ text = unicode(text, 'utf-8')
+ except UnicodeDecodeError:
+ text = unicode(text, 'iso8859-1')
+
+ log.log(_level, text, extra=context, **kwargs)
+
+ if _level <> INFO:
+ text = u'%s: %s' % (logging.getLevelName(_level), text)
+ _outputOld(text)
+
+def _outputOld(text, decoder=None, newline=True, toStdout=False, **kwargs):
"""Output a message to the user via the userinterface.
Works like print, but uses the encoding used by the user's console
@@ -9196,7 +9223,9 @@
text = unicode(text, 'iso8859-1')
if newline:
text += u'\n'
- log(text)
+ caller = inspect.getouterframes(inspect.currentframe())[1][3]
+ if not (caller == 'logoutput'):
+ _logOld(text)
if input_lock.locked():
cache_output(text, toStdout = toStdout)
else:
@@ -9212,6 +9241,48 @@
(args, kwargs) = output_cache.pop(0)
ui.output(*args, **kwargs)
+def _logOld(text):
+ """Write the given text to the logfile."""
+ if logger:
+ # remove all color markup
+ plaintext = colorTagR.sub('', text)
+ # save the text in a logfile (will be written in utf-8)
+ for line in plaintext.splitlines():
+ type = line.split(':')
+ func = 'info'
+ if len(type) > 1:
+ func = type[0].strip().lower()
+ if func not in ['debug', 'warning', 'error', 'critical']:
+ func = 'info'
+ getattr(logger, func)(line.rstrip())
+
+output = _outputOld
+
+def stdout(text, decoder=None, newline=True, **kwargs):
+ """Output script results to the user via the userinterface."""
+ logoutput(text, decoder, newline, STDOUT, **kwargs)
+
+def warning(text, decoder=None, newline=True, **kwargs):
+ """Output a warning message to the user via the userinterface."""
+ logoutput(text, decoder, newline, WARNING, **kwargs)
+
+def error(text, decoder=None, newline=True, **kwargs):
+ """Output an error message to the user via the userinterface."""
+ logoutput(text, decoder, newline, ERROR, **kwargs)
+
+def log(text, decoder=None, newline=True, **kwargs):
+ """Output a record to the log file."""
+ logoutput(text, decoder, newline, VERBOSE, **kwargs)
+
+def critical(text, decoder=None, newline=True, **kwargs):
+ """Output a debug record to the log file."""
+ logoutput(text, decoder, newline, CRITICAL, **kwargs)
+
+def debug(text, layer, decoder=None, newline=True, **kwargs):
+ """Output a debug record to the log file."""
+ logoutput(text, decoder, newline, DEBUG, layer, **kwargs)
+
+
# User input functions
def input(question, password = False):
@@ -9371,7 +9442,7 @@
except UnicodeDecodeError:
f.write(data)
f.close()
- output( u'ERROR: %s caused error %s. Dump %s created.' % (name,error,filename) )
+ error( u'%s caused error %s. Dump %s created.' % (name,error,filename) )
get_throttle = Throttle()
put_throttle = Throttle(write=True)
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11387
Revision: 11387
Author: drtrigon
Date: 2013-04-18 23:10:12 +0000 (Thu, 18 Apr 2013)
Log Message:
-----------
follow-up; to r11386 removed unneeded variable
Modified Paths:
--------------
branches/rewrite/pywikibot/bot.py
Modified: branches/rewrite/pywikibot/bot.py
===================================================================
--- branches/rewrite/pywikibot/bot.py 2013-04-18 22:51:02 UTC (rev 11386)
+++ branches/rewrite/pywikibot/bot.py 2013-04-18 23:10:12 UTC (rev 11387)
@@ -224,12 +224,12 @@
debuglogger.setLevel(DEBUG)
debuglogger.addHandler(file_handler)
- writelogheader(root_logger)
+ writelogheader()
_handlers_initialized = True
-def writelogheader(logger):
+def writelogheader():
"""
Save additional version, system and status info to the logfile in use,
so that the user can look it up later to track errors or report bugs.