http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11393
Revision: 11393 Author: drtrigon Date: 2013-04-19 21:28:02 +0000 (Fri, 19 Apr 2013) Log Message: ----------- improvement; follow-up to r11390 in order to use proper output/logging functions
Modified Paths: -------------- trunk/pywikipedia/blockpageschecker.py trunk/pywikipedia/blockreview.py trunk/pywikipedia/category.py trunk/pywikipedia/catimages.py trunk/pywikipedia/catlib.py trunk/pywikipedia/censure.py trunk/pywikipedia/checkimages.py trunk/pywikipedia/featured.py trunk/pywikipedia/fixing_redirects.py trunk/pywikipedia/followlive.py trunk/pywikipedia/imagecopy_self.py trunk/pywikipedia/imagerecat.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/login.py trunk/pywikipedia/lonelypages.py trunk/pywikipedia/movepages.py trunk/pywikipedia/patrol.py trunk/pywikipedia/pywikibot/comms/http.py trunk/pywikipedia/rciw.py trunk/pywikipedia/redirect.py trunk/pywikipedia/reflinks.py trunk/pywikipedia/replace.py trunk/pywikipedia/selflink.py trunk/pywikipedia/subster.py trunk/pywikipedia/subster_irc.py trunk/pywikipedia/sum_disc.py trunk/pywikipedia/table2wiki.py trunk/pywikipedia/upload.py trunk/pywikipedia/us-states.py trunk/pywikipedia/welcome.py trunk/pywikipedia/wikipedia.py trunk/pywikipedia/xmlreader.py
Modified: trunk/pywikipedia/blockpageschecker.py =================================================================== --- trunk/pywikipedia/blockpageschecker.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/blockpageschecker.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -354,7 +354,7 @@
if changes == 0: # We tried to fix edit-protection templates, but it did not work. - pywikibot.output('Warning : No edit-protection template could be found') + pywikibot.warning('No edit-protection template could be found')
if moveBlockCheck and changes > -1: # checking move protection now @@ -402,7 +402,7 @@
if changes == 0: # We tried to fix move-protection templates, but it did not work. - pywikibot.output('Warning : No move-protection template could be found') + pywikibot.warning('No move-protection template could be found')
if oldtext != text:
Modified: trunk/pywikipedia/blockreview.py =================================================================== --- trunk/pywikipedia/blockreview.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/blockreview.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -82,7 +82,7 @@ self.unblock_tpl[self.site.lang], defaultNamespace=10) except KeyError: - pywikibot.output(u'ERROR: Language "%s" not supported by this bot.' + pywikibot.error(u'Language "%s" not supported by this bot.' % self.site.lang) else: for page in genPage.getReferences(follow_redirects=False,
Modified: trunk/pywikipedia/category.py =================================================================== --- trunk/pywikipedia/category.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/category.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -302,9 +302,9 @@ % page.title(asLink=True)) except pywikibot.IsRedirectPage, arg: redirTarget = pywikibot.Page(self.site, arg.args[0]) - pywikibot.output(u"WARNING: Page %s is a redirect to %s; skipping." - % (page.title(asLink=True), - redirTarget.title(asLink=True))) + pywikibot.warning(u"Page %s is a redirect to %s; skipping." + % (page.title(asLink=True), + redirTarget.title(asLink=True))) else: return text return None
Modified: trunk/pywikipedia/catimages.py =================================================================== --- trunk/pywikipedia/catimages.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/catimages.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -34,8 +34,6 @@
X-untagged[:#] Use daniel's tool as generator: X http://toolserver.org/~daniel/WikiSense/UntaggedImages.php - - """
# @@ -214,10 +212,10 @@ # how small and how many features are detected as faces (or eyes) scale = max([1., np.average(np.array(img.shape)[0:2]/500.)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_Faces_CV]') + pywikibot.warning(u'unknown file type [_detect_Faces_CV]') return except AttributeError: - pywikibot.output(u'WARNING: unknown file type [_detect_Faces_CV]') + pywikibot.warning(u'unknown file type [_detect_Faces_CV]') return
#detectAndDraw( image, cascade, nestedCascade, scale ); @@ -419,10 +417,10 @@ scale = max([1., np.average(np.array(img.shape)[0:2]/400.)]) #scale = max([1., np.average(np.array(img.shape)[0:2]/300.)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_People_CV]') + pywikibot.warning(u'unknown file type [_detect_People_CV]') return except AttributeError: - pywikibot.output(u'WARNING: unknown file type [_detect_People_CV]') + pywikibot.warning(u'unknown file type [_detect_People_CV]') return
# similar to face detection @@ -529,10 +527,10 @@ # how small and how many features are detected scale = max([1., np.average(np.array(img.shape)[0:2]/500.)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_Geometry_CV]') + pywikibot.warning(u'unknown file type [_detect_Geometry_CV]') return self._buffer_Geometry except AttributeError: - pywikibot.output(u'WARNING: unknown file type [_detect_Geometry_CV]') + pywikibot.warning(u'unknown file type [_detect_Geometry_CV]') return self._buffer_Geometry
# similar to face or people detection @@ -750,7 +748,7 @@ (l, t) = (0, 0) i = im except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_SegmentColors_JSEGnPIL]') + pywikibot.warning(u'unknown file type [_detect_SegmentColors_JSEGnPIL]') return
result = [] @@ -764,7 +762,7 @@ ##(pic, scale) = self._util_detect_ColorSegments_JSEG(pic) # (final split) #hist = self._util_get_ColorSegmentsHist_PIL(i, pic, scale) # except TypeError: - pywikibot.output(u'WARNING: unknown file type [_detect_SegmentColors_JSEGnPIL]') + pywikibot.warning(u'unknown file type [_detect_SegmentColors_JSEGnPIL]') return i = 0 # (may be do an additional region merge according to same color names...) @@ -805,7 +803,7 @@ i = Image.open(self.image_path_JPEG) h = i.histogram() except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_AverageColor_PILnCV]') + pywikibot.warning(u'unknown file type [_detect_AverageColor_PILnCV]') return
result = self._util_average_Color_colormath(h) @@ -863,7 +861,7 @@ try: i = Image.open(self.image_path) except IOError: - pywikibot.output(u'WARNING: unknown (image) file type [_detect_Properties_PIL]') + pywikibot.warning(u'unknown (image) file type [_detect_Properties_PIL]') return
# http://mail.python.org/pipermail/image-sig/1999-May/000740.html @@ -906,7 +904,7 @@ # result = {} # # DO NOT use ImageMagick (identify) instead of PIL to get these info !! else: - pywikibot.output(u'WARNING: unknown (generic) file type [_detect_Properties_PIL]') + pywikibot.warning(u'unknown (generic) file type [_detect_Properties_PIL]') return
result['Dimensions'] = self.image_size @@ -1001,7 +999,7 @@ try: smallImg = im.resize( tuple(np.int_(np.array(im.size)/scale)), Image.ANTIALIAS ) except IOError: - pywikibot.output(u'WARNING: unknown file type [_util_detect_ColorSegments_JSEG]') + pywikibot.warning(u'unknown file type [_util_detect_ColorSegments_JSEG]') return
#im.thumbnail(size, Image.ANTIALIAS) # size is 640x480 @@ -1070,7 +1068,7 @@ try: smallImg = im.resize( tuple(np.int_(np.array(im.size)/scale)), Image.ANTIALIAS ) except IOError: - pywikibot.output(u'WARNING: unknown file type [_util_get_ColorSegmentsHist_PIL]') + pywikibot.warning(u'unknown file type [_util_get_ColorSegmentsHist_PIL]') return
imgsize = float(smallImg.size[0]*smallImg.size[1]) @@ -1190,10 +1188,10 @@ # how small and how many features are detected scale = max([1., np.average(np.array(img.shape)[0:2]/maxdim)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_Trained_CV]') + pywikibot.warning(u'unknown file type [_detect_Trained_CV]') return except AttributeError: - pywikibot.output(u'WARNING: unknown file type [_detect_Trained_CV]') + pywikibot.warning(u'unknown file type [_detect_Trained_CV]') return
# similar to face detection @@ -1358,10 +1356,10 @@ # pdfinterp.process_pdf(rsrcmgr, device, fp, set(), maxpages=0, password='', # caching=True, check_extractable=False) #except AssertionError: - # pywikibot.output(u'WARNING: pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]') + # pywikibot.warning(u'pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]') # return #except TypeError: - # pywikibot.output(u'WARNING: pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]') + # pywikibot.warning(u'pdfminer missed, may be corrupt [_detect_EmbeddedText_poppler]') # return #fp.close() #device.close() @@ -1417,7 +1415,7 @@
scale = max([1., np.average(np.array(img.size)/200.)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_recognize_OpticalCodes_dmtxNzbar]') + pywikibot.warning(u'unknown file type [_recognize_OpticalCodes_dmtxNzbar]') return
smallImg = img.resize( (int(img.size[0]/scale), int(img.size[1]/scale)) ) @@ -1454,7 +1452,7 @@ img = Image.open(self.image_path_JPEG).convert('L') width, height = img.size except IOError: - pywikibot.output(u'WARNING: unknown file type [_recognize_OpticalCodes_dmtxNzbar]') + pywikibot.warning(u'unknown file type [_recognize_OpticalCodes_dmtxNzbar]') return
scanner = zbar.ImageScanner() @@ -1504,10 +1502,10 @@
scale = max([1., np.average(np.array(im.shape)[0:2]/1000.)]) except IOError: - pywikibot.output(u'WARNING: unknown file type [_detect_Chessboard_CV]') + pywikibot.warning(u'unknown file type [_detect_Chessboard_CV]') return except AttributeError: - pywikibot.output(u'WARNING: unknown file type [_detect_Chessboard_CV]') + pywikibot.warning(u'unknown file type [_detect_Chessboard_CV]') return
smallImg = np.empty( (cv.Round(im.shape[1]/scale), cv.Round(im.shape[0]/scale)), dtype=np.uint8 ) @@ -1522,7 +1520,7 @@ #found_all, corners = cv.FindChessboardCorners( im, chessboard_dim ) found_all, corners = cv2.findChessboardCorners( im, chessboard_dim ) except cv2.error, e: - pywikibot.output(u'%s' % e) + pywikibot.error(u'%s' % e)
#cv2.drawChessboardCorners( im, chessboard_dim, corners, found_all ) ##cv2.imshow("win", im) @@ -1806,7 +1804,7 @@ try: (width, height) = (int(float(width)+0.5), int(float(height)+0.5)) except ValueError: - pywikibot.output(u'WARNING: %s contains incompatible unit(s), skipped' % ((width, height),)) + pywikibot.warning(u'%s contains incompatible unit(s), skipped' % ((width, height),)) return else: (width, height) = self.image_size @@ -1921,8 +1919,8 @@ available = [item in res for item in ['FacesDetected', 'ValidAFPoints']] unknown = ['face' in item.lower() for item in res.keys()] if make and (True in (available+unknown)): - pywikibot.output(u"WARNING: skipped '%s' since not supported (yet) [_detect_Faces_EXIF]" % make) - pywikibot.output(u"WARNING: FacesDetected: %s - ValidAFPoints: %s" % tuple(available)) + pywikibot.warning(u"skipped '%s' since not supported (yet) [_detect_Faces_EXIF]" % make) + pywikibot.warning(u"FacesDetected: %s - ValidAFPoints: %s" % tuple(available))
# finally, rotate face coordinates if image was rotated if wasRotated: @@ -2209,7 +2207,7 @@
# Load important components if (yaafe.loadComponentLibrary('yaafe-io')!=0): - pywikibot.output(u'WARNING: cannot load yaafe-io component library !') # ! needed, else it will crash ! + pywikibot.warning(u'cannot load yaafe-io component library !') # ! needed, else it will crash !
# Build a DataFlow object using FeaturePlan fp = yaafe.FeaturePlan(sample_rate=44100, normalize=0.98, resample=False) @@ -2764,7 +2762,7 @@ #self.image_size = (None, None) mime = mimetypes.guess_all_extensions('%s/%s' % tuple(self.image_mime[0:2])) if self.image_fileext.lower() not in mime: - pywikibot.output(u'WARNING: File extension does not match MIME type! File extension should be %s.' % mime) + pywikibot.warning(u'File extension does not match MIME type! File extension should be %s.' % mime)
# SVG: rasterize the SVG to bitmap (MAY BE GET FROM WIKI BY DOWNLOAD?...) # (Mediawiki uses librsvg too: http://commons.wikimedia.org/wiki/SVG#SVGs_in_MediaWiki) @@ -3481,12 +3479,12 @@ Bot.downloadImage() except IOError, err: # skip if download not possible - pywikibot.output(u"WARNING: %s, skipped..." % err) + pywikibot.warning(u"%s, skipped..." % err) continue except Exception, err: # skip on any unexpected error, but report it - pywikibot.output(u"ERROR: %s" % err) - pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\ + pywikibot.error(u"%s" % err) + pywikibot.error(u"was not able to process page %s !!!\n" %\ image.title(asLink=True)) continue resultCheck = Bot.checkStep() @@ -3496,8 +3494,8 @@ if ret: outresult.append( ret ) except AttributeError: - pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\ - image.title(asLink=True)) + pywikibot.error(u"was not able to process page %s !!!\n" %\ + image.title(asLink=True)) limit += -1 if not tagged: posfile = open(os.path.join(scriptdir, 'cache/catimages_start'), "w") @@ -3548,13 +3546,13 @@ Bot.downloadImage() except IOError, err: # skip if download not possible - pywikibot.output(u"WARNING: %s, skipped..." % err) + pywikibot.warning(u"%s, skipped..." % err) continue except Exception, err: # skip on any unexpected error, but report it - pywikibot.output(u"ERROR: %s" % err) - pywikibot.output(u"ERROR: was not able to process page %s !!!\n" %\ - image.title(asLink=True)) + pywikibot.error(u"%s" % err) + pywikibot.error(u"was not able to process page %s !!!\n" %\ + image.title(asLink=True)) continue
# gather all features (information) related to current image
Modified: trunk/pywikipedia/catlib.py =================================================================== --- trunk/pywikipedia/catlib.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/catlib.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -655,8 +655,8 @@ newCatList.append(cat)
if not changesMade: - wikipedia.output(u'ERROR: %s is not in category %s!' - % (article.title(asLink=True), oldCat.title())) + wikipedia.error(u'%s is not in category %s!' + % (article.title(asLink=True), oldCat.title())) else: text = article.get(get_redirect=True) try:
Modified: trunk/pywikipedia/censure.py =================================================================== --- trunk/pywikipedia/censure.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/censure.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -33,13 +33,13 @@
site = pywikibot.getSite() if not (site.language() + '.' + site.family.name) in badWordList or not (site.language() + '.' + site.family.name) in logPages: - pywikibot.output('Error: your language isn't supported, see the source code for further details') + pywikibot.error('your language isn't supported, see the source code for further details') sys.exit(1) ownWordPage = pywikibot.Page(site, badWordList[site.language() + '.' + site.family.name]) try: ownWordList = ownWordPage.get(get_redirect = True) except pywikibot.NoPage: - pywikibot.output('Error: the page containing the bad word list of your language doesn't exist') + pywikibot.error('the page containing the bad word list of your language doesn't exist') sys.exit(1) ownWordList = ownWordList.split('\n') del ownWordList[0] @@ -109,7 +109,7 @@ pywikibot.output(u'%s doesn't match any of the bad word list' %title)
def main(): - pywikibot.output('Warning: this script should not be run manually/directly, but automatically by maintainer.py') + pywikibot.warning('this script should not be run manually/directly, but automatically by maintainer.py') if len(sys.argv) == 1: pywikibot.output("Usage: censure.py <article title>") sys.exit(1)
Modified: trunk/pywikipedia/checkimages.py =================================================================== --- trunk/pywikipedia/checkimages.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/checkimages.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -1600,7 +1600,7 @@ elif imagechanges.lower() == 'true': imagestatus = True else: - pywikibot.output(u"Error! Imagechanges set wrongly!") + pywikibot.error(u"Imagechanges set wrongly!") self.settingsData = None break summary = tupla[5]
Modified: trunk/pywikipedia/featured.py =================================================================== --- trunk/pywikipedia/featured.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/featured.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -322,8 +322,8 @@ try: method = info[site.lang][0] except KeyError: - pywikibot.output( - u'Error: language %s doesn't has %s category source.' + pywikibot.error( + u'language %s doesn't has %s category source.' % (site.lang, pType)) return name = info[site.lang][1]
Modified: trunk/pywikipedia/fixing_redirects.py =================================================================== --- trunk/pywikipedia/fixing_redirects.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/fixing_redirects.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -187,7 +187,7 @@ try: page.put(text, comment) except (pywikibot.Error): - pywikibot.output('Error: unable to put %s' % page) + pywikibot.error('unable to put %s' % page)
def main(): featured = False
Modified: trunk/pywikipedia/followlive.py =================================================================== --- trunk/pywikipedia/followlive.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/followlive.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -456,14 +456,14 @@ choices = answer[1:].split(',') except ValueError: # User entered wrong value - pywikibot.output(u'ERROR: "%s" is not valid' % answer) + pywikibot.error(u'"%s" is not valid' % answer) continue else: try: choices = answer.split(',') except ValueError: # User entered wrong value - pywikibot.output(u'ERROR: "%s" is not valid' % answer) + pywikibot.error(u'"%s" is not valid' % answer) continue #test input for choice in choices: @@ -474,7 +474,7 @@ else: answered = x in range(1, len(questionlist)+1) if not answered: - pywikibot.output(u'ERROR: "%s" is not valid' % answer) + pywikibot.error(u'"%s" is not valid' % answer) continue summary = u'' for choice in choices: @@ -489,8 +489,8 @@ pywikibot.output(u'appending %s...' % questionlist[answer]) self.content += '\n' + questionlist[answer] else: - pywikibot.output( - u'ERROR: "pos" should be "top" or "bottom" for template ' + pywikibot.error( + u'"pos" should be "top" or "bottom" for template ' u'%s. Contact a developer.' % questionlist[answer]) sys.exit("Exiting") summary += tpl['msg']+' ' @@ -537,8 +537,8 @@ if __name__ == "__main__": try: for arg in pywikibot.handleArgs(): - pywikibot.output( - u'Warning: argument "%s" not understood; ignoring.' % arg) + pywikibot.warning( + u'argument "%s" not understood; ignoring.' % arg) bot = CleaningBot() bot.run() except:
Modified: trunk/pywikipedia/imagecopy_self.py =================================================================== --- trunk/pywikipedia/imagecopy_self.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/imagecopy_self.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -913,10 +913,10 @@
def main(args): - pywikibot.output(u'WARNING: This is an experimental bot') - pywikibot.output(u'WARNING: It will only work on self published work images') - pywikibot.output(u'WARNING: This bot is still full of bugs') - pywikibot.output(u'WARNING: Use at your own risk!') + pywikibot.warnnig(u'This is an experimental bot') + pywikibot.warning(u'It will only work on self published work images') + pywikibot.warning(u'This bot is still full of bugs') + pywikibot.warning(u'Use at your own risk!')
generator = None; autonomous = False
Modified: trunk/pywikipedia/imagerecat.py =================================================================== --- trunk/pywikipedia/imagerecat.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/imagerecat.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -230,7 +230,7 @@ elif addresspart.tag in invalidParts: pywikibot.output(u'Dropping %s, %s' % (addresspart.tag, addresspart.text)) else: - pywikibot.output(u'WARNING %s, %s is not in addressparts lists' % (addresspart.tag, addresspart.text)) + pywikibot.warning(u'%s, %s is not in addressparts lists' % (addresspart.tag, addresspart.text)) #print result return result
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/interwiki.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -1267,8 +1267,8 @@ dictName, year = page.autoFormat() if dictName is not None: if self.originPage: - pywikibot.output( - u'WARNING: %s:%s relates to %s:%s, which is an ' + pywikibot.warning( + u'%s:%s relates to %s:%s, which is an ' u'auto entry %s(%s)' % (self.originPage.site.language(), self.originPage, page.site.language(), page, dictName, year)) @@ -1925,8 +1925,8 @@ rmPage.site.lang in ['hak', 'hi', 'cdo', 'sa'] and \ pywikibot.unicode_error: # work-arround for bug #3081100 (do not remove affected pages) new[rmsite] = rmPage - pywikibot.output( - u"WARNING: %s is either deleted or has a mismatching disambiguation state." + pywikibot.warning( + u"%s is either deleted or has a mismatching disambiguation state." % rmPage) # Re-Check what needs to get done mods, mcomment, adding, removing, modifying = compareLanguages(old, @@ -1977,8 +1977,8 @@ if pywikibot.unicode_error: for x in removing: if x.lang in ['hi', 'cdo']: - pywikibot.output( -u'\03{lightred}WARNING: This may be false positive due to unicode bug #3081100\03{default}') + pywikibot.warning( +u'\03{lightred}This may be false positive due to unicode bug #3081100\03{default}') break ask = True if globalvar.force or globalvar.cleanup: @@ -2029,21 +2029,21 @@ pywikibot.output(u'Page %s is locked. Skipping.' % page) raise SaveError(u'Locked') except pywikibot.EditConflict: - pywikibot.output( - u'ERROR putting page: An edit conflict occurred. Giving up.') + pywikibot.error( + u'putting page: An edit conflict occurred. Giving up.') raise SaveError(u'Edit conflict') except (pywikibot.SpamfilterError), error: - pywikibot.output( - u'ERROR putting page: %s blacklisted by spamfilter. Giving up.' + pywikibot.error( + u'putting page: %s blacklisted by spamfilter. Giving up.' % (error.url,)) raise SaveError(u'Spam filter') except (pywikibot.PageNotSaved), error: - pywikibot.output(u'ERROR putting page: %s' % (error.args,)) + pywikibot.error(u'putting page: %s' % (error.args,)) raise SaveError(u'PageNotSaved') except (socket.error, IOError), error: if timeout>3600: raise - pywikibot.output(u'ERROR putting page: %s' % (error.args,)) + pywikibot.error(u'putting page: %s' % (error.args,)) pywikibot.output(u'Sleeping %i seconds before trying again.' % (timeout,)) timeout *= 2 @@ -2051,7 +2051,7 @@ except pywikibot.ServerError: if timeout > 3600: raise - pywikibot.output(u'ERROR putting page: ServerError.') + pywikibot.error(u'putting page: ServerError.') pywikibot.output(u'Sleeping %i seconds before trying again.' % (timeout,)) timeout *= 2 @@ -2088,8 +2088,8 @@ try: linkedPages = set(page.interwiki()) except pywikibot.NoPage: - pywikibot.output( - u"WARNING: Page %s does no longer exist?!" % page) + pywikibot.warning( + u"Page %s does no longer exist?!" % page) break # To speed things up, create a dictionary which maps sites # to pages. This assumes that there is only one interwiki @@ -2101,14 +2101,14 @@ if expectedPage != page: try: linkedPage = linkedPagesDict[expectedPage.site] - pywikibot.output( - u"WARNING: %s: %s does not link to %s but to %s" + pywikibot.warning( + u"%s: %s does not link to %s but to %s" % (page.site.family.name, page, expectedPage, linkedPage)) except KeyError: if not expectedPage.site.is_data_repository(): - pywikibot.output( - u"WARNING: %s: %s does not link to %s" + pywikibot.warning( + u"%s: %s does not link to %s" % (page.site.family.name, page, expectedPage)) # Check for superfluous links @@ -2117,12 +2117,12 @@ # Check whether there is an alternative page on that language. # In this case, it was already reported above. if linkedPage.site not in expectedSites: - pywikibot.output( - u"WARNING: %s: %s links to incorrect %s" + pywikibot.warning( + u"%s: %s links to incorrect %s" % (page.site.family.name, page, linkedPage)) except (socket.error, IOError): - pywikibot.output(u'ERROR: could not report backlinks') + pywikibot.error(u'could not report backlinks')
class InterwikiBot(object): @@ -2282,8 +2282,8 @@ self.generateMore(globalvar.maxquerysize - mycount) except pywikibot.ServerError: # Could not extract allpages special page? - pywikibot.output( - u'ERROR: could not retrieve more pages. Will try again in %d seconds' + pywikibot.error( + u'could not retrieve more pages. Will try again in %d seconds' % timeout) time.sleep(timeout) timeout *= 2
Modified: trunk/pywikipedia/login.py =================================================================== --- trunk/pywikipedia/login.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/login.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -391,7 +391,7 @@ return
if pywikibot.verbose > 1: - pywikibot.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.") + pywikibot.warning(u"Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.") verbose = True # only use this verbose when running from login.py if logall: if sysop:
Modified: trunk/pywikipedia/lonelypages.py =================================================================== --- trunk/pywikipedia/lonelypages.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/lonelypages.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -208,7 +208,7 @@ for j in refs: if j == None: # We have to find out why the function returns that value - pywikibot.output(u'Error: 1 --> Skip page') + pywikibot.error(u'1 --> Skip page') continue refsList.append(j) # This isn't possible with a generator @@ -218,7 +218,7 @@ # Never understood how a list can turn in "None", but it happened :-S elif refsList == None: # We have to find out why the function returns that value - pywikibot.output(u'Error: 2 --> Skip page') + pywikibot.error(u'2 --> Skip page') continue else: # Ok, no refs, no redirect... let's check if there's already the template
Modified: trunk/pywikipedia/movepages.py =================================================================== --- trunk/pywikipedia/movepages.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/movepages.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -238,8 +238,8 @@ else: oldName1 = page.title() if oldName1: - pywikibot.output( - u'WARNING: file %s contains odd number of links' % filename) + pywikibot.warning( + u'file %s contains odd number of links' % filename) elif arg == '-noredirect': noredirect = True elif arg == '-always': @@ -248,14 +248,14 @@ skipredirects = True elif arg.startswith('-from:'): if oldName: - pywikibot.output(u'WARNING: -from:%s without -to:' % oldName) + pywikibot.warning(u'-from:%s without -to:' % oldName) oldName = arg[len('-from:'):] elif arg.startswith('-to:'): if oldName: fromToPairs.append([oldName, arg[len('-to:'):]]) oldName = None else: - pywikibot.output(u'WARNING: %s without -from' % arg) + pywikibot.warning(u'%s without -from' % arg) elif arg.startswith('-prefix'): if len(arg) == len('-prefix'): prefix = pywikibot.input(u'Enter the prefix:') @@ -270,7 +270,7 @@ genFactory.handleArg(arg)
if oldName: - pywikibot.output(u'WARNING: -from:%s without -to:' % oldName) + pywikibot.warning(u'-from:%s without -to:' % oldName) for pair in fromToPairs: page = pywikibot.Page(pywikibot.getSite(), pair[0]) bot = MovePagesBot(None, prefix, noredirect, always, skipredirects,
Modified: trunk/pywikipedia/patrol.py =================================================================== --- trunk/pywikipedia/patrol.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/patrol.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -102,7 +102,7 @@ # cascade if there isnt a whitelist to fallback on if not self.whitelist: raise - pywikibot.output(u'Error: ' + e) + pywikibot.error(u'%s' % e)
def add_to_tuples(self, tuples, user, page): if pywikibot.verbose:
Modified: trunk/pywikipedia/pywikibot/comms/http.py =================================================================== --- trunk/pywikipedia/pywikibot/comms/http.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/pywikibot/comms/http.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -121,9 +121,9 @@ # read & info can raise socket.error headers = f.info() if (int(headers.get('content-length', '-1')) > 1E7): - pywikibot.output(u'WARNING: Target is of huge size (>10MB) is ' - u'that correct? Downloading will take some ' - u'time, please be patient.') + pywikibot.warning(u'Target is of huge size (>10MB) is ' + u'that correct? Downloading will take some ' + u'time, please be patient.') text = f.read() break except KeyboardInterrupt: @@ -143,8 +143,8 @@ retry_attempt += 1 if retry_attempt > config.maxretries: raise MaxTriesExceededError() - pywikibot.output( - u"WARNING: Could not open '%s'.Maybe the server or\n " + pywikibot.warning( + u"Could not open '%s'.Maybe the server or\n " u"your connection is down. Retrying in %i minutes..." % (url, retry_idle_time)) time.sleep(retry_idle_time * 60) @@ -164,8 +164,8 @@ retry_attempt += 1 if retry_attempt > config.maxretries: raise MaxTriesExceededError() - pywikibot.output( - u"WARNING: Could not open '%s'. Maybe the server or\n your " + pywikibot.warning( + u"Could not open '%s'. Maybe the server or\n your " u"connection is down. Retrying in %i minutes..." % (url, retry_idle_time)) time.sleep(retry_idle_time * 60) @@ -198,8 +198,8 @@ # We need to split it to get a value content_length = int(headers.get('content-length', '0').split(',')[0]) if content_length != len(text) and 'content-length' in headers: - pywikibot.output( - u'Warning! len(text) does not match content-length: %s != %s' + pywikibot.warning( + u'len(text) does not match content-length: %s != %s' % (len(text), content_length)) return request(site, uri, retry, sysop, data, compress, no_hostname, cookie_only, back_response) @@ -213,7 +213,7 @@ charset = m.group(1) else: if pywikibot.verbose: - pywikibot.output(u"WARNING: No character set found.") + pywikibot.warning(u"No character set found.") # UTF-8 as default charset = 'utf-8' # Check if this is the charset we expected @@ -223,9 +223,9 @@ if (not back_response) or verbose: pywikibot.output(u'%s' %e) if no_hostname: - pywikibot.output(u'ERROR: Invalid charset found on %s.' % uri) + pywikibot.error(u'Invalid charset found on %s.' % uri) else: - pywikibot.output(u'ERROR: Invalid charset found on %s://%s%s.' + pywikibot.error(u'Invalid charset found on %s://%s%s.' % (site.protocol(), site.hostname(), uri)) # Convert HTML to Unicode try: @@ -234,10 +234,10 @@ if (not back_response) or verbose: pywikibot.output(u'%s' %e) if no_hostname: - pywikibot.output(u'ERROR: Invalid characters found on %s, ' - u'replaced by \ufffd.' % uri) + pywikibot.error(u'Invalid characters found on %s, ' + u'replaced by \ufffd.' % uri) else: - pywikibot.output(u'ERROR: Invalid characters found on %s://%s%s, ' + pywikibot.error(u'Invalid characters found on %s://%s%s, ' u'replaced by \ufffd.' % (site.protocol(), site.hostname(), uri)) # We use error='replace' in case of bad encoding.
Modified: trunk/pywikipedia/rciw.py =================================================================== --- trunk/pywikipedia/rciw.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/rciw.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -70,7 +70,7 @@ self.queue.put_nowait(page)
def main(): - pywikibot.output('Warning: this script can not be run manually/directly, but automatically by maintainer.py') + pywikibot.warning('this script can not be run manually/directly, but automatically by maintainer.py')
if __name__ == "__main__": main()
Modified: trunk/pywikipedia/redirect.py =================================================================== --- trunk/pywikipedia/redirect.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/redirect.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -559,17 +559,17 @@ else: pass # target found except pywikibot.SectionError: - pywikibot.output( - u'Warning: Redirect target section %s doesn't exist.' + pywikibot.warning( + u'Redirect target section %s doesn't exist.' % newRedir.title(asLink=True)) except pywikibot.BadTitle, e: # str(e) is in the format 'BadTitle: [[Foo]]' - pywikibot.output( - u'Warning: Redirect target %s is not a valid page title.' + pywikibot.warning( + u'Redirect target %s is not a valid page title.' % str(e)[10:]) #sometimes this error occures. Invalid Title starting with a '#' except pywikibot.InvalidTitle, err: - pywikibot.output(u'Warning: %s' % err) + pywikibot.warning(u'%s' % err) break except pywikibot.NoPage: if len(redirList) == 1: @@ -583,8 +583,8 @@ % newRedir.title(asLink=True)) break # skip if automatic else: - pywikibot.output( - u"Warning: Redirect target %s doesn't exist." + pywikibot.warning( + u"Redirect target %s doesn't exist." % newRedir.title(asLink=True)) except pywikibot.ServerError: pywikibot.output(u'Skipping due to server error: ' @@ -602,8 +602,8 @@ u"Skipping toolbar example: Redirect source is potentially vandalized.") break if targetPage.site != self.site: - pywikibot.output( - u'Warning: redirect target (%s) is on a different site.' + pywikibot.warning( + u'redirect target (%s) is on a different site.' % targetPage.title(asLink=True)) if self.always: break # skip if automatic @@ -612,8 +612,8 @@ % (targetPage.site.lang, targetPage.sectionFreeTitle()) ) > 0: - pywikibot.output( - u'Warning: Redirect target %s forms a redirect loop.' + pywikibot.warning( + u'Redirect target %s forms a redirect loop.' % targetPage.title(asLink=True)) break ### doesn't work. edits twice! ## try:
Modified: trunk/pywikipedia/reflinks.py =================================================================== --- trunk/pywikipedia/reflinks.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/reflinks.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -464,7 +464,7 @@ u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) except pywikibot.PageNotSaved, error: - pywikibot.output(u'Error putting page: %s' % (error.args,)) + pywikibot.error(u'putting page: %s' % (error.args,)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % (page.title(),))
Modified: trunk/pywikipedia/replace.py =================================================================== --- trunk/pywikipedia/replace.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/replace.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -549,8 +549,8 @@ u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) except pywikibot.PageNotSaved, error: - pywikibot.output(u'Error putting page: %s' - % (error.args,)) + pywikibot.error(u'putting page: %s' + % (error.args,)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % (page.title(),))
Modified: trunk/pywikipedia/selflink.py =================================================================== --- trunk/pywikipedia/selflink.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/selflink.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -132,7 +132,7 @@ try: linkedPage = pywikibot.Page(page.site(), match.group('title')) except pywikibot.InvalidTitle, err: - pywikibot.output(u'Warning: %s' % err) + pywikibot.warning(u'%s' % err) return text, False
# Check whether the link found is to the current page itself.
Modified: trunk/pywikipedia/subster.py =================================================================== --- trunk/pywikipedia/subster.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/subster.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -172,7 +172,7 @@ pywikibot.output(u'Setting process TimeZone (TZ): %s' % str(time.tzname)) # ('CET', 'CEST') else: # e.g. windows doesn't have that attribute - pywikibot.output(u'WARNING: This operating system has NO SUPPORT for setting TimeZone by code! Before running this script, please set the TimeZone manually to one approriate for use with the Wikipedia language and region you intend to.') + pywikibot.warning(u'This operating system has NO SUPPORT for setting TimeZone by code! Before running this script, please set the TimeZone manually to one approriate for use with the Wikipedia language and region you intend to.')
# init constants self._bot_config = bot_config
Modified: trunk/pywikipedia/subster_irc.py =================================================================== --- trunk/pywikipedia/subster_irc.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/subster_irc.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -130,7 +130,7 @@ try: thread.start_new_thread( main_subster, (self.refs[page_title], params) ) except: - pywikibot.output(u"WARNING: unable to start thread") + pywikibot.warning(u"unable to start thread")
# Define a function for the thread def main_subster(page, params=None):
Modified: trunk/pywikipedia/sum_disc.py =================================================================== --- trunk/pywikipedia/sum_disc.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/sum_disc.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -387,7 +387,7 @@ # warnings/exceptions are printed to log, could be get by panel.py from there! # (separate and explicit warning handling not used anymore) #for warning in self._global_warn: # output all warnings to log (what about a special wiki page?) - # pywikibot.output( "%s: %s" % warning ) + # pywikibot.warning( "%s: %s" % warning )
def compressHistory(self, users = []): """Read history, and re-write new history without any duplicates.
Modified: trunk/pywikipedia/table2wiki.py =================================================================== --- trunk/pywikipedia/table2wiki.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/table2wiki.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -466,7 +466,7 @@ try: text = page.get() except pywikibot.NoPage: - pywikibot.output(u"ERROR: couldn't find %s" % page.title()) + pywikibot.error(u"couldn't find %s" % page.title()) return False except pywikibot.IsRedirectPage: pywikibot.output(u'Skipping redirect %s' % page.title()) @@ -476,8 +476,8 @@ # Check if there are any marked tags left markedTableTagR = re.compile("<##table##|</##table##>", re.IGNORECASE) if markedTableTagR.search(newText): - pywikibot.output( - u'ERROR: not all marked table start or end tags processed!') + pywikibot.error( + u'not all marked table start or end tags processed!') return
if convertedTables == 0:
Modified: trunk/pywikipedia/upload.py =================================================================== --- trunk/pywikipedia/upload.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/upload.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -167,8 +167,8 @@ dt += 60 else: if pywikibot.verbose: - pywikibot.output( -u"WARNING: No check length to retrieved data is possible.") + pywikibot.warning( +u"No check length to retrieved data is possible.") else: # Opening local files with MyURLopener would be possible, but we # don't do it because it only accepts ASCII characters in the
Modified: trunk/pywikipedia/us-states.py =================================================================== --- trunk/pywikipedia/us-states.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/us-states.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -93,8 +93,8 @@ elif arg == '-force': force = True else: - pywikibot.output( - u'Warning: argument "%s" not understood; ignoring.' % arg) + pywikibot.warning( + u'argument "%s" not understood; ignoring.' % arg)
mysite = pywikibot.getSite() for p in mysite.allpages(start = start): @@ -111,12 +111,12 @@ u"Not creating %s - redirect already exists." % goal) else: - pywikibot.output( - u"WARNING!!! %s already exists but redirects elsewhere!" + pywikibot.warning( + u"%s already exists but redirects elsewhere!" % goal) except pywikibot.IsNotRedirectPage: - pywikibot.output( - u"WARNING!!! Page %s already exists and is not a redirect. Please check page!" + pywikibot.warning( + u"Page %s already exists and is not a redirect. Please check page!" % goal) except pywikibot.NoPage: change=''
Modified: trunk/pywikipedia/welcome.py =================================================================== --- trunk/pywikipedia/welcome.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/welcome.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -499,7 +499,7 @@ pywikibot.output(u"The whitelist's page doesn't exist!") else: showStatus(4) - pywikibot.output(u"WARNING: The whitelist hasn't been setted!") + pywikibot.warning(u"The whitelist hasn't been setted!")
# Join the whitelist words. self._whitelist = list_white + whitelist_default @@ -730,7 +730,7 @@ f = codecs.open(pywikibot.config.datafilepath(globalvar.signFileName), 'r', encoding='utf-8') except IOError: - pywikibot.output(u'Error! - No fileName!') + pywikibot.error(u'No fileName!') raise FilenameNotSet("No signature filename specified.")
signText = f.read() @@ -971,7 +971,7 @@ # file where is stored the random signature index filename = pywikibot.config.datafilepath('welcome-%s-%s.data' % (pywikibot.default_family, pywikibot.default_code)) if globalvar.offset and globalvar.timeoffset: - pywikibot.output('WARING: both -offset and -timeoffset were provided, ignoring -offset') + pywikibot.warning('both -offset and -timeoffset were provided, ignoring -offset') globalvar.offset = 0 bot = WelcomeBot() try:
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/wikipedia.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -9225,7 +9225,7 @@ text += u'\n' caller = inspect.getouterframes(inspect.currentframe())[1][3] if not (caller == 'logoutput'): - _logOld(text) + logoutput(text) if input_lock.locked(): cache_output(text, toStdout = toStdout) else: @@ -9241,21 +9241,6 @@ (args, kwargs) = output_cache.pop(0) ui.output(*args, **kwargs)
-def _logOld(text): - """Write the given text to the logfile.""" - if logger: - # remove all color markup - plaintext = colorTagR.sub('', text) - # save the text in a logfile (will be written in utf-8) - for line in plaintext.splitlines(): - type = line.split(':') - func = 'info' - if len(type) > 1: - func = type[0].strip().lower() - if func not in ['debug', 'warning', 'error', 'critical']: - func = 'info' - getattr(logger, func)(line.rstrip()) - output = _outputOld
def stdout(text, decoder=None, newline=True, **kwargs):
Modified: trunk/pywikipedia/xmlreader.py =================================================================== --- trunk/pywikipedia/xmlreader.py 2013-04-19 15:37:30 UTC (rev 11392) +++ trunk/pywikipedia/xmlreader.py 2013-04-19 21:28:02 UTC (rev 11393) @@ -289,8 +289,8 @@ """Return a generator that will yield XmlEntry objects""" print 'Reading XML dump...' if not 'iterparse' in globals(): - pywikibot.output( -u'''WARNING: cElementTree not found. Using slower fallback solution. + pywikibot.warning( +u'''cElementTree not found. Using slower fallback solution. Consider installing the python-celementtree package.''') return self.regex_parse() else: