jenkins-bot merged this change.

View Change

Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
refactor: remove unused local variable assignments

Change-Id: I6931b0abd6e635412c26dbb947a7b78f9d973c05
---
M pywikibot/botirc.py
M pywikibot/i18n.py
M pywikibot/site.py
M pywikibot/specialbots.py
M scripts/category.py
M scripts/cfd.py
M scripts/commonscat.py
M scripts/data_ingestion.py
M scripts/imagecopy_self.py
M scripts/imagerecat.py
M scripts/imagetransfer.py
M scripts/interwiki.py
M scripts/listpages.py
M scripts/match_images.py
M scripts/nowcommons.py
M scripts/patrol.py
M scripts/protect.py
M scripts/redirect.py
M scripts/reflinks.py
M scripts/touch.py
M tests/djvu_tests.py
M tests/flow_thanks_tests.py
M tests/generate_family_files_tests.py
M tests/site_tests.py
M tests/thanks_tests.py
M tests/xmlreader_tests.py
26 files changed, 18 insertions(+), 56 deletions(-)

diff --git a/pywikibot/botirc.py b/pywikibot/botirc.py
index 3286341..c7abf71 100644
--- a/pywikibot/botirc.py
+++ b/pywikibot/botirc.py
@@ -104,7 +104,7 @@
entry = self.api_found.findall(text)
page = pywikibot.Page(self.site, name)
try:
- text = page.get()
+ page.get()
except pywikibot.NoPage:
return
except pywikibot.IsRedirectPage:
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index d138c13..607553b 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -435,20 +435,19 @@
if twtitle in _cache[lang]:
return _cache[lang][twtitle]
message_bundle = twtitle.split('-')[0]
- trans_text = None
filename = '%s/%s.json' % (message_bundle, lang)
try:
trans_text = pkgutil.get_data(
_messages_package_name, filename).decode('utf-8')
except (OSError, IOError): # file open can cause several exceptions
_cache[lang][twtitle] = None
- return
+ return None
transdict = json.loads(trans_text)
_cache[lang].update(transdict)
try:
return transdict[twtitle]
except KeyError:
- return
+ return None


def _extract_plural(code, message, parameters):
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 53928d7..22bcc27 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -2632,8 +2632,6 @@

for nsdata in self.siteinfo.get('namespaces', cache=False).values():
ns = nsdata.pop('id')
- custom_name = None
- canonical_name = None
if ns == 0:
canonical_name = nsdata.pop('*')
custom_name = canonical_name
diff --git a/pywikibot/specialbots.py b/pywikibot/specialbots.py
index 8c5448b..2bfbed1 100644
--- a/pywikibot/specialbots.py
+++ b/pywikibot/specialbots.py
@@ -3,7 +3,7 @@
"""Library containing special bots."""
#
# (C) Rob W.W. Hooft, Andre Engels 2003-2004
-# (C) Pywikibot team, 2003-2018
+# (C) Pywikibot team, 2003-2019
#
# Distributed under the terms of the MIT license.
#
@@ -422,7 +422,6 @@

pywikibot.output('Uploading file to {0}...'.format(site))

- success = False
ignore_warnings = self.ignoreWarning is True or self._handle_warnings
if ('://' in file_url
and 'upload_by_url' not in site.userinfo['rights']):
@@ -441,10 +440,8 @@
% site)
else:
pywikibot.error('Upload error: ', exc_info=True)
- return None
except Exception:
pywikibot.error('Upload error: ', exc_info=True)
- return None
else:
if success:
# No warning, upload complete.
@@ -453,7 +450,7 @@
return filename # data['filename']
else:
pywikibot.output('Upload aborted.')
- return None
+ return None

def run(self):
"""Run bot."""
diff --git a/scripts/category.py b/scripts/category.py
index 9be9060..e07d71e 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -1365,9 +1365,6 @@
local_args = pywikibot.handle_args(args)
gen_factory = pagegenerators.GeneratorFactory()

- # The generator gives the pages that should be worked upon.
- gen = None
-
# When this is True then the custom edit summary given for removing
# categories from articles will also be used as the deletion reason.
# Otherwise it will generate deletion specific comments.
@@ -1444,7 +1441,6 @@
else:
gen_factory.handleArg(arg)

- cat_db = None
bot = None

cat_db = CategoryDatabase(rebuild=rebuild)
diff --git a/scripts/cfd.py b/scripts/cfd.py
index ff5d6e4..f232d25 100755
--- a/scripts/cfd.py
+++ b/scripts/cfd.py
@@ -107,18 +107,14 @@
day = 'None'
mode = 'None'
summary = ''
- action_summary = ''
robot = None

m = ReCheck()
for line in page.text.split('\n'):
- if nobots.search(line):
- # NO BOTS!!!
- pass
- elif example.search(line):
- # Example line
- pass
- elif speedymode.search(line):
+ if nobots.search(line) or example.search(line):
+ # NO BOTS or example line
+ continue
+ if speedymode.search(line):
mode = 'Speedy'
day = 'None'
elif movemode.search(line):
diff --git a/scripts/commonscat.py b/scripts/commonscat.py
index a8cf10b..53cc9d0 100755
--- a/scripts/commonscat.py
+++ b/scripts/commonscat.py
@@ -400,8 +400,6 @@
primaryCommonscat, commonscatAlternatives = i18n.translate(
wikipediaPage.site.code, commonscatTemplates,
fallback=i18n.DEFAULT_FALLBACK)
- commonscatTemplate = ''
- commonscatTarget = ''
commonscatLinktext = ''
commonscatNote = ''
# See if commonscat is present
diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py
index ce846a0..ed8977f 100755
--- a/scripts/data_ingestion.py
+++ b/scripts/data_ingestion.py
@@ -63,7 +63,7 @@
urlparse(URL)[2])[1]
self.metadata['_ext'] = ext = filename.split('.')[-1]
if ext == filename:
- self.metadata['_ext'] = ext = None
+ self.metadata['_ext'] = None
self.contents = None

if not site:
diff --git a/scripts/imagecopy_self.py b/scripts/imagecopy_self.py
index 57394c8..afe7ae2 100644
--- a/scripts/imagecopy_self.py
+++ b/scripts/imagecopy_self.py
@@ -431,9 +431,6 @@
# "strange" fields

description = ''
- source = ''
- date = ''
- author = ''
permission = ''
other_versions = ''
contents = {}
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index 6e66b08..04e3fd0 100755
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -287,7 +287,7 @@
result = []
lang = ''
project = ''
- article = ''
+ articles = ''
usageRe = re.compile(
r'^(?P<lang>([\w-]+))\.(?P<project>([\w]+))\.org:(?P<articles>\s(.*))')
matches = usageRe.search(use)
@@ -454,7 +454,6 @@
@param args: command line arguments
@type args: str
"""
- generator = None
onlyFilter = False
onlyUncat = False

diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py
index eca4a5d..5968112 100755
--- a/scripts/imagetransfer.py
+++ b/scripts/imagetransfer.py
@@ -29,7 +29,7 @@
"""
#
# (C) Andre Engels, 2004
-# (C) Pywikibot team, 2004-2018
+# (C) Pywikibot team, 2004-2019
#
# Distributed under the terms of the MIT license.
#
@@ -167,11 +167,9 @@
if sourceSite.family == self.targetSite.family:
description += '\n\n{0}'.format(sourceImagePage)
except pywikibot.NoPage:
- description = ''
pywikibot.output(
'Image does not exist or description page is empty.')
except pywikibot.IsRedirectPage:
- description = ''
pywikibot.output('Image description page is redirect.')
else:
bot = UploadRobot(url=url, description=description,
@@ -291,8 +289,6 @@
@param args: command line arguments
@type args: str
"""
- gen = None
-
interwiki = False
keep_name = False
targetLang = None
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 3a1d7f0..ff40822 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1176,7 +1176,6 @@
# Only once!
self.hintsAsked = True
if self.conf.untranslated:
- newhint = None
t = self.conf.showtextlink
if t:
pywikibot.output(self.originPage.get()[:t])
diff --git a/scripts/listpages.py b/scripts/listpages.py
index 3328ff5..f13a861 100755
--- a/scripts/listpages.py
+++ b/scripts/listpages.py
@@ -177,7 +177,6 @@
@param args: command line arguments
@type args: str
"""
- gen = None
notitle = False
fmt = '1'
outputlang = None
diff --git a/scripts/match_images.py b/scripts/match_images.py
index 442e944..08981a7 100755
--- a/scripts/match_images.py
+++ b/scripts/match_images.py
@@ -24,7 +24,7 @@
"""
#
# (c) Multichill, 2009
-# (c) Pywikibot team, 2009-2018
+# (c) Pywikibot team, 2009-2019
#
# Distributed under the terms of the MIT license.
#
@@ -54,7 +54,6 @@
imageB = get_image_from_image_page(imagePageB)

(imA_width, imA_height) = imageA.size
- (imB_width, imB_height) = imageB.size

imageB = imageB.resize((imA_width, imA_height))

@@ -113,7 +112,6 @@

def get_image_from_image_page(imagePage):
"""Get the image object to work based on an imagePage object."""
- imageBuffer = None
imageURL = imagePage.fileUrl()
imageURLopener = http.fetch(imageURL)
imageBuffer = io.BytesIO(imageURLopener.raw[:])
@@ -147,8 +145,6 @@
images = []
other_family = ''
other_lang = ''
- imagePageA = None
- imagePageB = None

# Read commandline parameters.
local_args = pywikibot.handle_args(args)
diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py
index 1bf112f..1b4b25f 100755
--- a/scripts/nowcommons.py
+++ b/scripts/nowcommons.py
@@ -229,7 +229,6 @@

def findFilenameOnCommons(self, localImagePage):
"""Find filename on Commons."""
- filenameOnCommons = None
for templateName, params in localImagePage.templatesWithParams():
if templateName in self.nc_templates:
if params == []:
diff --git a/scripts/patrol.py b/scripts/patrol.py
index 200a7d4..9e35343 100755
--- a/scripts/patrol.py
+++ b/scripts/patrol.py
@@ -425,7 +425,6 @@
# that are also used by other scripts and that determine on which pages
# to work on.
usercontribs = None
- gen = None
recentchanges = False
newpages = False
repeat = False
diff --git a/scripts/protect.py b/scripts/protect.py
index 986e35d..7623166 100755
--- a/scripts/protect.py
+++ b/scripts/protect.py
@@ -163,7 +163,6 @@
"""
options = {}
message_properties = {}
- generator = None
protections = {}
default_level = 'sysop'
default_summaries = {
diff --git a/scripts/redirect.py b/scripts/redirect.py
index a5cc986..54a2985 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -203,7 +203,6 @@
def get_redirect_pages_via_api(self):
"""Yield Pages that are redirects."""
for ns in self.namespaces:
- done = False
gen = self.site.allpages(start=self.api_start,
namespace=ns,
filterredir=True)
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 1963ff5..a2616ce 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -540,7 +540,6 @@
continue

ref = RefLink(link, match.group('name'), site=self.site)
- f = None

try:
f = comms.http.fetch(
diff --git a/scripts/touch.py b/scripts/touch.py
index e949bea..95cad9c 100755
--- a/scripts/touch.py
+++ b/scripts/touch.py
@@ -100,7 +100,6 @@
@param args: command line arguments
@type args: str
"""
- gen = None
options = {}

# Process global args and prepare generator args parser
diff --git a/tests/djvu_tests.py b/tests/djvu_tests.py
index b2e375f..a00374d 100644
--- a/tests/djvu_tests.py
+++ b/tests/djvu_tests.py
@@ -41,7 +41,7 @@
dp = subprocess.Popen(['djvudump'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
- stdoutdata, stderrdata = dp.communicate()
+ dp.communicate()
except OSError:
raise unittest.SkipTest('djvulibre library not installed.')

diff --git a/tests/flow_thanks_tests.py b/tests/flow_thanks_tests.py
index 1a37feb6..c0b2f4e 100644
--- a/tests/flow_thanks_tests.py
+++ b/tests/flow_thanks_tests.py
@@ -33,7 +33,6 @@

def test_thank_post(self):
"""Test thanks for Flow posts."""
- found_log = False
site = self.get_site()
topic = Topic(site, self._topic_title)
for post in reversed(topic.replies()):
diff --git a/tests/generate_family_files_tests.py b/tests/generate_family_files_tests.py
index 210f254..1c54bb8 100644
--- a/tests/generate_family_files_tests.py
+++ b/tests/generate_family_files_tests.py
@@ -23,7 +23,7 @@
def getapis(self):
"""Only load additional two additional wikis randomly."""
save = self.langs
- self.langs = [choice(save) for i in range(2)]
+ self.langs = [choice(save), choice(save)]
self.prefixes = [item['prefix'] for item in self.langs]
super(FamilyTestGenerator, self).getapis()
self.langs = save
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 91204f2..30e1887 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -2111,7 +2111,7 @@

try:
# no such rcid, revid or too old revid
- result = list(mysite.patrol(**params))
+ list(mysite.patrol(**params))
except api.APIError as error:
if error.code == 'badtoken':
self.skipTest(error)
diff --git a/tests/thanks_tests.py b/tests/thanks_tests.py
index 051a1a0..7612cb3 100644
--- a/tests/thanks_tests.py
+++ b/tests/thanks_tests.py
@@ -32,7 +32,6 @@
there must make edits made before reruns of this test.
Please see https://phabricator.wikimedia.org/T137836.
"""
- found_log = False
site = self.get_site()
data = site.recentchanges(total=20)
for rev in data:
diff --git a/tests/xmlreader_tests.py b/tests/xmlreader_tests.py
index b3c6198..190c810 100644
--- a/tests/xmlreader_tests.py
+++ b/tests/xmlreader_tests.py
@@ -54,7 +54,7 @@

def test_XmlDumpRedirect(self):
"""Test XmlDump correctly parsing whether a page is a redirect."""
- pages = self._get_entries('article-pyrus.xml', allrevisions=True)
+ self._get_entries('article-pyrus.xml', allrevisions=True)
pages = [r for r in
xmlreader.XmlDump(
join_xml_data_path('article-pyrus.xml')).parse()]
@@ -75,7 +75,7 @@
previous = self._compare(previous, '.xml', all_revisions)
previous = self._compare(previous, '-utf16.xml', all_revisions)
previous = self._compare(previous, '.xml.bz2', all_revisions)
- previous = self._compare(previous, '-utf16.xml.bz2', all_revisions)
+ self._compare(previous, '-utf16.xml.bz2', all_revisions)

def test_XmlDump_compare_all(self):
"""Compare the different XML files using all revisions."""

To view, visit change 517810. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I6931b0abd6e635412c26dbb947a7b78f9d973c05
Gerrit-Change-Number: 517810
Gerrit-PatchSet: 4
Gerrit-Owner: Dalba <dalba.wiki@gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki@gmail.com>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot (75)
Gerrit-CC: Mpaa <mpaa.wiki@gmail.com>