Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773991 )
Change subject: [bugfix] Do not iterate over sys.modules
......................................................................
[bugfix] Do not iterate over sys.modules
See:
- https://docs.python.org/3/library/sys.html#sys.modules
- https://githubhot.com/repo/flexmock/flexmock/issues/123
Bug: T304785
Change-Id: I10c35b0ec9f63a1399be2de5cc446c305d7da39c
---
M pywikibot/bot.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Ammarpad: Looks good to me, but someone else must approve
Xqt: Verified; Looks good to me, approved
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index e9106ba..5d2c9f1 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -507,7 +507,7 @@
# imported modules
log('MODULES:')
- for module in sys.modules.values():
+ for module in sys.modules.copy().values():
filename = version.get_module_filename(module)
if not filename:
continue
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773991
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I10c35b0ec9f63a1399be2de5cc446c305d7da39c
Gerrit-Change-Number: 773991
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Ammarpad <ammarpad(a)yahoo.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773944 )
Change subject: [IMPR] decrease nested flow statements in CheckImagesBot.check_image_duplicated
......................................................................
[IMPR] decrease nested flow statements in CheckImagesBot.check_image_duplicated
Change-Id: I99caf80b91e1a7355e833e33fc0d4b6b0237564f
---
M scripts/checkimages.py
1 file changed, 121 insertions(+), 125 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 4ec829b..26216e5 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -904,147 +904,143 @@
hash_found = image_page.latest_file_info.sha1
duplicates = list(self.site.allimages(sha1=hash_found))
- if not duplicates:
- return False # Image deleted, no hash found. Skip the image.
+ # If empty, image is deleted, no hash found. Skip the image.
+ # Otherwise ok, let's continue the checking phase
+ if len(duplicates) <= 1:
+ return bool(duplicates)
- if len(duplicates) > 1:
- xdict = {'en':
- '%(name)s has {{PLURAL:count'
- '|a duplicate! Reporting it'
- '|%(count)s duplicates! Reporting them}}...'}
- pywikibot.output(i18n.translate('en', xdict,
- {'name': self.image_name,
- 'count': len(duplicates) - 1}))
- if dup_text and dup_regex:
- time_image_list = []
+ xdict = {'en':
+ '%(name)s has {{PLURAL:count'
+ '|a duplicate! Reporting it'
+ '|%(count)s duplicates! Reporting them}}...'}
+ pywikibot.output(i18n.translate('en', xdict,
+ {'name': self.image_name,
+ 'count': len(duplicates) - 1}))
+ if dup_text and dup_regex:
+ time_image_list = []
- for dup_page in duplicates:
- if (dup_page.title(as_url=True) != self.image.title(
- as_url=True)
- or self.timestamp is None):
- try:
- self.timestamp = (
- dup_page.latest_file_info.timestamp)
- except PageRelatedError:
- continue
- data = self.timestamp.timetuple()
- data_seconds = time.mktime(data)
- time_image_list.append([data_seconds, dup_page])
- older_image_page = self.important_image(time_image_list)
- older_page_text = older_image_page.text
- # And if the images are more than two?
- string = ''
- images_to_tag_list = []
-
- for dup_page in duplicates:
- if dup_page == older_image_page:
- # the most used or oldest image
- # not report also this as duplicate
- continue
+ for dup_page in duplicates:
+ if dup_page.title(as_url=True) != self.image.title(
+ as_url=True) or self.timestamp is None:
try:
- dup_page_text = dup_page.text
- except NoPageError:
+ self.timestamp = (dup_page.latest_file_info.timestamp)
+ except PageRelatedError:
continue
+ data = self.timestamp.timetuple()
+ data_seconds = time.mktime(data)
+ time_image_list.append([data_seconds, dup_page])
+ older_image_page = self.important_image(time_image_list)
+ older_page_text = older_image_page.text
+ # And if the images are more than two?
+ string = ''
+ images_to_tag_list = []
- if not (re.findall(dup_regex, dup_page_text)
- or re.findall(dup_regex, older_page_text)):
- pywikibot.output(
- '{} is a duplicate and has to be tagged...'
- .format(dup_page))
- images_to_tag_list.append(dup_page.title())
- string += '* {}\n'.format(
- dup_page.title(as_link=True, textlink=True))
- else:
- pywikibot.output(
- "Already put the dupe-template in the files's page"
- " or in the dupe's page. Skip.")
- return False # Ok - Let's continue the checking phase
+ for dup_page in duplicates:
+ if dup_page == older_image_page:
+ # the most used or oldest image
+ # not report also this as duplicate
+ continue
+ try:
+ dup_page_text = dup_page.text
+ except NoPageError:
+ continue
- # true if the image are not to be tagged as dupes
- only_report = False
-
- # put only one image or the whole list according to the request
- if '__images__' in dup_text:
- text_for_the_report = dup_text.replace(
- '__images__',
- '\n{}* {}\n'.format(
- string,
- older_image_page.title(
- as_link=True, textlink=True)))
+ if not (re.findall(dup_regex, dup_page_text)
+ or re.findall(dup_regex, older_page_text)):
+ pywikibot.output(
+ '{} is a duplicate and has to be tagged...'
+ .format(dup_page))
+ images_to_tag_list.append(dup_page.title())
+ string += '* {}\n'.format(dup_page.title(as_link=True,
+ textlink=True))
else:
- text_for_the_report = dup_text.replace(
- '__image__',
- older_image_page.title(as_link=True, textlink=True))
+ pywikibot.output(
+ "Already put the dupe-template in the files's page"
+ " or in the dupe's page. Skip.")
+ return False # Ok - Let's continue the checking phase
- # Two iteration: report the "problem" to the user only once
- # (the last)
- if len(images_to_tag_list) > 1:
- for image_to_tag in images_to_tag_list[:-1]:
- fp = pywikibot.FilePage(self.site, image_to_tag)
- already_reported_in_past = fp.revision_count(self.bots)
- # if you want only one edit, the edit found should be
- # more than 0 -> num - 1
- if already_reported_in_past > duplicates_rollback - 1:
- only_report = True
- break
- # Delete the image in the list where we're write on
- image = self.image_namespace + image_to_tag
- text_for_the_report = re.sub(
- r'\n\*\[\[:{}\]\]'.format(re.escape(image)),
- '', text_for_the_report)
- self.report(text_for_the_report, image_to_tag,
- comm_image=dup_comment_image, unver=True)
+ # true if the image are not to be tagged as dupes
+ only_report = False
- if images_to_tag_list and not only_report:
- fp = pywikibot.FilePage(self.site, images_to_tag_list[-1])
+ # put only one image or the whole list according to the request
+ if '__images__' in dup_text:
+ text_for_the_report = dup_text.replace(
+ '__images__',
+ '\n{}* {}\n'.format(string,
+ older_image_page.title(as_link=True,
+ textlink=True)))
+ else:
+ text_for_the_report = dup_text.replace(
+ '__image__',
+ older_image_page.title(as_link=True, textlink=True))
+
+ # Two iteration: report the "problem" to the user only once
+ # (the last)
+ if len(images_to_tag_list) > 1:
+ for image_to_tag in images_to_tag_list[:-1]:
+ fp = pywikibot.FilePage(self.site, image_to_tag)
already_reported_in_past = fp.revision_count(self.bots)
- image_title = re.escape(self.image.title(as_url=True))
- from_regex = (r'\n\*\[\[:{}{}\]\]'
- .format(self.image_namespace, image_title))
- # Delete the image in the list where we're write on
- text_for_the_report = re.sub(from_regex, '',
- text_for_the_report)
- # if you want only one edit, the edit found should be more
- # than 0 -> num - 1
- if already_reported_in_past > duplicates_rollback - 1 or \
- not dup_talk_text:
+ # if you want only one edit, the edit found should be
+ # more than 0 -> num - 1
+ if already_reported_in_past > duplicates_rollback - 1:
only_report = True
- else:
- self.report(
- text_for_the_report, images_to_tag_list[-1],
- dup_talk_text
- % (older_image_page.title(with_ns=True),
- string),
- dup_talk_head, comm_talk=dup_comment_talk,
- comm_image=dup_comment_image, unver=True)
+ break
+ # Delete the image in the list where we're write on
+ image = self.image_namespace + image_to_tag
+ text_for_the_report = re.sub(
+ r'\n\*\[\[:{}\]\]'.format(re.escape(image)),
+ '', text_for_the_report)
+ self.report(text_for_the_report, image_to_tag,
+ comm_image=dup_comment_image, unver=True)
- if self.duplicates_report or only_report:
- if only_report:
- repme = ((self.list_entry + 'has the following duplicates '
- "('''forced mode'''):")
- % self.image.title(as_url=True))
+ if images_to_tag_list and not only_report:
+ fp = pywikibot.FilePage(self.site, images_to_tag_list[-1])
+ already_reported_in_past = fp.revision_count(self.bots)
+ image_title = re.escape(self.image.title(as_url=True))
+ from_regex = (r'\n\*\[\[:{}{}\]\]'
+ .format(self.image_namespace, image_title))
+ # Delete the image in the list where we're write on
+ text_for_the_report = re.sub(from_regex, '',
+ text_for_the_report)
+ # if you want only one edit, the edit found should be more
+ # than 0 -> num - 1
+ if already_reported_in_past > duplicates_rollback - 1 \
+ or not dup_talk_text:
+ only_report = True
else:
- repme = (
- (self.list_entry + 'has the following duplicates:')
- % self.image.title(as_url=True))
+ self.report(
+ text_for_the_report, images_to_tag_list[-1],
+ dup_talk_text % (older_image_page.title(with_ns=True),
+ string),
+ dup_talk_head, comm_talk=dup_comment_talk,
+ comm_image=dup_comment_image, unver=True)
- for dup_page in duplicates:
- if (dup_page.title(as_url=True)
- == self.image.title(as_url=True)):
- # the image itself, not report also this as duplicate
- continue
- repme += '\n** [[:{}{}]]'.format(
- self.image_namespace, dup_page.title(as_url=True))
+ if self.duplicates_report or only_report:
+ if only_report:
+ repme = ((self.list_entry + 'has the following duplicates '
+ "('''forced mode'''):")
+ % self.image.title(as_url=True))
+ else:
+ repme = ((self.list_entry + 'has the following duplicates:')
+ % self.image.title(as_url=True))
- result = self.report_image(self.image_name, self.rep_page,
- self.com, repme, addings=False)
- if not result:
- return True # If Errors, exit (but continue the check)
+ for dup_page in duplicates:
+ if dup_page.title(as_url=True) \
+ == self.image.title(as_url=True):
+ # the image itself, not report also this as duplicate
+ continue
+ repme += '\n** [[:{}{}]]'.format(self.image_namespace,
+ dup_page.title(as_url=True))
- if older_image_page.title() != self.image_name:
- # The image is a duplicate, it will be deleted. So skip the
- # check-part, useless
- return False
+ result = self.report_image(self.image_name, self.rep_page,
+ self.com, repme, addings=False)
+ if not result:
+ return True # If Errors, exit (but continue the check)
+
+ if older_image_page.title() != self.image_name:
+ # The image is a duplicate, it will be deleted. So skip the
+ # check-part, useless
+ return False
return True # Ok - No problem. Let's continue the checking phase
def report_image(self, image_to_report, rep_page=None, com=None,
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773944
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I99caf80b91e1a7355e833e33fc0d4b6b0237564f
Gerrit-Change-Number: 773944
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773920 )
Change subject: [cleanup] Deprecate win32_unicode
......................................................................
[cleanup] Deprecate win32_unicode
Change-Id: I6dcbe9d216e088008799d7f87ef67f9f71eb6f24
---
M ROADMAP.rst
M pywikibot/userinterfaces/win32_unicode.py
2 files changed, 6 insertions(+), 2 deletions(-)
Approvals:
Xqt: Verified; Looks good to me, approved
diff --git a/ROADMAP.rst b/ROADMAP.rst
index ca68e8c..d19c3bf 100644
--- a/ROADMAP.rst
+++ b/ROADMAP.rst
@@ -7,6 +7,7 @@
Deprecations
^^^^^^^^^^^^
+* 7.1.0: win32_unicode.py will be removed with Pywikibot 8
* 7.1.0: Unused `get_redirect` parameter of Page.getOldVersion() will be removed
* 7.1.0: APISite._simple_request() will be removed in favour of APISite.simple_request()
* 7.0.0: The i18n identifier 'cosmetic_changes-append' will be removed in favour of 'pywikibot-cosmetic-changes'
@@ -30,4 +31,3 @@
* 5.5.0: APISite.redirectRegex() is deprecated in favour of APISite.redirect_regex()
* 4.0.0: Revision.parent_id is deprecated in favour of Revision.parentid
* 4.0.0: Revision.content_model is deprecated in favour of Revision.contentmodel
-
diff --git a/pywikibot/userinterfaces/win32_unicode.py b/pywikibot/userinterfaces/win32_unicode.py
index 75008a4..a135878 100644
--- a/pywikibot/userinterfaces/win32_unicode.py
+++ b/pywikibot/userinterfaces/win32_unicode.py
@@ -1,4 +1,8 @@
-"""Stdout, stderr and argv support for unicode."""
+"""Unicode support for stdout, stderr and argv with Python 3.5.
+
+.. deprecated:: 7.1
+ will be removed with Pywikibot 8 when Python 3.5 support is dropped.
+"""
#
# (C) Pywikibot team, 2012-2022
#
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/773920
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I6dcbe9d216e088008799d7f87ef67f9f71eb6f24
Gerrit-Change-Number: 773920
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged