jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/770133 )
Change subject: [IMPR] Make _get_parsed_page a public method ......................................................................
[IMPR] Make _get_parsed_page a public method
- make _get_parsed_page a public method - no deprecation warning is thrown due to the private method status - add force parameter to clear the cache - raise KeyError instead of AssertionError in APISite.get_parsed_page - use get_parsed_page with force parameter in ProofreadPage
Change-Id: Icf3b5f22d3f98fe811b5e782a84301bc77894649 --- M pywikibot/page/_basepage.py M pywikibot/proofreadpage.py M pywikibot/site/_apisite.py 3 files changed, 27 insertions(+), 17 deletions(-)
Approvals: Matěj Suchánek: Looks good to me, but someone else must approve Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/page/_basepage.py b/pywikibot/page/_basepage.py index 05bc27b..d4a47e4 100644 --- a/pywikibot/page/_basepage.py +++ b/pywikibot/page/_basepage.py @@ -618,10 +618,20 @@ self.site.loadpageinfo(self, preload=True) return self._preloadedtext
- def _get_parsed_page(self): - """Retrieve parsed text (via action=parse) and cache it.""" - # Get (cached) parsed text. - if not hasattr(self, '_parsed_text'): + def get_parsed_page(self, force: bool = False) -> str: + """Retrieve parsed text (via action=parse) and cache it. + + .. versionchanged:: 7.1 + `force` parameter was added; + `_get_parsed_page` becomes a public method + + :param force: force updating from the live site + + .. seealso:: + :meth:`APISite.get_parsed_page() + <pywikibot.site._apisite.APISite.get_parsed_page>` + """ + if not hasattr(self, '_parsed_text') or force: self._parsed_text = self.site.get_parsed_page(self) return self._parsed_text
diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py index 895566a..4c03176 100644 --- a/pywikibot/proofreadpage.py +++ b/pywikibot/proofreadpage.py @@ -906,11 +906,7 @@ self._pages_from_label = {} # type: PagesFromLabelType self._labels_from_page_number = {} # type: Dict[int, str] self._labels_from_page = {} # type: Dict[pywikibot.page.Page, str] - if hasattr(self, '_parsed_text'): - del self._parsed_text - - self._parsed_text = self._get_parsed_page() - self._soup = _bs4_soup(self._parsed_text) # type: ignore + self._soup = _bs4_soup(self.get_parsed_page(True)) # type: ignore # Do not search for "new" here, to avoid to skip purging if links # to non-existing pages are present. attrs = {'class': re.compile('prp-pagequality')} @@ -932,9 +928,7 @@ attrs = {'class': re.compile('prp-pagequality|new')} if not found: self.purge() - del self._parsed_text - self._parsed_text = self._get_parsed_page() - self._soup = _bs4_soup(self._parsed_text) # type: ignore + self._soup = _bs4_soup(self.get_parsed_page(True)) # type: ignore if not self._soup.find_all('a', attrs=attrs): raise ValueError( 'Missing class="qualityN prp-pagequality-N" or ' diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py index 118fed7..2db07ad 100644 --- a/pywikibot/site/_apisite.py +++ b/pywikibot/site/_apisite.py @@ -1441,16 +1441,22 @@ return user_tokens
# TODO: expand support to other parameters of action=parse? - def get_parsed_page(self, page): + def get_parsed_page(self, page: 'pywikibot.Page') -> str: """Retrieve parsed text of the page using action=parse.
- :see: https://www.mediawiki.org/wiki/API:Parse + .. versionchanged:: 7.1 + raises KeyError instead of AssertionError + + .. seealso:: + - https://www.mediawiki.org/wiki/API:Parse + - :meth:`pywikibot.page.BasePage.get_parsed_page`. """ req = self.simple_request(action='parse', page=page) data = req.submit() - assert 'parse' in data, "API parse response lacks 'parse' key" - assert 'text' in data['parse'], "API parse response lacks 'text' key" - parsed_text = data['parse']['text']['*'] + try: + parsed_text = data['parse']['text']['*'] + except KeyError as e: + raise KeyError('API parse response lacks {} key'.format(e)) return parsed_text
def getcategoryinfo(self, category) -> None:
pywikibot-commits@lists.wikimedia.org