John Vandenberg has uploaded a new change for review. https://gerrit.wikimedia.org/r/216523
Change subject: APISite.editpage should not reload the Page ...................................................................... APISite.editpage should not reload the Page APISite.editpage reloads the revisions after success in order to ensure that the Page is up to date, as the actual text of the page may be different from the submitted text due to merges performed by the server to resolve edit conflicts. APISite and api achieved this by modifying private variables of BasePage. By exposing a public setter and deleter for latest_revision_id, APISite and api can use this property to inform BasePage of when its caches are invalid, so it can clear the caches so subsequent fetches retrieve the actual text of the saved revision when it is needed. Normalise WikibasePage to using attribute _revid instead of lastrevid, deprecating the latter. Also add missing text deleter to ProofreadPage. Change-Id: I55a49d115a8f207216f24f9acbfa64d87da88578 --- M pywikibot/data/api.py M pywikibot/page.py M pywikibot/proofreadpage.py M pywikibot/site.py M tests/aspects.py M tests/pagegenerators_tests.py M tests/site_tests.py 7 files changed, 130 insertions(+), 46 deletions(-) git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core refs/changes/23/216523/1 diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index 3e82636..88237fd 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -2617,9 +2617,8 @@ page._revisions[revision.revid] = revision if 'lastrevid' in pagedict: - page._revid = pagedict['lastrevid'] - if page._revid in page._revisions: - page._text = page._revisions[page._revid].text + page.latest_revision_id = pagedict['lastrevid'] + del page.text if 'imageinfo' in pagedict: assert(isinstance(page, pywikibot.FilePage)) diff --git a/pywikibot/page.py b/pywikibot/page.py index 16a69d0..ae41402 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -346,13 +346,7 @@ """ if force: - # When forcing, we retry the page no matter what: - # * Old exceptions do not apply any more - # * Deleting _revid to force reload - # * Deleting _redirtarget, that info is now obsolete. - for attr in ['_redirtarget', '_getexception', '_revid']: - if hasattr(self, attr): - delattr(self, attr) + del self.latest_revision_id try: self._getInternals(sysop) except pywikibot.IsRedirectPage: @@ -430,6 +424,34 @@ self.revisions(self) return self._revid + @latest_revision_id.deleter + def latest_revision_id(self): + """Remove the latest revision id set for this Page. + + All internal cached values specifically for the latest revision + of this page are cleared. + + The following cached values are not cleared: + - text property + - page properties, and page coordinates + - lastNonBotUser + - isDisambig and isCategoryRedirect status + - langlinks, templates and deleted revisions + """ + # When forcing, we retry the page no matter what: + # * Old exceptions do not apply any more + # * Deleting _revid to force reload + # * Deleting _redirtarget, that info is now obsolete. + for attr in ['_redirtarget', '_getexception', '_revid']: + if hasattr(self, attr): + delattr(self, attr) + + @latest_revision_id.setter + def latest_revision_id(self, value): + """Set the latest revision for this Page.""" + del self.latest_revision_id + self._revid = value + @deprecated('latest_revision_id') def latestRevision(self): """Return the current revision id for this page.""" @@ -479,6 +501,8 @@ """Delete the current (edited) wikitext.""" if hasattr(self, "_text"): del self._text + if hasattr(self, '_expanded_text'): + del self._expanded_text def preloadText(self): """The text returned by EditFormPreloadText. @@ -3183,7 +3207,7 @@ self._content = data[item_index] if 'lastrevid' in self._content: - self.lastrevid = self._content['lastrevid'] + self.latest_revision_id = self._content['lastrevid'] else: if lazy_loading_id: p = Page(self._site, self._title) @@ -3287,15 +3311,38 @@ return self.id @property + @deprecated('latest_revision_id') + def lastrevid(self): + return self.latest_revision_id + + @lastrevid.setter + @deprecated('latest_revision_id') + def lastrevid(self, value): + self.latest_revision_id = value + + @lastrevid.deleter + @deprecated('latest_revision_id') + def lastrevid(self): + del self.latest_revision_id + + @property def latest_revision_id(self): """ Get the revision identifier for the most recent revision of the entity. @return: long """ - if not hasattr(self, 'lastrevid'): + if not hasattr(self, '_revid'): self.get() - return self.lastrevid + return self._revid + + @latest_revision_id.setter + def latest_revision_id(self, value): + self._revid = value + + @latest_revision_id.deleter + def latest_revision_id(self, value): + del self._revid @staticmethod def _normalizeLanguages(data): @@ -3366,8 +3413,8 @@ @param data: Data to be saved @type data: dict, or None to save the current content of the entity. """ - if hasattr(self, 'lastrevid'): - baserevid = self.lastrevid + if hasattr(self, '_revid'): + baserevid = self.latest_revision_id else: baserevid = None @@ -3378,7 +3425,7 @@ updates = self.repo.editEntity(self._defined_by(singular=True), data, baserevid=baserevid, **kwargs) - self.lastrevid = updates['entity']['lastrevid'] + self.latest_revision_id = updates['entity']['lastrevid'] lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site') if lazy_loading_id or self.id == '-1': @@ -4195,7 +4242,7 @@ source = defaultdict(list) for claim in claims: claim.hash = data['reference']['hash'] - self.on_item.lastrevid = data['pageinfo']['lastrevid'] + self.on_item.latest_revision_id = data['pageinfo']['lastrevid'] source[claim.getID()].append(claim) self.sources.append(source) @@ -4229,7 +4276,7 @@ """ data = self.repo.editQualifier(self, qualifier, **kwargs) qualifier.isQualifier = True - self.on_item.lastrevid = data['pageinfo']['lastrevid'] + self.on_item.latest_revision_id = data['pageinfo']['lastrevid'] if qualifier.getID() in self.qualifiers: self.qualifiers[qualifier.getID()].append(qualifier) else: diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py index 4b0f1d3..2acb362 100644 --- a/pywikibot/proofreadpage.py +++ b/pywikibot/proofreadpage.py @@ -235,6 +235,11 @@ if not self._text: self._create_empty_page() + @text.deleter + def text(self): + if hasattr(self, '_text'): + del self._text + def _decompose_page(self): """Split Proofread Page text in header, body and footer. diff --git a/pywikibot/site.py b/pywikibot/site.py index 6cddb9f..95960b6 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -4295,10 +4295,10 @@ pywikibot.log(u"Page [[%s]] saved without any changes." % page.title()) return True - page._revid = result["edit"]["newrevid"] + page.latest_revision_id = result["edit"]["newrevid"] # see https://www.mediawiki.org/wiki/API:Wikimania_2006_API_discussion#Notes # not safe to assume that saved text is the same as sent - self.loadrevisions(page, getText=True) + del page.text return True elif result["edit"]["result"] == "Failure": if "captcha" in result["edit"]: @@ -5767,7 +5767,7 @@ item.claims[claim.getID()].append(claim) else: item.claims[claim.getID()] = [claim] - item.lastrevid = data['pageinfo']['lastrevid'] + item.latest_revision_id = data['pageinfo']['lastrevid'] @must_be(group='user') def changeClaimTarget(self, claim, snaktype='value', bot=True, **kwargs): @@ -5796,7 +5796,7 @@ if snaktype == 'value': params['value'] = json.dumps(claim._formatValue()) - params['baserevid'] = claim.on_item.lastrevid + params['baserevid'] = claim.on_item.last_revision_id req = api.Request(site=self, **params) data = req.submit() return data @@ -5817,7 +5817,7 @@ params = {'action': 'wbsetclaim', 'claim': json.dumps(claim.toJSON()), 'token': self.tokens['edit'], - 'baserevid': claim.on_item.lastrevid, + 'baserevid': claim.on_item.latest_revision_id, } if 'bot' not in kwargs or kwargs['bot']: params['bot'] = True @@ -5846,7 +5846,7 @@ statement=claim.snak, ) if claim.on_item: # I think this wouldn't be false, but lets be safe - params['baserevid'] = claim.on_item.lastrevid + params['baserevid'] = claim.on_item.latest_revision_id if bot: params['bot'] = 1 params['token'] = self.tokens['edit'] @@ -5898,7 +5898,7 @@ claim=claim.snak, ) if claim.on_item: # I think this wouldn't be false, but lets be safe - params['baserevid'] = claim.on_item.lastrevid + params['baserevid'] = claim.on_item.last_revision_id if bot: params['bot'] = 1 if (not new and diff --git a/tests/aspects.py b/tests/aspects.py index 73c89ca..5b67113 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -895,12 +895,12 @@ # Create an instance method named the same as the class method self.get_site = lambda name=None: self.__class__.get_site(name) - def get_mainpage(self, site=None): + def get_mainpage(self, site=None, force=False): """Create a Page object for the sites main page.""" if not site: site = self.get_site() - if hasattr(self, '_mainpage'): + if hasattr(self, '_mainpage') and not force: # For multi-site test classes, or site is specified as a param, # the cached mainpage object may not be the desired site. if self._mainpage.site == site: diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 967b95e..d845b84 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -351,8 +351,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 self.assertEqual(len(links), count) @@ -366,8 +366,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 self.assertEqual(len(links), count) diff --git a/tests/site_tests.py b/tests/site_tests.py index d9bd295..9436cae 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -1667,19 +1667,50 @@ self.mysite = self.get_site() self.mainpage = pywikibot.Page(pywikibot.Link("Main Page", self.mysite)) + def test_page_text(self): + """Test site.loadrevisions() with Page.text.""" + page = self.get_mainpage(force=True) + self.site.loadrevisions(page, total=1) + self.assertFalse(hasattr(page, "_text")) + self.assertTrue(hasattr(page, "_revid")) + self.assertTrue(hasattr(page, "_revisions")) + self.assertEqual(len(page._revisions), 1) + self.assertIn(page._revid, page._revisions) + self.assertIsNone(page._revisions[page._revid].text) + self.assertFalse(hasattr(page, "_text")) + self.assertIsNone(page._latest_cached_revision()) + + self.site.loadrevisions(page, total=1, getText=True) + self.assertFalse(hasattr(page, "_text")) + self.assertIsNotNone(page._latest_cached_revision()) + # To verify that calling .text doesnt call loadrevisions again: + # (causes pickling error) + # loadrevisions = self.site.loadrevisions + # try: + # self.site.loadrevisions = None + # self.assertIsNotNone(page.text) + # finally: + # self.site.loadrevisions = loadrevisions + self.assertIsNotNone(page.text) + self.assertTrue(hasattr(page, "_text")) + def testLoadRevisions_basic(self): """Test the site.loadrevisions() method.""" self.mysite.loadrevisions(self.mainpage, total=15) - self.assertTrue(hasattr(self.mainpage, "_revid")) - self.assertTrue(hasattr(self.mainpage, "_revisions")) - self.assertIn(self.mainpage._revid, self.mainpage._revisions) + self.assertFalse(hasattr(self.mainpage, "_text")) self.assertEqual(len(self.mainpage._revisions), 15) - self.assertEqual(self.mainpage._text, None) + self.assertIn(self.mainpage._revid, self.mainpage._revisions) + self.assertIsNone(self.mainpage._revisions[self.mainpage._revid].text) + self.assertIsNotNone(self.mainpage.text) def testLoadRevisions_getText(self): """Test the site.loadrevisions() method with getText=True.""" self.mysite.loadrevisions(self.mainpage, getText=True, total=5) - self.assertGreater(len(self.mainpage._text), 0) + self.assertFalse(hasattr(self.mainpage, "_text")) + self.assertIn(self.mainpage._revid, self.mainpage._revisions) + self.assertIsNotNone(self.mainpage._revisions[self.mainpage._revid].text) + self.assertGreater(len(self.mainpage._revisions[self.mainpage._revid].text), 0) + self.assertIsNotNone(self.mainpage.text) def testLoadRevisions_revids(self): """Test the site.loadrevisions() method, listing based on revid.""" @@ -1893,8 +1924,10 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) + self.assertTrue(hasattr(page, "_revid")) self.assertEqual(len(page._revisions), 1) + self.assertIn(page._revid, page._revisions) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 if count >= 5: @@ -1916,8 +1949,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 if count >= 5: @@ -1933,8 +1966,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 if count >= 6: @@ -1959,8 +1992,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 self.assertEqual(count, link_count) @@ -1984,8 +2017,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 self.assertEqual(count, link_count) @@ -2012,8 +2045,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 if count > 5: @@ -2041,8 +2074,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 if count > 5: @@ -2082,8 +2115,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) self.assertTrue(hasattr(page, '_langlinks')) count += 1 @@ -2104,8 +2137,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) count += 1 @@ -2124,8 +2157,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) self.assertTrue(hasattr(page, '_langlinks')) count += 1 @@ -2144,8 +2177,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) self.assertTrue(hasattr(page, '_templates')) count += 1 @@ -2164,8 +2197,8 @@ self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) if page.exists(): - self.assertTrue(hasattr(page, "_text")) self.assertEqual(len(page._revisions), 1) + self.assertIsNotNone(page._revisions[page._revid].text) self.assertFalse(hasattr(page, '_pageprops')) self.assertTrue(hasattr(page, '_templates')) self.assertTrue(hasattr(page, '_langlinks')) -- To view, visit https://gerrit.wikimedia.org/r/216523 To unsubscribe, visit https://gerrit.wikimedia.org/r/settings Gerrit-MessageType: newchange Gerrit-Change-Id: I55a49d115a8f207216f24f9acbfa64d87da88578 Gerrit-PatchSet: 1 Gerrit-Project: pywikibot/core Gerrit-Branch: master Gerrit-Owner: John Vandenberg <jay...@gmail.com> _______________________________________________ MediaWiki-commits mailing list MediaWiki-commits@lists.wikimedia.org https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits