Brian Wolff has uploaded a new change for review. https://gerrit.wikimedia.org/r/59359
Change subject: RefreshLinks on imagelinks on new uploads to kill tracking cat. ...................................................................... RefreshLinks on imagelinks on new uploads to kill tracking cat. When someone uploads a new file, we should refresh all the pages that use that image, so that [[Category:Pages with broken file links]] gets removed. Future fixme would to run this again on file deletes, but that's less critical as they will eventually get added on next reparse and people don't expect that to happen instantly like when fixing the issue. Ditto for someone creating a new file redirect. p.s. Not 100% sure about if the job deduplication is needed. It can still trigger recursive links update, but the base job is an imagelinks update, which likely won't have duplicates, so I'm not sure if the deduplication is actually doing anything useful. Bug: 46749 Change-Id: I7889598eee54a1c07c64f750b8499e922ba59704 --- M includes/filerepo/file/LocalFile.php 1 file changed, 33 insertions(+), 0 deletions(-) git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core refs/changes/59/59359/1 diff --git a/includes/filerepo/file/LocalFile.php b/includes/filerepo/file/LocalFile.php index b481e83..912abae 100644 --- a/includes/filerepo/file/LocalFile.php +++ b/includes/filerepo/file/LocalFile.php @@ -1352,11 +1352,17 @@ # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ); $update->doUpdate(); + if ( !$reupload ) { + $this->queueRefreshLinks( $this->getTitle() ); + } # Invalidate cache for all pages that redirects on this page $redirs = $this->getTitle()->getRedirectsHere(); foreach ( $redirs as $redir ) { + if ( !$reupload ) { + $this->queueRefreshLinks( $redir ); + } $update = new HTMLCacheUpdate( $redir, 'imagelinks' ); $update->doUpdate(); } @@ -1365,6 +1371,33 @@ return true; } + + /** + * Get rid of "Broken file links" on pages where it no longer applies. + * + * Only need to do on a new upload, not a reupload. + * @param $title Title What page to refresh imagelinks back links for. + */ + private function queueRefreshLinks( Title $title ) { + wfProfileIn( __METHOD__ ); + if ( $title->getBacklinkCache()->hasLinks( 'imagelinks' ) ) { + $job = new RefreshLinksJob2( + $title, + array( + 'table' => 'imagelinks', + ) + Job::newRootJobParams( // "overall" refresh links job info + "refreshlinks:imagelinks:{$title->getPrefixedText()}" + ) + ); + JobQueueGroup::singleton()->push( $job ); + // Not sure if deduplication really needed here + JobQueueGroup::singleton()->deduplicateRootJob( $job ); + } + wfProfileOut( __METHOD__ ); + } + + + /** * Move or copy a file to its public location. If a file exists at the * destination, move it to an archive. Returns a FileRepoStatus object with -- To view, visit https://gerrit.wikimedia.org/r/59359 To unsubscribe, visit https://gerrit.wikimedia.org/r/settings Gerrit-MessageType: newchange Gerrit-Change-Id: I7889598eee54a1c07c64f750b8499e922ba59704 Gerrit-PatchSet: 1 Gerrit-Project: mediawiki/core Gerrit-Branch: master Gerrit-Owner: Brian Wolff <bawolff...@gmail.com> _______________________________________________ MediaWiki-commits mailing list MediaWiki-commits@lists.wikimedia.org https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits