Hashar has uploaded a new change for review. ( 
https://gerrit.wikimedia.org/r/356119 )

Change subject: Add WAN Cache to SiteStats::jobs
......................................................................

Add WAN Cache to SiteStats::jobs

The method hits the jobrunner backend to find out how many jobs are
enqueued in each of the JobQueue.  It is publicly available via the
MediaWiki API request:
    /w/api.php?action=query&meta=siteinfo&siprop=statistics

That is often used by bots when querying recent changes among other and
with fast bot cause useless queries toward the jobrunner backend.

Wrap SiteStats::jobs() with a WAN cache under key SiteStats:jobscount.
That is similar to SiteStats::numberingroup().
Set TTL to one minute, which should still give fresh enough results for
public uses.

Cover that behavior with a test.

When writing tests I noticed MediaWikiTestCase generates a few jobs due
to the creation of the UTPage page:

* HTMLCacheUpdateJob to refresh backlinks (eg: history)
* RecentChangesUpdateJob which happens randomly

Pass EDIT_SUPPRESS_RC to doEditContent to prevent the first and blindly
delete entries in the recentChangesUpdate jobqueue for the second.

Change-Id: I95a272d0691d779bfee9e7a671cbab66a113dfa1
---
M includes/SiteStats.php
M tests/phpunit/MediaWikiTestCase.php
A tests/phpunit/includes/SiteStatsTest.php
3 files changed, 54 insertions(+), 8 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/19/356119/1

diff --git a/includes/SiteStats.php b/includes/SiteStats.php
index 86a4f63..065c075 100644
--- a/includes/SiteStats.php
+++ b/includes/SiteStats.php
@@ -213,16 +213,24 @@
        }
 
        /**
+        * Total number of jobs in the job queue.
         * @return int
         */
        static function jobs() {
-               if ( !isset( self::$jobs ) ) {
-                       try{
-                               self::$jobs = array_sum( 
JobQueueGroup::singleton()->getQueueSizes() );
-                       } catch ( JobQueueError $e ) {
-                               self::$jobs = 0;
-                       }
-               }
+               $cache = 
MediaWikiServices::getInstance()->getMainWANObjectCache();
+               self::$jobs = $cache->getWithSetCallback(
+                       $cache->makeKey( 'SiteStats', 'jobscount' ),
+                       $cache::TTL_MINUTE,
+                       function ( $oldValue, &$ttl, array &$setOpts ) {
+                               try{
+                                       $jobs = array_sum( 
JobQueueGroup::singleton()->getQueueSizes() );
+                               } catch ( JobQueueError $e ) {
+                                       $jobs = 0;
+                               }
+                               return $jobs;
+                       },
+                       [ 'pcTTL' => $cache::TTL_PROC_LONG ]
+               );
                return self::$jobs;
        }
 
diff --git a/tests/phpunit/MediaWikiTestCase.php 
b/tests/phpunit/MediaWikiTestCase.php
index df3d568..0e920da 100644
--- a/tests/phpunit/MediaWikiTestCase.php
+++ b/tests/phpunit/MediaWikiTestCase.php
@@ -1073,10 +1073,15 @@
                        $page->doEditContent(
                                new WikitextContent( 'UTContent' ),
                                'UTPageSummary',
-                               EDIT_NEW,
+                               EDIT_NEW | EDIT_SUPPRESS_RC,
                                false,
                                $user
                        );
+                       // an edit always attempt to purge backlink links such 
as history
+                       // pages. That is unneccessary.
+                       JobQueueGroup::singleton()->get( 'htmlCacheUpdate' 
)->delete();
+                       // WikiPages::doEditUpdates randomly adds RC purges
+                       JobQueueGroup::singleton()->get( 'recentChangesUpdate' 
)->delete();
 
                        // doEditContent() probably started the session via
                        // User::loadFromSession(). Close it now.
diff --git a/tests/phpunit/includes/SiteStatsTest.php 
b/tests/phpunit/includes/SiteStatsTest.php
new file mode 100644
index 0000000..365f699
--- /dev/null
+++ b/tests/phpunit/includes/SiteStatsTest.php
@@ -0,0 +1,33 @@
+<?php
+
+class SiteStatsTest extends MediaWikiTestCase {
+
+       function testJobsCountGetCached() {
+               $this->setService( 'MainWANObjectCache',
+                       new WANObjectCache( [ 'cache' => new HashBagOStuff() ] 
) );
+               $cache = 
\MediaWiki\MediaWikiServices::getInstance()->getMainWANObjectCache();
+               $jobq = JobQueueGroup::singleton();
+
+               $jobq->push( new NullJob( Title::newMainPage(), [] ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                        'A single job enqueued bumps jobscount stat to 1' );
+
+               $jobq->push( new NullJob( Title::newMainPage(), [] ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                       'SiteStats::jobs() count does not reflect addition ' .
+                       'of a second job (cached)'
+               );
+
+               $jobq->get( 'null' )->delete();  // clear jobqueue
+               $this->assertEquals( 0, $jobq->get( 'null' )->getSize(),
+                       'Job queue for NullJob has been cleaned' );
+
+               $cache->delete( $cache->makeKey( 'SiteStats', 'jobscount' ) );
+               $this->assertEquals( 1, SiteStats::jobs(),
+                       'jobs count is kept in process cache' );
+
+               $cache->clearProcessCache();
+               $this->assertEquals( 0, SiteStats::jobs() );
+       }
+
+}

-- 
To view, visit https://gerrit.wikimedia.org/r/356119
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I95a272d0691d779bfee9e7a671cbab66a113dfa1
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Hashar <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to