jenkins-bot has submitted this change and it was merged.

Change subject: Prevent duplicate tests in round-trip testing
......................................................................


Prevent duplicate tests in round-trip testing

There were some cases of sending the same page to different clients if all
the clients couldn't finish a whole batch in the cutoff time. Removing
duplicate pages prevents this.

Also, clear the number of times a page has been tested when a new commit
is started if the page went over the maximum number of tries. This wasn't
done so crashers were marked as having being tested too many times.

Change-Id: I4622de7e6a073c529790b31bafb63fa64a9b489e
---
M js/tests/server/server.js
1 file changed, 28 insertions(+), 8 deletions(-)

Approvals:
  Subramanya Sastry: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/js/tests/server/server.js b/js/tests/server/server.js
index 82fc237..6653df5 100755
--- a/js/tests/server/server.js
+++ b/js/tests/server/server.js
@@ -202,6 +202,11 @@
        'claim_hash = ?, claim_timestamp = NULL, claim_num_tries = 0 ' +
     'WHERE id = ?';
 
+var dbUpdateCrashersClearTries =
+       'UPDATE pages ' +
+       'SET claim_num_tries = 0 ' +
+       'WHERE claim_hash != ? AND claim_num_tries >= ?';
+
 var dbStatsQuery =
        'SELECT ' +
        '(select hash from commits order by timestamp desc limit 1) as maxhash, 
' +
@@ -460,7 +465,7 @@
                        for ( var i = 0; i < rows.length; i++ ) {
                                var row = rows[i];
                                pageIds.push( row.id );
-                               pages.push( { prefix: row.prefix, title: 
row.title } );
+                               pages.push( { id: row.id, prefix: row.prefix, 
title: row.title } );
                        }
 
                        trans.query( dbUpdatePageClaims, [ commitHash, new 
Date(), pageIds ],
@@ -481,12 +486,20 @@
        req.connection.setTimeout(300 * 1000);
        res.setHeader( 'Content-Type', 'text/plain; charset=UTF-8' );
 
-       // Keep record of the commit, ignore if already there.
-       db.query( dbInsertCommit, [ commitHash, new Date() ], function ( err ) {
-               if ( err ) {
-                       console.error( "Error inserting commit " + commitHash );
-               }
-       });
+       // Keep record of the commit, ignore if already there. We use a 
transaction
+       // to make sure we don't start fetching pages until we've done this.
+       if ( commitHash !== lastFetchedCommit ) {
+               var trans = db.startTransaction();
+               trans.query( dbInsertCommit, [ commitHash, new Date() ], 
function ( err, commitInsertResult ) {
+                       if ( err ) {
+                               console.error( "Error inserting commit " + 
commitHash );
+                       } else if ( commitInsertResult.affectedRows > 0 ) {
+                               // If this is a new commit, we need to clear 
the number of times a
+                               // crasher page has been sent out so that each 
title gets retested
+                               trans.query( dbUpdateCrashersClearTries, [ 
commitHash, maxTries ] );
+                       }
+               } ).commit();
+       }
 
        var fetchCb = function ( err, pages ) {
                if ( err ) {
@@ -495,9 +508,16 @@
                }
 
                if ( pages ) {
+                       // Get the pages that aren't already fetched, to guard 
against the
+                       // case of clients not finishing the whole batch in the 
cutoff time
+                       var newPages = pages.filter( function( p ) {
+                               return fetchedPages.every( function ( f ) {
+                                       return f.id !== p.id;
+                               } );
+                       } );
                        // Append the new pages to the already fetched ones, in 
case there's
                        // a parallel request.
-                       fetchedPages = fetchedPages.concat( pages );
+                       fetchedPages = fetchedPages.concat( newPages );
                }
                if ( fetchedPages.length === 0 ) {
                        res.send( 'No available titles that fit the 
constraints.', 404 );

-- 
To view, visit https://gerrit.wikimedia.org/r/98092
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I4622de7e6a073c529790b31bafb63fa64a9b489e
Gerrit-PatchSet: 3
Gerrit-Project: mediawiki/extensions/Parsoid
Gerrit-Branch: master
Gerrit-Owner: Marcoil <marc...@wikimedia.org>
Gerrit-Reviewer: GWicke <gwi...@wikimedia.org>
Gerrit-Reviewer: Marcoil <marc...@wikimedia.org>
Gerrit-Reviewer: Subramanya Sastry <ssas...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to