jenkins-bot has submitted this change and it was merged.

Change subject: Add --force option to fetch-parserTests.txt.js to update file 
and hashes.
......................................................................


Add --force option to fetch-parserTests.txt.js to update file and hashes.

Change-Id: If4362711e4d17e9dad8b3fbc7ac3b17966fa5aa0
---
M js/tests/fetch-parserTests.txt.js
M js/tests/parserTests.js
2 files changed, 83 insertions(+), 17 deletions(-)

Approvals:
  GWicke: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/js/tests/fetch-parserTests.txt.js 
b/js/tests/fetch-parserTests.txt.js
old mode 100644
new mode 100755
index 8e98c72..251e42b
--- a/js/tests/fetch-parserTests.txt.js
+++ b/js/tests/fetch-parserTests.txt.js
@@ -1,9 +1,14 @@
+#!/usr/bin/env node
 /**
  * Fetch new parserTests.txt from upstream mediawiki/core.
  */
 
 // UPDATE THESE when upstream mediawiki/core includes new parsoid-relevant 
tests
 // This ensures that our whitelist is in sync.
+//
+// ==> Use "./fetch-parserTests.txt.js --force" to download latest parserTests
+//     and update these hashes automatically.
+//
 // You can use 'sha1sum -b tests/parser/parserTests.txt' to compute this value:
 var expectedSHA1 = "f1af8c010dd69906e27036d787fbdc36f1067c55";
 // git log --pretty=oneline -1 tests/parser/parserTests.txt
@@ -14,24 +19,33 @@
        https = require('https'),
        crypto = require('crypto');
 
-var existsSync = fs.existsSync || path.existsSync; // node 0.6 compat
-
-var url = {
+var downloadUrl = {
        host: 'gerrit.wikimedia.org',
        path: 
'/r/gitweb?p=mediawiki/core.git;a=blob_plain;hb=HEAD;f=tests/parser/parserTests.txt'
+};
+var historyUrl = {
+       host: downloadUrl.host,
+       path: 
'/r/gitweb?p=mediawiki/core.git;a=history;hb=HEAD;f=tests/parser/parserTests.txt'
 };
 var target_name = __dirname+"/parserTests.txt";
 
 var computeSHA1 = function(target_name) {
+       var existsSync = fs.existsSync || path.existsSync; // node 0.6 compat
+       if (!existsSync(target_name)) {
+               return "<file not present>";
+       }
        var contents = fs.readFileSync(target_name);
        return crypto.createHash('sha1').update(contents).digest('hex').
                toLowerCase();
 };
 
-var fetch = function(url, target_name, gitCommit) {
+var fetch = function(url, target_name, gitCommit, cb) {
        console.log('Fetching parserTests.txt from mediawiki/core');
        if (gitCommit) {
-               url.path = url.path.replace(/;hb=[^;]+;/, ';hb='+gitCommit+';');
+               url = {
+                       host: url.host,
+                       path: url.path.replace(/;hb=[^;]+;/, 
';hb='+gitCommit+';')
+               };
        }
        https.get(url, function(result) {
                var out = fs.createWriteStream(target_name);
@@ -41,7 +55,9 @@
                result.on('end', function() {
                        if (out) {
                                out.end();
-                               if (expectedSHA1 !== computeSHA1(target_name)) {
+                               if (cb) {
+                                       return cb();
+                               } else if (expectedSHA1 !== 
computeSHA1(target_name)) {
                                        console.warn('Parsoid expected 
sha1sum', expectedSHA1,
                                                                 'but got', 
computeSHA1(target_name));
                                }
@@ -52,23 +68,73 @@
        });
 };
 
+var isUpToDate = function() {
+       return (expectedSHA1 === computeSHA1(target_name));
+};
+
 var checkAndUpdate = function() {
-       if (existsSync(target_name) &&
-               expectedSHA1 === computeSHA1(target_name)) {
-               return; // a-ok!
+       if (!isUpToDate()) {
+               fetch(downloadUrl, target_name, latestCommit);
        }
-       fetch(url, target_name, latestCommit);
+};
+
+var forceUpdate = function() {
+       console.log('Fetching parserTests.txt history from mediawiki/core');
+       var findMostRecentCommit, downloadCommit, updateHashes;
+
+       // fetch the history page
+       https.get(historyUrl, function(result) {
+               var html = '';
+               result.setEncoding('utf8');
+               result.on('data', function(data) { html += data; });
+               result.on('end', function() {
+                       findMostRecentCommit(html);
+               });
+       }).on('error', function(err) {
+               console.error(err);
+       });
+
+       // now look for the most recent commit
+       findMostRecentCommit = function(html) {
+               // remove everything before <table class="history">
+               html = html.replace(/^[^]*<table\s[^>]*class="history"[^>]*>/, 
'');
+               // now find the first link to this file with a specific hash
+               var m = 
/[?;]a=blob;f=tests\/parser\/parserTests.txt;hb=([0-9a-f]+)/.
+                       exec(html);
+               var gitCommit = m ? m[1] : "HEAD";
+               downloadCommit(gitCommit);
+       };
+
+       // download latest file
+       downloadCommit = function(gitCommit) {
+               fetch(downloadUrl, target_name, gitCommit, function() {
+                       updateHashes(gitCommit, computeSHA1(target_name));
+               });
+       };
+
+       // now rewrite this file!
+       updateHashes = function(gitCommit, fileHash) {
+               var contents = fs.
+                       readFileSync(__filename, 'utf8').
+                       replace(/^var expectedSHA1 = "[0-9a-f]*";/m,
+                                       "var expectedSHA1 = \""+fileHash+"\";").
+                       replace(/^var latestCommit = "[0-9a-f]*";/m,
+                                       "var latestCommit = 
\""+gitCommit+"\";");
+               fs.writeFileSync(__filename, contents, 'utf8');
+               console.log('Updated fetch-parserTests.txt.js');
+       };
 };
 
 if (typeof module === 'object' && require.main !== module) {
        module.exports = {
                checkAndUpdate: checkAndUpdate,
-               expectedSHA1: expectedSHA1,
-               computeSHA1: function() {
-                       return existsSync(target_name) ? 
computeSHA1(target_name) :
-                               "<file not present>";
-               }
+               isUpToDate: isUpToDate
        };
 } else {
-       checkAndUpdate();
+       var argv = require('optimist').argv;
+       if (argv.force) {
+               forceUpdate();
+       } else {
+               checkAndUpdate();
+       }
 }
diff --git a/js/tests/parserTests.js b/js/tests/parserTests.js
index b893689..e1bfa3b 100755
--- a/js/tests/parserTests.js
+++ b/js/tests/parserTests.js
@@ -294,7 +294,7 @@
 ParserTests.prototype.getTests = function ( argv ) {
        // double check that test file is up-to-date with upstream
        var fetcher = require(__dirname+"/fetch-parserTests.txt.js");
-       if (fetcher.expectedSHA1 !== fetcher.computeSHA1()) {
+       if (!fetcher.isUpToDate()) {
                console.warn("WARNING: parserTests.txt not up-to-date with 
upstream.");
        }
 

-- 
To view, visit https://gerrit.wikimedia.org/r/58442
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: If4362711e4d17e9dad8b3fbc7ac3b17966fa5aa0
Gerrit-PatchSet: 10
Gerrit-Project: mediawiki/extensions/Parsoid
Gerrit-Branch: master
Gerrit-Owner: Cscott <wikime...@cscott.net>
Gerrit-Reviewer: Cscott <wikime...@cscott.net>
Gerrit-Reviewer: GWicke <gwi...@wikimedia.org>
Gerrit-Reviewer: Subramanya Sastry <ssas...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to