jenkins-bot has submitted this change and it was merged.
Change subject: Fix selser in the v2 API
......................................................................
Fix selser in the v2 API
* It was setting env.page.dom as the document, whereas throughout the
codebase that's usually the body. This produced an unnecessary diff.
* To this point, some of the tests for the v2 API are merely
documenting the expectations, and not enforcing anything. Here we add
a test to ensure selser is used.
* Turns on useSelser in the API test parsoidServer.
* Renames some variables in the selectiveSerializer to reflect that
we're working with a body, not a doc.
* Clarified that a new `data-parsoid` should not be submitted. Can
only be found in the `original` field and applied to both new and
old html.
Bug: T89411
Change-Id: I91d688d7b3942b89637b15ed2ae1a54dab22bede
---
M api/localsettings.js.example
M api/routes.js
M lib/mediawiki.SelectiveSerializer.js
M tests/apiServer.js
M tests/mocha/api.js
A tests/mocha/apitest.localsettings.js
A tests/rttest.localsettings.js
D tests/test.localsettings.js
8 files changed, 243 insertions(+), 97 deletions(-)
Approvals:
Subramanya Sastry: Looks good to me, approved
jenkins-bot: Verified
diff --git a/api/localsettings.js.example b/api/localsettings.js.example
index 57fc72c..5818dae 100644
--- a/api/localsettings.js.example
+++ b/api/localsettings.js.example
@@ -5,6 +5,7 @@
*
* Also see the file server.js for more information.
*/
+"use strict";
exports.setup = function( parsoidConfig ) {
// The URL of your MediaWiki API endpoint. Optionally, you can also pass
diff --git a/api/routes.js b/api/routes.js
index 9d08249..e5f82b3 100644
--- a/api/routes.js
+++ b/api/routes.js
@@ -278,12 +278,14 @@
var doc = DU.parseHTML( html.replace(/\r/g, '') ),
Serializer = parsoidConfig.useSelser ?
SelectiveSerializer : WikitextSerializer,
serializer = new Serializer({ env: env, oldid:
env.page.id });
- if ( v2 && v2["data-parsoid"] ) {
- DU.applyDataParsoid( doc, v2["data-parsoid"].body );
+ if ( v2 && v2.original && v2.original["data-parsoid"] ) {
+ DU.applyDataParsoid( doc,
v2.original["data-parsoid"].body );
}
if ( v2 && v2.original && v2.original.html ) {
- env.page.dom = DU.parseHTML( v2.original.html.body );
- DU.applyDataParsoid( env.page.dom,
v2.original["data-parsoid"].body );
+ env.page.dom = DU.parseHTML( v2.original.html.body
).body;
+ if ( v2.original["data-parsoid"] ) {
+ DU.applyDataParsoid(
env.page.dom.ownerDocument, v2.original["data-parsoid"].body );
+ }
}
return Promise.promisify( serializer.serializeDOM, false,
serializer )(
doc.body, function( chunk ) { out.push( chunk ); },
false
diff --git a/lib/mediawiki.SelectiveSerializer.js
b/lib/mediawiki.SelectiveSerializer.js
index 7128d63..d7da44c 100644
--- a/lib/mediawiki.SelectiveSerializer.js
+++ b/lib/mediawiki.SelectiveSerializer.js
@@ -19,13 +19,12 @@
* @class
* @constructor
*
- * If one of options.env.page.name or options.oldtext is set, we use the
selective serialization
- * method, only reporting the serialized wikitext for parts of the page that
changed. Else, we
- * fall back to serializing the whole DOM.
+ * If we have the page source (this.env.page.src), we use the selective
+ * serialization method, only reporting the serialized wikitext for parts of
+ * the page that changed. Else, we fall back to serializing the whole DOM.
*
* @param options {Object} Options for the serializer.
* @param options.env {MWParserEnvironment}
- * @param options.oldtext {string} The old text of the document, if any
* @param options.oldid {string} The revision ID you want to compare to
(defaults to latest revision)
*/
var SelectiveSerializer = function ( options ) {
@@ -49,38 +48,38 @@
* Run the DOM serialization on a node.
*
* @param {Error} err
- * @param {Node} doc
+ * @param {Node} body
* @param {Function} cb Callback that is called for each chunk.
* @param {string} cb.res The wikitext of the chunk we've just serialized.
* @param {Function} finalcb The callback for when we've finished serializing
the DOM.
*/
-SSP.doSerializeDOM = function( err, doc, cb, finalcb ) {
+SSP.doSerializeDOM = function( err, body, cb, finalcb ) {
var self = this;
if ( err || (!this.env.page.dom && !this.env.page.domdiff) ||
!this.env.page.src) {
// If there's no old source, fall back to non-selective
serialization.
- this.wts.serializeDOM( doc, cb, false, finalcb );
+ this.wts.serializeDOM( body, cb, false, finalcb );
} else {
// Use provided diff-marked DOM (used during testing)
// or generate one (used in production)
- var diff = this.env.page.domdiff || new DOMDiff(this.env).diff(
doc );
+ var diff = this.env.page.domdiff || new DOMDiff( this.env
).diff( body );
- if ( ! diff.isEmpty ) {
+ if ( !diff.isEmpty ) {
- doc = diff.dom;
+ body = diff.dom;
// Add the serializer info
- // new DiffToSelserConverter(this.env, doc).convert();
+ // new DiffToSelserConverter(this.env, body).convert();
if ( this.trace || ( this.env.conf.parsoid.dumpFlags &&
this.env.conf.parsoid.dumpFlags.indexOf(
'dom:post-dom-diff' ) !== -1) )
{
console.log( '----- DOM after running DOMDiff
-----' );
- console.log( doc.outerHTML );
+ console.log( body.outerHTML );
}
// Call the WikitextSerializer to do our bidding
- this.wts.serializeDOM( doc, cb, true, finalcb );
+ this.wts.serializeDOM( body, cb, true, finalcb );
} else {
// Nothing was modified, just re-use the original source
cb( this.env.page.src );
@@ -95,7 +94,7 @@
*
* Parse the wikitext source of the page for DOM-diffing purposes.
*
- * @param {Node} doc The node for which we're getting the source.
+ * @param {Node} body The node for which we're getting the source.
* @param {Function} cb A callback to call after each chunk is serialized.
* @param {string} cb.res The result of the chunk serialization.
* @param {Function} finalcb The callback for after we've serialized the
entire document.
@@ -103,7 +102,7 @@
* @param {string} src The wikitext source of the document (optionally
* including page metadata)
*/
-SSP.parseOriginalSource = function ( doc, cb, finalcb, err, src ) {
+SSP.parseOriginalSource = function( body, cb, finalcb, err, src ) {
var self = this,
parserPipelineFactory = new ParserPipelineFactory( this.env ),
parserPipeline = parserPipelineFactory.getPipeline(
'text/x-mediawiki/full' );
@@ -113,14 +112,11 @@
// Parse the wikitext src to the original DOM, and pass that on to
// doSerializeDOM
- parserPipeline.once( 'document', function ( origDoc ) {
- var body = DU.parseHTML( DU.serializeNode(origDoc) ).body;
- self.env.page.dom = body;
- //console.log('calling doSerializeDOM');
- //console.log(body.outerHTML);
- self.doSerializeDOM(null, doc, cb, finalcb);
+ parserPipeline.once( 'document', function( origDoc ) {
+ self.env.page.dom = DU.parseHTML( DU.serializeNode( origDoc )
).body;
+ self.doSerializeDOM( null, body, cb, finalcb );
} );
- parserPipeline.processToplevelDoc(this.env.page.src);
+ parserPipeline.processToplevelDoc( this.env.page.src );
};
@@ -130,19 +126,19 @@
* The main serializer handler. Calls detectDOMChanges and prepares and calls
* WikitextSerializer.serializeDOM if changes were found.
*
- * @param {Node} doc The document to serialize.
+ * @param {Node} body The document to serialize.
* @param {Function} cb A callback for any serialized chunks, called whenever
we get a chunk of wikitext.
* @param {string} cb.res The chunk of wikitext just serialized.
* @param {Function} finalcb The callback fired on completion of the
serialization.
*/
-SSP.serializeDOM = function ( doc, cb, dummy, finalcb ) {
+SSP.serializeDOM = function( body, cb, dummy, finalcb ) {
// dummy preserves the wt serializer interface
var self = this;
if ( this.env.page.dom || this.env.page.domdiff ) {
- this.doSerializeDOM(null, doc, cb, finalcb);
+ this.doSerializeDOM( null, body, cb, finalcb );
} else if ( this.env.page.src ) {
// Have the src, only parse the src to the dom
- this.parseOriginalSource( doc, cb, finalcb, null,
this.env.page.src );
+ this.parseOriginalSource( body, cb, finalcb, null,
this.env.page.src );
} else if (this.env.page.id && this.env.page.id !== '0') {
// Start by getting the old text of this page
if (this.env.conf.parsoid.parsoidCacheURI) {
@@ -166,22 +162,23 @@
// And the original
dom. results[1] is an array
// with the html and
the content type. Ignore the
// content type here.
- self.env.page.dom =
DU.parseHTML(results[1][0]).body;
+ self.env.page.dom =
DU.parseHTML( results[1][0] ).body;
}
// Selective serialization if
there was no error, full
// serialization if there was
one.
- self.doSerializeDOM(null, doc,
cb, finalcb);
+ self.doSerializeDOM( null,
body, cb, finalcb );
}
);
} else {
- Util.getPageSrc( this.env, this.env.page.name,
- this.env.page.id || null,
- this.parseOriginalSource.bind(this,
doc, cb, finalcb) );
+ Util.getPageSrc(
+ this.env, this.env.page.name, this.env.page.id
|| null,
+ this.parseOriginalSource.bind( this, body, cb,
finalcb )
+ );
}
} else {
- this.doSerializeDOM(null, doc, cb, finalcb);
+ this.doSerializeDOM( null, body, cb, finalcb );
}
};
diff --git a/tests/apiServer.js b/tests/apiServer.js
index a1c7747..3f223c7 100644
--- a/tests/apiServer.js
+++ b/tests/apiServer.js
@@ -125,7 +125,7 @@
filePath: "/../api/server.js",
serverArgv: [
'--num-workers', '1',
- '--config', path.resolve( __dirname, './test.localsettings.js' )
+ '--config', path.resolve( __dirname,
'./rttest.localsettings.js' )
],
serverEnv: {}
};
diff --git a/tests/mocha/api.js b/tests/mocha/api.js
index a559f7d..28bf57a 100644
--- a/tests/mocha/api.js
+++ b/tests/mocha/api.js
@@ -6,6 +6,7 @@
request = require('supertest'),
domino = require('domino'),
url = require('url'),
+ path = require('path'),
should = require('chai').should();
describe('Parsoid API', function() {
@@ -13,7 +14,13 @@
before(function() {
var p = apiServer.startMockAPIServer({}).then(function( ret ) {
mockHost = url.parse( ret.url ).host;
- return apiServer.startParsoidServer({ mockUrl: ret.url
});
+ return apiServer.startParsoidServer({
+ mockUrl: ret.url,
+ serverArgv: [
+ '--num-workers', '1',
+ '--config', path.resolve( __dirname,
'./apitest.localsettings.js' )
+ ]
+ });
}).then(function( ret ) {
api = ret.url;
});
@@ -290,35 +297,6 @@
.end(done);
});
- it('should accept html as a pagebundle', function(done)
{
- request(api)
- .post('v2/' + mockHost + '/wt/')
- .send({
- html: {
- headers: {
- 'content-type':
'text/html;profile=mediawiki.org/specs/html/1.0.0'
- },
- body: "<!DOCTYPE html>\n<html
prefix=\"dc: http://purl.org/dc/terms/ mw: http://mediawiki.org/rdf/\"
about=\"http://localhost/index.php/Special:Redirect/revision/1\"><head
prefix=\"mwr: http://localhost/index.php/Special:Redirect/\"><meta
property=\"mw:articleNamespace\" content=\"0\"/><link rel=\"dc:replaces\"
resource=\"mwr:revision/0\"/><meta property=\"dc:modified\"
content=\"2014-09-12T22:46:59.000Z\"/><meta about=\"mwr:user/0\"
property=\"dc:title\" content=\"MediaWiki default\"/><link
rel=\"dc:contributor\" resource=\"mwr:user/0\"/><meta
property=\"mw:revisionSHA1\"
content=\"8e0aa2f2a7829587801db67d0424d9b447e09867\"/><meta
property=\"dc:description\" content=\"\"/><meta property=\"mw:parsoidVersion\"
content=\"0\"/><link rel=\"dc:isVersionOf\"
href=\"http://localhost/index.php/Main_Page\"/><title>Main_Page</title><base
href=\"http://localhost/index.php/\"/><link rel=\"stylesheet\"
href=\"//localhost/load.php?modules=mediawiki.legacy.commonPrint,shared|mediawiki.skinning.elements|mediawiki.skinning.content|mediawiki.skinning.interface|skins.vector.styles|site|mediawiki.skinning.content.parsoid&only=styles&debug=true&skin=vector\"/></head><body
id=\"mwAA\" lang=\"en\" class=\"mw-content-ltr sitedir-ltr ltr mw-body
mw-body-content mediawiki\" dir=\"ltr\"><p id=\"mwAQ\"><strong
id=\"mwAg\">MediaWiki has been successfully installed.</strong></p>\n\n<p
id=\"mwAw\">Consult the <a rel=\"mw:ExtLink\"
href=\"//meta.wikimedia.org/wiki/Help:Contents\" id=\"mwBA\">User's Guide</a>
for information on using the wiki software.</p>\n\n<h2 id=\"mwBQ\"> Getting
started </h2>\n<ul id=\"mwBg\"><li id=\"mwBw\"> <a rel=\"mw:ExtLink\"
href=\"//www.mediawiki.org/wiki/Special:MyLanguage/Manual:Configuration_settings\"
id=\"mwCA\">Configuration settings list</a></li>\n<li id=\"mwCQ\"> <a
rel=\"mw:ExtLink\"
href=\"//www.mediawiki.org/wiki/Special:MyLanguage/Manual:FAQ\"
id=\"mwCg\">MediaWiki FAQ</a></li>\n<li id=\"mwCw\"> <a rel=\"mw:ExtLink\"
href=\"https://lists.wikimedia.org/mailman/listinfo/mediawiki-announce\"
id=\"mwDA\">MediaWiki release mailing list</a></li>\n<li id=\"mwDQ\"> <a
rel=\"mw:ExtLink\"
href=\"//www.mediawiki.org/wiki/Special:MyLanguage/Localisation#Translation_resources\"
id=\"mwDg\">Localise MediaWiki for your language</a></li></ul></body></html>",
- },
- "data-parsoid": {
- headers: {
- 'content-type':
'application/json;profile=mediawiki.org/specs/data-parsoid/0.0.1'
- },
- body: {
- "counter":14,
- "ids": {
-
"mwAA":{"dsr":[0,592,0,0]},"mwAQ":{"dsr":[0,59,0,0]},"mwAg":{"stx":"html","dsr":[0,59,8,9]},"mwAw":{"dsr":[61,171,0,0]},"mwBA":{"targetOff":114,"contentOffsets":[114,126],"dsr":[73,127,41,1]},"mwBQ":{"dsr":[173,194,2,2]},"mwBg":{"dsr":[195,592,0,0]},"mwBw":{"dsr":[195,300,1,0]},"mwCA":{"targetOff":272,"contentOffsets":[272,299],"dsr":[197,300,75,1]},"mwCQ":{"dsr":[301,373,1,0]},"mwCg":{"targetOff":359,"contentOffsets":[359,372],"dsr":[303,373,56,1]},"mwCw":{"dsr":[374,472,1,0]},"mwDA":{"targetOff":441,"contentOffsets":[441,471],"dsr":[376,472,65,1]},"mwDQ":{"dsr":[473,592,1,0]},"mwDg":{"targetOff":555,"contentOffsets":[555,591],"dsr":[475,592,80,1]
}
- }
- }
- }
- })
- .expect(200)
- .expect(function(res) {
-
res.body.should.have.property("wikitext");
- })
- .end(done);
- });
-
it('should allow a title in the url', function(done) {
request(api)
.post('v2/' + mockHost + '/wt/Main_Page')
@@ -430,6 +408,78 @@
.end(done);
});
+ // The following two tests should both serialize as:
+ // "<div>Selser test"
+ // However, we're deliberately setting the original
wikitext in
+ // the first to garbage so that when selser doesn't
detect any
+ // difference between the new and old html, it'll just
reuse that
+ // string and we have a reliable way of determining
that selser
+ // was used.
+
+ it('should use selser', function(done) {
+ // New and old html are identical, which should
produce no diffs
+ // and reuse the original wikitext.
+ request(api)
+ .post('v2/' + mockHost + '/wt/')
+ .send({
+ html: "<html><body id=\"mwAA\"><div
id=\"mwBB\">Selser test</div></body></html>",
+ original: {
+ title: "Doesnotexist",
+ wikitext: {
+ body: "This is just
some junk. See the comment above."
+ },
+ html: {
+ body: "<html><body
id=\"mwAA\"><div id=\"mwBB\">Selser test</div></body></html>",
+ },
+ "data-parsoid": {
+ body: {
+ "ids": {
+ mwAA:
{},
+ mwBB: {
"autoInsertedEnd": true, "stx": "html" }
+ }
+ }
+ }
+ }
+ })
+ .expect(200)
+ .expect(function(res) {
+
res.body.should.have.property("wikitext");
+
res.body.wikitext.body.should.equal("This is just some junk. See the comment
above.");
+ })
+ .end(done);
+ });
+
+ it('should fallback to non-selective serialization',
function(done) {
+ // Without the original wikitext and an
unavailable
+ // TemplateFetch for the source (Doestnotexist
will 404),
+ // it should fallback to non-selective
serialization.
+ request(api)
+ .post('v2/' + mockHost + '/wt/')
+ .send({
+ html: "<html><body id=\"mwAA\"><div
id=\"mwBB\">Selser test</div></body></html>",
+ original: {
+ title: "Doesnotexist",
+ html: {
+ body: "<html><body
id=\"mwAA\"><div id=\"mwBB\">Selser test</div></body></html>",
+ },
+ "data-parsoid": {
+ body: {
+ "ids": {
+ mwAA:
{},
+ mwBB: {
"autoInsertedEnd": true, "stx": "html" }
+ }
+ }
+ }
+ }
+ })
+ .expect(200)
+ .expect(function(res) {
+
res.body.should.have.property("wikitext");
+
res.body.wikitext.body.should.equal("<div>Selser test");
+ })
+ .end(done);
+ });
+
}); // end html2wt
});
diff --git a/tests/mocha/apitest.localsettings.js
b/tests/mocha/apitest.localsettings.js
new file mode 100644
index 0000000..d7694dc
--- /dev/null
+++ b/tests/mocha/apitest.localsettings.js
@@ -0,0 +1,60 @@
+/*
+ * This is a sample configuration file.
+ *
+ * Copy this file to localsettings.js and edit that file to fit your needs.
+ *
+ * Also see the file server.js for more information.
+ */
+"use strict";
+
+exports.setup = function( parsoidConfig ) {
+ // The URL of your MediaWiki API endpoint. Optionally, you can also pass
+ // in a proxy specific to this prefix (overrides defaultAPIProxyURI), or
+ // null to disable proxying for this end point.
+ //parsoidConfig.setInterwiki( 'localhost', 'http://localhost/w/api.php'
);
+ if ( process.env.PARSOID_MOCKAPI_URL ) {
+ parsoidConfig.setInterwiki( 'localhost',
process.env.PARSOID_MOCKAPI_URL );
+ }
+
+ // We pre-define wikipedias as 'enwiki', 'dewiki' etc. Similarly
+ // for other projects: 'enwiktionary', 'enwikiquote', 'enwikibooks',
+ // 'enwikivoyage' etc. (default true)
+ //parsoidConfig.loadWMF = false;
+
+ // A default proxy to connect to the API endpoints. Default: undefined
+ // (no proxying). Overridden by per-wiki proxy config in setInterwiki.
+ //parsoidConfig.defaultAPIProxyURI = 'http://proxy.example.org:8080';
+
+ // Enable debug mode (prints extra debugging messages)
+ //parsoidConfig.debug = true;
+
+ // Use the PHP preprocessor to expand templates via the MW API (default
true)
+ //parsoidConfig.usePHPPreProcessor = false;
+
+ // Use selective serialization (default false)
+ parsoidConfig.useSelser = true;
+
+ // Allow cross-domain requests to the API (default '*')
+ // Sets Access-Control-Allow-Origin header
+ // disable:
+ //parsoidConfig.allowCORS = false;
+ // restrict:
+ //parsoidConfig.allowCORS = 'some.domain.org';
+
+ // Allow override of port/interface:
+ //parsoidConfig.serverPort = 8000;
+ //parsoidConfig.serverInterface = '127.0.0.1';
+
+ // The URL of your LintBridge API endpoint
+ //parsoidConfig.linterAPI = 'http://lintbridge.wmflabs.org/add';
+
+ // Require SSL certificates to be valid (default true)
+ // Set to false when using self-signed SSL certificates
+ //parsoidConfig.strictSSL = false;
+
+ // Use a different server for CSS style modules.
+ // Set to true to use bits.wikimedia.org, or to a string with the URI.
+ // Leaving it undefined (the default) will use the same URI as the MW
API,
+ // changing api.php for load.php.
+ //parsoidConfig.modulesLoadURI = true;
+};
diff --git a/tests/rttest.localsettings.js b/tests/rttest.localsettings.js
new file mode 100644
index 0000000..8b0cec9
--- /dev/null
+++ b/tests/rttest.localsettings.js
@@ -0,0 +1,66 @@
+/*
+ * This is a sample configuration file.
+ *
+ * Copy this file to localsettings.js and edit that file to fit your needs.
+ *
+ * Also see the file server.js for more information.
+ */
+"use strict";
+
+exports.setup = function( parsoidConfig ) {
+ // The URL of your MediaWiki API endpoint. Optionally, you can also pass
+ // in a proxy specific to this prefix (overrides defaultAPIProxyURI), or
+ // null to disable proxying for this end point.
+ //parsoidConfig.setInterwiki( 'localhost', 'http://localhost/w/api.php'
);
+ if ( process.env.PARSOID_MOCKAPI_URL ) {
+ parsoidConfig.setInterwiki( 'localhost',
process.env.PARSOID_MOCKAPI_URL );
+ }
+
+ // We pre-define wikipedias as 'enwiki', 'dewiki' etc. Similarly
+ // for other projects: 'enwiktionary', 'enwikiquote', 'enwikibooks',
+ // 'enwikivoyage' etc. (default true)
+ //parsoidConfig.loadWMF = false;
+
+ // A default proxy to connect to the API endpoints. Default: undefined
+ // (no proxying). Overridden by per-wiki proxy config in setInterwiki.
+ //parsoidConfig.defaultAPIProxyURI = 'http://proxy.example.org:8080';
+
+ // Enable debug mode (prints extra debugging messages)
+ //parsoidConfig.debug = true;
+
+ // Use the PHP preprocessor to expand templates via the MW API (default
true)
+ //parsoidConfig.usePHPPreProcessor = false;
+
+ // Use selective serialization (default false)
+ //parsoidConfig.useSelser = true;
+
+ // Allow cross-domain requests to the API (default '*')
+ // Sets Access-Control-Allow-Origin header
+ // disable:
+ //parsoidConfig.allowCORS = false;
+ // restrict:
+ //parsoidConfig.allowCORS = 'some.domain.org';
+
+ // Allow override of port/interface:
+ //parsoidConfig.serverPort = 8000;
+ //parsoidConfig.serverInterface = '127.0.0.1';
+
+ // The URL of your LintBridge API endpoint
+ //parsoidConfig.linterAPI = 'http://lintbridge.wmflabs.org/add';
+
+ // Require SSL certificates to be valid (default true)
+ // Set to false when using self-signed SSL certificates
+ //parsoidConfig.strictSSL = false;
+
+ // Use a different server for CSS style modules.
+ // Set to true to use bits.wikimedia.org, or to a string with the URI.
+ // Leaving it undefined (the default) will use the same URI as the MW
API,
+ // changing api.php for load.php.
+ //parsoidConfig.modulesLoadURI = true;
+
+ // Set rtTestMode to true for round-trip testing
+ parsoidConfig.rtTestMode = false;
+
+ // Fetch the wikitext for a page before doing html2wt
+ parsoidConfig.fetchWT = true;
+};
\ No newline at end of file
diff --git a/tests/test.localsettings.js b/tests/test.localsettings.js
deleted file mode 100644
index 6b9e3af..0000000
--- a/tests/test.localsettings.js
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * This is a sample configuration file.
- *
- * Copy this file to localsettings.js and edit that file to fit your needs.
- *
- * Also see the file ParserService.js for more information.
- */
-"use strict";
-
-exports.setup = function( parsoidConfig ) {
- // The URL here is supposed to be your MediaWiki installation root
- if (process.env.PARSOID_MOCKAPI_URL) {
- parsoidConfig.setInterwiki( 'localhost',
process.env.PARSOID_MOCKAPI_URL );
- }
-
- // Use the PHP preprocessor to expand templates via the MW API (default
true)
- //parsoidConfig.usePHPPreProcessor = false;
-
- // Use selective serialization (default false)
- //parsoidConfig.useSelser = true;
-
- // allow cross-domain requests to the API (default disallowed)
- //parsoidConfig.allowCORS = '*';
-
- // Set rtTestMode to true for round-trip testing
- parsoidConfig.rtTestMode = true;
-
- // Fetch the wikitext for a page before doing html2wt
- parsoidConfig.fetchWT = true;
-};
--
To view, visit https://gerrit.wikimedia.org/r/190410
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I91d688d7b3942b89637b15ed2ae1a54dab22bede
Gerrit-PatchSet: 6
Gerrit-Project: mediawiki/services/parsoid
Gerrit-Branch: master
Gerrit-Owner: Arlolra <[email protected]>
Gerrit-Reviewer: Arlolra <[email protected]>
Gerrit-Reviewer: Cscott <[email protected]>
Gerrit-Reviewer: GWicke <[email protected]>
Gerrit-Reviewer: Marcoil <[email protected]>
Gerrit-Reviewer: Subramanya Sastry <[email protected]>
Gerrit-Reviewer: jenkins-bot <>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits