Xqt has uploaded a new change for review.
https://gerrit.wikimedia.org/r/282352
Change subject: Remove old code dealing with starsList
......................................................................
Remove old code dealing with starsList
- stars has been replaces with Wikidata's sitelinks and the code is no longer
needed. Remove these code parts from add_text.py and cosmetic_changes.py (but
keep featured.py unchanged which is in archive folder now.
- put code parts into textlib which can be used with version history contents
- remove add_text_tests which only tests stars list
- change test_standardizePageFolder test is cosmetic_changes_test
- add several tests to textlib_tests
Bug: T123150
Change-Id: I06ca86805693f8ce57e78c34b2ee5ace2659a3ba
---
M pywikibot/cosmetic_changes.py
M pywikibot/textlib.py
M scripts/add_text.py
D tests/add_text_tests.py
M tests/cosmetic_changes_tests.py
5 files changed, 107 insertions(+), 155 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core
refs/changes/52/282352/1
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index 6afa7b0..72b74d2 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -49,7 +49,7 @@
cosmetic_changes_deny_script += ['your_script_name_1',
'your_script_name_2']
"""
#
-# (C) xqt, 2009-2015
+# (C) xqt, 2009-2016
# (C) Pywikibot team, 2006-2016
#
# Distributed under the terms of the MIT license.
@@ -296,51 +296,18 @@
"""
Standardize page footer.
- Makes sure that interwiki links, categories and star templates are
- put to the correct position and into the right order. This combines the
- old instances standardizeInterwiki and standardizeCategories
+ Makes sure that interwiki links and categories are put to the correct
+ position and into the right order. This combines the old instances
+ standardizeInterwiki and standardizeCategories.
The page footer has the following section in that sequence:
1. categories
2. ## TODO: template beyond categories ##
3. additional information depending on local site policy
- 4. stars templates for featured and good articles
- 5. interwiki links
+ 4. interwiki links
"""
- # TODO: T123150
- starsList = [
- u'bueno',
- u'bom interwiki',
- u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed',
- u'destacado', u'destaca[tu]',
- u'enllaç[ _]ad',
- u'enllaz[ _]ad',
- u'leam[ _]vdc',
- u'legătură[ _]a[bcf]',
- u'liamm[ _]pub',
- u'lien[ _]adq',
- u'lien[ _]ba',
- u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
- u'liên[ _]kết[ _]chọn[ _]lọc',
- u'ligam[ _]adq',
- u'ligazón[ _]a[bd]',
- u'ligoelstara',
- u'ligoleginda',
- u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[
_]km',
- u'link[ _]sm', u'linkfa',
- u'na[ _]lotura',
- u'nasc[ _]ar',
- u'tengill[ _][úg]g',
- u'ua',
- u'yüm yg',
- u'רא',
- u'وصلة مقالة جيدة',
- u'وصلة مقالة مختارة',
- ]
-
categories = None
interwikiLinks = None
- allstars = []
# Pywikibot is no longer allowed to touch categories on the
# German Wikipedia. See
@@ -367,15 +334,6 @@
# Removing the interwiki
text = textlib.removeLanguageLinks(text, site=self.site)
- # Removing the stars' issue
- starstext = textlib.removeDisabledParts(text)
- for star in starsList:
- regex = re.compile(r'(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
- % star, re.I)
- found = regex.findall(starstext)
- if found != []:
- text = regex.sub('', text)
- allstars += found
# Adding categories
if categories:
@@ -390,13 +348,6 @@
# categories.insert(0, name)
text = textlib.replaceCategoryLinks(text, categories,
site=self.site)
- # Adding stars templates
- if allstars:
- text = text.strip() + self.site.family.interwiki_text_separator
- allstars.sort()
- for element in allstars:
- text += '%s%s' % (element.strip(), config.line_separator)
- pywikibot.log(u'%s' % element.strip())
# Adding the interwiki
if interwikiLinks:
text = textlib.replaceLanguageLinks(text, interwikiLinks,
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index ec7b508..0627fa2 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -7,7 +7,7 @@
"""
#
-# (C) Pywikibot team, 2008-2015
+# (C) Pywikibot team, 2008-2016
#
# Distributed under the terms of the MIT license.
#
@@ -1665,6 +1665,104 @@
return u'{{%s\n%s}}' % (template, text)
+# ---------------------------------
+# functions dealing with stars list
+# ---------------------------------
+
+starsList = [
+ 'bueno',
+ 'bom interwiki',
+ 'cyswllt[ _]erthygl[ _]ddethol', 'dolen[ _]ed',
+ 'destacado', 'destaca[tu]',
+ 'enllaç[ _]ad',
+ 'enllaz[ _]ad',
+ 'leam[ _]vdc',
+ 'legătură[ _]a[bcf]',
+ 'liamm[ _]pub',
+ 'lien[ _]adq',
+ 'lien[ _]ba',
+ 'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
+ 'liên[ _]kết[ _]chọn[ _]lọc',
+ 'ligam[ _]adq',
+ 'ligazón[ _]a[bd]',
+ 'ligoelstara',
+ 'ligoleginda',
+ 'link[ _][afgu]a', 'link[ _]adq', 'link[ _]f[lm]', 'link[ _]km',
+ 'link[ _]sm', 'linkfa',
+ 'na[ _]lotura',
+ 'nasc[ _]ar',
+ 'tengill[ _][úg]g',
+ 'ua',
+ 'yüm yg',
+ 'רא',
+ 'وصلة مقالة جيدة',
+ 'وصلة مقالة مختارة',
+]
+
+def get_stars(text):
+ """
+ Extract stars templates from wikitext.
+
+ @param text: a wiki text
+ @type text: str
+ @return: list of stars templates
+ @rtype: list
+ """
+ allstars = []
+ starstext = removeDisabledParts(text)
+ for star in starsList:
+ regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
+ % star, re.I)
+ found = regex.findall(starstext)
+ if found:
+ allstars += found
+ return allstars
+
+
+def remove_stars(text, stars_list):
+ """
+ Remove stars templates from text.
+
+ @param text: a wiki text
+ @type text: str
+ @param start_list: list of stars templates previously found in text
+ @return: modified text
+ @rtype: str
+ """
+ for star in stars_list:
+ text = text.replace(star, '')
+ return text
+
+def append_stars(text, stars_list, site=None):
+ """
+ Remove stars templates from text.
+
+ @param text: a wiki text
+ @type text: str
+ @param stars_list: list of stars templates previously found in text
+ @type stars_list: list
+ @param site: a site where the given text is used.
+ interwiki_text_separator is used when a site object is given.
+ Otherwise line_separator is used twice to separate stars list.
+ @type site: BaseSite
+ @return: modified text
+ @rtype: str
+ """
+ LS = (config.line_separator * 2
+ if not site else site.family.interwiki_text_separator)
+ text = text.strip() + LS
+ stars = stars_list[:]
+ stars.sort()
+ for element in stars:
+ text += element.strip() + config.line_separator
+ return text
+
+def standardize_stars(text):
+ """Makes sure that star templates are in the right order."""
+ allstars = get_stars(text)
+ text = remove_stars(text, allstars)
+ return append_stars(text, allstars)
+
# --------------------------
# Page parsing functionality
# --------------------------
diff --git a/scripts/add_text.py b/scripts/add_text.py
index a5b03f1..db4f009 100755
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -80,37 +80,6 @@
}
-starsList = [
- u'bueno',
- u'bom interwiki',
- u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed',
- u'destacado', u'destaca[tu]',
- u'enllaç[ _]ad',
- u'enllaz[ _]ad',
- u'leam[ _]vdc',
- u'legătură[ _]a[bcf]',
- u'liamm[ _]pub',
- u'lien[ _]adq',
- u'lien[ _]ba',
- u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
- u'liên[ _]kết[ _]chọn[ _]lọc',
- u'ligam[ _]adq',
- u'ligazón[ _]a[bd]',
- u'ligoelstara',
- u'ligoleginda',
- u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km',
- u'link[ _]sm', u'linkfa',
- u'na[ _]lotura',
- u'nasc[ _]ar',
- u'tengill[ _][úg]g',
- u'ua',
- u'yüm yg',
- u'רא',
- u'وصلة مقالة جيدة',
- u'وصلة مقالة مختارة',
-]
-
-
def add_text(page, addText, summary=None, regexSkip=None,
regexSkipUrl=None, always=False, up=False, putText=True,
oldTextGiven=None, reorderEnabled=True, create=False):
@@ -188,22 +157,6 @@
newtext = textlib.replaceCategoryLinks(newtext,
categoriesInside, site,
True)
- # Dealing the stars' issue
- # TODO: T123150
- allstars = []
- starstext = textlib.removeDisabledParts(text)
- for star in starsList:
- regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
- % star, re.I)
- found = regex.findall(starstext)
- if found != []:
- newtext = regex.sub('', newtext)
- allstars += found
- if allstars != []:
- newtext = newtext.strip() + config.line_separator * 2
- allstars.sort()
- for element in allstars:
- newtext += '%s%s' % (element.strip(), config.LS)
# Adding the interwiki
newtext = textlib.replaceLanguageLinks(newtext, interwikiInside,
site)
diff --git a/tests/add_text_tests.py b/tests/add_text_tests.py
deleted file mode 100644
index 9c90ef6..0000000
--- a/tests/add_text_tests.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Test add_text bot module."""
-#
-# (C) Pywikibot team, 2016
-#
-# Distributed under the terms of the MIT license.
-#
-from __future__ import absolute_import, unicode_literals
-
-__version__ = '$Id$'
-
-import pywikibot
-
-from scripts.add_text import add_text
-
-from tests.aspects import unittest, TestCase
-
-
-class TestStarList(TestCase):
-
- """Test starlist."""
-
- family = 'wikipedia'
- code = 'en'
-
- dry = True
-
- def test_basic(self):
- """Test adding text before {{linkfa}} without parameters."""
- page = pywikibot.Page(self.site, 'foo')
- (text, newtext, always) = add_text(
- page, 'bar', putText=False,
- oldTextGiven='foo\n{{linkfa}}')
- self.assertEqual(
- 'foo\n{{linkfa}}\nbar',
- newtext)
-
- def test_with_params(self):
- """Test adding text before {{linkfa|...}}."""
- page = pywikibot.Page(self.site, 'foo')
- (text, newtext, always) = add_text(
- page, 'bar', putText=False,
- oldTextGiven='foo\n{{linkfa|...}}')
- self.assertEqual(
- 'foo\nbar\n\n{{linkfa|...}}\n',
- newtext)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/cosmetic_changes_tests.py b/tests/cosmetic_changes_tests.py
index 80a0595..73ee156 100644
--- a/tests/cosmetic_changes_tests.py
+++ b/tests/cosmetic_changes_tests.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Test cosmetic_changes module."""
#
-# (C) Pywikibot team, 2015
+# (C) Pywikibot team, 2015-2016
#
# Distributed under the terms of the MIT license.
#
@@ -44,9 +44,9 @@
def test_standardizePageFooter(self):
"""Test standardizePageFooter method."""
- self.assertEqual('Foo\n{{link fa}}\n\n[[Category:Foo]]',
+ self.assertEqual('Foo\n{{any template}}\n\n[[Category:Foo]]',
self.cct.standardizePageFooter(
- 'Foo [[category:foo]] {{link fa}}'))
+ 'Foo\n[[category:foo]]\n{{any template}}'))
def test_resolveHtmlEntities(self):
"""Test resolveHtmlEntities method."""
--
To view, visit https://gerrit.wikimedia.org/r/282352
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I06ca86805693f8ce57e78c34b2ee5ace2659a3ba
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits