[MediaWiki-commits] [Gerrit] mediawiki/core[master]: mw.widgets.DateInputWidget: Fix unexpected MMMMM or ddddd da...

2017-10-24 Thread Liangent (Code Review)
Liangent has uploaded a new change for review. ( 
https://gerrit.wikimedia.org/r/386305 )

Change subject: mw.widgets.DateInputWidget: Fix unexpected M or d date 
format
..

mw.widgets.DateInputWidget: Fix unexpected M or d date format

In some locale data in moment.js such as zh,  date format already
uses  and . The original attempt to expand MMM to  and ddd
to  inadvertently expands  to M and  to d, which is
then interpreted as  or  followed by an unexpected single M or d.

Change-Id: I2634dfbaaf9615a13dce7b8f4ba3c3bea6863a91
---
M resources/src/mediawiki.widgets/mw.widgets.DateInputWidget.js
1 file changed, 2 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/05/386305/1

diff --git a/resources/src/mediawiki.widgets/mw.widgets.DateInputWidget.js 
b/resources/src/mediawiki.widgets/mw.widgets.DateInputWidget.js
index f10c93d..0e7fc30 100644
--- a/resources/src/mediawiki.widgets/mw.widgets.DateInputWidget.js
+++ b/resources/src/mediawiki.widgets/mw.widgets.DateInputWidget.js
@@ -465,7 +465,8 @@
format = .replace( lll.replace( ll, '' ), '' );
 
if ( this.longDisplayFormat ) {
-   format = format.replace( 'MMM', '' 
).replace( 'ddd', '' );
+   format = format.replace( '', 'MMM' 
).replace( 'MMM', '' );
+   format = format.replace( '', 'ddd' 
).replace( 'ddd', '' );
}
 
return format;

-- 
To view, visit https://gerrit.wikimedia.org/r/386305
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I2634dfbaaf9615a13dce7b8f4ba3c3bea6863a91
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent <liang...@gmail.com>

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[Wikidata-bugs] [Maniphest] [Created] T123621: #mw-newentity-form1 sometimes has incorrect action attribute

2016-01-14 Thread liangent
liangent created this task.
liangent added a subscriber: liangent.
liangent added a project: MediaWiki-extensions-WikibaseRepository.
Herald added subscribers: StudiesWorld, Aklapper.
Herald added a project: Wikidata.

TASK DESCRIPTION
  I got this somehow. Note that Special: prefix in action= is missing.
  ```
  
  ```
  
  This was achieved by trying to create an item with language=zh-cn, 
label=User:... (which is rejected by "Links to userpages" on Wikidata).

TASK DETAIL
  https://phabricator.wikimedia.org/T123621

EMAIL PREFERENCES
  https://phabricator.wikimedia.org/settings/panel/emailpreferences/

To: liangent
Cc: Aklapper, liangent, StudiesWorld, Wikidata-bugs, aude, Mbch331



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[MediaWiki-commits] [Gerrit] Chinese Conversion Table Update 2015-9 - change (mediawiki/core)

2016-01-13 Thread Liangent (Code Review)
Liangent has submitted this change and it was merged.

Change subject: Chinese Conversion Table Update 2015-9
..


Chinese Conversion Table Update 2015-9

Update the Chinese conversion table routinely to fix bugs reported from
https://zh.wikipedia.org/wiki/Wikipedia:%E5%AD%97%E8%AF%8D%E8%BD%AC%E6%8D%A2/%E4%BF%AE%E5%A4%8D%E8%AF%B7%E6%B1%82
 .

It is only data changes and only works for Chinese WikiProjects.

Change-Id: Icb47cf7d30a9bf09d55af9e96e34b9b5c6d6c9cf
---
M includes/ZhConversion.php
M maintenance/language/zhtable/simp2trad.manual
M maintenance/language/zhtable/toCN.manual
M maintenance/language/zhtable/toHK.manual
M maintenance/language/zhtable/toTW.manual
M maintenance/language/zhtable/toTrad.manual
M maintenance/language/zhtable/tradphrases.manual
M maintenance/language/zhtable/tradphrases_exclude.manual
8 files changed, 101 insertions(+), 28 deletions(-)

Approvals:
  Liuxinyu970226: Looks good to me, but someone else must approve
  Liangent: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/includes/ZhConversion.php b/includes/ZhConversion.php
index 9e7d12c..6c768ff 100644
--- a/includes/ZhConversion.php
+++ b/includes/ZhConversion.php
@@ -294,6 +294,7 @@
 '卖' => '賣',
 '卢' => '盧',
 '卤' => '鹵',
+'卧' => '臥',
 '卫' => '衛',
 '却' => '卻',
 '厂' => '廠',
@@ -3424,6 +3425,7 @@
 '干股' => '乾股',
 '干肥' => '乾肥',
 '干脆' => '乾脆',
+'干脆面' => '乾脆麵',
 '干花' => '乾花',
 '干刍' => '乾芻',
 '干苔' => '乾苔',
@@ -3564,6 +3566,7 @@
 '于慧' => '于慧',
 '于成龍' => '于成龍',
 '于成龙' => '于成龍',
+'于承惠' => '于承惠',
 '于振' => '于振',
 '于振武' => '于振武',
 '于敏' => '于敏',
@@ -3644,6 +3647,7 @@
 '于赠' => '于贈',
 '于越' => '于越',
 '于軍' => '于軍',
+'于逸堯' => '于逸堯',
 '于道泉' => '于道泉',
 '于远伟' => '于遠偉',
 '于遠偉' => '于遠偉',
@@ -3829,6 +3833,8 @@
 '信托' => '信託',
 '修杰楷' => '修杰楷',
 '修杰麟' => '修杰麟',
+'修筑前' => '修築前',
+'修筑后' => '修築後',
 '修胡刀' => '修鬍刀',
 '俯冲' => '俯衝',
 '个月里' => '個月裡',
@@ -4112,7 +4118,6 @@
 '削面' => '削麵',
 '克剥' => '剋剝',
 '克扣' => '剋扣',
-'克星' => '剋星',
 '克期' => '剋期',
 '克死' => '剋死',
 '克薄' => '剋薄',
@@ -4235,6 +4240,7 @@
 '去山里' => '去山裡',
 '参数只' => '參數只',
 '参数里' => '參數裡',
+'反反复复' => '反反覆覆',
 '反应制得' => '反應製得',
 '反朴' => '反樸',
 '反冲' => '反衝',
@@ -4736,14 +4742,10 @@
 '委托书' => '委託書',
 '奸夫' => '姦夫',
 '奸妇' => '姦婦',
-'奸宄' => '姦宄',
 '奸情' => '姦情',
 '奸杀' => '姦殺',
 '奸污' => '姦污',
 '奸淫' => '姦淫',
-'奸猾' => '姦猾',
-'奸细' => '姦細',
-'奸邪' => '姦邪',
 '威棱' => '威稜',
 '婢仆' => '婢僕',
 '嫁祸于' => '嫁禍於',
@@ -4758,6 +4760,7 @@
 '子里' => '子裡',
 '子里甲' => '子里甲',
 '字汇' => '字彙',
+'字母后' => '字母後',
 '字码表' => '字碼表',
 '字里行间' => '字裡行間',
 '存折' => '存摺',
@@ -5056,8 +5059,11 @@
 '廢后' => '廢后',
 '广征' => '廣徵',
 '广舍' => '廣捨',
+'广播里' => '廣播裡',
 '延历' => '延曆',
 '建于' => '建於',
+'建筑前' => '建築前',
+'建筑后' => '建築後',
 '弄干' => '弄乾',
 '弄丑' => '弄醜',
 '弄脏胸' => '弄髒胸',
@@ -5131,6 +5137,7 @@
 '影后' => '影后',
 '影相吊' => '影相弔',
 '役于' => '役於',
+'往复式' => '往復式',
 '往日无仇' => '往日無讎',
 '往里' => '往裡',
 '待复' => '待覆',
@@ -5405,7 +5412,6 @@
 '欲令智昏' => '慾令智昏',
 '欲壑难填' => '慾壑難填',
 '欲念' => '慾念',
-'欲望' => '慾望',
 '欲海' => '慾海',
 '欲火' => '慾火',
 '欲障' => '慾障',
@@ -5472,6 +5478,7 @@
 '手表面' => '手表面',
 '手里剑' => '手裏劍',
 '手里' => '手裡',
+'手游' => '手遊',
 '手表' => '手錶',
 '手链' => '手鍊',
 '手松' => '手鬆',
@@ -6227,6 +6234,7 @@
 '水来汤里去' => '水來湯裡去',
 '水准' => '水準',
 '水无怜奈' => '水無怜奈',
+'水表示' => '水表示',
 '水表面' => '水表面',
 '水里' => '水裡',
 '水里商工' => '水里商工',
@@ -6499,6 +6507,7 @@
 '沈丹客运' => '瀋丹客運',
 '沈丹线' => '瀋丹線',
 '沈丹铁路' => '瀋丹鐵路',
+'沈丹高' => '瀋丹高',
 '沈北' => '瀋北',
 '沈吉' => '瀋吉',
 '沈大线' => '瀋大線',
@@ -6714,7 +6723,6 @@
 '发松' => '發鬆',
 '发面' => '發麵',
 '白干儿' => '白乾兒',
-'白子里' => '白子里',
 '白术' => '白朮',
 '白朴' => '白樸',
 '白净面皮' => '白淨面皮',
@@ -6735,6 +6743,7 @@
 '百只足夠' => '百只足夠',
 '百周后' => '百周後',
 '百天后' => '百天後',
+'百子里' => '百子里',
 '百年' => '百年',
 '百拙千丑' => '百拙千醜',
 '百科里' => '百科裡',
@@ -6970,6 +6979,7 @@
 '窗明几净' => '窗明几淨',
 '窗帘' => '窗簾',
 '窝里' => '窩裡',
+'窝里斗' => '窩裡鬥',
 '穷于' => '窮於',
 '穷追不舍' => '窮追不捨',
 '穷发' => '窮髮',
@@ -7280,6 +7290,7 @@
 '聚药雄蕊' => '聚葯雄蕊',
 '闻风后' => '聞風後',
 '联系' => '聯繫',
+'声母后' => '聲母後',
 '听于' => '聽於',
 '肉干' => '肉乾',
 '肉欲' => '肉慾',
@@ -7970,11 +7981,13 @@
 '警报钟' => '警報鐘',
 '警示钟' => '警示鐘',
 '警钟' => '警鐘',
+'译制' => '譯製',
 '译注' => '譯註',
 '护发' => '護髮',
 '变征' => '變徵',
 '变丑' => '變醜',
 '仇隙' => '讎隙',
+'赞一个' => '讚一個',
 '赞不绝口' => '讚不絕口',
 '赞佩' => '讚佩',
 '赞呗' => '讚唄',
@@ -8593,7 +8606,6 @@
 '防御' => '防禦',
 '防范' => '防範',
 '防锈' => '防鏽',
-'防台' => '防颱',
 '阻于' => '阻於',
 '阿里' => '阿里',
 '附于' => '附於',
@@ -8683,6 +8695,7 @@
 '电码表' => '電碼表',
 '电冲' => '電衝',
 '电视台风' => '電視台風',
+'电视里' => '電視裡',
 '电表' => '電錶',
 '电钟' => '電鐘',
 '震栗' => '震慄

[MediaWiki-commits] [Gerrit] zhtable makefile: Remove unused variables, fix almost all pe... - change (mediawiki/core)

2015-10-17 Thread Liangent (Code Review)
Liangent has submitted this change and it was merged.

Change subject: zhtable makefile: Remove unused variables, fix almost all pep8 
errors
..


zhtable makefile: Remove unused variables, fix almost all pep8 errors

Change-Id: I7ab1ffeeb2a5592fbe143f27801c11248c167093
---
M maintenance/language/zhtable/Makefile.py
1 file changed, 235 insertions(+), 207 deletions(-)

Approvals:
  Hashar: Looks good to me, but someone else must approve
  Liangent: Looks good to me, approved



diff --git a/maintenance/language/zhtable/Makefile.py 
b/maintenance/language/zhtable/Makefile.py
index 4ab57d4..5924c66 100755
--- a/maintenance/language/zhtable/Makefile.py
+++ b/maintenance/language/zhtable/Makefile.py
@@ -1,9 +1,13 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 # @author Philip
-import tarfile as tf
-import zipfile as zf
-import os, re, shutil, sys, platform
+import os
+import platform
+import re
+import shutil
+import sys
+import tarfile
+import zipfile
 
 pyversion = platform.python_version()
 islinux = platform.system().lower() == 'linux'
@@ -18,16 +22,18 @@
 if i < 0x1:
 return _unichr(i)
 else:
-return _unichr( 0xD7C0 + ( i>>10 ) ) + _unichr( 0xDC00 + ( i & 
0x3FF ) )
+return _unichr(0xD7C0 + (i >> 10)) + _unichr(0xDC00 + (i & 
0x3FF))
 elif pyversion[:2] == '3.':
 import urllib.request as urllib_request
 unichr = chr
 
-def unichr2( *args ):
-return [unichr( int( i.split('<')[0][2:], 16 ) ) for i in args]
 
-def unichr3( *args ):
-return [unichr( int( i[2:7], 16 ) ) for i in args if i[2:7]]
+def unichr2(*args):
+return [unichr(int(i.split('<')[0][2:], 16)) for i in args]
+
+
+def unichr3(*args):
+return [unichr(int(i[2:7], 16)) for i in args if i[2:7]]
 
 # DEFINE
 UNIHAN_VER = '6.3.0'
@@ -37,189 +43,201 @@
 LIBTABE_VER = '0.2.3'
 # END OF DEFINE
 
-def download( url, dest ):
-if os.path.isfile( dest ):
-print( 'File %s is up to date.' % dest )
+
+def download(url, dest):
+if os.path.isfile(dest):
+print('File %s is up to date.' % dest)
 return
 global islinux
 if islinux:
-# we use wget instead urlretrieve under Linux, 
+# we use wget instead urlretrieve under Linux,
 # because wget could display details like download progress
-os.system( 'wget %s -O %s' % ( url, dest ) )
+os.system('wget %s -O %s' % (url, dest))
 else:
-print( 'Downloading from [%s] ...' % url )
-urllib_request.urlretrieve( url, dest )
-print( 'Download complete.\n' )
+print('Downloading from [%s] ...' % url)
+urllib_request.urlretrieve(url, dest)
+print('Download complete.\n')
 return
 
-def uncompress( fp, member, encoding = 'U8' ):
-name = member.rsplit( '/', 1 )[-1]
-print( 'Extracting %s ...' % name )
-fp.extract( member )
-shutil.move( member, name )
+
+def uncompress(fp, member, encoding='U8'):
+name = member.rsplit('/', 1)[-1]
+print('Extracting %s ...' % name)
+fp.extract(member)
+shutil.move(member, name)
 if '/' in member:
-shutil.rmtree( member.split( '/', 1 )[0] )
+shutil.rmtree(member.split('/', 1)[0])
 if pyversion[:1] in ['2']:
-fc = open( name, 'rb', encoding, 'ignore' )
+fc = open(name, 'rb', encoding, 'ignore')
 else:
-fc = open( name, 'r', encoding = encoding, errors = 'ignore' )
+fc = open(name, 'r', encoding=encoding, errors='ignore')
 return fc
 
 unzip = lambda path, member, encoding = 'U8': \
-uncompress( zf.ZipFile( path ), member, encoding )
+uncompress(zipfile.ZipFile(path), member, encoding)
 
 untargz = lambda path, member, encoding = 'U8': \
-uncompress( tf.open( path, 'r:gz' ), member, encoding )
+uncompress(tarfile.open(path, 'r:gz'), member, encoding)
 
-def parserCore( fp, pos, beginmark = None, endmark = None ):
+
+def parserCore(fp, pos, beginmark=None, endmark=None):
 if beginmark and endmark:
 start = False
-else: start = True
+else:
+start = True
 mlist = set()
 for line in fp:
-if beginmark and line.startswith( beginmark ):
+if beginmark and line.startswith(beginmark):
 start = True
 continue
-elif endmark and line.startswith( endmark ):
+elif endmark and line.startswith(endmark):
 break
-if start and not line.startswith( '#' ):
+if start and not line.startswith('#'):
 elems = line.split()
-if len( elems ) < 2:
+if len(elems) < 2:
 continue
-elif len( elems[0] ) > 1 and \
-len( elems[pos] ) > 1: # words only
-mlist.add( elems[pos] )
+elif len(elems[0]) > 1 and len(elems[pos]) > 1:  # words only
+  

[MediaWiki-commits] [Gerrit] Fix wrong translation of special page name in zh-hans - change (mediawiki...ApiFeatureUsage)

2015-10-11 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/245201

Change subject: Fix wrong translation of special page name in zh-hans
..

Fix wrong translation of special page name in zh-hans

Change-Id: I1cf7e86dbfd437c03a93a001dc525b52d57b24d2
---
M ApiFeatureUsage.alias.php
1 file changed, 2 insertions(+), 2 deletions(-)


  git pull 
ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/ApiFeatureUsage 
refs/changes/01/245201/1

diff --git a/ApiFeatureUsage.alias.php b/ApiFeatureUsage.alias.php
index d912cc8..47a49a0 100644
--- a/ApiFeatureUsage.alias.php
+++ b/ApiFeatureUsage.alias.php
@@ -56,5 +56,5 @@
 
 /** Simplified Chinese (中文(简体)‎) */
 $specialPageAliases['zh-hans'] = array(
-   'ApiFeatureUsage' => array( 'Api功能用法' ),
-);
\ No newline at end of file
+   'ApiFeatureUsage' => array( 'Api功能使用率' ),
+);

-- 
To view, visit https://gerrit.wikimedia.org/r/245201
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I1cf7e86dbfd437c03a93a001dc525b52d57b24d2
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/ApiFeatureUsage
Gerrit-Branch: master
Gerrit-Owner: Liangent <liang...@gmail.com>

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Clean up zh-tw.json - change (mediawiki/core)

2015-10-10 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/244892

Change subject: Clean up zh-tw.json
..

Clean up zh-tw.json

Change-Id: I8930136a2feabdd5ce229e31927678f01bc53fab
---
M languages/i18n/zh-tw.json
1 file changed, 4 insertions(+), 542 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/92/244892/1

diff --git a/languages/i18n/zh-tw.json b/languages/i18n/zh-tw.json
index bb5e9ab..3e25eec 100644
--- a/languages/i18n/zh-tw.json
+++ b/languages/i18n/zh-tw.json
@@ -14,552 +14,14 @@
 "Urhixidur",
 "Wong128hk",
 "Zerng07",
-"לערי ריינהארט"
+"לערי ריינהארט",
+"LNDDYL",
+"Carrotkit"
 ]
 },
-"tog-underline": "連結標注底線",
-"tog-justify": "段落對齊",
-"tog-hideminor": "近期變動中隱藏細微修改",
-"tog-usenewrc": "使用強化的近期變動 (需要JavaScript)",
-"tog-numberheadings": "自動編號標題",
-"tog-showtoolbar": "顯示編輯工具欄 (需要JavaScript)",
-"tog-watchcreations": "將我建立的頁面加進我的監視列表",
-"tog-watchdefault": "將我更改的頁面添加到我的監視列表中",
-"tog-minordefault": "預設將編輯設定為細微修改",
-"tog-enotifwatchlistpages": "當我監視的頁面改變時發電子郵件給我",
-"tog-enotifusertalkpages": "當我的對話頁有更動時發電子郵件通知我",
-"tog-shownumberswatching": "顯示監視數目",
-"tog-uselivepreview": "使用即時預覽 (JavaScript) (試驗中)",
-"tog-watchlisthideminor": "監視列表中隱藏細微修改",
-"tog-ccmeonemails": "當我寄電子郵件給其他使用者時,也寄一份複本到我的信箱。",
-"saturday": "星期六",
-"sun": "日",
-"mon": "一",
-"tue": "二",
-"wed": "三",
-"thu": "四",
-"fri": "五",
-"january": "一月",
-"february": "二月",
-"march": "三月",
-"april": "四月",
-"may_long": "五月",
-"june": "六月",
-"july": "七月",
-"august": "八月",
-"september": "九月",
-"october": "十月",
-"november": "十一月",
-"december": "十二月",
-"january-gen": "一月",
-"february-gen": "二月",
-"march-gen": "三月",
-"april-gen": "四月",
-"may-gen": "五月",
-"june-gen": "六月",
-"july-gen": "七月",
-"august-gen": "八月",
-"september-gen": "九月",
-"october-gen": "十月",
-"november-gen": "十一月",
-"december-gen": "十二月",
-"jan": "1月",
-"feb": "2月",
-"mar": "3月",
-"apr": "4月",
-"may": "5月",
-"jun": "6月",
-"jul": "7月",
-"aug": "8月",
-"sep": "9月",
-"oct": "10月",
-"nov": "11月",
-"dec": "12月",
-"subcategories": "子分類",
-"qbfind": "尋找",
-"vector-action-protect": "保護",
-"vector-view-create": "建立",
-"vector-view-view": "閱讀",
-"help": "使用說明",
-"search": "搜尋",
-"searchbutton": "搜尋",
-"history": "修訂記錄",
-"history_short": "歷史",
-"edit": "編輯",
-"create": "建立",
-"delete": "刪除",
-"protect_change": "更改",
-"postcomment": "新段落",
-"toolbox": "工具箱",
-"userpage": "檢視使用者頁面",
 "projectpage": "檢視計畫頁面",
-"lastmodifiedat": "本頁最後更動時間在 $1 $2。",
-"jumptosearch": "搜尋",
-"aboutsite": "關於 {{SITENAME}}",
 "copyright": "本站的文字內容除另有聲明外,全部以 $1 條款授權使用。",
-"disclaimers": "免責聲明",
-"edithelp": "編輯說明",
-"mainpage": "首頁",
-"portal": "社群入口",
-"portal-url": "Project:社群入口",
-"badaccess-groups": "您剛才的請求只有{{PLURAL:$

[MediaWiki-commits] [Gerrit] Fix accidential variable overriding in manualWordsTable - change (mediawiki/core)

2015-09-30 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/242584

Change subject: Fix accidential variable overriding in manualWordsTable
..

Fix accidential variable overriding in manualWordsTable

It doesn't make sense to get a table passed in then ignore that one
completely so it doesn't look like the intentional behavior, and
regardless of this, the edited code is still more logical.

Change-Id: I98bc841e982d328e268309c06fefd3d3ca9f6cd7
---
M maintenance/language/zhtable/Makefile.py
1 file changed, 4 insertions(+), 4 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/84/242584/1

diff --git a/maintenance/language/zhtable/Makefile.py 
b/maintenance/language/zhtable/Makefile.py
index 4ab57d4..9d64a89 100755
--- a/maintenance/language/zhtable/Makefile.py
+++ b/maintenance/language/zhtable/Makefile.py
@@ -229,7 +229,7 @@
 
 def manualWordsTable( path, conv_table, reconv_table ):
 fp = open( path, 'r', encoding = 'U8' )
-reconv_table = {}
+out_table = {}
 wordlist = [line.split( '#' )[0].strip() for line in fp]
 wordlist = list( set( wordlist ) )
 wordlist.sort( key = lambda w: ( len(w), w ), reverse = True )
@@ -238,9 +238,9 @@
 new_word = translate( word, conv_table )
 rcv_word = translate( word, reconv_table )
 if word != rcv_word:
-reconv_table[word] = word
-reconv_table[new_word] = word
-return reconv_table
+out_table[word] = word
+out_table[new_word] = word
+return out_table
 
 def defaultWordsTable( src_wordlist, src_tomany, char_conv_table, 
char_reconv_table ):
 wordlist = list( src_wordlist )

-- 
To view, visit https://gerrit.wikimedia.org/r/242584
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I98bc841e982d328e268309c06fefd3d3ca9f6cd7
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent <liang...@gmail.com>

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Drop zh-tw message "saveprefs" - change (mediawiki/core)

2015-09-16 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/239003

Change subject: Drop zh-tw message "saveprefs"
..

Drop zh-tw message "saveprefs"

Change-Id: I4b0da9f17e77a9a0d7af7522d7cb59d8891246f7
---
M languages/i18n/zh-tw.json
1 file changed, 0 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/03/239003/1

diff --git a/languages/i18n/zh-tw.json b/languages/i18n/zh-tw.json
index 81e0605..06b358b 100644
--- a/languages/i18n/zh-tw.json
+++ b/languages/i18n/zh-tw.json
@@ -245,7 +245,6 @@
 "prefs-personal": "使用者資料",
 "prefs-rc": "近期變動",
 "prefs-watchlist-days": "監視列表中顯示記錄的最長天數:",
-"saveprefs": "保存偏好設定",
 "resetprefs": "重設參數",
 "searchresultshead": "搜尋結果設定",
 "recentchangesdays": "近期變動中的顯示日數:",

-- 
To view, visit https://gerrit.wikimedia.org/r/239003
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I4b0da9f17e77a9a0d7af7522d7cb59d8891246f7
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent <liang...@gmail.com>

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Drop message whatlinkshere-title/zh-tw - change (mediawiki/core)

2015-09-03 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/235737

Change subject: Drop message whatlinkshere-title/zh-tw
..

Drop message whatlinkshere-title/zh-tw

This message contains incorrect wording as reported by user. Remove it
to let it fall back to zh-hant message.

Change-Id: Ieefb3a0649d5907eb29c91bf021550f573b5bf4f
---
M languages/i18n/zh-tw.json
1 file changed, 0 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/37/235737/1

diff --git a/languages/i18n/zh-tw.json b/languages/i18n/zh-tw.json
index 81e0605..8e5704e 100644
--- a/languages/i18n/zh-tw.json
+++ b/languages/i18n/zh-tw.json
@@ -414,7 +414,6 @@
 "sp-contributions-blocklog": "封鎖記錄",
 "sp-contributions-userrights": "使用者權限管理",
 "sp-contributions-username": "IP位址或使用者名稱:",
-"whatlinkshere-title": "鏈接到$1的頁面",
 "blockip": "封鎖使用者",
 "ipadressorusername": "IP地址或使用者名:",
 "ipbreason-dropdown": "*一般的封鎖理由\n** 屢次增加不實資料\n** 刪除頁面內容\n** 外部連結廣告\n** 
在頁面中增加無意義文字\n** 無禮的行為、攻擊/騷擾別人\n** 濫用多個帳號\n** 不能接受的使用者名",

-- 
To view, visit https://gerrit.wikimedia.org/r/235737
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ieefb3a0649d5907eb29c91bf021550f573b5bf4f
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent <liang...@gmail.com>

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[Wikidata-bugs] [Maniphest] [Changed Subscribers] T109038: [Bug] Users are unable to login on wikidata.org until they clear their cookies

2015-08-31 Thread liangent
liangent added a subscriber: liangent.

TASK DETAIL
  https://phabricator.wikimedia.org/T109038

EMAIL PREFERENCES
  https://phabricator.wikimedia.org/settings/panel/emailpreferences/

To: JanZerebecki, liangent
Cc: liangent, Keegan, Sjoerddebruin, Ankry, JanZerebecki, gerritbot, Addshore, 
Daniel_Mietchen, BBlack, thiemowmde, Magnus, Akoopal, Ortjens, Krenair, 
Billinghurst, Mbch331, Aklapper, aude, hoo, Anomie, Lydia_Pintscher, csteipp, 
Legoktm, Wikidata-bugs, Snowolf, Malyacko



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


Re: [Wikitech-l] +2 in mediawiki/* for Liangent

2015-07-11 Thread Liangent
Thanks everyone!
-Liangent

On Sun, Jul 12, 2015 at 9:13 AM, Amir E. Aharoni
amir.ahar...@mail.huji.ac.il wrote:
 About time! Congratulations!
 בתאריך 12 ביולי 2015 03:43,‏ Legoktm legoktm.wikipe...@gmail.com כתב:

 Hi,

 Per [1], I gave Liangent +2 powers in mediawiki/* repos. Congratulations!

 [1] https://phabricator.wikimedia.org/T98386

 -- Legoktm

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[Wikidata-bugs] [Maniphest] [Changed Subscribers] T102888: Recent changes pages doesn't have anymore the interlingual wikilinks on the sidebar on some projects

2015-06-27 Thread liangent
liangent added a subscriber: liangent.

TASK DETAIL
  https://phabricator.wikimedia.org/T102888

EMAIL PREFERENCES
  https://phabricator.wikimedia.org/settings/panel/emailpreferences/

To: liangent
Cc: liangent, XXN, Bene, aude, Mbch331, hoo, Lydia_Pintscher, Ricordisamoa, 
Alex_brollo, Accurimbono, Candalua, Andyrom75, Aklapper, Wikidata-bugs, 
Malyacko, P.Copp



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


Re: [Wikitech-l] (off) Baidu Baike current dump

2015-06-17 Thread Liangent
I don't think there is and will be one, besides crawling web pages,
considering Baidu's past activities.

Note that contents on Baidu Baike are usually not free and in an
unknown / messy copyright status.

-Liangent

On Wed, Jun 17, 2015 at 8:20 PM, Farkas, Illes f...@elte.hu wrote:
 Hello,

 Does anyone know a method for downloading the current dump of Baidu Baike?
 Suggestions for alternative mailing lists would be also great.

 Thanks,
 Illes
 http://goo.gl/trcz4
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[Wikidata-bugs] [Maniphest] [Commented On] T98286: Deploy usage tracking and arbitrary access to zhwiki

2015-05-07 Thread liangent
liangent added a comment.

Although I said that I was looking forward to it and there would be good use 
cases on zhwiki, I don't have time to work on it right now. Since it's 
mentioned in the announcement that it will be available globally at some time 
in June, I guess I can just wait for it.


TASK DETAIL
  https://phabricator.wikimedia.org/T98286

EMAIL PREFERENCES
  https://phabricator.wikimedia.org/settings/panel/emailpreferences/

To: liangent
Cc: Ricordisamoa, Aklapper, aude, liangent, Bugreporter, Wikidata-bugs, gpaumier



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[MediaWiki-commits] [Gerrit] Prevent unexpected }- in converter output - change (mediawiki/core)

2015-04-23 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/206079

Change subject: Prevent unexpected }- in converter output
..

Prevent unexpected }- in converter output

Previously for input -{span title=-{X}-X/span}-, the converter
sees -{span title=-#123;X}-A/span}-, so span title=-#123;X
becomes the content in the first block, and a stray }- is left to output.

Now, the converter sees -{span title=-#123;X#125;-A/span}- with
this change. In further processing, the span tag may be parsed and have
its title attrib converted. For cases where the content is not processed
further (eg. R = raw flag), -{X}- is left as is in the attrib, which
is not so ideal, but at least it's better than the original extra }-
outside the whole tag.

Change-Id: Idbaaf53f914f362e5b8cc9fad02a524f8d591bb7
---
M includes/Sanitizer.php
M tests/parser/parserTests.txt
2 files changed, 23 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/79/206079/1

diff --git a/includes/Sanitizer.php b/includes/Sanitizer.php
index 96193a7..713cfd4 100644
--- a/includes/Sanitizer.php
+++ b/includes/Sanitizer.php
@@ -1051,6 +1051,7 @@
''= 'gt;',   // we've received invalid input
''= 'quot;', // which should have been escaped.
'{'= '#123;',
+   '}'= '#125;', // prevent unpaired language 
conversion syntax
'['= '#91;',
''   = '#39;#39;',
'ISBN' = '#73;SBN',
diff --git a/tests/parser/parserTests.txt b/tests/parser/parserTests.txt
index e965352..43097e9 100644
--- a/tests/parser/parserTests.txt
+++ b/tests/parser/parserTests.txt
@@ -18305,6 +18305,28 @@
 !! end
 
 !! test
+HTML markups with conversion syntax in attribs, nested in other conversion 
blocks
+!! options
+language=zh variant=zh-cn
+!! wikitext
+-{zh;zh-hans;zh-hant|span title=-{X}-A/span}-
+!! html
+pspan title=XA/span
+/p
+!! end
+
+!! test
+HTML markups with conversion syntax in attribs, nested in other conversion 
blocks (not working yet)
+!! options
+language=zh variant=zh-cn disabled
+!! wikitext
+-{span title=-{X}-A/span}-
+!! html
+pspan title=XA/span
+/p
+!! end
+
+!! test
 Proper conversion of text in external links
 !! options
 language=sr variant=sr-ec

-- 
To view, visit https://gerrit.wikimedia.org/r/206079
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Idbaaf53f914f362e5b8cc9fad02a524f8d591bb7
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Remove zh-tw message editing - change (mediawiki/core)

2015-04-12 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/203786

Change subject: Remove zh-tw message editing
..

Remove zh-tw message editing

The current string is incorrect (written in zh-hans); removing it to
let it fall back to zh-hant. Since zh-tw exporting has been disabled
on translatewiki.net, it's not possible to fix it there.

Change-Id: I3cc7bbb2065ecae535986525367d26dfc82c49b1
---
M languages/i18n/zh-tw.json
1 file changed, 0 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/86/203786/1

diff --git a/languages/i18n/zh-tw.json b/languages/i18n/zh-tw.json
index 9abb137..74a4542 100644
--- a/languages/i18n/zh-tw.json
+++ b/languages/i18n/zh-tw.json
@@ -190,7 +190,6 @@
 session_fail_preview: 
'''很抱歉!由於部份資料遺失,我們無法處理您的編輯。'''\n請再試一次。\n如果仍然失敗,請[[Special:UserLogout|登出]]後重新登入。,
 session_fail_preview_html: 
'''很抱歉!部份資料已遺失,我們無法處理您的編輯。''如果這個編輯過程沒有問題,請再試一次。如果仍然有問題,請登出後再重新登入一次。''',
 token_suffix_mismatch: 
'''由於您使用者端中的編輯信符毀損了一些標點符號字元,為防止編輯的文字損壞,您的編輯已經被拒絕。\n這種情況通常出現於使用含有很多臭蟲、以網路為主的匿名代理服務的時候。''',
-editing: 正在编辑 $1,
 editingcomment: 正在編輯$1(新段落),
 storedversion: 已保存版本,
 nonunicodebrowser: '''警告: 您的瀏覽器不相容Unicode編碼。這裡有一個工作區將使您能安全地編輯頁面: 
非ASCII字元將以十六進製編碼模式出現在編輯框中。''',

-- 
To view, visit https://gerrit.wikimedia.org/r/203786
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I3cc7bbb2065ecae535986525367d26dfc82c49b1
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] Too many connections...

2015-04-08 Thread Liangent
I haven't seen this myself, but there're some reports about the same
problem in the village pump of zhwiki.

-Liangent

On Wed, Apr 8, 2015 at 9:00 PM, florian.schmidt.wel...@t-online.de
florian.schmidt.wel...@t-online.de wrote:
 Nope, it's working for me.

 Maybe a temp problem, have you asked on #wikimedia-operations?

 Freundliche Grüße / Best
 Florian Schmidt
 -Original-Nachricht-
 Betreff: [Wikitech-l] Too many connections...
 Datum: Wed, 08 Apr 2015 14:29:57 +0200
 Von: Ilario Valdelli valde...@gmail.com
 An: wikitech-l@lists.wikimedia.org

 Sorry, are you experiencing the same problem than me accessing to the
 Italian Wikipedia?

 Sorry! This site is experiencing technical difficulties.

 Try waiting a few minutes and reloading.

 (Cannot access the database: Too many connections (10.64.32.30))


 --
 Ilario Valdelli
 Wikimedia CH
 Verein zur Förderung Freien Wissens
 Association pour l’avancement des connaissances libre
 Associazione per il sostegno alla conoscenza libera
 Switzerland - 8008 Zürich
 Wikipedia: Ilario https://meta.wikimedia.org/wiki/User:Ilario
 Tel: +41764821371
 http://www.wikimedia.ch
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l



 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[MediaWiki-commits] [Gerrit] Output converted namespace name in nstab when nstab-* is not... - change (mediawiki/core)

2015-04-03 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/201887

Change subject: Output converted namespace name in nstab when nstab-* is not 
defined
..

Output converted namespace name in nstab when nstab-* is not defined

In languages without converter implemented, FakeConverter just invokes
Language::getFormattedNsText(), which is the function used originally.

Change-Id: Ieb34a65776cf69bcd67f1ed8402eb3d38ece0f0c
---
M includes/skins/SkinTemplate.php
1 file changed, 1 insertion(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/87/201887/1

diff --git a/includes/skins/SkinTemplate.php b/includes/skins/SkinTemplate.php
index b0390e9..61aad92 100644
--- a/includes/skins/SkinTemplate.php
+++ b/includes/skins/SkinTemplate.php
@@ -717,7 +717,7 @@
$text = $msg-text();
} else {
global $wgContLang;
-   $text = $wgContLang-getFormattedNsText(
+   $text = $wgContLang-getConverter()-convertNamespace(
MWNamespace::getSubject( $title-getNamespace() 
) );
}
 

-- 
To view, visit https://gerrit.wikimedia.org/r/201887
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ieb34a65776cf69bcd67f1ed8402eb3d38ece0f0c
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Fix I1bb0315d: only use tab as separator in *.manual - change (mediawiki/core)

2015-03-19 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/197863

Change subject: Fix I1bb0315d: only use tab as separator in *.manual
..

Fix I1bb0315d: only use tab as separator in *.manual

This fixes incorrect parsing of phrases with spaces in them.

Change-Id: Ifb6b33b937a7b82218172e3fdd937bda4ea4cc6c
---
M includes/ZhConversion.php
M maintenance/language/zhtable/Makefile.py
M maintenance/language/zhtable/toCN.manual
M maintenance/language/zhtable/toHK.manual
M maintenance/language/zhtable/toTW.manual
M maintenance/language/zhtable/toTrad.manual
M maintenance/language/zhtable/trad2simp.manual
7 files changed, 397 insertions(+), 397 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/63/197863/1

diff --git a/includes/ZhConversion.php b/includes/ZhConversion.php
index 4be2751..833e8a7 100644
--- a/includes/ZhConversion.php
+++ b/includes/ZhConversion.php
@@ -3002,10 +3002,10 @@
 '뛢' = '鸋',
 '뛶' = '鶒',
 '뛸' = '鶗',
-'0出現' = '0出現',
 '0出现' = '0出現',
-'0出線' = '0出線',
+'0出現' = '0出現',
 '0出线' = '0出線',
+'0出線' = '0出線',
 '0只支持' = '0只支持',
 '0只支援' = '0只支援',
 '0周后' = '0周後',
@@ -3213,8 +3213,8 @@
 '不干涉' = '不干涉',
 '不干牠' = '不干牠',
 '不干犯' = '不干犯',
-'不干預' = '不干預',
 '不干预' = '不干預',
+'不干預' = '不干預',
 '不干' = '不幹',
 '不吊' = '不弔',
 '不卷' = '不捲',
@@ -3228,12 +3228,13 @@
 '不负所托' = '不負所托',
 '不通吊庆' = '不通弔慶',
 '不丑' = '不醜',
-'不采聲' = '不采聲',
 '不采声' = '不采聲',
+'不采聲' = '不采聲',
 '不锈钢' = '不鏽鋼',
 '不食干腊' = '不食乾腊',
 '不斗' = '不鬥',
 '丑三' = '丑三',
+'丑婆子' = '丑婆子',
 '丑年' = '丑年',
 '丑日' = '丑日',
 '丑旦' = '丑旦',
@@ -3288,8 +3289,8 @@
 '丰神' = '丰神',
 '丰茸' = '丰茸',
 '丰采' = '丰采',
-'丰韻' = '丰韻',
 '丰韵' = '丰韻',
+'丰韻' = '丰韻',
 '主仆' = '主僕',
 '主干' = '主幹',
 '主钟差' = '主鐘差',
@@ -3322,11 +3323,9 @@
 '九只' = '九隻',
 '九余' = '九餘',
 '也斗了胆' = '也斗了膽',
-'干上' = '乾上',
 '干干' = '乾乾',
 '干干儿的' = '乾乾兒的',
 '干干净净' = '乾乾淨淨',
-'干了' = '乾了',
 '干井' = '乾井',
 '干个够' = '乾個夠',
 '干儿' = '乾兒',
@@ -3364,7 +3363,6 @@
 '干巴' = '乾巴',
 '干式' = '乾式',
 '干弟' = '乾弟',
-'干得' = '乾得',
 '干急' = '乾急',
 '干性' = '乾性',
 '干打雷' = '乾打雷',
@@ -3507,8 +3505,8 @@
 '于仲文' = '于仲文',
 '于佳卉' = '于佳卉',
 '于来山' = '于來山',
-'于偉國' = '于偉國',
 '于伟国' = '于偉國',
+'于偉國' = '于偉國',
 '于光新' = '于光新',
 '于光遠' = '于光遠',
 '于光远' = '于光遠',
@@ -3524,20 +3522,20 @@
 '于化虎' = '于化虎',
 '于占元' = '于占元',
 '于友泽' = '于友澤',
-'于台烟' = '于台煙',
 '于台煙' = '于台煙',
+'于台烟' = '于台煙',
 '于右任' = '于右任',
 '于吉' = '于吉',
 '于和伟' = '于和偉',
 '于品海' = '于品海',
-'于國楨' = '于國楨',
 '于国桢' = '于國楨',
+'于國楨' = '于國楨',
 '于国治' = '于國治',
 '于國治' = '于國治',
-'于堅' = '于堅',
 '于坚' = '于堅',
-'于大宝' = '于大寶',
+'于堅' = '于堅',
 '于大寶' = '于大寶',
+'于大宝' = '于大寶',
 '于天仁' = '于天仁',
 '于天龙' = '于天龍',
 '于奇库杜克' = '于奇庫杜克',
@@ -3547,32 +3545,32 @@
 '于娟' = '于娟',
 '于子千' = '于子千',
 '于孔兼' = '于孔兼',
-'于学忠' = '于學忠',
 '于學忠' = '于學忠',
+'于学忠' = '于學忠',
 '于家堡' = '于家堡',
 '于寘' = '于寘',
 '于宝轩' = '于寶軒',
-'于小伟' = '于小偉',
 '于小偉' = '于小偉',
+'于小伟' = '于小偉',
 '于小彤' = '于小彤',
 '于小惠' = '于小惠',
 '于少保' = '于少保',
 '于山' = '于山',
-'于山國' = '于山國',
 '于山国' = '于山國',
-'于帅' = '于帥',
+'于山國' = '于山國',
 '于帥' = '于帥',
+'于帅' = '于帥',
 '于幼軍' = '于幼軍',
 '于幼军' = '于幼軍',
 '于康震' = '于康震',
-'于广洲' = '于廣洲',
 '于廣洲' = '于廣洲',
+'于广洲' = '于廣洲',
 '于式枚' = '于式枚',
-'于从濂' = '于從濂',
 '于從濂' = '于從濂',
+'于从濂' = '于從濂',
 '于德海' = '于德海',
-'于志寧' = '于志寧',
 '于志宁' = '于志寧',
+'于志寧' = '于志寧',
 '于忠肃集' = '于忠肅集',
 '于思' = '于思',
 '于慎行' = '于慎行',
@@ -3585,26 +3583,26 @@
 '于敏中' = '于敏中',
 '于斌' = '于斌',
 '于斯塔德' = '于斯塔德',
-'于斯納爾斯貝里' = '于斯納爾斯貝里',
 '于斯纳尔斯贝里' = '于斯納爾斯貝里',
-'于斯達爾' = '于斯達爾',
+'于斯納爾斯貝里' = '于斯納爾斯貝里',
 '于斯达尔' = '于斯達爾',
-'于明濤' = '于明濤',
+'于斯達爾' = '于斯達爾',
 '于明涛' = '于明濤',
+'于明濤' = '于明濤',
 '于是之' = '于是之',
 '于晨楠' = '于晨楠',
 '于晴' = '于晴',
-'于会泳' = '于會泳',
 '于會泳' = '于會泳',
-'于根偉' = '于根偉',
+'于会泳' = '于會泳',
 '于根伟' = '于根偉',
+'于根偉' = '于根偉',
 '于格' = '于格',
-'于枫' = '于楓',
 '于楓' = '于楓',
+'于枫' = '于楓',
 '于荣光' = '于榮光',
 '于樂' = '于樂',
-'于樹潔' = '于樹潔',
 '于树洁' = '于樹潔',
+'于樹潔' = '于樹潔',
 '于欣' = '于欣',
 '于欣源' = '于欣源',
 '于正昇' = '于正昇',
@@ -3615,25 +3613,25 @@
 '于江震' = '于江震',
 '于波' = '于波',
 '于洋' = '于洋',
-'于洪區' = '于洪區',
 '于洪区' = '于洪區',
+'于洪區' = '于洪區',
 '于浩威' = '于浩威',
 '于海' = '于海',
 '于海洋' = '于海洋',
-'于湘蘭' = '于湘蘭',
 '于湘兰' = '于湘蘭',
+'于湘蘭' = '于湘蘭',
 '于漢超' = '于漢超',
 '于汉超' = '于漢超',
 '于澄' = '于澄',
-'于澤爾' = '于澤爾',
 '于泽尔' = '于澤爾',
-'于涛' = '于濤',
+'于澤爾' = '于澤爾',
 '于濤' = '于濤',
+'于涛' = '于濤',
 '于熙珍' = '于熙珍',
-'于尔岑' = '于爾岑',
 '于爾岑' = '于爾岑',
-'于尔根' = '于爾根',
+'于尔岑' = '于爾岑',
 '于爾根' = '于爾根',
+'于尔根' = '于爾根',
 '于尔里克' = '于爾里克',
 '于爾里克' = '于爾里克',
 '于特森' = '于特森',
@@ -3646,8 +3644,8 @@
 '于美人' = '于美人',
 '于耘婕' = '于耘婕',
 '于若木' = '于若木',
-'于荫霖' = '于蔭霖',
 '于蔭霖' = '于蔭霖',
+'于荫霖' = '于蔭霖',
 '于衡' = '于衡',
 '于西翰' = '于西翰',
 '于謙' = '于謙',
@@ -3663,36 +3661,36 @@
 '于道泉' = '于道泉',
 '于远伟' = '于遠偉',
 '于遠偉' = '于遠偉',
-'于都县' = '于都縣',
 '于都縣' = '于都縣',
+'于都县' = '于都縣',
 '于里察' = '于里察',
 '于阗' = '于闐',
-'于双戈' = '于雙戈',
 '于雙戈' = '于雙戈',
+'于双戈' = '于雙戈',
 '于云鹤' = '于雲鶴',
 '于震' = '于震',
 '于震寰' = '于震寰',
 '于震环' = '于震環',
 '于震環' = '于震環',
 '于靖' = '于靖',
-'于非暗' = '于非闇',
 '于非闇' = '于非闇',
+'于非暗' = '于非闇',
 '于韋斯屈萊' = '于韋斯屈萊',
 '于韦斯屈莱' = '于韋斯屈萊',
 '于风政' = '于風政',
 '于風政' = '于風政',
 '于飞' = '于飛',
-'于飛島' = '于飛島',
 '于飞岛' = '于飛島',
+'于飛島' = '于飛島',
 '于余曲折

Re: ch341.c does not work with new ch34x devices

2014-12-13 Thread Liangent
I'm connecting it to an external modem on the RS232 side. If I use
some other program to connect to the modem, it reports modem not
responding. To debug further I'm minicom'ing to it. Once connected, I
type AT then press Enter. Screen output is pasted below (with Local
Echo set to No and Hex Display set to Yes):

Good driver:

Welcome to minicom 2.7

OPTIONS: I18n
Compiled on Jan  1 2014, 09:30:18.
Port /dev/ttyUSB0, 22:53:12

Press CTRL-A Z for help on special keys

41 54 0d 0d 0a 4f 4b 0d 0a

Bad driver:

Welcome to minicom 2.7

OPTIONS: I18n
Compiled on Jan  1 2014, 09:30:18.
Port /dev/ttyUSB0, 23:09:35

Press CTRL-A Z for help on special keys

01 14 0d

-Liangent

On 12/13/14, Greg KH gre...@linuxfoundation.org wrote:
 On Sat, Dec 13, 2014 at 01:15:07PM +0800, Liangent wrote:
 Hello,

 I bought a new USB-RS232 cable and the system loads ch341.ko for me
 (Debian with kernel 3.14-2-amd64 and 3.16.0-4-amd64). However this
 module does not work with my cable (/dev/ttyUSB0 appears but the
 serial device doesn't work properly) while the driver provided by chip
 vendor[1] works (this driver doesn't compile on kernel 3.16 anymore
 but 3.14 is fine).

 This is dmesg output with the vendor-provided driver.

 [ 1900.070021] usb 2-1.4: new full-speed USB device number 5 using
 ehci-pci
 [ 1900.163218] usb 2-1.4: New USB device found, idVendor=1a86,
 idProduct=7523
 [ 1900.163227] usb 2-1.4: New USB device strings: Mfr=0, Product=2,
 SerialNumber=0
 [ 1900.163232] usb 2-1.4: Product: USB2.0-Ser!
 [ 1900.163887] ch34x 2-1.4:1.0: ch34x converter detected
 [ 1900.165052] usb 2-1.4: ch34x converter now attached to ttyUSB0

 For the record I've used an old cable which works fine with the
 built-in ch341.ko driver, but unfortunately that cable is not in my
 hand anymore.

 [1] http://wch.cn/downloads.php?name=proproid=177

 That device should just work with the in-kernel driver.  The fact that
 it is autoloaded is a good sign something is right.

 So no data flows through the device at all when using it, or are there
 other symptoms of not working?

 thanks,

 greg k-h

--
To unsubscribe from this list: send the line unsubscribe linux-usb in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html


[Wikidata-bugs] [Maniphest] [Closed] T69791: clear in wbeditentity does not remove claims

2014-12-01 Thread liangent
liangent closed this task as Resolved.
liangent claimed this task.
liangent added a comment.

I added some logging to my bot and it appears that similar situations are due 
to T59754.

TASK DETAIL
  https://phabricator.wikimedia.org/T69791

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, hoo, Lydia_Pintscher, liangent, Bene, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Wikidata-bugs] [Maniphest] [Closed] T69791: clear in wbeditentity does not remove claims

2014-12-01 Thread liangent
liangent closed this task as Resolved.
liangent claimed this task.
liangent added a comment.

I added some logging to my bot and it appears that similar situations are due 
to T59754.

TASK DETAIL
  https://phabricator.wikimedia.org/T69791

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, hoo, Lydia_Pintscher, liangent, Bene, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Wikidata-bugs] [Maniphest] [Reopened] T59754: API: action=wbeditentityclear=true with baserevid set doesn't throw conflicts when the existing entity has been edited

2014-12-01 Thread liangent
liangent reopened this task as Open.
liangent added a comment.

I logged the attached data about 
https://www.wikidata.org/w/index.php?title=Q874074action=history . It appears 
the behavior has regressed.

TASK DETAIL
  https://phabricator.wikimedia.org/T59754

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, Addshore, Lydia_Pintscher, liangent, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Wikidata-bugs] [Maniphest] [Reopened] T59754: API: action=wbeditentityclear=true with baserevid set doesn't throw conflicts when the existing entity has been edited

2014-12-01 Thread liangent
liangent reopened this task as Open.
liangent added a comment.

I logged the attached data about 
https://www.wikidata.org/w/index.php?title=Q874074action=history . It appears 
the behavior has regressed.

TASK DETAIL
  https://phabricator.wikimedia.org/T59754

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, Addshore, Lydia_Pintscher, liangent, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Wikidata-bugs] [Maniphest] [Commented On] T59754: API: action=wbeditentityclear=true with baserevid set doesn't throw conflicts when the existing entity has been edited

2014-12-01 Thread liangent
liangent added a comment.

Files:

{F17690}

{F17691}

{F17692}

{F17693}

TASK DETAIL
  https://phabricator.wikimedia.org/T59754

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, Addshore, Lydia_Pintscher, liangent, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Wikidata-bugs] [Maniphest] [Commented On] T59754: API: action=wbeditentityclear=true with baserevid set doesn't throw conflicts when the existing entity has been edited

2014-12-01 Thread liangent
liangent added a comment.

Files:

{F17690}

{F17691}

{F17692}

{F17693}

TASK DETAIL
  https://phabricator.wikimedia.org/T59754

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: liangent
Cc: wikidata-bugs, Addshore, Lydia_Pintscher, liangent, Wikidata-bugs



___
Wikidata-bugs mailing list
Wikidata-bugs@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-bugs


[Maniphest] [Commented On] T547: MediaWiki login for Phabricator produces broken links for Wikimedia profiles with URL encoded characters

2014-10-06 Thread liangent
liangent added a comment.

! In T547#8678, @bd808 wrote:
 This double encoding issue seems to also effect the links to OAuth accounts 
 on https://phabricator.wikimedia.org/settings/panel/external/

It appears URLs are stored in some double encoded form in Phab. We should make 
stored URLs correct, not just fix (=urldecode) them on display.

TASK DETAIL
  https://phabricator.wikimedia.org/T547

REPLY HANDLER ACTIONS
  Reply to comment or attach files, or !close, !claim, !unsubscribe or !assign 
username.

To: mmodell, liangent
Cc: wikibugs-l, Qgil, mmodell, liangent, Aklapper, chasemp, He7d3r, bd808



___
Wikibugs-l mailing list
Wikibugs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikibugs-l


[MediaWiki-commits] [Gerrit] Change loading order of Chinese conversion tables - change (mediawiki/core)

2014-09-04 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/158366

Change subject: Change loading order of Chinese conversion tables
..

Change loading order of Chinese conversion tables

Previously, the zh-cn table was composed by:

(1) Load zh2Hans as zh-hans table
(2) Load zh2CN + zh2Hans as zh-cn table
(3) Load Conversiontable/zh-hans + zh-hans as zh-hans table
(4) Load Conversiontable/zh-cn + zh-cn as zh-cn table
(5) Load zh-hans + zh-cn as the final zh-cn table

The new loading order is:

(1) Load zh2Hans as zh-hans table
(2) Load zh2CN as zh-cn table
(3) Load Conversiontable/zh-hans + zh-hans as zh-hans table
(4) Load Conversiontable/zh-cn + zh-cn as zh-cn table
(5) Load zh-cn + zh-hans as the final zh-cn table

Change-Id: Ie9d08b85d4911618946fa7efd23eb898412449e5
---
M languages/classes/LanguageZh.php
1 file changed, 12 insertions(+), 12 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/66/158366/1

diff --git a/languages/classes/LanguageZh.php b/languages/classes/LanguageZh.php
index dfdc6b1..90d538d 100644
--- a/languages/classes/LanguageZh.php
+++ b/languages/classes/LanguageZh.php
@@ -67,23 +67,23 @@
$this-mTables = array(
'zh-hans' = new ReplacementArray( $zh2Hans ),
'zh-hant' = new ReplacementArray( $zh2Hant ),
-   'zh-cn' = new ReplacementArray( array_merge( $zh2Hans, 
$zh2CN ) ),
-   'zh-hk' = new ReplacementArray( array_merge( $zh2Hant, 
$zh2HK ) ),
-   'zh-mo' = new ReplacementArray( array_merge( $zh2Hant, 
$zh2HK ) ),
-   'zh-my' = new ReplacementArray( array_merge( $zh2Hans, 
$zh2SG ) ),
-   'zh-sg' = new ReplacementArray( array_merge( $zh2Hans, 
$zh2SG ) ),
-   'zh-tw' = new ReplacementArray( array_merge( $zh2Hant, 
$zh2TW ) ),
+   'zh-cn' = new ReplacementArray( $zh2CN ),
+   'zh-hk' = new ReplacementArray( $zh2HK ),
+   'zh-mo' = new ReplacementArray( $zh2HK ),
+   'zh-my' = new ReplacementArray( $zh2SG ),
+   'zh-sg' = new ReplacementArray( $zh2SG ),
+   'zh-tw' = new ReplacementArray( $zh2TW ),
'zh' = new ReplacementArray
);
}
 
function postLoadTables() {
-   $this-mTables['zh-cn']-merge( $this-mTables['zh-hans'] );
-   $this-mTables['zh-hk']-merge( $this-mTables['zh-hant'] );
-   $this-mTables['zh-mo']-merge( $this-mTables['zh-hant'] );
-   $this-mTables['zh-my']-merge( $this-mTables['zh-hans'] );
-   $this-mTables['zh-sg']-merge( $this-mTables['zh-hans'] );
-   $this-mTables['zh-tw']-merge( $this-mTables['zh-hant'] );
+   $this-mTables['zh-cn']-setArray( 
$this-mTables['zh-cn']-getArray() + $this-mTables['zh-hans']-getArray() );
+   $this-mTables['zh-hk']-setArray( 
$this-mTables['zh-hk']-getArray() + $this-mTables['zh-hant']-getArray() );
+   $this-mTables['zh-mo']-setArray( 
$this-mTables['zh-mo']-getArray() + $this-mTables['zh-hant']-getArray() );
+   $this-mTables['zh-my']-setArray( 
$this-mTables['zh-my']-getArray() + $this-mTables['zh-hans']-getArray() );
+   $this-mTables['zh-sg']-setArray( 
$this-mTables['zh-sg']-getArray() + $this-mTables['zh-hans']-getArray() );
+   $this-mTables['zh-tw']-setArray( 
$this-mTables['zh-tw']-getArray() + $this-mTables['zh-hant']-getArray() );
}
 
/**

-- 
To view, visit https://gerrit.wikimedia.org/r/158366
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ie9d08b85d4911618946fa7efd23eb898412449e5
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikimedia-l] Asking Google to output Wikipedia citation format in Scholar

2014-08-03 Thread Liangent
Hmm, maybe another option is to accept BibTeX format on our side, possibly
via a Lua module?

-Liangent
On Aug 4, 2014 1:15 AM, Andy Mabbett a...@pigsonthewing.org.uk wrote:

 Google Scholar search results each have a cite link, which generates
 citation text to copy-and-paste in three formats (MLA, APA, Chicago).

 Is there someone at Google we can talk to, to get Wikipedia's citation
 format included?

 For English-language users (or results), the {{Cite journal}} template
 is probably most appropriate.

 --
 Andy Mabbett
 @pigsonthewing
 http://pigsonthewing.org.uk

 ___
 Wikimedia-l mailing list, guidelines at:
 https://meta.wikimedia.org/wiki/Mailing_lists/Guidelines
 Wikimedia-l@lists.wikimedia.org
 Unsubscribe: https://lists.wikimedia.org/mailman/listinfo/wikimedia-l,
 mailto:wikimedia-l-requ...@lists.wikimedia.org?subject=unsubscribe
___
Wikimedia-l mailing list, guidelines at: 
https://meta.wikimedia.org/wiki/Mailing_lists/Guidelines
Wikimedia-l@lists.wikimedia.org
Unsubscribe: https://lists.wikimedia.org/mailman/listinfo/wikimedia-l, 
mailto:wikimedia-l-requ...@lists.wikimedia.org?subject=unsubscribe

Re: [Pywikipedia-l] reflinks.py under GPL?

2014-08-01 Thread Liangent
cc'ing shizhao.
On Aug 1, 2014 8:58 AM, John Mark Vandenberg jay...@gmail.com wrote:

 On Fri, Jun 13, 2014 at 10:10 AM, Ricordisamoa
 ricordisa...@openmailbox.org wrote:
  Please sign off a Code-Review+1 for
 https://gerrit.wikimedia.org/r/#/c/139294/.
  Thanks in advance.
 
  Il 09/06/2014 10:02, Nicolas Dumazet ha scritto:
 
  Hey folks,
 
  Sorry about that. I'd happily relicense the original code under whatever
 is
  convenient for you.
 
  If anyone contributed to the file in the meantime, I'm afraid you'd have
 to
  ask those guys for permission, too :(

 I think we are waiting for +1's from Shi Zhao and Mayank Madan, CC:d.

 --
 John Vandenberg

 ___
 Pywikipedia-l mailing list
 Pywikipedia-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/pywikipedia-l

___
Pywikipedia-l mailing list
Pywikipedia-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-l


[Wikizh-l] Fwd: [Wikimedia-l] Wikimedia User Group China

2014-07-30 Thread Liangent
-- Forwarded message --
From: Carlos M. Colina ma...@wikimedia.org.ve
Date: Thu, Jul 31, 2014 at 12:17 AM
Subject: [Wikimedia-l] Wikimedia User Group China
To: wikimedi...@lists.wikimedia.org


Dear all,

It is an honor to announce that the Affiliations Committee has
resolved [1] recognizing the Wikimedia User Group China as a Wikimedia
User Group; their main focus areas are getting more chinese people
know and use Wikipedia, encouraging people to become contributors to
the different Wikimedia projects, and maintain the community healthy
and growing.  Let's welcome the newest member of the family of
affiliates -and the fourth from the Sinosphere!


Regards,
Carlos

1: 
https://meta.wikimedia.org/wiki/Affiliations_Committee/Resolutions/Wikimedia_User_Group_China_-_July_2014
--
*Jülüjain wane mmakat* ein kapülain tü alijunakalirua jee
wayuukanairua junain ekerolaa alümüin supüshuwayale etijaanaka.
Ayatashi waya junain.
Carlos M. Colina
Vicepresidente, A.C. Wikimedia Venezuela | RIF J-40129321-2 |
www.wikimedia.org.ve http://wikimedia.org.ve
Chair, Wikimedia Foundation Affiliations Committee
Phone: +972-52-4869915
Twitter: @maor_x
___
Wikimedia-l mailing list, guidelines at:
https://meta.wikimedia.org/wiki/Mailing_lists/Guidelines
wikimedi...@lists.wikimedia.org
Unsubscribe: https://lists.wikimedia.org/mailman/listinfo/wikimedia-l,
mailto:wikimedia-l-requ...@lists.wikimedia.org?subject=unsubscribe

___
Wikizh-l 邮件列表
Wikizh-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikizh-l


[MediaWiki-commits] [Gerrit] Send 404 in various special pages when there're no results - change (mediawiki/core)

2014-07-24 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/148965

Change subject: Send 404 in various special pages when there're no results
..

Send 404 in various special pages when there're no results

Bug: 67182
Change-Id: I442645d8b98731282768e6cc19a8f426b1d0b519
---
M includes/specialpage/ChangesListSpecialPage.php
M includes/specials/SpecialContributions.php
M includes/specials/SpecialRecentchanges.php
M includes/specials/SpecialWhatlinkshere.php
4 files changed, 8 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/65/148965/1

diff --git a/includes/specialpage/ChangesListSpecialPage.php 
b/includes/specialpage/ChangesListSpecialPage.php
index 008200d..80c612f 100644
--- a/includes/specialpage/ChangesListSpecialPage.php
+++ b/includes/specialpage/ChangesListSpecialPage.php
@@ -54,6 +54,7 @@
if ( $rows === false ) {
if ( !$this-including() ) {
$this-doHeader( $opts );
+   $this-getOutput()-setStatusCode( 404 );
}
 
return;
diff --git a/includes/specials/SpecialContributions.php 
b/includes/specials/SpecialContributions.php
index 251ac51..a884a39 100644
--- a/includes/specials/SpecialContributions.php
+++ b/includes/specials/SpecialContributions.php
@@ -255,6 +255,9 @@
wfEscapeWikiText( 
$userObj-getName() ),
)
);
+   if ( !$this-including() ) {
+   $this-getOutput()-setStatusCode( 404 
);
+   }
}
$user = htmlspecialchars( $userObj-getName() );
} else {
diff --git a/includes/specials/SpecialRecentchanges.php 
b/includes/specials/SpecialRecentchanges.php
index aa8ed82..c3d9d3e 100644
--- a/includes/specials/SpecialRecentchanges.php
+++ b/includes/specials/SpecialRecentchanges.php
@@ -344,6 +344,9 @@
$this-msg( 'recentchanges-noresult' )-parse() 
.
'/div'
);
+   if ( !$this-including() ) {
+   $this-getOutput()-setStatusCode( 404 );
+   }
} else {
$this-getOutput()-addHTML( $rclistOutput );
}
diff --git a/includes/specials/SpecialWhatlinkshere.php 
b/includes/specials/SpecialWhatlinkshere.php
index 694bc83..a3f71d1 100644
--- a/includes/specials/SpecialWhatlinkshere.php
+++ b/includes/specials/SpecialWhatlinkshere.php
@@ -208,6 +208,7 @@
}
$errMsg = is_int( $namespace ) ? 
'nolinkshere-ns' : 'nolinkshere';
$out-addWikiMsg( $errMsg, 
$this-target-getPrefixedText() );
+   $out-setStatusCode( 404 );
}
}
 

-- 
To view, visit https://gerrit.wikimedia.org/r/148965
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I442645d8b98731282768e6cc19a8f426b1d0b519
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Add new variable article_first_contributor - change (mediawiki...AbuseFilter)

2014-07-09 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/145014

Change subject: Add new variable article_first_contributor
..

Add new variable article_first_contributor

Change-Id: I725b7150de52bdc6518f73a6302f850e8d596824
---
M AbuseFilter.class.php
M AbuseFilterVariableHolder.php
M i18n/en.json
M i18n/qqq.json
4 files changed, 31 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/AbuseFilter 
refs/changes/14/145014/1

diff --git a/AbuseFilter.class.php b/AbuseFilter.class.php
index a742959..3c3cc63 100644
--- a/AbuseFilter.class.php
+++ b/AbuseFilter.class.php
@@ -123,6 +123,7 @@
'article_restrictions_create' = 'restrictions-create',
'article_restrictions_upload' = 'restrictions-upload',
'article_recent_contributors' = 'recent-contributors',
+   'article_first_contributor' = 'first-contributor',
 #  'old_text' = 'old-text-stripped', # Disabled, 
performance
 #  'old_html' = 'old-html', # Disabled, performance
'old_links' = 'old-links',
@@ -342,6 +343,12 @@
'namespace' = $title-getNamespace()
) );
 
+   $vars-setLazyLoadVar( {$prefix}_first_contributor, 
'load-first-author',
+   array(
+   'title' = $title-getText(),
+   'namespace' = $title-getNamespace()
+   ) );
+
wfRunHooks( 'AbuseFilter-generateTitleVars', array( $vars, 
$title, $prefix ) );
 
return $vars;
diff --git a/AbuseFilterVariableHolder.php b/AbuseFilterVariableHolder.php
index fcc2c00..8f09b3c 100644
--- a/AbuseFilterVariableHolder.php
+++ b/AbuseFilterVariableHolder.php
@@ -504,6 +504,27 @@
}
$result = $users;
break;
+   case 'load-first-author':
+   $title = Title::makeTitle( 
$parameters['namespace'], $parameters['title'] );
+   $result = '';
+
+   if ( !$title-exists() ) {
+   break;
+   }
+
+   $dbr = wfGetDB( DB_SLAVE );
+   $res = $dbr-selectField(
+   'revision',
+   'rev_user_text',
+   array( 'rev_page' = 
$title-getArticleID() ),
+   __METHOD__,
+   array( 'ORDER BY' = 'rev_timestamp' )
+   );
+
+   if ( $res !== false ) {
+   $result = $res;
+   }
+   break;
case 'get-page-restrictions':
$action = $parameters['action'];
$title = Title::makeTitle( 
$parameters['namespace'], $parameters['title'] );
diff --git a/i18n/en.json b/i18n/en.json
index ee959f1..cd6a187 100644
--- a/i18n/en.json
+++ b/i18n/en.json
@@ -264,6 +264,7 @@
 abusefilter-edit-builder-vars-user-blocked: Whether user is blocked,
 abusefilter-edit-builder-vars-user-emailconfirm: Time email address was 
confirmed,
 abusefilter-edit-builder-vars-recent-contributors: Last ten users to 
contribute to the page,
+abusefilter-edit-builder-vars-first-contributor: First user to 
contribute to the page,
 abusefilter-edit-builder-vars-all-links: All external links in the new 
text,
 abusefilter-edit-builder-vars-added-links: All external links added in 
the edit,
 abusefilter-edit-builder-vars-removed-links: All external links removed 
in the edit,
@@ -393,4 +394,4 @@
 abusefilter-import-intro: You can use this interface to import filters 
from other wikis.\nOn the source wiki, click 
\{{int:abusefilter-edit-export}}\ under \{{int:abusefilter-edit-tools}}\ on 
the editing interface.\nCopy from the textbox that appears, and paste it into 
this textbox, then click \{{int:abusefilter-import-submit}}\.,
 abusefilter-import-submit: Import data,
 abusefilter-group-default: Default
-}
\ No newline at end of file
+}
diff --git a/i18n/qqq.json b/i18n/qqq.json
index fb92995..63ed93c 100644
--- a/i18n/qqq.json
+++ b/i18n/qqq.json
@@ -284,6 +284,7 @@
abusefilter-edit-builder-vars-user-groups: See also:\n* 
{{msg-mw|Abusefilter-edit-builder-vars-global-user-groups}},
abusefilter-edit-builder-vars-user-rights: Paraphrased: List of 
rights that the user has. Abuse filter syntax option

Re: [Wikidata-l] Finding image URL from Commons image name

2014-07-02 Thread Liangent
Also there are Special:FilePath and thumb.php. I'm not sure how this
affects caching though.

http://commons.wikimedia.org/wiki/Special:FilePath/Example.svg

http://commons.wikimedia.org/w/thumb.php?f=Example.svgw=420

-Liangent
On Jul 3, 2014 4:50 AM, Emilio J. Rodríguez-Posada emi...@gmail.com
wrote:

Hello Markus;

The URL of a Commons image is build like this:

https://upload.wikimedia.org/wikipedia/commons/x/xy/File_name.ext

Where X and XY are the first char and firstsecond chars respectively of
the md5sum of the filename (replacing the spaces with _).

For a 200px thumb:

https://upload.wikimedia.org/wikipedia/commons/thumb/x/xy/File_name.ext/200px-File_name.ext

The SVG files are a special case, therefore .png is appended to .ext, being
.ext.png. For SVG doesn't mind to use big thumb sizes, but when file is
JPG, don't try to generate a thumb bigger than the original file or you
will get a beautiful error.

Regards


2014-07-02 22:33 GMT+02:00 Markus Krötzsch mar...@semantic-mediawiki.org:

Dear Wikidatarians,

 From Commons media properties, I get the string name of a file on Commons.
 I can easily use it to build a link to the Commons page fo rthat image.

 * But how do I get the raw image URL?
 * And can I also get the raw URL of a small-scale (thumbnail) image?

 I would like to beautify my Wikidata applications to show some images. I
 know this is more of a general MediaWiki question, but it is much more
 relevant in Wikidata, so I am posting it here first. I guess somebody has
 already solved this since we have images in various Wikidata-based
 applications and gadgets.

 Thanks

 Markus

 ___
 Wikidata-l mailing list
 Wikidata-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikidata-l



___
Wikidata-l mailing list
Wikidata-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-l
___
Wikidata-l mailing list
Wikidata-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-l


Re: [Toolserver-l] 403 User account expired hit list

2014-07-01 Thread Liangent
360Spider looks like some crawler from Qihoo[1], a company creating
malwares (read as disobeying various rules here). I would personally
just ignore those hits.

Relative URLs in Location were incorrect, but they're now acceptable
with the new HTTP standard, and most, if not all, current browsers
recognize them[2].

It seems that the spider is not aware of the new standard, and/or
protocol-relative URLs. In any case, I don't think we need to care
spiders, as long as they work fine in browsers.

[1] https://en.wikipedia.org/wiki/Qihoo
[2] https://en.wikipedia.org/wiki/HTTP_location

-Liangent

On Wed, Jul 2, 2014 at 4:47 AM, Para wikip...@gmail.com wrote:
 On 01/07/14 21:35, Marlen Caemmerer wrote:

 Honestly I see this for a number of URLs and I dont know if it has any
 impact:


 When putting redirects in place earlier, I noticed in the logs a 360Spider
 that interpreted all redirects without a protocol as a local redirect.
 Probably still going strong?


 ___
 Toolserver-l mailing list (Toolserver-l@lists.wikimedia.org)
 https://lists.wikimedia.org/mailman/listinfo/toolserver-l
 Posting guidelines for this list:
 https://wiki.toolserver.org/view/Mailing_list_etiquette

___
Toolserver-l mailing list (Toolserver-l@lists.wikimedia.org)
https://lists.wikimedia.org/mailman/listinfo/toolserver-l
Posting guidelines for this list: 
https://wiki.toolserver.org/view/Mailing_list_etiquette

Re: [QA] Can not assign a bug

2014-06-12 Thread Liangent
She can do it now!

For more information, you can read this mail:
http://lists.wikimedia.org/pipermail/wikitech-l/2014-May/076723.html

-Liangent

On Thu, Jun 12, 2014 at 5:24 PM, Željko Filipin zfili...@wikimedia.org wrote:
 Hi,

 I was pairing with Jagori today and she told me she can not assign this bug
 to herself:

 https://bugzilla.wikimedia.org/show_bug.cgi?id=66369

 When I look at the bug, I see this:

 Assigned To: Nobody - You can work on this! (edit) (take)

 There are edit and take links, but she does not see them. She was
 sharing her screen, so I was able to verify she was logged in, but the links
 were not there.

 Anybody knows what is the problem?

 Željko

 ___
 QA mailing list
 QA@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/qa


___
QA mailing list
QA@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/qa


Re: [Wikimania-l] Registration payment options

2014-06-06 Thread Liangent
At least PayPal supports more bank card organizations. Not to say
other methods like asking someone else to transfer me some money,
which is obviously easier to ask someone else to provide me with their
credit card info.

-Liangent

On Fri, Jun 6, 2014 at 7:17 PM, Bence Damokos bdamo...@gmail.com wrote:
 In the interest of providing free knowledge for all(/me), how is Paypal
 better where credit/debit cards don't work? (I would assume you still have
 to get your money into Paypal somehow.)

 Best regards,
 Bence


 On Fri, Jun 6, 2014 at 1:11 PM, Stuart Prior stuart.pr...@wikimedia.org.uk
 wrote:

 Thanks Federico for pointing that out.

 Best

 Stuart


 On 4 June 2014 17:24, Federico Leva (Nemo) nemow...@gmail.com wrote:

 Stuart Prior, 04/06/2014 16:33:

 there are effective and commonly used bank transfer systems in some
 European countries


 Some?
 https://en.wikipedia.org/wiki/Single_Euro_Payments_Area

 Nemo


 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l




 --
 Stuart Prior
 Wikimania Liaison
 Wikimedia UK
 +44 20 7065 0990

 Wikimedia UK is a Company Limited by Guarantee registered in England and
 Wales, Registered No. 6741827. Registered Charity No.1144513. Registered
 Office 4th Floor, Development House, 56-64 Leonard Street, London EC2A 4LT.
 United Kingdom. Wikimedia UK is the UK chapter of a global Wikimedia
 movement. The Wikimedia projects are run by the Wikimedia Foundation (who
 operate Wikipedia, amongst other projects).

 Wikimedia UK is an independent non-profit charity with no legal control
 over Wikipedia nor responsibility for its contents.


 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l



 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l


___
Wikimania-l mailing list
Wikimania-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikimania-l


Re: [WikimediaMobile] Mobile tags for Wikipedia android app

2014-06-05 Thread Liangent
I think https://gerrit.wikimedia.org/r/#/c/137563/ fixes this.

-Liangent

On Thu, Jun 5, 2014 at 11:30 AM, Dario Taraborelli
dtarabore...@wikimedia.org wrote:
 This afternoon Dan and I reviewed how tags from the android app are captured
 in the change_tag and tag_summary tables.

 (1) We noticed that the “mobile app edit” tag is applied to recentchanges
 events that are not edits, but new account registrations:

 SELECT * FROM enwiki.change_tag WHERE ct_rev_id IS NULL AND ct_tag = mobile
 app edit;

 ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
 661123291 56899294 NULL mobile app edit NULL
 661123407 56899301 NULL mobile app edit NULL
 661124928 56899375 NULL mobile app edit NULL
 661127933 56899610 NULL mobile app edit NULL
 661128644 56899685 NULL mobile app edit NULL
 661131626 56899908 NULL mobile app edit NULL
 661133278 5688 NULL mobile app edit NULL
 661134572 56900072 NULL mobile app edit NULL
 661140653 56900620 NULL mobile app edit NULL
 661155198 56901558 NULL mobile app edit NULL
 661155799 56901593 NULL mobile app edit NULL
 661156983 56901659 NULL mobile app edit NULL

 AFAIK this is unusual behavior for tags and will create artifacts in tagged
 revisions unless people are aware that all these registration-related events
 always need to be excluded (it’s also confusing because the name of the tag
 explicitly refers to an edit). As suggested earlier [1], we should not track
 the source of account registrations via MediaWiki tags but via the
 ServerSideAccountCreation log.

 (2) edits made on apps should be stored with two separate tags: “mobile
 edit” and “mobile app edit”. The tags are correctly stored in the change_tag
 table with 2 records for each revisions, e.g.

 SELECT * FROM enwiki.change_tag WHERE ct_rc_id = 661110028;

 ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
 661110028 NULL 611585155 mobile app edit NULL
 661110028 NULL 611585155 mobile edit NULL

 but when the tags are combined in the tag_summary table, the “mobile app
 edit” tag is lost:

 SELECT * FROM enwiki.tag_summary WHERE ts_rc_id = 661110028;

 ts_rc_id ts_log_id ts_rev_id ts_tags
 661110028 NULL 611585155 mobile edit

 This should not be the case, the 2 tags should be concatenated in the
 ts_tags field, see for example this desktop revision with 2 tags:

 SELECT * FROM enwiki.change_tag WHERE ct_rc_id = 578489188;

 ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
 578489188 NULL 64321 gettingstarted edit NULL
 578489188 NULL 64321 visualeditor NULL

 SELECT * FROM enwiki.tag_summary WHERE ts_rc_id = 578489188;

 ts_rc_id ts_log_id ts_rev_id ts_tags
 578489188 NULL 64321 gettingstarted edit,visualeditor

 I believe that neither (1) nor (2) is intended behavior for apps. Can you
 guys confirm and if so, can we fix this?

 Dario


 [1] http://lists.wikimedia.org/pipermail/mobile-l/2014-May/007150.html

 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l


___
Mobile-l mailing list
Mobile-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mobile-l


Re: [WikimediaMobile] Mobile tags for Wikipedia android app

2014-06-05 Thread Liangent
For analysis purposes, you'll want to change_tag table, which does not
suffer this bug.

-Liangent

On Fri, Jun 6, 2014 at 2:15 AM, Maryana Pinchuk mpinc...@wikimedia.org wrote:
 Thanks, Liangent!

 This is indeed a bug and should be fixed asap, because it'll pollute our
 early release data analysis. Any chance we can do an LD or add this to the
 SWAT deploy list?


 On Thu, Jun 5, 2014 at 6:13 AM, Liangent liang...@gmail.com wrote:

 I think https://gerrit.wikimedia.org/r/#/c/137563/ fixes this.

 -Liangent

 On Thu, Jun 5, 2014 at 11:30 AM, Dario Taraborelli
 dtarabore...@wikimedia.org wrote:
  This afternoon Dan and I reviewed how tags from the android app are
  captured
  in the change_tag and tag_summary tables.
 
  (1) We noticed that the “mobile app edit” tag is applied to
  recentchanges
  events that are not edits, but new account registrations:
 
  SELECT * FROM enwiki.change_tag WHERE ct_rev_id IS NULL AND ct_tag =
  mobile
  app edit;
 
  ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
  661123291 56899294 NULL mobile app edit NULL
  661123407 56899301 NULL mobile app edit NULL
  661124928 56899375 NULL mobile app edit NULL
  661127933 56899610 NULL mobile app edit NULL
  661128644 56899685 NULL mobile app edit NULL
  661131626 56899908 NULL mobile app edit NULL
  661133278 5688 NULL mobile app edit NULL
  661134572 56900072 NULL mobile app edit NULL
  661140653 56900620 NULL mobile app edit NULL
  661155198 56901558 NULL mobile app edit NULL
  661155799 56901593 NULL mobile app edit NULL
  661156983 56901659 NULL mobile app edit NULL
 
  AFAIK this is unusual behavior for tags and will create artifacts in
  tagged
  revisions unless people are aware that all these registration-related
  events
  always need to be excluded (it’s also confusing because the name of the
  tag
  explicitly refers to an edit). As suggested earlier [1], we should not
  track
  the source of account registrations via MediaWiki tags but via the
  ServerSideAccountCreation log.
 
  (2) edits made on apps should be stored with two separate tags: “mobile
  edit” and “mobile app edit”. The tags are correctly stored in the
  change_tag
  table with 2 records for each revisions, e.g.
 
  SELECT * FROM enwiki.change_tag WHERE ct_rc_id = 661110028;
 
  ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
  661110028 NULL 611585155 mobile app edit NULL
  661110028 NULL 611585155 mobile edit NULL
 
  but when the tags are combined in the tag_summary table, the “mobile app
  edit” tag is lost:
 
  SELECT * FROM enwiki.tag_summary WHERE ts_rc_id = 661110028;
 
  ts_rc_id ts_log_id ts_rev_id ts_tags
  661110028 NULL 611585155 mobile edit
 
  This should not be the case, the 2 tags should be concatenated in the
  ts_tags field, see for example this desktop revision with 2 tags:
 
  SELECT * FROM enwiki.change_tag WHERE ct_rc_id = 578489188;
 
  ct_rc_id ct_log_id ct_rev_id ct_tag ct_params
  578489188 NULL 64321 gettingstarted edit NULL
  578489188 NULL 64321 visualeditor NULL
 
  SELECT * FROM enwiki.tag_summary WHERE ts_rc_id = 578489188;
 
  ts_rc_id ts_log_id ts_rev_id ts_tags
  578489188 NULL 64321 gettingstarted edit,visualeditor
 
  I believe that neither (1) nor (2) is intended behavior for apps. Can
  you
  guys confirm and if so, can we fix this?
 
  Dario
 
 
  [1] http://lists.wikimedia.org/pipermail/mobile-l/2014-May/007150.html
 
  ___
  Mobile-l mailing list
  Mobile-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/mobile-l
 

 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l




 --
 Maryana Pinchuk
 Product Manager, Wikimedia Foundation
 wikimediafoundation.org

___
Mobile-l mailing list
Mobile-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mobile-l


[MediaWiki-commits] [Gerrit] Use master to load previous tags in ChangeTags::addTags() - change (mediawiki/core)

2014-06-05 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/137563

Change subject: Use master to load previous tags in ChangeTags::addTags()
..

Use master to load previous tags in ChangeTags::addTags()

Change-Id: Ie47649ef45f9eebdfeae73a0698ea5d21b150b56
---
M includes/ChangeTags.php
1 file changed, 4 insertions(+), 2 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/63/137563/1

diff --git a/includes/ChangeTags.php b/includes/ChangeTags.php
index d3dd51a..9cc5a0c 100644
--- a/includes/ChangeTags.php
+++ b/includes/ChangeTags.php
@@ -146,7 +146,10 @@
);
 
## Update the summary row.
-   $prevTags = $dbr-selectField( 'tag_summary', 'ts_tags', 
$tsConds, __METHOD__ );
+   // $prevTags can be out of date on slaves, especially when 
addTags is called consecutively,
+   // causing loss of tags added recently in tag_summary table.
+   $dbw = wfGetDB( DB_MASTER );
+   $prevTags = $dbw-selectField( 'tag_summary', 'ts_tags', 
$tsConds, __METHOD__ );
$prevTags = $prevTags ? $prevTags : '';
$prevTags = array_filter( explode( ',', $prevTags ) );
$newTags = array_unique( array_merge( $prevTags, $tags ) );
@@ -158,7 +161,6 @@
return false;
}
 
-   $dbw = wfGetDB( DB_MASTER );
$dbw-replace(
'tag_summary',
array( 'ts_rev_id', 'ts_rc_id', 'ts_log_id' ),

-- 
To view, visit https://gerrit.wikimedia.org/r/137563
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ie47649ef45f9eebdfeae73a0698ea5d21b150b56
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Remove $dbr from ChangeTags::addTags() - change (mediawiki/core)

2014-06-05 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/137847

Change subject: Remove $dbr from ChangeTags::addTags()
..

Remove $dbr from ChangeTags::addTags()

$dbr (DB_SLAVE) is completely unused now.

Change-Id: I02bbde8366480b841d8028403e9a0efe88fa46fd
---
M includes/ChangeTags.php
1 file changed, 5 insertions(+), 8 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/47/137847/1

diff --git a/includes/ChangeTags.php b/includes/ChangeTags.php
index 7e19164..28db8a1 100644
--- a/includes/ChangeTags.php
+++ b/includes/ChangeTags.php
@@ -101,21 +101,20 @@
'specified when adding a tag to a change!' );
}
 
-   $dbr = wfGetDB( DB_SLAVE );
+   $dbw = wfGetDB( DB_MASTER );
 
// Might as well look for rcids and so on.
if ( !$rc_id ) {
// Info might be out of date, somewhat fractionally, on 
slave.
-   $dbr = wfGetDB( DB_MASTER );
if ( $log_id ) {
-   $rc_id = $dbr-selectField(
+   $rc_id = $dbw-selectField(
'recentchanges',
'rc_id',
array( 'rc_logid' = $log_id ),
__METHOD__
);
} elseif ( $rev_id ) {
-   $rc_id = $dbr-selectField(
+   $rc_id = $dbw-selectField(
'recentchanges',
'rc_id',
array( 'rc_this_oldid' = $rev_id ),
@@ -124,14 +123,13 @@
}
} elseif ( !$log_id  !$rev_id ) {
// Info might be out of date, somewhat fractionally, on 
slave.
-   $dbr = wfGetDB( DB_MASTER );
-   $log_id = $dbr-selectField(
+   $log_id = $dbw-selectField(
'recentchanges',
'rc_logid',
array( 'rc_id' = $rc_id ),
__METHOD__
);
-   $rev_id = $dbr-selectField(
+   $rev_id = $dbw-selectField(
'recentchanges',
'rc_this_oldid',
array( 'rc_id' = $rc_id ),
@@ -146,7 +144,6 @@
);
 
## Update the summary row.
-   $dbw = wfGetDB( DB_MASTER );
// $prevTags can be out of date on slaves, especially when 
addTags is called consecutively,
// causing loss of tags added recently in tag_summary table.
$prevTags = $dbw-selectField( 'tag_summary', 'ts_tags', 
$tsConds, __METHOD__ );

-- 
To view, visit https://gerrit.wikimedia.org/r/137847
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I02bbde8366480b841d8028403e9a0efe88fa46fd
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [WikimediaMobile] Android app typography changes

2014-05-28 Thread Liangent
Not really. Firefox and Chrome give me different fonts on Android, using
their own default configurations...

-Liangent

On Thu, May 29, 2014 at 2:28 AM, Steven Walling swall...@wikimedia.orgwrote:


 On Wed, May 28, 2014 at 11:13 AM, Juliusz Gonera jgon...@wikimedia.orgwrote:

 Roboto for text? We don't use Roboto for text in mobile web.


 We don't specify Roboto, but it's the sans-serif that matches the current
 stack, correct?


 --
 Steven Walling,
 Product Manager
 https://wikimediafoundation.org/

 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l


___
Mobile-l mailing list
Mobile-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mobile-l


Bug#749496: Version requirement for python-six dependency is missing

2014-05-27 Thread Liangent
Package: python-w3lib
Version: 1.5-1

Caught the following exception in my first run, and upgrading python-six
from 1.3.0-1 to 1.6.1-2 fixes it.

  File /usr/lib/python2.7/dist-packages/w3lib/form.py, line 2, in module
if six.PY2:
AttributeError: 'module' object has no attribute 'PY2'


[Python-modules-team] Bug#749496: Version requirement for python-six dependency is missing

2014-05-27 Thread Liangent
Package: python-w3lib
Version: 1.5-1

Caught the following exception in my first run, and upgrading python-six
from 1.3.0-1 to 1.6.1-2 fixes it.

  File /usr/lib/python2.7/dist-packages/w3lib/form.py, line 2, in module
if six.PY2:
AttributeError: 'module' object has no attribute 'PY2'
___
Python-modules-team mailing list
Python-modules-team@lists.alioth.debian.org
http://lists.alioth.debian.org/cgi-bin/mailman/listinfo/python-modules-team

Re: [Labs-l] paste with stikkit!

2014-05-26 Thread Liangent
Well a simple configuration file enables pastebinit on
tools.wmflabs.org/paste

https://tools.wmflabs.org/paste/view/662c89d4

-Liangent


On Tue, May 27, 2014 at 4:10 AM, Marc-André Pelletier 
mpellet...@wikimedia.org wrote:

 On 05/26/2014 04:09 PM, Petr Bena wrote:
  I have .deb's but they are on launchpad and someone said that we
  shouldn't allow ppa's on tools project

 We don't, but there's nothing that prevents us from grabbing the .deb,
 reviewing it, and adding it to the local repo.  :-)

 -- Marc


 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l

___
Labs-l mailing list
Labs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/labs-l


Re: [Labs-l] MediaWiki to LaTeX Compiler

2014-05-25 Thread Liangent
I don't really have an idea about how to go for the command line version
and use the -c command line option; I know nothing about Haskell anyway...

I hope that it's available on the web, is it possible to add a checkbox or
something?

-Liangent


On Sun, May 25, 2014 at 7:18 PM, Dirk Hünniger dirk.hunni...@googlemail.com
 wrote:

  Hi,
 I didn't take any special care about CJK. Its a bit hard for me since I
 cannot read any of these languages myself. Maybe you can have a look at the
 LaTeX source and tell me what I need to change. Currently no CJK package is
 loaded. The only thing I am doing is to switch to ttf fonts that contain
 CJK characters when I need to print them. Also I am using babel packages.
 For some languages I get proper hyphenation this way, but apparently
 something does not work here for Chinese.
 Yours Dirk

 On 2014-05-25 13:02, Liangent wrote:

  I had a try using an article on Chinese Wikipedia. Although I'm not sure
 whether the cause is in generated LaTeX source or the way you invoke LaTeX,
 the most notable problem is that in output PDF, word wrap doesn't take
 place correctly so almost every line overflows. See
 https://en.wikipedia.org/wiki/Word_wrap#Word_wrapping_in_text_containing_Chinese.2C_Japanese.2C_and_Koreanfor
  more information.

  -Liangent


 On Sun, May 25, 2014 at 5:49 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

 Hi,
 if you want the tex source go for the command line version and use the -c
 command line option. If you want to convert from tex to mediawiki use
 pandoc. In the imprint of each pdf there is a link to the sourceforge page.
 Its slow, but I cannot make it any faster. Its mostly the runtime of LaTeX
 itself. I already invested two weeks in optimizing speed. In particular its
 using multiple cores, while in my code. But well there is not much you can
 do with multiple cores when running LaTeX itself. You could actully get
 some speed by using native cores, but the administration is not that easy.
 It also says on the main page that it will take up to ten minutes.
 Yours Dirk


 On 2014-05-25 11:40, Gryllida wrote:

 Hi,

 On Sun, 25 May 2014, at 18:02, Dirk Hünniger wrote:

 It not a private server anymore. Its now running on Wmflabs already.

 http://mediawiki2latex.wmflabs.org

 I would probably link to the source code and a bug tracker on its main
 page.
 - I see it generated a PDF. Nicely formatted. :) But the TeX source
 would be also useful.
 - It would be nice to be able to convert back from tex to wiki markup
 also.
 - It also appears to be dog slow (about 5 minutes).

 Gryllida.

 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l



 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l




 ___
 Labs-l mailing 
 listLabs-l@lists.wikimedia.orghttps://lists.wikimedia.org/mailman/listinfo/labs-l



 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l


___
Labs-l mailing list
Labs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/labs-l


Re: [Labs-l] MediaWiki to LaTeX Compiler

2014-05-25 Thread Liangent
I failed to compile your document in it's original form:

(../headers/babel.tex
(/var/lib/texmf/tex/generic/babel/babel.sty
(/usr/share/texlive/texmf-dist/tex/generic/babel-english/english.ldf
(/usr/share/texlive/texmf-dist/tex/generic/babel/babel.def
! Undefined control sequence.
\initiate@active@char #1-\bbl@ifshorthand
   {#1}{\bbl@s@initiate@active@char
...
l.585 \initiate@active@char{~}

and I worked around it by commenting out \usepackage[english]{babel} in
../headers/babel.tex

In my experiment I added:

\usepackage{xeCJK}
\setCJKmainfont{WenQuanYi Zen Hei}

to main.tex after \usepackage{fontspec}, and it improves CJK typesetting a
lot.

WenQuanYi Zen Hei is contained in
https://packages.debian.org/sid/fonts-wqy-zenhei

-Liangent


On Sun, May 25, 2014 at 8:01 PM, Dirk Hünniger dirk.hunni...@googlemail.com
 wrote:

  Hi,
 I am sending you the latex source of the main page of the chinese
 wikipedia as attachment.
 You can look at it. But if you want to compile it you need to have ubuntu
 14.04 and do
 sudo apt-get install mediawiki2latex
 xelatex main.tex
 Yours Dirk


 On 2014-05-25 13:32, Liangent wrote:

  I don't really have an idea about how to go for the command line
 version and use the -c command line option; I know nothing about Haskell
 anyway...

 I hope that it's available on the web, is it possible to add a checkbox or
 something?

  -Liangent


 On Sun, May 25, 2014 at 7:18 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

  Hi,
 I didn't take any special care about CJK. Its a bit hard for me since I
 cannot read any of these languages myself. Maybe you can have a look at the
 LaTeX source and tell me what I need to change. Currently no CJK package is
 loaded. The only thing I am doing is to switch to ttf fonts that contain
 CJK characters when I need to print them. Also I am using babel packages.
 For some languages I get proper hyphenation this way, but apparently
 something does not work here for Chinese.
 Yours Dirk

 On 2014-05-25 13:02, Liangent wrote:

  I had a try using an article on Chinese Wikipedia. Although I'm not
 sure whether the cause is in generated LaTeX source or the way you invoke
 LaTeX, the most notable problem is that in output PDF, word wrap doesn't
 take place correctly so almost every line overflows. See
 https://en.wikipedia.org/wiki/Word_wrap#Word_wrapping_in_text_containing_Chinese.2C_Japanese.2C_and_Koreanfor
  more information.

  -Liangent


 On Sun, May 25, 2014 at 5:49 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

 Hi,
 if you want the tex source go for the command line version and use the
 -c command line option. If you want to convert from tex to mediawiki use
 pandoc. In the imprint of each pdf there is a link to the sourceforge page.
 Its slow, but I cannot make it any faster. Its mostly the runtime of LaTeX
 itself. I already invested two weeks in optimizing speed. In particular its
 using multiple cores, while in my code. But well there is not much you can
 do with multiple cores when running LaTeX itself. You could actully get
 some speed by using native cores, but the administration is not that easy.
 It also says on the main page that it will take up to ten minutes.
 Yours Dirk


 On 2014-05-25 11:40, Gryllida wrote:

 Hi,

 On Sun, 25 May 2014, at 18:02, Dirk Hünniger wrote:

 It not a private server anymore. Its now running on Wmflabs already.

 http://mediawiki2latex.wmflabs.org

 I would probably link to the source code and a bug tracker on its main
 page.
 - I see it generated a PDF. Nicely formatted. :) But the TeX source
 would be also useful.
 - It would be nice to be able to convert back from tex to wiki markup
 also.
 - It also appears to be dog slow (about 5 minutes).

 Gryllida.

 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l



 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l




 ___
 Labs-l mailing 
 listLabs-l@lists.wikimedia.orghttps://lists.wikimedia.org/mailman/listinfo/labs-l



 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l




 ___
 Labs-l mailing 
 listLabs-l@lists.wikimedia.orghttps://lists.wikimedia.org/mailman/listinfo/labs-l



___
Labs-l mailing list
Labs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/labs-l


Re: [Labs-l] MediaWiki to LaTeX Compiler

2014-05-25 Thread Liangent
It looks much better now.

Regarding newcommand's, it's more natural to say:

第4章 instead of 章4
第2页 instead of 页2
图8 instead of 图形8

and in Chinese you don't need to (and shouldn't) add spaces between words.
I feel there're some extra ones added especially near links.

-Liangent
On May 26, 2014 1:06 AM, Dirk Hünniger dirk.hunni...@googlemail.com
wrote:

  Hi,
 I made a language file for chinese now and installed it on the server. So
 please have a try:
 http://mediawiki2latex.wmflabs.org/
 Yours Dirk

 PS:The language file:
 \HyphSubstLet{ngerman}{ngerman-x-latest}
 \usepackage{xeCJK}
 \setCJKmainfont{WenQuanYi Zen Hei}
 \newcommand{\mychapterbabel}{章}
 \newcommand{\mypagebabel}{页}
 \newcommand{\myfigurebabel}{图形}
 \newcommand{\mylangbabel}{chinese}


 On 2014-05-25 17:58, Liangent wrote:

  I failed to compile your document in it's original form:

 (../headers/babel.tex
 (/var/lib/texmf/tex/generic/babel/babel.sty
 (/usr/share/texlive/texmf-dist/tex/generic/babel-english/english.ldf
 (/usr/share/texlive/texmf-dist/tex/generic/babel/babel.def
 ! Undefined control sequence.
 \initiate@active@char #1-\bbl@ifshorthand
{#1}{\bbl@s@initiate@active@char
 ...
 l.585 \initiate@active@char{~}

 and I worked around it by commenting out \usepackage[english]{babel} in
 ../headers/babel.tex

  In my experiment I added:

 \usepackage{xeCJK}
 \setCJKmainfont{WenQuanYi Zen Hei}

 to main.tex after \usepackage{fontspec}, and it improves CJK typesetting a
 lot.

 WenQuanYi Zen Hei is contained in
 https://packages.debian.org/sid/fonts-wqy-zenhei

  -Liangent


 On Sun, May 25, 2014 at 8:01 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

  Hi,
 I am sending you the latex source of the main page of the chinese
 wikipedia as attachment.
 You can look at it. But if you want to compile it you need to have ubuntu
 14.04 and do
 sudo apt-get install mediawiki2latex
 xelatex main.tex
 Yours Dirk


 On 2014-05-25 13:32, Liangent wrote:

  I don't really have an idea about how to go for the command line
 version and use the -c command line option; I know nothing about Haskell
 anyway...

 I hope that it's available on the web, is it possible to add a checkbox
 or something?

  -Liangent


 On Sun, May 25, 2014 at 7:18 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

  Hi,
 I didn't take any special care about CJK. Its a bit hard for me since I
 cannot read any of these languages myself. Maybe you can have a look at the
 LaTeX source and tell me what I need to change. Currently no CJK package is
 loaded. The only thing I am doing is to switch to ttf fonts that contain
 CJK characters when I need to print them. Also I am using babel packages.
 For some languages I get proper hyphenation this way, but apparently
 something does not work here for Chinese.
 Yours Dirk

 On 2014-05-25 13:02, Liangent wrote:

  I had a try using an article on Chinese Wikipedia. Although I'm not
 sure whether the cause is in generated LaTeX source or the way you invoke
 LaTeX, the most notable problem is that in output PDF, word wrap doesn't
 take place correctly so almost every line overflows. See
 https://en.wikipedia.org/wiki/Word_wrap#Word_wrapping_in_text_containing_Chinese.2C_Japanese.2C_and_Koreanfor
  more information.

  -Liangent


 On Sun, May 25, 2014 at 5:49 PM, Dirk Hünniger 
 dirk.hunni...@googlemail.com wrote:

 Hi,
 if you want the tex source go for the command line version and use the
 -c command line option. If you want to convert from tex to mediawiki use
 pandoc. In the imprint of each pdf there is a link to the sourceforge page.
 Its slow, but I cannot make it any faster. Its mostly the runtime of LaTeX
 itself. I already invested two weeks in optimizing speed. In particular its
 using multiple cores, while in my code. But well there is not much you can
 do with multiple cores when running LaTeX itself. You could actully get
 some speed by using native cores, but the administration is not that easy.
 It also says on the main page that it will take up to ten minutes.
 Yours Dirk


 On 2014-05-25 11:40, Gryllida wrote:

 Hi,

 On Sun, 25 May 2014, at 18:02, Dirk Hünniger wrote:

 It not a private server anymore. Its now running on Wmflabs already.

 http://mediawiki2latex.wmflabs.org

 I would probably link to the source code and a bug tracker on its main
 page.
 - I see it generated a PDF. Nicely formatted. :) But the TeX source
 would be also useful.
 - It would be nice to be able to convert back from tex to wiki markup
 also.
 - It also appears to be dog slow (about 5 minutes).

 Gryllida.

 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l



 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l




 ___
 Labs-l mailing 
 listLabs-l

Re: [Labs-l] Sharing code in GitHub

2014-05-16 Thread Liangent
I pushed three repos just now[1][2][3], and I'm not sure about the correct
way to deal with others. One of them contains some counter vandalism
functionality, and I fear that publishing them makes vandals easier to
bypass it somehow. Another one is basically a MediaWiki core repo, with a
few patches on top of master to support [1]; I'm afraid pushing it will
upload a full MediaWiki history.

[1] https://github.com/liangent/mediawiki-extensions-Labs
[2] https://github.com/liangent/mediawiki-maintenance
[3] https://github.com/liangent/updatedyk

-Liangent


On Sat, May 17, 2014 at 3:04 AM, Emilio J. Rodríguez-Posada 
emi...@gmail.com wrote:

 Hi all;

 I use to publish my scripts with open source licenses. Previously I used
 SVN, but now I have moved to GitHub.

 This is my collection of repositories.[1] I have uploaded some of my tools
 (still migrating more from Toolserver) and started a special repo for code
 chunks of mine that can be widely used by other coders.[2] By now, only one
 about how to run a query over all Wikimedia projects[3] but I will add more.

 Please share your GitHub repo, I would like to follow you and read your
 code. :-)

 Regards

 [1] https://github.com/emijrp
 [2] https://github.com/emijrp/wmflabs
 [3] https://github.com/emijrp/wmflabs/blob/master/queryalldb.py

 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l


___
Labs-l mailing list
Labs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/labs-l


[MediaWiki-commits] [Gerrit] Set unifont-5.1.20080907.ttf for timeline on ZH projects - change (operations/mediawiki-config)

2014-05-14 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/133228

Change subject: Set unifont-5.1.20080907.ttf for timeline on ZH projects
..

Set unifont-5.1.20080907.ttf for timeline on ZH projects

The original one, wqy-zenhei.ttc, doesn't actually exist in GDFONTPATH,
and EasyTimeline is not smart enough to make use of other system fonts.

Bug: 20825
Change-Id: I9dd358872265ff753226b9c8f3b7cd374e3bf609
---
M wmf-config/CommonSettings.php
1 file changed, 1 insertion(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/operations/mediawiki-config 
refs/changes/28/133228/1

diff --git a/wmf-config/CommonSettings.php b/wmf-config/CommonSettings.php
index 8b1a4ea..659c0db 100644
--- a/wmf-config/CommonSettings.php
+++ b/wmf-config/CommonSettings.php
@@ -533,7 +533,7 @@
// Generate(FreeSansWMF.ttf, , 4 );
$wgTimelineSettings-fontFile = 'FreeSansWMF.ttf';
} elseif ( $lang == 'zh' ) {
-   $wgTimelineSettings-fontFile = 'wqy-zenhei.ttc';
+   $wgTimelineSettings-fontFile = 'unifont-5.1.20080907.ttf';
}
$wgTimelineSettings-fileBackend = 'local-multiwrite';
 

-- 
To view, visit https://gerrit.wikimedia.org/r/133228
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I9dd358872265ff753226b9c8f3b7cd374e3bf609
Gerrit-PatchSet: 1
Gerrit-Project: operations/mediawiki-config
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] MediaWiki 1.19.0beta2

2014-05-09 Thread Liangent
On Mar 23, 2012 3:38 AM, Sam Reed re...@wikimedia.org wrote:

 I'm happy to announce the availability of the second beta release of the
 new MediaWiki 1.19 release series.

 Please try it out and let us know what you think. Don't run it on any
 wikis that you really care about, unless you are both very brave and
 very confident in your MediaWiki administration skills.

 MediaWiki 1.19 is a large release that contains many new features and
 bug fixes. This is a summary of the major changes of interest to users.
 You can consult the RELEASE-NOTES-1.19 file for the full list of changes
 in this version.

 Five security issues were discovered.

 It was discovered that the api had a cross-site request forgery (CSRF)
 vulnerability in the block/unblock modules. It was possible for a user
 account with the block privileges to block or unblock another user without
 providing a token.

 For more details, see https://bugzilla.wikimedia.org/show_bug.cgi?id=34212

 It was discovered that the resource loader can leak certain kinds of
private
 data across domain origin boundaries, by providing the data as an
executable
 JavaScript file. In MediaWiki 1.18 and later, this includes the leaking of
 CSRF
 protection tokens. This allows compromise of the wiki's user accounts, say
 by
 changing the user's email address and then requesting a password reset.

 For more details, see https://bugzilla.wikimedia.org/show_bug.cgi?id=34907

 Jan Schejbal of Hatforce.com discovered a cross-site request forgery
(CSRF)
 vulnerability in Special:Upload. Modern browsers (since at least as early
as
 December 2010) are able to post file uploads without user interaction,
 violating previous security assumptions within MediaWiki.

 Depending on the wiki's configuration, this vulnerability could lead to
 further
 compromise, especially on private wikis where the set of allowed file
types
 is
 broader than on public wikis. Note that CSRF allows compromise of a wiki
 from
 an external website even if the wiki is behind a firewall.

 For more details, see https://bugzilla.wikimedia.org/show_bug.cgi?id=35317

 George Argyros and Aggelos Kiayias reported that the method used to
generate
 password reset tokens is not sufficiently secure. Instead we use various
 more
 secure random number generators, depending on what is available on the
 platform. Windows users are strongly advised to install either the openssl
 extension or the mcrypt extension for PHP so that MediaWiki can take
 advantage
 of the cryptographic random number facility provided by Windows.

 Any extension developers using mt_rand() to generate random numbers in
 contexts
 where security is required are encouraged to instead make use of the
 MWCryptRand class introduced with this release.

 For more details, see https://bugzilla.wikimedia.org/show_bug.cgi?id=35078

I came across this mail and found this link still not viewable.


 A long-standing bug in the wikitext parser (bug 22555) was discovered to
 have
 security implications. In the presence of the popular CharInsert
extension,
 it
 leads to cross-site scripting (XSS). XSS may be possible with other
 extensions
 or perhaps even the MediaWiki core alone, although this is not confirmed
at
 this time. A denial-of-service attack (infinite loop) is also possible
 regardless of configuration.

 For more details, see https://bugzilla.wikimedia.org/show_bug.cgi?id=35315

 *
  What's new?
 *

 MediaWiki 1.19 brings the usual host of various bugfixes and new features.

 Comprehensive list of what's new is in the release notes.

 * Bumped MySQL version requirement to 5.0.2.
 * Disable the partial HTML and MathML rendering options for Math,
   and render as PNG by  default.
   * MathML mode was so incomplete most people thought it simply didn't
work.
 * New skins/common/*.css files usable by skins instead of having to copy
 piles of
   generic styles from MonoBook or Vector's css.
 * The default user signature now contains a talk link in addition to the
 user link.
 * Searching blocked usernames in block log is now clearer.
 * Better timezone recognition in user preferences.
 * Extensions can now participate in the extraction of titles from URL
paths.
 * The command-line installer supports various RDBMSes better.
 * The interwiki links table can now be accessed also when the interwiki
 cache
   is used (used in the API and the Interwiki extension).

 Internationalization
 - 
 * More gender support (for instance in user lists).
 * Add languages: Canadian English.
 * Language converter improved, e.g. it now works depending on the page
   content language.
 * Time and number-formatting magic words also now depend on the page
   content language.
 * Bidirectional support further improved after 1.18.

 Release notes
 - -
 Full release notes:


[MediaWiki-commits] [Gerrit] LanguageConverter fix of empty and numeric strings - change (mediawiki/core)

2014-05-06 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/131717

Change subject: LanguageConverter fix of empty and numeric strings
..

LanguageConverter fix of empty and numeric strings

Bug: 49072
Bug: 46634
Bug: 51551
Change-Id: I2c88f1cf7c0014bebf5c798916b660b334a0b78b
---
M languages/LanguageConverter.php
M tests/parser/parserTests.txt
2 files changed, 44 insertions(+), 9 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/17/131717/1

diff --git a/languages/LanguageConverter.php b/languages/LanguageConverter.php
index 57e73da..4ab4e3f 100644
--- a/languages/LanguageConverter.php
+++ b/languages/LanguageConverter.php
@@ -1004,7 +1004,7 @@
if ( $recursive ) {
foreach ( $sublinks as $link ) {
$s = $this-parseCachedTable( $code, $link, 
$recursive );
-   $ret = array_merge( $ret, $s );
+   $ret = $s + $ret;
}
}
 
@@ -1260,18 +1260,20 @@
$to = trim( $v[1] );
$v = trim( $v[0] );
$u = explode( '=', $v, 2 );
-   // if $to is empty, strtr() could return a wrong result
-   if ( count( $u ) == 1  $to  in_array( $v, $variants 
) ) {
+   // if $to is empty (which is also used as $from in 
bidtable),
+   // strtr() could return a wrong result.
+   if ( count( $u ) == 1  $to !== ''  in_array( $v, 
$variants ) ) {
$bidtable[$v] = $to;
} elseif ( count( $u ) == 2 ) {
$from = trim( $u[0] );
$v = trim( $u[1] );
+   // if $from is empty, strtr() could return a 
wrong result.
if ( array_key_exists( $v, $unidtable )
 !is_array( $unidtable[$v] )
-$to
+$from !== ''
 in_array( $v, $variants ) ) {
$unidtable[$v] = array( $from = $to );
-   } elseif ( $to  in_array( $v, $variants ) ) {
+   } elseif ( $from !== ''  in_array( $v, 
$variants ) ) {
$unidtable[$v][$from] = $to;
}
}
@@ -1325,17 +1327,17 @@
// display current variant in bidirectional array
$disp = $this-getTextInBidtable( $variant );
// or display current variant in fallbacks
-   if ( !$disp ) {
+   if ( $disp === false ) {
$disp = $this-getTextInBidtable(

$this-mConverter-getVariantFallbacks( $variant ) );
}
// or display current variant in unidirectional array
-   if ( !$disp  array_key_exists( $variant, $unidtable ) 
) {
+   if ( $disp === false  array_key_exists( $variant, 
$unidtable ) ) {
$disp = array_values( $unidtable[$variant] );
$disp = $disp[0];
}
// or display frist text under disable manual convert
-   if ( !$disp  
$this-mConverter-mManualLevel[$variant] == 'disable' ) {
+   if ( $disp === false  
$this-mConverter-mManualLevel[$variant] == 'disable' ) {
if ( count( $bidtable )  0 ) {
$disp = array_values( $bidtable );
$disp = $disp[0];
@@ -1430,7 +1432,7 @@
 isset( $unidtable[$v] )
) {
if ( isset( $this-mConvTable[$v] ) ) {
-   $this-mConvTable[$v] = array_merge( 
$this-mConvTable[$v], $unidtable[$v] );
+   $this-mConvTable[$v] = $unidtable[$v] 
+ $this-mConvTable[$v];
} else {
$this-mConvTable[$v] = $unidtable[$v];
}
diff --git a/tests/parser/parserTests.txt b/tests/parser/parserTests.txt
index e8e71b8..4dc16b0 100644
--- a/tests/parser/parserTests.txt
+++ b/tests/parser/parserTests.txt
@@ -15653,6 +15653,39 @@
 !! end
 
 !! test
+Strings evaluating false shouldn't be ignored by Language converter (bug 49072)
+!! options
+language=zh variant=zh-cn
+!! input
+-{zh-cn:0;zh-sg:1

[MediaWiki-commits] [Gerrit] Correct README - change (mediawiki...FastStringSearch)

2014-05-06 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/131835

Change subject: Correct README
..

Correct README

Change-Id: I8a59f9e82fc6dfec6b35a5694a6ed84ed548cc44
---
M README
1 file changed, 1 insertion(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/php/FastStringSearch 
refs/changes/35/131835/1

diff --git a/README b/README
index a29c1a4..fb05ef8 100644
--- a/README
+++ b/README
@@ -1,5 +1,5 @@
 This is a PHP extension for fast string search and replace. It is used by 
-LanguageConverter.php. It supports multiple search terms. We use it as a
+StringUtils.php. It supports multiple search terms. We use it as a
 replacement for PHP's strtr, which is extremely slow in certain cases.
 Chinese script conversion is one of those cases. This extension uses a
 Commentz-Walter style algorithm for multiple search terms, or a Boyer-Moore

-- 
To view, visit https://gerrit.wikimedia.org/r/131835
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I8a59f9e82fc6dfec6b35a5694a6ed84ed548cc44
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/php/FastStringSearch
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Labs-l] Changes to cron on tools

2014-04-30 Thread Liangent
It could be nice to have original crontabs kept somewhere for manual
inspection of the automatically prefixed version later.
On May 1, 2014 1:33 AM, Marc-André Pelletier mpellet...@wikimedia.org
wrote:

 On 03/24/2014 04:43 PM, Marc A. Pelletier wrote:
  AT THE END OF THAT WEEK:  any crontabs you may have had on tools-login
  or tools-bastion will be moved to the new server, and 'crontab' will be
  made to use the new system exclusively.  You have no intervention to
  make; this will be done automatically.

 This had not been yet done to give everyone the time to perform the
 switch manually (and because I had limited availability), but will take
 place this Thursday, May 1. (That is, tomorrow).

 I will try take the time to manually inspect the edited crontabs for
 everyone to make any obvious fixes but please be aware that I cannot
 make more than a cursory inspection.

 If you notice issues in your scheduled cron jobs, simply examine the
 automatically modified crontab for correctness.

 -- Marc


 ___
 Labs-l mailing list
 Labs-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/labs-l

___
Labs-l mailing list
Labs-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/labs-l


Re: [Mediawiki-api] Bad or Invalid Token in Extension

2014-04-27 Thread Liangent
Well you can just use Title::moveTo() directly.
On Apr 28, 2014 1:13 AM, Justin Maldonado i...@keyappraising.com wrote:

 Yeah, that was just a typo copying the code over.  It is actually just the
 one $token variable.  Same with the double quotes.   I’ve tried no quotes
 as well though, along with single quotes, smiley faces, hearts, etc.



 I was also going to do some more urlencoding, but just wanted to get the
 basic page move working first.



 Thanks though, I appreciate you taking a look.  I have no doubt it could
 be something as simple as that.



 *From:* mediawiki-api-boun...@lists.wikimedia.org [mailto:
 mediawiki-api-boun...@lists.wikimedia.org] *On Behalf Of *Niklas Keller
 *Sent:* Sunday, April 27, 2014 11:56 AM
 *To:* MediaWiki API announcements  discussion
 *Subject:* Re: [Mediawiki-api] Bad or Invalid Token in Extension



 I don't have enough time right now to have a deeper look at your code, but:

 $token = $token = $wgUser-editToken();

 Although this isn't an error, it's still unnecessary.

 Next, are you really using these quotes: ” instead of . I think this
 happened when you copied the code because it'll result in an syntax error.
 Additionally, you shouldn't put pure variables into quotes, there's no
 reason why I would do so.

 You should urlencode all parameters, not just the token.

 ___
 Mediawiki-api mailing list
 Mediawiki-api@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mediawiki-api


___
Mediawiki-api mailing list
Mediawiki-api@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-api


[MediaWiki-commits] [Gerrit] Add special page alias for the example special page - change (mediawiki...examples)

2014-04-24 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/129426

Change subject: Add special page alias for the example special page
..

Add special page alias for the example special page

Change-Id: I9b6c6e37fdd5f2fb3fceec4aa99fe51a7f4eb506
---
M BoilerPlate/BoilerPlate.i18n.alias.php
1 file changed, 6 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/examples 
refs/changes/26/129426/1

diff --git a/BoilerPlate/BoilerPlate.i18n.alias.php 
b/BoilerPlate/BoilerPlate.i18n.alias.php
index e187c4a..9a0511a 100644
--- a/BoilerPlate/BoilerPlate.i18n.alias.php
+++ b/BoilerPlate/BoilerPlate.i18n.alias.php
@@ -6,4 +6,9 @@
  * @ingroup Extensions
  */
 
-$specialPageAliases = array();
\ No newline at end of file
+$specialPageAliases = array();
+
+/** English (English) */
+$specialPageAliases['en'] = array(
+   'HelloWorld' = array( 'HelloWorld' ),
+);

-- 
To view, visit https://gerrit.wikimedia.org/r/129426
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I9b6c6e37fdd5f2fb3fceec4aa99fe51a7f4eb506
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/examples
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] Programmatically making an empty edit?

2014-04-23 Thread Liangent
refreshLinks.php[1] can fix that.

[1] https://www.mediawiki.org/wiki/Manual:RefreshLinks.php

On Tue, Apr 22, 2014 at 12:36 AM, Daniel Barrett d...@vistaprint.comwrote:

 I'm the original poster -- here is my real-world use case where a null
 edit is required.

 My wiki has a custom parser tag, (say) foobar.  Many articles include it
 in their wikitext.

 We modified the code for foobar to add a feature: autocategorizing. It
 automatically categorizes any article that contains it (say, in
 Category:Articles containing the foobar tag).

 After this code change is deployed, if you visit an article Blat that
 previously contained foobar, you will see it is properly categorized at
 the bottom. However, if you visit the category page for Articles
 containing the foobar tag, it is missing Blat (and tons of other articles).

 If you perform a null edit on Blat, it then shows up on the category page.
  Purging Blat (action=purge) does not have this effect.

 DanB

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikimedia-l] Affiliation in username

2014-04-20 Thread Liangent
On Sun, Apr 20, 2014 at 4:05 PM, Federico Leva (Nemo) nemow...@gmail.comwrote:

 Amir E. Aharoni, 20/04/2014 08:39:

  Silly technical remark: Everybody, please stop doing this with
 parentheses.
 It breaks in right to left languages. Gary-WMF is just as readable, and
 doesn't have this problem. Thanks for the attention.


 Your suggestion works against the built-in assumptions of MediaWiki for
 disambiguations.
 https://meta.wikimedia.org/wiki/Help:Pipe_trick


Then Gary, WMF?



 Nemo


 ___
 Wikimedia-l mailing list
 Wikimedia-l@lists.wikimedia.org
 Unsubscribe: https://lists.wikimedia.org/mailman/listinfo/wikimedia-l,
 mailto:wikimedia-l-requ...@lists.wikimedia.org?subject=unsubscribe

___
Wikimedia-l mailing list
Wikimedia-l@lists.wikimedia.org
Unsubscribe: https://lists.wikimedia.org/mailman/listinfo/wikimedia-l, 
mailto:wikimedia-l-requ...@lists.wikimedia.org?subject=unsubscribe

Re: [Wikitech-l] Programmatically making an empty edit?

2014-04-18 Thread Liangent
On Fri, Apr 18, 2014 at 5:22 PM, Federico Leva (Nemo) nemow...@gmail.comwrote:

 This was asked just few days ago on this list...
 https://www.mediawiki.org/wiki/Manual:Pywikibot/touch.py is regularly
 used to make millions null edits, don't bother inventing something else.


It seems touch.py should be updated to make use of forcelinkupdate,
forcerecursivelinkupdate and generator parameters of api.php?action=purge,
to reduce traffic.

https://www.mediawiki.org/wiki/API:Purge

-Liangent



 Nemo


 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] Programmatically making an empty edit?

2014-04-18 Thread Liangent
There're still cases / some bugs where *links tables can't be updated
automatically. touch.py and similar techniques work as a workaround.


On Sat, Apr 19, 2014 at 9:12 AM, MZMcBride z...@mzmcbride.com wrote:

 Liangent wrote:
 It seems touch.py should be updated to make use of forcelinkupdate,
 forcerecursivelinkupdate and generator parameters of api.php?action=purge,
 to reduce traffic.
 
 https://www.mediawiki.org/wiki/API:Purge

 As far as I can tell, touch.py should not be necessary. Purging (including
 links updates) should just work in MediaWiki. Individual users shouldn't
 feel compelled to manually null edit or aggressively purge pages. I'd
 personally rather see time and energy invested into making the need for
 null edits obsolete instead of making touch.py more robust.

 MZMcBride



 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] English Wikipedia Homepage Framework

2014-04-17 Thread Liangent
On Wed, Apr 16, 2014 at 4:34 AM, Jon Robson jdlrob...@gmail.com wrote:

 PLEASSE! :D
 This would help the mobile version a lot.


For mobile version: I created [[zh:User:Liangent/首页]] with wikitext {{
:{{MediaWiki:MainPage}} }} (comparison: [[en:User:Liangent/Main Page]]) and
found https://zh.m.wikipedia.org/wiki/User:Liangent/%E9%A6%96%E9%A1%B5quite
mobile usable. I feel it's even possible to disable any special
handling of main page contents on zhwiki (which caused
https://bugzilla.wikimedia.org/show_bug.cgi?id=63963 btw).



 On Tue, Apr 15, 2014 at 1:25 PM, Derric Atzrott
 datzr...@alizeepathology.com wrote:
  Hello,
 
  I just wanted to make everyone aware of a discussion going on at the
 English
  Wikipedia.  They are discussing changing the Main Page, not visibly, but
 just
  changing the page from a table based layout to one that is a little bit
 more
  modern.  The discussion can be found here:
 
 https://en.wikipedia.org/wiki/Talk:Main_Page#Proposal_to_implement_new_framework
  _for_main_page
 
  One of the things that came up during the discussion was testing the
 framework
  with a wide variety of browsers.  A lot of the opposition to it that I
 could see
  came from the lack of testing.  I feel like I remember us discussing here
  previously a framework for testing stuff across many browsers.  If we
 have
  something of the like here, I think that it could be well deployed to
 help them
  out.  No reason for them to re-invent the wheel or do all of the testing
  manually.
 
  Thank you,
  Derric Atzrott
  Computer Specialist
  Alizee Pathology
 
 
 
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l



 --
 Jon Robson
 * http://jonrobson.me.uk
 * https://www.facebook.com/jonrobson
 * @rakugojon

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] English Wikipedia Homepage Framework

2014-04-15 Thread Liangent
zhwiki main page is already div-based :)
On Apr 16, 2014 4:34 AM, Jon Robson jdlrob...@gmail.com wrote:

 PLEASSE! :D
 This would help the mobile version a lot.

 On Tue, Apr 15, 2014 at 1:25 PM, Derric Atzrott
 datzr...@alizeepathology.com wrote:
  Hello,
 
  I just wanted to make everyone aware of a discussion going on at the
 English
  Wikipedia.  They are discussing changing the Main Page, not visibly, but
 just
  changing the page from a table based layout to one that is a little bit
 more
  modern.  The discussion can be found here:
 
 https://en.wikipedia.org/wiki/Talk:Main_Page#Proposal_to_implement_new_framework
  _for_main_page
 
  One of the things that came up during the discussion was testing the
 framework
  with a wide variety of browsers.  A lot of the opposition to it that I
 could see
  came from the lack of testing.  I feel like I remember us discussing here
  previously a framework for testing stuff across many browsers.  If we
 have
  something of the like here, I think that it could be well deployed to
 help them
  out.  No reason for them to re-invent the wheel or do all of the testing
  manually.
 
  Thank you,
  Derric Atzrott
  Computer Specialist
  Alizee Pathology
 
 
 
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l



 --
 Jon Robson
 * http://jonrobson.me.uk
 * https://www.facebook.com/jonrobson
 * @rakugojon

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] mGerrit - Gerrit for Android

2014-04-15 Thread Liangent
I'm more eager to find a good bugzilla app :) I can find a few on Google
Play, but is there a best one to recommend?
On Apr 14, 2014 6:58 PM, Sam Reed re...@wikimedia.org wrote:

 As we all know, Gerrit doesn't have the best interface, but even more so,
 it's not very mobile friendly. There is however mGerrit [1] for Android
 which does a nice job.



 Also, we're not in the list of default supported Gerrit instannces (yes,
 I
 know WikiMedia is in CamelCase, this is already reported and fixed
 upstream).



 If you've any issues with the software emailing the developer directly is
 quite good at yielding responses.







 Sam





 [1] https://play.google.com/store/apps/details?id=com.jbirdvegas.mgerrit



 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [WikimediaMobile] 通知;

2014-04-11 Thread Liangent
This, and a previous mail to this list written in Chinese are spams.
On Apr 12, 2014 12:33 AM, 宇文先生 mobile-l@lists.wikimedia.org wrote:

 mobile-l@lists.wikimedia.org您好;邮件请转相关负责人;谢谢

 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l


___
Mobile-l mailing list
Mobile-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mobile-l


Re: [Wikitech-l] Abandoning -1 code reviews automatically?

2014-04-09 Thread Liangent
I have some changesets where I uploaded a reworked patchset several weeks
or even months after an original -1 was given...

Anyway a changeset can be easily renewed by rebasing it, but the side
effect is that all existing -1's get flushed. If people start to use this
method to extend the period, it's not something good...


On Wed, Apr 9, 2014 at 11:25 PM, Jon Robson jdlrob...@gmail.com wrote:

 Yes yes yes please!

 I've been manually doing this in mobile with a short friendly note saying
 if the owner wants to resubmit it they can feel free to at a later date. My
 gerrit is just a spam queue right now.

 Just to clarify - if someone submits a patch then says 1 month later via
 comment I still want to work on it do we abandon it for the time being or
 keep it open?
 On 9 Apr 2014 08:08, Quim Gil q...@wikimedia.org wrote:

  In relation to our discussion about code review metrics at
  http://korma.wmflabs.org/browser/gerrit_review_queue.html
 
  I just learned that OpenStack has a policy to abandon automatically
 reviews
  sitting in -1 during more than one week:
 
  https://bugzilla.wikimedia.org/show_bug.cgi?id=63533#c1
 
  Maybe one week is too tight for the reality of our project, but what
 about
  2-4 weeks?
 
  Brad and others said last week that they were not interested in code
 review
  queue metrics mixing pending -1 reviews. Maybe the solution is not to
 tweak
  the metrics, but to effectively abandon those changesets automatically?
 
 
  --
  Quim Gil
  Engineering Community Manager @ Wikimedia Foundation
  http://www.mediawiki.org/wiki/User:Qgil
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] MediaWiki on Google App Engine

2014-04-07 Thread Liangent
I remember I mentioned it once in IRC, and some people don't like it
because GAE is proprietary...

-Liangent


On Mon, Apr 7, 2014 at 10:44 PM, Jeremy Baron jer...@tuxmachine.com wrote:

 On Apr 7, 2014 10:36 AM, Dan Andreescu dandree...@wikimedia.org wrote:
  So you'd probably have to
  redirect mediawiki to write files somewhere else more permanent.  Google
  Drive seems like a decent place but last time I tried, integrating Drive
  with App Engine was silly hard.  This might have changed since I haven't
  tried for a few years.

 Well Google now has an s3 clone too.  But I was under the impression that
 you can't upload to an appengine host at all? i.e. you need to have the
 POST go directly from web client to the s3 or similar service and then back
 to appengine when upload us done? (or use ajax upload?)

 Anyway, if you were buffering in RAM would have to test for/deal with
 problems with large files.

 Sorry in forgot the thread yesterday. Will write more soon.

 -Jeremy
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] Caching with Varnish

2014-03-28 Thread Liangent
No they were deliberately made not cached because otherwise in the event
that some sensitive data got oversighted, they may still keep accessible
from cached data, and there are even no way to have them go away.
On Mar 29, 2014 11:40 AM, Brian Wolff bawo...@gmail.com wrote:

 On 3/28/14, Shawn Jones sj...@cs.odu.edu wrote:
  Hi,
 
  As part of our extension testing, we've set up varnish in accordance
  with http://www.mediawiki.org/wiki/Manual:Varnish_caching
 
  One of the things we've noticed is that our oldid URIs are cached,
  whereas Wikipedia doesn't seem to cache those pages.
 
  Is there a reason why Wikipedia doesn't do this?  Is there some
  threshold that Wikipedia uses for caching?
 
  Thanks in advance,
 
  Shawn M. Jones
  Graduate Research Assistant
  Department of Computer Science
  Old Dominion University
 
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l

 I think your caching is set up incorrectly. MediaWiki does not (afaik)
 send an smaxage caching header for requests with an oldid in them. See
 the $output-setSquidMaxage( $wgSquidMaxage ); line in
 MediaWiki::performAction (line 425 of includes/Wiki.php). The caching
 headers are only sent if the url is in the list of urls that can be
 purged (basically normal page views and history page views). If other
 pages are being cached, it probably means all pages are being cached
 for you, which is not a good thing and will cause problems, since
 there are some pages that really should not be cached.

 In the case of oldid urls, it may make sense for us to send caching
 headers with oldid urls, since they do not change (excluding of course
 oldid's that don't exist)

 --bawolff

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [WikimediaMobile] Apps Feedback Email Addresses

2014-03-27 Thread Liangent
Does this mean mails sent to OTRS before this mail got lost?


On Thu, Mar 27, 2014 at 5:43 AM, Adam Baso ab...@wikimedia.org wrote:

 The OTRS queues have been created and the Google Groups removed.


 On Tue, Mar 18, 2014 at 2:29 PM, Yuvi Panda yuvipa...@gmail.com wrote:

 As an update, I asked for creation of relevant queues on OTRS. Will
 keep mobile-l updated.

 Thanks for your suggestions!


 --
 Yuvi Panda T
 http://yuvi.in/blog

 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l



 ___
 Mobile-l mailing list
 Mobile-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/mobile-l


___
Mobile-l mailing list
Mobile-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mobile-l


[MediaWiki-commits] [Gerrit] Add release note for removal of preference noconvertlink - change (mediawiki/core)

2014-03-11 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/118066

Change subject: Add release note for removal of preference noconvertlink
..

Add release note for removal of preference noconvertlink

Follow up 333bf3ae5b412fae1e4f57a62a220c941ef50536.

Change-Id: I7a3e391f56d2e2839b3210d79a9f5b630f0f6fed
---
M RELEASE-NOTES-1.23
1 file changed, 1 insertion(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/66/118066/1

diff --git a/RELEASE-NOTES-1.23 b/RELEASE-NOTES-1.23
index 344c967..fb12acd 100644
--- a/RELEASE-NOTES-1.23
+++ b/RELEASE-NOTES-1.23
@@ -278,6 +278,7 @@
   table.mw-prefixindex-list-table to avoid duplicate ids when the special page
   is transcluded.
 * (bug 62198) window.$j has been deprecated.
+* Preference Disable link title conversion was removed.
 
  Removed classes 
 * FakeMemCachedClient (deprecated in 1.18)

-- 
To view, visit https://gerrit.wikimedia.org/r/118066
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I7a3e391f56d2e2839b3210d79a9f5b630f0f6fed
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Create plural rules for variants of zh (Chinese) - change (mediawiki/core)

2014-03-02 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/116278

Change subject: Create plural rules for variants of zh (Chinese)
..

Create plural rules for variants of zh (Chinese)

Change-Id: I50e28f601944955ac85e0a847941d7b4824e51e3
---
M languages/data/plurals-mediawiki.xml
1 file changed, 4 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/78/116278/1

diff --git a/languages/data/plurals-mediawiki.xml 
b/languages/data/plurals-mediawiki.xml
index aafc393..6f711ca 100644
--- a/languages/data/plurals-mediawiki.xml
+++ b/languages/data/plurals-mediawiki.xml
@@ -41,5 +41,9 @@
pluralRule count=fewv = 0 and i % 10 = 2..4 and i % 
100 != 12..14 or f % 10 = 2..4 and f % 100 != 12..14 @integer 2~4, 22~24, 
32~34, 42~44, 52~54, 62, 102, 1002, … @decimal 0.2~0.4, 1.2~1.4, 2.2~2.4, 
3.2~3.4, 4.2~4.4, 5.2, 10.2, 100.2, 1000.2, …/pluralRule
pluralRule count=other @integer 0, 5~19, 100, 1000, 
1, 10, 100, … @decimal 0.0, 0.5~1.0, 1.5~2.0, 2.5~2.7, 10.0, 100.0, 
1000.0, 1.0, 10.0, 100.0, …/pluralRule
/pluralRules
+   !-- Copied from zh --
+   pluralRules locales=zh-hans zh-hant zh-cn zh-hk zh-mo zh-my 
zh-sg zh-tw
+   pluralRule count=other @integer 0~15, 100, 1000, 1, 
10, 100, … @decimal 0.0~1.5, 10.0, 100.0, 1000.0, 1.0, 10.0, 
100.0, …/pluralRule
+   /pluralRules
/plurals
 /supplementalData

-- 
To view, visit https://gerrit.wikimedia.org/r/116278
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I50e28f601944955ac85e0a847941d7b4824e51e3
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikimania-l] Question for the WM 2014 London team

2014-02-08 Thread Liangent
On Mon, Jan 13, 2014 at 9:13 AM, Ellie Young eyo...@wikimedia.org wrote:

 I have already brought this up with them and waiting to hear back (first
 response didn't sound like there was much we can do frankly...)   My
 comment about the spring is that is when we will have  a guide for our
 attendees who need visas.  They  said it would be ready in the next couple
 of months.  We won't be notifying people about their scholarships until
 April.

 And while we are on this subject, I want to reiterate  that we expect
 individuals to cover their own visa expenses as well as contacting the
 British Consulate on their own or through a visa service in their home
 country. The UK accepts applications for visas 90 days prior to the
 expected date of entering the UK (applying before doesn't do any good.)


I'm feeling this is somehow against our general principle to give people
from all around the world an equal opportunity. We don't want to classify
people as no visa needed visa required with simple application process
or visa required with requirements which are difficult to satisfy for
access to Wikimania, but British government does, and usually, people from
a less developed country face a more strict visa policy. It would be better
to fill this natural gap between applicants based on their country of
origin first, and if our budget disallows, do something that is equal to
all applicants.

-Liangent


 Ellie





 On Jan 12, 2014, at 3:42 PM, aude aude.w...@gmail.com wrote:

 On Sun, Jan 12, 2014 at 5:31 PM, Ellie Young eyo...@wikimedia.org wrote:

 I will be sure to take this up when we are putting together instructions
 this Spring.  UK Immigation has agreed to
 put together a how to apply guide for our event.  I will be sure this
 issue get put to them.


 I wouldn't wait until spring to bring this up with UK immigration.

 It's not too soon to start now.  The process for folks in Iran can take
 quite a while (if similar to the process for Wikimania 2012 in the US) but
 I think can be successful.

 People in Iran will need to travel (at least once) to a UK consulate in
 Turkey or UAE to apply.


 http://www.ukba.homeoffice.gov.uk/countries/iran/applying/?langname=UK%20English

 Cheers,
 Katie



 Ellie

 WMF Conference Coordinator

 On Jan 12, 2014, at 2:08 PM, Muhammad Yahia shipmas...@gmail.com wrote:

  Hi all,
 
  I received this question about visas for Iranian citizens and am
 passing it along:
 
  As it stands now, UK visa requirements for Iranian citizens is to have
 a bank account with (on average) 1 USD or equivalent in the last six
 months, plus proof of ownership of a property in Iran, the documents have
 to be translated and notarized.
 
  Most of the volunteers interested in getting scholarship for WM or
 trying to arrange travel on their own are mostly young people who do not
 satisfy either conditions, is there something that the UK team can do about
 that? can they get confirmation that if they apply these requirements will
 be waived or something similar?
 
  Thanks!
  --
  Best Regards,
  Muhammad Yahia
  ___
  Wikimania-l mailing list
  Wikimania-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikimania-l


 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l




 --
 @wikimediadc / @wikidata
 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l



 ___
 Wikimania-l mailing list
 Wikimania-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikimania-l


___
Wikimania-l mailing list
Wikimania-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikimania-l


[MediaWiki-commits] [Gerrit] Remove user preference noconvertlink - change (mediawiki/core)

2014-02-06 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/111968

Change subject: Remove user preference noconvertlink
..

Remove user preference noconvertlink

This toggle was introduced in 8d06ad6e, but the most useful feature
there (disabling h1 conversion on a per-user basis) has been dropped
due to cache fragmentation. The only remaining part is not quite useful
and can be covered by the URL parameter linkconvert=no.

Change-Id: I12f2cdc9b0d44d6e47487b14fa8ef010de5c94a7
---
M includes/DefaultSettings.php
M includes/Preferences.php
M languages/LanguageConverter.php
M languages/messages/MessagesEn.php
4 files changed, 1 insertion(+), 12 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/68/111968/1

diff --git a/includes/DefaultSettings.php b/includes/DefaultSettings.php
index 9264947..38cf735 100644
--- a/includes/DefaultSettings.php
+++ b/includes/DefaultSettings.php
@@ -3994,7 +3994,6 @@
'minordefault' = 0,
'newpageshidepatrolled' = 0,
'nickname' = '',
-   'noconvertlink' = 0,
'norollbackdiff' = 0,
'numberheadings' = 0,
'previewonfirst' = 0,
diff --git a/includes/Preferences.php b/includes/Preferences.php
index 04e9114..9ced6bc 100644
--- a/includes/Preferences.php
+++ b/includes/Preferences.php
@@ -374,14 +374,6 @@
'section' = 'personal/i18n',
'help-message' = 
'prefs-help-variant',
);
-
-   if ( !$wgDisableTitleConversion ) {
-   
$defaultPreferences['noconvertlink'] = array(
-   'type' = 'toggle',
-   'section' = 
'personal/i18n',
-   'label-message' = 
'tog-noconvertlink',
-   );
-   }
} else {

$defaultPreferences[variant-$langCode] = array(
'type' = 'api',
diff --git a/languages/LanguageConverter.php b/languages/LanguageConverter.php
index bb5b49f..7231477 100644
--- a/languages/LanguageConverter.php
+++ b/languages/LanguageConverter.php
@@ -768,8 +768,7 @@
( $isredir == 'no'
|| $action == 'edit'
|| $action == 'submit'
-   || $linkconvert == 'no'
-   || $wgUser-getOption( 'noconvertlink' 
) == 1 ) ) ) {
+   || $linkconvert == 'no' ) ) ) {
return;
}
 
diff --git a/languages/messages/MessagesEn.php 
b/languages/messages/MessagesEn.php
index 300792a..b9d368c 100644
--- a/languages/messages/MessagesEn.php
+++ b/languages/messages/MessagesEn.php
@@ -694,7 +694,6 @@
 'tog-ccmeonemails'= 'Send me copies of emails I send to other 
users',
 'tog-diffonly'= 'Do not show page content below diffs',
 'tog-showhiddencats'  = 'Show hidden categories',
-'tog-noconvertlink'   = 'Disable link title conversion', # only 
translate this message to other languages if you have to change it
 'tog-norollbackdiff'  = 'Omit diff after performing a rollback',
 'tog-useeditwarning'  = 'Warn me when I leave an edit page with 
unsaved changes',
 'tog-prefershttps'= 'Always use a secure connection when logged 
in',

-- 
To view, visit https://gerrit.wikimedia.org/r/111968
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I12f2cdc9b0d44d6e47487b14fa8ef010de5c94a7
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] (bug 57401) Fix padding values of div#content - change (mediawiki/core)

2014-02-05 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/111454

Change subject: (bug 57401) Fix padding values of div#content
..

(bug 57401) Fix padding values of div#content

Bug: 57401
Change-Id: I9633a6f5f8b77fe7ac98eba2afd3488219946bdd
---
M skins/vector/screen-hd.less
M skins/vector/variables.less
2 files changed, 2 insertions(+), 2 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/54/111454/1

diff --git a/skins/vector/screen-hd.less b/skins/vector/screen-hd.less
index 2683a21..5a1fc05 100644
--- a/skins/vector/screen-hd.less
+++ b/skins/vector/screen-hd.less
@@ -2,7 +2,7 @@
 
 div#content {
margin-left: 11em;
-   padding: 1.5em 1.5em 1.5em 1.75em;
+   padding: 1.25em 1.5em 1.5em 1.5em;
 }
 #p-logo {
left: @menu-main-logo-left;
diff --git a/skins/vector/variables.less b/skins/vector/variables.less
index c2d0b92..542ffe7 100644
--- a/skins/vector/variables.less
+++ b/skins/vector/variables.less
@@ -9,7 +9,7 @@
 @content-font-color: black;
 @content-font-size: 0.8em;
 @content-line-height: 1.5em;
-@content-padding: 1.25em 1.5em 1.5em 1.5em;
+@content-padding: 1em;
 @content-heading-font-size: 1.6em;
 @content-heading-font-family: sans-serif;
 @body-background-color: #fff;

-- 
To view, visit https://gerrit.wikimedia.org/r/111454
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I9633a6f5f8b77fe7ac98eba2afd3488219946bdd
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] How to retrieve the page, execute some time expensive operation and edit the page ONLY if it wasn't changed meanwhile

2014-01-25 Thread Liangent
On Thu, Jan 23, 2014 at 9:22 AM, Matthew Flaschen
mflasc...@wikimedia.orgwrote:

 On 01/22/2014 12:35 PM, Petr Bena wrote:

 this explanation should be in the documentation ;)

 anyway I guess I need to use both of them?


 basetimestamp (the timestamp of the revision your edit is based on) should
 be sufficient.  I might be missing something, but I can't think of a
 scenario where:

 1. Get the ID and timestamp of the last revision.
 2. Get the text of that revision.
 3. Do long-running computation based on that text.
 4. POST an edit (resulting from that computation) with basetimestamp set
 to the timestamp obtained in #1.

 will cause a race condition.


IIRC there'll still be some race conditions in the sense of programs due to
that smart merge:

1. User A and User B fetch text;
3. User A moves paragraph 1 after paragraph 10 in their copy;
4. User B moves paragraph 1 after paragraph 20 in their copy;
5. User A and User B upload their modified text. The order of uploading
actions is not critical;
6. Result: both edits got accepted; paragraph 1 got duplicated after
paragraph 10 and 20 and removed from its original position.

just like what we do on Web UI. The key is we want to detect any mid-air
edits and abort actions in this case, instead of trying to have my edit
merged to any mid-air revisions.

-Liangent



 Matthew Flaschen



 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] Status of the new PDF Renderer

2014-01-25 Thread Liangent
On Sat, Jan 25, 2014 at 6:13 PM, Gerard Meijssen
gerard.meijs...@gmail.comwrote:

 Hoi Liangent,

 Does [1] this answer your question ? It is a page they use for testing.
 Thanks,
 Gerard

 http://zh.wikipedia.org/wiki/納粹德國海軍


It's mentioned as a test case but where's the output (expected and
actual) of that article?

-Liangent




 On 25 January 2014 08:56, Liangent liang...@gmail.com wrote:

  I didn't look at the new renderer carefully, but I guess it's a
  Parsoid-based one. Hope that the language conversion syntax issue in PDF
  output can be resolved together with Parsoid in the future, which blocks
  the deployment of PDF output on zhwiki currently. See
  https://bugzilla.wikimedia.org/show_bug.cgi?id=34919 .
 
  -Liangent
 
 
  On Fri, Jan 24, 2014 at 2:38 AM, Matthew Walker mwal...@wikimedia.org
  wrote:
 
   Marco,
  
   Is it also possible to set this up behind a firewall?
  
   Yes; with the caveat that your wiki must be running Parsoid. It is also
   theoretically possible to still use Print on Demand services behind a
   firewall as we can POST a zip bundle to them -- likely however you'd
 just
   disable that functionality and I'm not sure our new bundle format is
   entirely compatible with the old bundle format...
  
   If you want to set this up locally; I can help with that if you jump on
  IRC
   #mediawiki-pdfhack on freenode. I'm mwalker.
  
   ~Matt Walker
   Wikimedia Foundation
   Fundraising Technology Team
   ___
   Wikitech-l mailing list
   Wikitech-l@lists.wikimedia.org
   https://lists.wikimedia.org/mailman/listinfo/wikitech-l
  
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l
 
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[MediaWiki-commits] [Gerrit] Fix various comments from 84a2f570. - change (mediawiki/core)

2014-01-24 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/109459


Change subject: Fix various comments from 84a2f570.
..

Fix various comments from 84a2f570.

Change-Id: I03f7778bc93a255475aa132a09c6da15decd1549
---
M includes/Collation.php
M includes/utils/ArrayUtils.php
2 files changed, 14 insertions(+), 8 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/59/109459/1

diff --git a/includes/Collation.php b/includes/Collation.php
index b51256b..d2a5797 100644
--- a/includes/Collation.php
+++ b/includes/Collation.php
@@ -514,7 +514,7 @@
 * Do a binary search, and return the index of the largest item that 
sorts
 * less than or equal to the target value.
 *
-* @deprecated in 1.22; use ArrayUtils::findLowerBound() instead
+* @deprecated in 1.23; use ArrayUtils::findLowerBound() instead
 *
 * @param array $valueCallback A function to call to get the value with
 * a given array index.
@@ -528,7 +528,7 @@
 * sorts before all items.
 */
function findLowerBound( $valueCallback, $valueCount, 
$comparisonCallback, $target ) {
-   wfDeprecated( __METHOD__, '1.22' );
+   wfDeprecated( __METHOD__, '1.23' );
return ArrayUtils::findLowerBound( $valueCallback, $valueCount, 
$comparisonCallback, $target );
}
 
diff --git a/includes/utils/ArrayUtils.php b/includes/utils/ArrayUtils.php
index 802cdbc..037663c 100644
--- a/includes/utils/ArrayUtils.php
+++ b/includes/utils/ArrayUtils.php
@@ -22,6 +22,8 @@
 
 /**
  * A collection of static methods to play with arrays.
+ *
+ * @since 1.21
  */
 class ArrayUtils {
/**
@@ -94,13 +96,15 @@
 * Do a binary search, and return the index of the largest item that 
sorts
 * less than or equal to the target value.
 *
+* @since 1.23
+*
 * @param array $valueCallback A function to call to get the value with
 * a given array index.
-* @param $valueCount int The number of items accessible via 
$valueCallback,
+* @param int $valueCount The number of items accessible via 
$valueCallback,
 * indexed from 0 to $valueCount - 1
-* @param $comparisonCallback array A callback to compare two values, 
returning
+* @param array $comparisonCallback A callback to compare two values, 
returning
 * -1, 0 or 1 in the style of strcmp().
-* @param $target string The target value to find.
+* @param string $target The target value to find.
 *
 * @return int|bool The item index of the lower bound, or false if the 
target value
 * sorts before all items.
@@ -142,9 +146,11 @@
 *
 * Note: empty arrays are removed.
 *
-* @param $array1 array The array to compare from
-* @param $array2 array An array to compare against
-* @param ... array More arrays to compare against
+* @since 1.23
+*
+* @param array $array1 The array to compare from
+* @param array $array2 An array to compare against
+* @param array ... More arrays to compare against
 * @return array An array containing all the values from array1
 *   that are not present in any of the other arrays.
 */

-- 
To view, visit https://gerrit.wikimedia.org/r/109459
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I03f7778bc93a255475aa132a09c6da15decd1549
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] Status of the new PDF Renderer

2014-01-24 Thread Liangent
I didn't look at the new renderer carefully, but I guess it's a
Parsoid-based one. Hope that the language conversion syntax issue in PDF
output can be resolved together with Parsoid in the future, which blocks
the deployment of PDF output on zhwiki currently. See
https://bugzilla.wikimedia.org/show_bug.cgi?id=34919 .

-Liangent


On Fri, Jan 24, 2014 at 2:38 AM, Matthew Walker mwal...@wikimedia.orgwrote:

 Marco,

 Is it also possible to set this up behind a firewall?

 Yes; with the caveat that your wiki must be running Parsoid. It is also
 theoretically possible to still use Print on Demand services behind a
 firewall as we can POST a zip bundle to them -- likely however you'd just
 disable that functionality and I'm not sure our new bundle format is
 entirely compatible with the old bundle format...

 If you want to set this up locally; I can help with that if you jump on IRC
 #mediawiki-pdfhack on freenode. I'm mwalker.

 ~Matt Walker
 Wikimedia Foundation
 Fundraising Technology Team
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] How to identify the MW version from the git tree?

2014-01-06 Thread Liangent
And to check a specific commit easier, expand the Included in section on
its Gerrit page.

-Liangent


On Tue, Jan 7, 2014 at 12:58 AM, Chad innocentkil...@gmail.com wrote:

 There's branches for the current versions and tags for the older ones.
 Clone mediawiki/core and git tag and git branch -r will show them.

 -Chad
 On Jan 6, 2014 8:47 AM, Strainu strain...@gmail.com wrote:

  Hi,
 
  How can I find which commits entered a certain MediaWiki build
  (especially the -wmfX ones). Are there any tags in the repository or
  is there a page that holds that information?
 
  Thanks,
 Strainu
 
  ___
  Wikitech-l mailing list
  Wikitech-l@lists.wikimedia.org
  https://lists.wikimedia.org/mailman/listinfo/wikitech-l
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikidata-tech] locally run lua scripts

2014-01-05 Thread Liangent
On Sun, Jan 5, 2014 at 9:34 PM, Voß, Jakob jakob.v...@gbv.de wrote:

  If what you're executing is not something huge, doesn't require (m)any
  external dependencies, and doesn't have user interaction, you can try
  to (ab)use Scribunto's console AJAX interface:

 Thanks, I used your example to set up a git repository with notes. I
 planned to clone the full module-namespace with git,


Huh this makes me think of a git-mediawiki tool (compare with git-svn).

There's already an (inactive) wikipediafs
http://wikipediafs.sourceforge.net/

Just a quick idea. Nothing exists in reality.

-Liangent


 so modules in
 MediaWiki can be managed (and locally tested) with git as well:

 https://github.com/nichtich/wikidata-lua-client

 Jakob
 ___
 Wikidata-tech mailing list
 Wikidata-tech@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikidata-tech

___
Wikidata-tech mailing list
Wikidata-tech@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-tech


[MediaWiki-commits] [Gerrit] Updated location of zhtable in comment - change (mediawiki/core)

2013-12-25 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/103655


Change subject: Updated location of zhtable in comment
..

Updated location of zhtable in comment

Follow up I239483e6cdf7d412be01ae8bd45978481c5dd518.

Change-Id: Ia909b36ed8b996057ba4e7966cac4d29ee3836f4
---
M maintenance/language/zhtable/Makefile.py
1 file changed, 1 insertion(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/55/103655/1

diff --git a/maintenance/language/zhtable/Makefile.py 
b/maintenance/language/zhtable/Makefile.py
index 7e19794..ac09246 100755
--- a/maintenance/language/zhtable/Makefile.py
+++ b/maintenance/language/zhtable/Makefile.py
@@ -352,7 +352,7 @@
 /**
  * Simplified / Traditional Chinese conversion tables
  *
- * Automatically generated using code and data in includes/zhtable/
+ * Automatically generated using code and data in maintenance/language/zhtable/
  * Do not modify directly!
  *
  * @file

-- 
To view, visit https://gerrit.wikimedia.org/r/103655
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ia909b36ed8b996057ba4e7966cac4d29ee3836f4
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] FWD: [Bug 58236] New: No longer allow gadgets to be turned on by default for all users on Wikimedia sites

2013-12-09 Thread Liangent
MediaWiki:Common.js can also create disagreement and defaults change
without notice.

Actually what I did sometimes is to move code from MediaWiki:Common.js to
default gadgets, so it can be disabled per user (for example, some users
are using a too slow computer or network), and at the same time, this makes
maintenance easier for developers, because gadgets can be made modularized,
instead of using a big JavaScript and CSS file (Common.js/css).

-Liangent


On Tue, Dec 10, 2013 at 5:02 AM, K. Peachey p858sn...@gmail.com wrote:

 For wider discussion

 ---
 From: bugzilla-daemon at wikimedia.org
 Subject: [Bug 58236] New: No longer allow gadgets to be turned on by
 default for all users on Wikimedia
 sites
 http://news.gmane.org/find-root.php?message_id=%3cbug%2d58236%2d3%40https.bugzilla.wikimedia.org%2f%3e
 
 Newsgroups: gmane.org.wikimedia.mediawiki.bugs
 http://news.gmane.org/gmane.org.wikimedia.mediawiki.bugs
 Date: 2013-12-09 20:41:41 GMT (19 minutes ago)

 https://bugzilla.wikimedia.org/show_bug.cgi?id=58236

Web browser: ---
 Bug ID: 58236
Summary: No longer allow gadgets to be turned on by default for
 all users on Wikimedia sites
Product: Wikimedia
Version: wmf-deployment
   Hardware: All
 OS: All
 Status: NEW
   Severity: normal
   Priority: Unprioritized
  Component: Site requests
   Assignee: wikibugs-l at lists.wikimedia.org
   Reporter: jared.zimmerman at wikimedia.org
 CC: benapetr at gmail.com,
 bugzilla+org.wikimedia at tuxmachine.com,
 dereckson at espace-win.org, greg at wikimedia.org
 ,
 tomasz at twkozlowski.net, wikimedia.bugs at
 snowolf.eu
 Classification: Unclassified
Mobile Platform: ---

 Gadgets being turned on for all site users (including readers) can cause a
 confusing users experience, especially when there is some disagreement and
 defaults change without notice (readers are rarely if ever part of these
 discussions)

 Move to model where gadgets are per user rather than part of a default
 experience for users

 --
 You are receiving this mail because:
 You are the assignee for the bug.
 You are on the CC list for the bug.
 ___
 Wikibugs-l mailing list
 Wikibugs-l at
 lists.wikimedia.orghttps://lists.wikimedia.org/mailman/listinfo/wikibugs-l
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

Re: [Wikitech-l] API edit call in a maintenance script

2013-12-09 Thread Liangent
On Tue, Dec 10, 2013 at 7:13 AM, Toni Hermoso Pulido toni...@cau.catwrote:

 Hello,

 I'm trying to perform an API edit call in a maintenance script using this
 example in MW 1.19.9
 http://www.mediawiki.org/wiki/API:Calling_internally


 $user = User::newFromId( 1 ); // Using WikiSysiop
 $page = WikiPage::newFromID( $id );
 $titleText = $page-getTitle()-getPrefixedText();

 $text = ...;

 global $wgRequest;

 $req = new DerivativeRequest(
 $wgRequest,
 array(
 'action' = 'edit',
 'title' = $titleText,
 'text' = $text,
 'token' = $user-editToken(),
 ), true);

 $api = new ApiMain( $req, true );

 $api-execute();

 However, I get this problem:
 Unexpected non-MediaWiki exception encountered, of type UsageException
 badtoken: Invalid token

 Any idea what can be wrong?


Token is not used to do user lookup. You need to call
$api-getContext()-setUser( $user ); before $api-execute();.

-Liangent



 P.D.: I already use WikiPage::doEdit() successfully.

 --
 Toni Hermoso Pulido
 http://www.cau.cat

 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l
___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[MediaWiki-commits] [Gerrit] Do title conversion on action=edit if redlink=1 exists - change (mediawiki/core)

2013-12-05 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/99335


Change subject: Do title conversion on action=edit if redlink=1 exists
..

Do title conversion on action=edit if redlink=1 exists

Bug: 33231
Change-Id: I33c3c9df4ff2215710bacb696b64bb4291dda24e
---
M languages/LanguageConverter.php
1 file changed, 3 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/35/99335/1

diff --git a/languages/LanguageConverter.php b/languages/LanguageConverter.php
index cdaab2c..9c64801 100644
--- a/languages/LanguageConverter.php
+++ b/languages/LanguageConverter.php
@@ -769,6 +769,9 @@
$wgUser;
$isredir = $wgRequest-getText( 'redirect', 'yes' );
$action = $wgRequest-getText( 'action' );
+   if ( $action == 'edit'  $wgRequest-getBool( 'redlink' ) ) {
+   $action = 'view';
+   }
$linkconvert = $wgRequest-getText( 'linkconvert', 'yes' );
$disableLinkConversion = $wgDisableLangConversion
|| $wgDisableTitleConversion;

-- 
To view, visit https://gerrit.wikimedia.org/r/99335
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I33c3c9df4ff2215710bacb696b64bb4291dda24e
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Add fallback languages for cdo, hak and nan - change (mediawiki/core)

2013-12-02 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/98496


Change subject: Add fallback languages for cdo, hak and nan
..

Add fallback languages for cdo, hak and nan

Change-Id: I2d22bfa82e94c86f19758e5c6bb2706e274d44a0
---
M languages/messages/MessagesCdo.php
M languages/messages/MessagesHak.php
M languages/messages/MessagesNan.php
3 files changed, 6 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/96/98496/1

diff --git a/languages/messages/MessagesCdo.php 
b/languages/messages/MessagesCdo.php
index f91ca5d..bd27279 100644
--- a/languages/messages/MessagesCdo.php
+++ b/languages/messages/MessagesCdo.php
@@ -11,6 +11,8 @@
  * @author Yejianfei
  */
 
+$fallback = 'nan, zh-hant';
+
 $namespaceNames = array(
NS_MEDIA= '媒體',
NS_SPECIAL  = '特殊',
diff --git a/languages/messages/MessagesHak.php 
b/languages/messages/MessagesHak.php
index a2411b4..34097c7 100644
--- a/languages/messages/MessagesHak.php
+++ b/languages/messages/MessagesHak.php
@@ -16,6 +16,8 @@
  * @author Xiaomingyan
  */
 
+$fallback = 'zh-hant';
+
 $messages = array(
 # User preference toggles
 'tog-underline' = '鏈接加底線:',
diff --git a/languages/messages/MessagesNan.php 
b/languages/messages/MessagesNan.php
index 432caf8..bbb922c 100644
--- a/languages/messages/MessagesNan.php
+++ b/languages/messages/MessagesNan.php
@@ -12,6 +12,8 @@
  * @author Kaihsu
  */
 
+$fallback = 'cdo, zh-hant';
+
 $datePreferences = array(
'default',
'ISO 8601',

-- 
To view, visit https://gerrit.wikimedia.org/r/98496
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I2d22bfa82e94c86f19758e5c6bb2706e274d44a0
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[Wikidata-l] Clearly define {{#property: }} parser function

2013-11-08 Thread Liangent
Hello,

I submitted bug 56763[1] just now, then realized that this is a part of a
larger issue: what is {{#property: }} expected to output.

Theoretically, there could be two extreme answers: raw data as a string, or
whatever wikitext which could be rendered as what *readers* love best.

I guess the second answer is the goal, because of ValueFormatter,
SnakFormatter::FORMAT_WIKI and $wgContLang-listToText() on final output in
our code, but sadly this is never explicitly defined anywhere, including
MW.org documentations[2] and development notes[3].

With this goal, I imagine the output for each datatype should be:

* Item: WikiLink to the linked article, or maybe the WikibaseRepo item page
when there's not such an article, with label as link text
* Commons media: ImageLink to the specified media file (size and other
params TBD)
* String: Wikitext-escaped form of the string data
* Time / Globe coordinate: See bug 48937[4] and bug 49387[5]
* URL: ExternalLink to the specified URL, see bug 56763[1]

However due to lack of the specification and the current behaviors of
{{#property: }} which is a mix of raw data and fully-constructed wikitext,
template writers have already invented various usages:

* [[{{#property:item-property}}]]
* [[File:{{#property:commons-media-property}}|thumb]]
* [{{#property:url-property}}
{{url-protocol-stripper|{{#property:url-property]
* {{#ifeq:{{#property:commons-media-property}}|A.png|B|C}}

and obviously work towards the goal described above breaks them (of course
what they want is still doable with Lua), and less obviously, they're
already broken sometimes when multiple statements exist, which are imploded
using $wgContLang-listToText().

Is there any plan here, and what's the real goal of {{#property: }} ?

[1] https://bugzilla.wikimedia.org/show_bug.cgi?id=56763
[2] https://www.mediawiki.org/wiki/Wikibase_Client#Data_transclusion
[3]
https://meta.wikimedia.org/wiki/Wikidata/Notes/Inclusion_syntax#Accessing_Item_Data
[4] https://bugzilla.wikimedia.org/show_bug.cgi?id=48937
[5] https://bugzilla.wikimedia.org/show_bug.cgi?id=49387

-Liangent
___
Wikidata-l mailing list
Wikidata-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikidata-l


Re: [Wikitext-l] Problematic url http://parsoid.wmflabs.org/zh//dev/null

2013-11-04 Thread Liangent
This seems not specific to this page on zhwiki. Any page starting with a
slash on any wiki triggers this.

-Liangent


On Mon, Nov 4, 2013 at 3:59 AM, Emmanuel Engelhart kel...@kiwix.org wrote:

 Hi,

 http://parsoid.wmflabs.org/zh//dev/null gives an error:

 TypeError: Cannot read property 'ns' of undefined at
 Function.Title.fromPrefixedText
 (/data/project/parsoid/js/lib/mediawiki.Title.js:59:40) at
 MWParserEnvironment.reset
 (/data/project/parsoid/js/lib/mediawiki.parser.environment.js:216:20) at
 Function.MWParserEnvironment.getParserEnv
 (/data/project/parsoid/js/lib/mediawiki.parser.environment.js:280:7) at
 parserEnvMw (/data/project/parsoid/js/api/ParserService.js:432:22) at
 callbacks
 (/data/project/parsoid/js/node_modules/express/lib/router/index.js:164:37)
 at interParams (/data/project/parsoid/js/api/ParserService.js:428:2) at
 callbacks
 (/data/project/parsoid/js/node_modules/express/lib/router/index.js:164:37)
 at param
 (/data/project/parsoid/js/node_modules/express/lib/router/index.js:138:11)
 at pass
 (/data/project/parsoid/js/node_modules/express/lib/router/index.js:145:5)
 at
 Router._dispatch
 (/data/project/parsoid/js/node_modules/express/lib/router/index.js:173:5)

 But this should give a valid result like:
 https://zh.wikipedia.org/wiki//dev/null

 It this normal?

 Regards
 Emmanuel
 --
 Kiwix - Wikipedia Offline  more
 * Web: http://www.kiwix.org
 * Twitter: https://twitter.com/KiwixOffline
 * more: http://www.kiwix.org/wiki/Communication

 ___
 Wikitext-l mailing list
 Wikitext-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitext-l

___
Wikitext-l mailing list
Wikitext-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitext-l


Re: [Wikitech-l] Should MediaWiki CSS prefer non-free fonts?

2013-10-28 Thread Liangent
btw. Be aware of internationalization issues: not to say that fonts are
usually tied to a (group of) alphabets. Even digits can be affected by the
language info of the context they live.

See [1]: this is the standard English Wikipedia signup screen, and [2]:
with ?uselang=zh-cn added.

[1] http://imagebin.org/275031
[2] http://imagebin.org/275032

-Liangent

On Mon, Oct 28, 2013, S Page sp...@wikimedia.org wrote:

 On Sun, Oct 27, 2013 at 11:11 PM, Faidon Liambotis fai...@wikimedia.org
 wrote:

  On Mon, Oct 28, 2013 at 01:32:30PM +1100, Tim Starling wrote:
 
  Yes, we should prefer to use free software. We should also strive to
  ensure that our support for users on non-free platforms is optimal, as
  long as that doesn't negatively impact on users of free platforms. So
  I don't think it is a problem to specify non-free fonts in font lists.
 
 
  It's a bit more complicated than that. Linux distros ship with fontconfig
  (which is used by Cairo, which in turn is used by at least Firefox).
  Fontconfig aliases fonts via a set of rules and the default rules map
  popular non-free fonts to their free metric equivalents, or generics.
 e.g.
  $ fc-match Helvetica
  n019003l.pfb: Nimbus Sans L Regular
  ...
 
  This effectively means that, for Linux, having the free fonts at the end
  of the CSS font selection is probably[1]  a no-op: the browser will never
  fallback via the CSS, but match the first font on the list to an
 equivalent
  found on the system via fontconfig's fallback mechanisms.

 Almost. fontconfig will use the first font in the font stack that has a
 positive match. Helvetica Neue doesn't mean anything (so alone it would
 give Deja Vu Sans), but the following Helvetica has a alias to Nimbus
 Sans L with binding=same in /etc/fonts/* , so Firefox uses that.


  It will be an educated guess and possibly do the right thing but it won't
  be what the web designer intended.
 

 For the 2012 Login and Create account form redesign, the web designer
 (Munaf Assaf and others) intended Helvetica Neue for text and Georgia for
 some numbers. fc-match lets free software get close to that intended look.
 The right thing happens! (The Login and Create account forms looked good on
 my Ubuntu for the time when they specified a font stack.[*]) Free OSes
 sometimes improve their supplied fonts and matching rules, so it's possible
 they'll later ship something that matches even better. For example Google's
 new Roboto is a nice Helvetica Neue. Brave users can make the decision
 themselves by hacking /etc/fonts/*.

 This basically strengthens your point: free fonts should be first in the
  list.
 

 Only if the free font looks better.

 [1]: I say probably, because I vaguely remember the interactions between
  Firefox  fontconfig to be complicated. Maybe they're being smarter --
  someone should test :)
 
 Firefox works this way. It seems my Chromium prefers Nimbus Sans L even for
 'sans serif'; it could be my setup, or
 https://code.google.com/p/chromium/issues/detail?id=242046  I would love
 to
 know what Android tablets do.

 [*] The local improvement to fonts on those forms made them inconsistent
 with the rest of MediaWiki, so their font stack was removed. The VectorBeta
 feature applies better typography everywhere. It's really nice IMO.

 --
 =S Page  Features engineer
 ___
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[MediaWiki-commits] [Gerrit] DO NOT MERGE - change (mediawiki/core)

2013-10-25 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/91943


Change subject: DO NOT MERGE
..

DO NOT MERGE

Change-Id: Iaceecb26062266ccd9a1c63c126ff0d5471f7d97
---
M includes/EditPage.php
1 file changed, 0 insertions(+), 6 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/43/91943/1

diff --git a/includes/EditPage.php b/includes/EditPage.php
index d7b2b5e..23bc7c1 100644
--- a/includes/EditPage.php
+++ b/includes/EditPage.php
@@ -912,10 +912,6 @@
$undo = $wgRequest-getInt( 'undo' );
 
if ( $undo  0  $undoafter  0 ) {
-   if ( $undo  $undoafter ) {
-   # If they got undoafter and 
undo round the wrong way, switch them
-   list( $undo, $undoafter ) = 
array( $undoafter, $undo );
-   }
 
$undorev = Revision::newFromId( $undo );
$oldrev = Revision::newFromId( 
$undoafter );
@@ -924,8 +920,6 @@
# the revisions exist and they were not 
deleted.
# Otherwise, $content will be left 
as-is.
if ( !is_null( $undorev )  !is_null( 
$oldrev ) 
-   $undorev-getPage() == 
$oldrev-getPage() 
-   $undorev-getPage() == 
$this-mTitle-getArticleID() 
!$undorev-isDeleted( 
Revision::DELETED_TEXT ) 
!$oldrev-isDeleted( 
Revision::DELETED_TEXT ) ) {
 

-- 
To view, visit https://gerrit.wikimedia.org/r/91943
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Iaceecb26062266ccd9a1c63c126ff0d5471f7d97
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Fix {{#property: }} calls for missing property on wikis with... - change (mediawiki...Wikibase)

2013-10-18 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/90636


Change subject: Fix {{#property: }} calls for missing property on wikis with 
LanguageConverter
..

Fix {{#property: }} calls for missing property on wikis with LanguageConverter

Change-Id: Iada5fbd8736753622af734e3494d0b3fa0109f68
---
M client/includes/parserhooks/PropertyParserFunction.php
M client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
2 files changed, 14 insertions(+), 3 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/Wikibase 
refs/changes/36/90636/1

diff --git a/client/includes/parserhooks/PropertyParserFunction.php 
b/client/includes/parserhooks/PropertyParserFunction.php
index bc9a53d..a143ad9 100644
--- a/client/includes/parserhooks/PropertyParserFunction.php
+++ b/client/includes/parserhooks/PropertyParserFunction.php
@@ -80,11 +80,13 @@
 */
public function processRenderedArray( $textArray ) {
// We got arrays, so they must have already checked that 
variants are being used.
-   $text = '-{';
+   $text = '';
foreach ( $textArray as $variantCode = $variantText ) {
$text .= $variantCode:$variantText;;
}
-   $text .= '}-';
+   if ( $text !== '' ) {
+   $text = '-{' . $text . '}-';
+   }
 
return $text;
}
@@ -161,7 +163,13 @@
 
foreach ( $variants as $variantCode ) {
$variantLanguage = \Language::factory( $variantCode );
-   $textArray[$variantCode] = $this-renderInLanguage( 
$propertyLabel, $variantLanguage );
+   $variantText = $this-renderInLanguage( $propertyLabel, 
$variantLanguage );
+   // LanguageConverter doesn't handle empty strings 
correctly, and it's more difficult
+   // to fix the issue there, as it's using empty string 
as a special value.
+   // Also keeping the ability to check a missing property 
with {{#if: }} is another reason.
+   if ( $variantText !== '' ) {
+   $textArray[$variantCode] = $variantText;
+   }
}
 
return $textArray;
diff --git 
a/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php 
b/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
index 6b49bbd..655e203 100644
--- a/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
+++ b/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
@@ -95,6 +95,9 @@
'zh-cn' = 'fo#60;ob#62;ar',
'zh-tw' = 'FO#60;OB#62;AR',
), '-{zh-cn:fo#60;ob#62;ar;zh-tw:FO#60;OB#62;AR;}-' 
),
+   // Don't create -{}- for empty input,
+   // to keep the ability to check a missing property with 
{{#if: }}.
+   array( \Parser::OT_HTML, array(), '' ),
);
}
 

-- 
To view, visit https://gerrit.wikimedia.org/r/90636
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Iada5fbd8736753622af734e3494d0b3fa0109f68
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/Wikibase
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Remove one hard-coded constant - change (mediawiki...Wikibase)

2013-10-18 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/90643


Change subject: Remove one hard-coded constant
..

Remove one hard-coded constant

Change-Id: Iea8f8fb9c9ec2f2bf41879edaead36deb460e0ff
---
M lib/includes/ClaimGuidGenerator.php
1 file changed, 2 insertions(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/Wikibase 
refs/changes/43/90643/1

diff --git a/lib/includes/ClaimGuidGenerator.php 
b/lib/includes/ClaimGuidGenerator.php
index 4ef3ad3..1231309 100644
--- a/lib/includes/ClaimGuidGenerator.php
+++ b/lib/includes/ClaimGuidGenerator.php
@@ -2,6 +2,7 @@
 
 namespace Wikibase\Lib;
 
+use Wikibase\DataModel\Claim\ClaimGuid;
 use Wikibase\DataModel\Entity\EntityId;
 
 /**
@@ -42,7 +43,7 @@
 * @return string
 */
public function newGuid() {
-   return $this-entityId-getSerialization() . '$' . 
$this-baseGenerator-newGuid();
+   return $this-entityId-getSerialization() . 
ClaimGuid::SEPARATOR . $this-baseGenerator-newGuid();
}
 
 }

-- 
To view, visit https://gerrit.wikimedia.org/r/90643
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Iea8f8fb9c9ec2f2bf41879edaead36deb460e0ff
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/Wikibase
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


Re: [Wikitech-l] Google Code-in: are you in?

2013-10-15 Thread Liangent
On 10/16/2013, Quim Gil q...@wikimedia.org wrote:

 On 10/11/2013 02:59 PM, Quim Gil wrote:

 Let's discuss and eventually decide our next steps next at

 Engineering Community Team office hour


 Today we decided that we will attempt to apply as Wikimedia to Google
 Code-in by October 28.

 There is a lot of work to be done, and by the nature of this program the
 best approach is to have dozens of contributors (you) committing to mentor
 just 1-2 tasks. To have an idea, these tasks should take a couple of hours
 of a skilled contributor to be completed, probably meaning a couple of days
 of a student landing to our project for the first time.

 The tasks are organized in 5 areas:

 * Code
 * Documentation/Training
 * Outreach/Research
 * Quality Assurance
 * User Interface

 Get involved! Propose one task at

 https://www.mediawiki.org/**wiki/Google_Code-Inhttps://www.mediawiki.org/wiki/Google_Code-In


Is Lua / ParserFunctions templating or Lua conversion, either generic
(meta-templates / -modules) or for a specific purpose (requested by local
community etc.), eligible as a task, in Code or User Interface category?

-Liangent




 --
 Quim Gil
 Technical Contributor Coordinator @ Wikimedia Foundation
 http://www.mediawiki.org/wiki/**User:Qgilhttp://www.mediawiki.org/wiki/User:Qgil

 __**_
 Wikitech-l mailing list
 Wikitech-l@lists.wikimedia.org
 https://lists.wikimedia.org/**mailman/listinfo/wikitech-lhttps://lists.wikimedia.org/mailman/listinfo/wikitech-l

___
Wikitech-l mailing list
Wikitech-l@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/wikitech-l

[MediaWiki-commits] [Gerrit] Initial version of the extension - change (mediawiki...PageLanguage)

2013-10-06 Thread Liangent (Code Review)
Liangent has submitted this change and it was merged.

Change subject: Initial version of the extension
..


Initial version of the extension

Bug: 49588
Change-Id: Ib754e512b6923a043d93c565ea3af9c936409ca3
---
A PageLanguage.body.php
A PageLanguage.i18n.php
A PageLanguage.magic.php
A PageLanguage.php
4 files changed, 135 insertions(+), 0 deletions(-)

Approvals:
  Liangent: Verified; Looks good to me, approved



diff --git a/PageLanguage.body.php b/PageLanguage.body.php
new file mode 100644
index 000..f3f4583
--- /dev/null
+++ b/PageLanguage.body.php
@@ -0,0 +1,69 @@
+?php
+
+class PageLanguage {
+
+   private static $cache = array();
+
+   public static function onPageContentLanguage( Title $title, $pageLang 
) {
+   if ( isset( self::$cache[$title-getPrefixedDBKey()] ) ) {
+   $pageLang = self::$cache[$title-getPrefixedDBKey()];
+   } elseif ( $title-getArticleID()  0 ) {
+   $dbr = wfGetDB( DB_SLAVE );
+   $langCode = $dbr-selectField(
+   'page_props', 'pp_value', array(
+   'pp_page' = $title-getArticleID(),
+   'pp_propname' = 'pagelanguage',
+   ), __METHOD__
+   );
+
+   if ( $langCode !== false  Language::isValidCode( 
$langCode ) ) {
+   $pageLang = Language::factory( $langCode );
+   }
+   }
+
+   return true;
+   }
+
+   public static function onParserFirstCallInit( Parser $parser ) {
+   $parser-setFunctionHook( 'pagelanguage', 
'PageLanguage::funcPageLanguage', SFH_NO_HASH );
+
+   return true;
+   }
+
+   public static function funcPageLanguage( Parser $parser, $langCode, 
$uarg = '' ) {
+   static $magicWords = null;
+   if ( is_null( $magicWords ) ) {
+   $magicWords = new MagicWordArray( array( 
'pagelanguage_noerror', 'pagelanguage_noreplace' ) );
+   }
+   $arg = $magicWords-matchStartToEnd( $uarg );
+
+   $langCode = trim( $langCode );
+   if ( strlen( $langCode ) === 0 ) {
+   return '';
+   }
+
+   if ( Language::isValidCode( $langCode ) ) {
+   $lang = Language::factory( $langCode );
+   } else {
+   return 'span class=error' .
+   wfMessage( 'pagelanguage-invalid' 
)-inContentLanguage()
+   -params( wfEscapeWikiText( $langCode ) 
)-text() .
+   '/span';
+   }
+
+   $old = $parser-getOutput()-getProperty( 'pagelanguage' );
+   if ( $old === false || $arg !== 'pagelanguage_noreplace' ) {
+   $parser-getOutput()-setProperty( 'pagelanguage', 
$lang-getCode() );
+   self::$cache[$parser-getTitle()-getPrefixedDBKey()] = 
$lang;
+   }
+
+   if ( $old === false || $old === $lang-getCode() || $arg ) {
+   return '';
+   } else {
+   return 'span class=error' .
+   wfMessage( 'pagelanguage-duplicate' 
)-inContentLanguage()-params(
+   wfEscapeWikiText( $old ), 
wfEscapeWikiText( $lang-getCode() ) )-text() .
+   '/span';
+   }
+   }
+}
diff --git a/PageLanguage.i18n.php b/PageLanguage.i18n.php
new file mode 100644
index 000..615e1bc
--- /dev/null
+++ b/PageLanguage.i18n.php
@@ -0,0 +1,27 @@
+?php
+/**
+ * Internationalisation file for PageLanguage extension.
+ *
+ * @file
+ * @ingroup Extensions
+ */
+
+$messages = array();
+
+/** English */
+$messages['en'] = array(
+   'pagelanguage-desc' = Define page language per page,
+   'pagelanguage-invalid' = '''Warning:''' Ignoring invalid language 
code \$1\ for page language.,
+   'pagelanguage-duplicate' = '''Warning:''' Page language \$2\ 
overrides earlier page language \$1\.,
+);
+
+/** Message documentation (Message documentation) */
+$messages['qqq'] = array(
+   'pagelanguage-desc' = '{{desc|name=Page 
Language|url=http://www.mediawiki.org/wiki/Extension:PageLanguage}}',
+   'pagelanguage-invalid' = 'Error message when an invalid language is 
used. $1 for the language code used.',
+   'pagelanguage-duplicate' = 'Error message when a different language is 
set overriding a previous one.
+
+Parameters:
+* $1: the language code set previously
+* $2: the language code being set now',
+);
diff --git a/PageLanguage.magic.php b/PageLanguage.magic.php
new file mode 100644
index 000..825bfc2
--- /dev/null
+++ b/PageLanguage.magic.php
@@ -0,0 +1,16

[MediaWiki-commits] [Gerrit] Remove one $wgContLang usage. - change (mediawiki...Math)

2013-10-04 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/87519


Change subject: Remove one $wgContLang usage.
..

Remove one $wgContLang usage.

Change-Id: Ic988d17c2e7026a17bde9d53bcc4d10c42d22118
---
M Math.hooks.php
1 file changed, 2 insertions(+), 2 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/Math 
refs/changes/19/87519/1

diff --git a/Math.hooks.php b/Math.hooks.php
index 3489da5..b029f38 100644
--- a/Math.hooks.php
+++ b/Math.hooks.php
@@ -42,7 +42,7 @@
 * @return string
 */
static function mathTagHook( $content, $attributes, $parser ) {
-   global $wgContLang, $wgUseMathJax;
+   global $wgUseMathJax;
if ( trim( $content )  ===  ) { // bug 8372
return ;
}
@@ -55,7 +55,7 @@
$parser-getOutput()-addModules( array( 
'ext.math.mathjax.enabler' ) );
}
$renderer-writeCache();
-   return $wgContLang-armourMath( $renderedMath );
+   return $parser-getConverterLanguage()-armourMath( 
$renderedMath );
}
 
/**

-- 
To view, visit https://gerrit.wikimedia.org/r/87519
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ic988d17c2e7026a17bde9d53bcc4d10c42d22118
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/Math
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] New collations: zh-pinyin and zh-stroke - change (mediawiki/core)

2013-10-02 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/87273


Change subject: New collations: zh-pinyin and zh-stroke
..

New collations: zh-pinyin and zh-stroke

Change-Id: I763f60f86a6b000d6f8187bff3d598d784c92d94
(cherry picked from commit bc5f9afd297ccfcc174abb5011c5c4daa02fcf4f)
---
M RELEASE-NOTES-1.22
M includes/Collation.php
A maintenance/language/generateCollationDataZh.php
A serialized/first-letters-zh@collation=pinyin.ser
A serialized/first-letters-zh@collation=stroke.ser
5 files changed, 144 insertions(+), 3 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/73/87273/1

diff --git a/RELEASE-NOTES-1.22 b/RELEASE-NOTES-1.22
index d0c9d1d..37f8803 100644
--- a/RELEASE-NOTES-1.22
+++ b/RELEASE-NOTES-1.22
@@ -213,6 +213,7 @@
   against allowed proxy lists.
 * Add deferrable update support for callback/closure
 * Add TitleMove hook before page renames
+* New category collations: zh-pinyin and zh-stroke (require ICU above 4.8rc1).
 
 === Bug fixes in 1.22 ===
 * Disable Special:PasswordReset when $wgEnableEmail is false. Previously one
diff --git a/includes/Collation.php b/includes/Collation.php
index b0252c7..61f47ee 100644
--- a/includes/Collation.php
+++ b/includes/Collation.php
@@ -47,6 +47,10 @@
return new IdentityCollation;
case 'uca-default':
return new IcuCollation( 'root' );
+   case 'zh-pinyin':
+   return new IcuCollation( 'zh@collation=pinyin' 
);
+   case 'zh-stroke':
+   return new IcuCollation( 'zh@collation=stroke' 
);
default:
$match = array();
if ( preg_match( '/^uca-([a-z@=-]+)$/', 
$collationName, $match ) ) {
@@ -318,8 +322,13 @@
}
 
// Check for CJK
+   // Always sort Chinese if this is using a Chinese locale.
+   // self::isCjk() checks Chinese only though it's called 'CJK'.
$firstChar = mb_substr( $string, 0, 1, 'UTF-8' );
+   $localePieces = explode( '@', $this-locale );
+   $localePieces = explode( '-', $localePieces[0] );
if ( ord( $firstChar )  0x7f
+$localePieces[0] !== 'zh'
 self::isCjk( utf8ToCodepoint( $firstChar ) ) )
{
return $firstChar;
@@ -385,11 +394,16 @@
// We also take this opportunity to remove primary collisions.
$letterMap = array();
foreach ( $letters as $letter ) {
-   $key = $this-getPrimarySortKey( $letter );
+   // Chinese collations don't display real first letters.
+   if ( !is_array( $letter ) ) {
+   // array( $letterSort, $letterDisplay )
+   $letter = array( $letter, $letter );
+   }
+   $key = $this-getPrimarySortKey( $letter[0] );
if ( isset( $letterMap[$key] ) ) {
// Primary collision
// Keep whichever one sorts first in the main 
collator
-   if ( $this-mainCollator-compare( $letter, 
$letterMap[$key] )  0 ) {
+   if ( $this-mainCollator-compare( $letter[0], 
$letterMap[$key][0] )  0 ) {
$letterMap[$key] = $letter;
}
} else {
@@ -464,7 +478,9 @@
// This code assumes that unsetting does not change 
sort order.
}
$data = array(
-   'chars' = array_values( $letterMap ),
+   'chars' = array_map( function( $letter ) {
+   return $letter[1];
+   }, array_values( $letterMap ) ),
'keys' = array_keys( $letterMap ),
'version' = self::FIRST_LETTER_VERSION,
);
diff --git a/maintenance/language/generateCollationDataZh.php 
b/maintenance/language/generateCollationDataZh.php
new file mode 100644
index 000..1b7990a
--- /dev/null
+++ b/maintenance/language/generateCollationDataZh.php
@@ -0,0 +1,122 @@
+?php
+/**
+ * Maintenance script to generate first letter data files of Chinese
+ * collations for Collation.php.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope

[MediaWiki-commits] [Gerrit] (bug 35378) Support multiple collations at the same time - change (mediawiki/core)

2013-10-02 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/87288


Change subject: (bug 35378) Support multiple collations at the same time
..

(bug 35378) Support multiple collations at the same time

Change-Id: I84248ac208065de23127f6a5268bca316f2f4147
---
M RELEASE-NOTES-1.22
M docs/hooks.txt
M includes/CategoryViewer.php
M includes/Collation.php
M includes/DefaultSettings.php
M includes/LinksUpdate.php
M includes/Preferences.php
M includes/Setup.php
M includes/Title.php
M includes/api/ApiQueryCategoryMembers.php
M includes/installer/DatabaseUpdater.php
M includes/installer/MysqlUpdater.php
M languages/messages/MessagesEn.php
M languages/messages/MessagesQqq.php
A maintenance/archives/patch-categorylinks-multiple-collations.sql
M maintenance/language/messages.inc
M maintenance/tables.sql
17 files changed, 198 insertions(+), 47 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/88/87288/1

diff --git a/RELEASE-NOTES-1.22 b/RELEASE-NOTES-1.22
index 0e3e222..10e4a65 100644
--- a/RELEASE-NOTES-1.22
+++ b/RELEASE-NOTES-1.22
@@ -234,6 +234,7 @@
referenced in LESS '@import' statements are looked up here first.
 * Added meta=filerepoinfo API module for getting information about foreign
   image repositories, and related ForeignAPIRepo methods getInfo and getApiUrl.
+* (bug 35378) Support multiple collations at the same time
 
 === Bug fixes in 1.22 ===
 * Disable Special:PasswordReset when $wgEnableEmail is false. Previously one
diff --git a/docs/hooks.txt b/docs/hooks.txt
index 2d1001b..0c3afdd 100644
--- a/docs/hooks.txt
+++ b/docs/hooks.txt
@@ -800,7 +800,7 @@
 $unpatrolled: Whether or not we are showing unpatrolled changes.
 $watched: Whether or not the change is watched by the user.
 
-'Collation::factory': Called if $wgCategoryCollation is an unknown collation.
+'Collation::factory': Called if $wgCategoryCollations contains an unknown 
collation.
 $collationName: Name of the collation in question
 $collationObject: Null. Replace with a subclass of the Collation class that
   implements the collation given in $collationName.
diff --git a/includes/CategoryViewer.php b/includes/CategoryViewer.php
index 55d9c1e..183552a 100644
--- a/includes/CategoryViewer.php
+++ b/includes/CategoryViewer.php
@@ -49,6 +49,11 @@
var $collation;
 
/**
+* @var Collation name
+*/
+   var $collationName;
+
+   /**
 * @var ImageGallery
 */
var $gallery;
@@ -77,7 +82,7 @@
 * @param $query Array
 */
function __construct( $title, IContextSource $context, $from = array(), 
$until = array(), $query = array() ) {
-   global $wgCategoryPagingLimit;
+   global $wgCategoryPagingLimit, $wgCategoryCollations;
$this-title = $title;
$this-setContext( $context );
$this-from = $from;
@@ -85,7 +90,8 @@
$this-limit = $wgCategoryPagingLimit;
$this-cat = Category::newFromTitle( $title );
$this-query = $query;
-   $this-collation = Collation::singleton();
+   list( $this-collationName, $this-collation ) = 
Collation::getInstanceByContext(
+   isset( $query['collation'] ) ? $query['collation'] : 
null, $title, $context );
unset( $this-query['title'] );
}
 
@@ -295,13 +301,17 @@
foreach ( array( 'page', 'subcat', 'file' ) as $type ) {
# Get the sortkeys for start/end, if applicable.  Note 
that if
# the collation in the database differs from the one
-   # set in $wgCategoryCollation, pagination might go 
totally haywire.
-   $extraConds = array( 'cl_type' = $type );
+   # set in $wgCategoryCollations, pagination might go 
totally haywire.
+   $conds = array(
+   'cl_type' = $type,
+   'cl_to' = $this-title-getDBkey(),
+   'cl_collation' = array( '', 
$this-collationName ),
+   );
if ( isset( $this-from[$type] )  $this-from[$type] 
!== null ) {
-   $extraConds[] = 'cl_sortkey = '
+   $conds[] = 'cl_sortkey = '
. $dbr-addQuotes( 
$this-collation-getSortKey( $this-from[$type] ) );
} elseif ( isset( $this-until[$type] )  
$this-until[$type] !== null ) {
-   $extraConds[] = 'cl_sortkey  '
+   $conds[] = 'cl_sortkey  '
. $dbr-addQuotes( 
$this-collation-getSortKey( $this-until[$type] ) );
$this-flip[$type] = true

[MediaWiki-commits] [Gerrit] Create and move some functions for class ArrayUtils - change (mediawiki/core)

2013-10-02 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/87290


Change subject: Create and move some functions for class ArrayUtils
..

Create and move some functions for class ArrayUtils

Change-Id: Id9ca20925f49e314918810fb54b3819ba9cf9c39
---
M includes/ArrayUtils.php
M includes/Collation.php
A tests/phpunit/includes/ArrayUtilsTest.php
3 files changed, 427 insertions(+), 30 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/90/87290/1

diff --git a/includes/ArrayUtils.php b/includes/ArrayUtils.php
index 985271f..7867b93 100644
--- a/includes/ArrayUtils.php
+++ b/includes/ArrayUtils.php
@@ -1,5 +1,28 @@
 ?php
+/**
+ * Methods to play with arrays.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ */
 
+/**
+ * A collection of static methods to play with arrays.
+ */
 class ArrayUtils {
/**
 * Sort the given array in a pseudo-random order which depends only on 
the
@@ -66,4 +89,92 @@
}
return $i;
}
+
+   /**
+* Do a binary search, and return the index of the largest item that 
sorts
+* less than or equal to the target value.
+*
+* @param array $valueCallback A function to call to get the value with
+* a given array index.
+* @param $valueCount int The number of items accessible via 
$valueCallback,
+* indexed from 0 to $valueCount - 1
+* @param $comparisonCallback array A callback to compare two values, 
returning
+* -1, 0 or 1 in the style of strcmp().
+* @param $target string The target value to find.
+*
+* @return int|bool The item index of the lower bound, or false if the 
target value
+* sorts before all items.
+*/
+   public static function findLowerBound( $valueCallback, $valueCount, 
$comparisonCallback, $target ) {
+   if ( $valueCount === 0 ) {
+   return false;
+   }
+
+   $min = 0;
+   $max = $valueCount;
+   do {
+   $mid = $min + ( ( $max - $min )  1 );
+   $item = call_user_func( $valueCallback, $mid );
+   $comparison = call_user_func( $comparisonCallback, 
$target, $item );
+   if ( $comparison  0 ) {
+   $min = $mid;
+   } elseif ( $comparison == 0 ) {
+   $min = $mid;
+   break;
+   } else {
+   $max = $mid;
+   }
+   } while ( $min  $max - 1 );
+
+   if ( $min == 0 ) {
+   $item = call_user_func( $valueCallback, $min );
+   $comparison = call_user_func( $comparisonCallback, 
$target, $item );
+   if ( $comparison  0 ) {
+   // Before the first item
+   return false;
+   }
+   }
+   return $min;
+   }
+
+   /**
+* Do array_diff_assoc() on multi-dimensional arrays.
+*
+* Note: empty arrays are removed.
+*
+* @param $array1 array The array to compare from
+* @param $array2 array An array to compare against
+* @param ... array More arrays to compare against
+* @return array An array containing all the values from array1
+*   that are not present in any of the other arrays.
+*/
+   public static function arrayDiffAssocRecursive( $array1 ) {
+   $arrays = func_get_args();
+   array_shift( $arrays );
+   $ret = array();
+
+   foreach ( $array1 as $key = $value ) {
+   if ( is_array( $value ) ) {
+   $args = array( $value );
+   foreach ( $arrays as $array ) {
+   if ( isset( $array[$key] ) ) {
+   $args[] = $array[$key

[MediaWiki-commits] [Gerrit] New diff variables regarding pre-save transformed wikitext - change (mediawiki...AbuseFilter)

2013-10-01 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/86815


Change subject: New diff variables regarding pre-save transformed wikitext
..

New diff variables regarding pre-save transformed wikitext

Change-Id: Ie21041d96f1c4cf37d697fffcaffa1ff8242f886
---
M AbuseFilter.class.php
M AbuseFilter.i18n.php
2 files changed, 15 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/AbuseFilter 
refs/changes/15/86815/1

diff --git a/AbuseFilter.class.php b/AbuseFilter.class.php
index f41cbc7..fe5d602 100644
--- a/AbuseFilter.class.php
+++ b/AbuseFilter.class.php
@@ -108,6 +108,9 @@
'removed_links' = 'removed-links',
'all_links' = 'all-links',
'new_pst' = 'new-pst',
+   'edit_diff_pst' = 'diff-pst',
+   'added_lines_pst' = 'addedlines-pst',
+   'removed_lines_pst' = 'removedlines-pst',
'new_text' = 'new-text-stripped',
'new_html' = 'new-html',
'article_restrictions_edit' = 'restrictions-edit',
@@ -1990,6 +1993,8 @@
 
$vars-setLazyLoadVar( 'edit_diff', 'diff',
array( 'oldtext-var' = 'old_wikitext', 'newtext-var' 
= 'new_wikitext' ) );
+   $vars-setLazyLoadVar( 'edit_diff_pst', 'diff',
+   array( 'oldtext-var' = 'old_wikitext', 'newtext-var' 
= 'new_pst' ) );
$vars-setLazyLoadVar( 'new_size', 'length', array( 
'length-var' = 'new_wikitext' ) );
$vars-setLazyLoadVar( 'old_size', 'length', array( 
'length-var' = 'old_wikitext' ) );
$vars-setLazyLoadVar( 'edit_delta', 'subtract',
@@ -2000,6 +2005,10 @@
array( 'diff-var' = 'edit_diff', 'line-prefix' = '+' 
) );
$vars-setLazyLoadVar( 'removed_lines', 'diff-split',
array( 'diff-var' = 'edit_diff', 'line-prefix' = '-' 
) );
+   $vars-setLazyLoadVar( 'added_lines_pst', 'diff-split',
+   array( 'diff-var' = 'edit_diff_pst', 'line-prefix' = 
'+' ) );
+   $vars-setLazyLoadVar( 'removed_lines_pst', 'diff-split',
+   array( 'diff-var' = 'edit_diff_pst', 'line-prefix' = 
'-' ) );
 
// Links
$vars-setLazyLoadVar( 'added_links', 'link-diff-added',
diff --git a/AbuseFilter.i18n.php b/AbuseFilter.i18n.php
index 1d1fe55..30f232d 100644
--- a/AbuseFilter.i18n.php
+++ b/AbuseFilter.i18n.php
@@ -337,6 +337,9 @@
'abusefilter-edit-builder-vars-old-text' = 'Old page wikitext, before 
the edit',
'abusefilter-edit-builder-vars-new-text' = 'New page wikitext, after 
the edit',
'abusefilter-edit-builder-vars-new-pst' = 'New page wikitext, pre-save 
transformed',
+   'abusefilter-edit-builder-vars-diff-pst' = 'Unified diff of changes 
made by edit, pre-save transformed',
+   'abusefilter-edit-builder-vars-addedlines-pst' = 'Lines added in edit, 
pre-save transformed',
+   'abusefilter-edit-builder-vars-removedlines-pst' = 'Lines removed in 
edit, pre-save transformed',
'abusefilter-edit-builder-vars-new-text-stripped' = 'New page text, 
stripped of any markup',
'abusefilter-edit-builder-vars-new-html' = 'Parsed HTML source of the 
new revision',
'abusefilter-edit-builder-vars-recent-contributors' = 'Last ten users 
to contribute to the page',
@@ -922,6 +925,9 @@
 * {{msg-mw|Abusefilter-edit-builder-vars-global-user-groups}}',
'abusefilter-edit-builder-vars-user-blocked' = 'Paraphrased: Boolean 
value on whether the user is blocked. Abuse filter syntax option in a dropdown 
from the group {{msg-mw|abusefilter-edit-builder-group-vars}}.',
'abusefilter-edit-builder-vars-new-pst' = 'Paraphrased: The output 
wikitext after pre-save transform is applied to new_wikitext. Abuse filter 
syntax option in a dropdown from the group 
{{msg-mw|abusefilter-edit-builder-group-vars}}.',
+   'abusefilter-edit-builder-vars-diff-pst' = 'Paraphrased: Edit diff of 
new_pst against old_wikitext. Abuse filter syntax option in a dropdown from the 
group {{msg-mw|abusefilter-edit-builder-group-vars}}.',
+   'abusefilter-edit-builder-vars-addedlines-pst' = 'Paraphrased: Added 
lines in edit_diff_pst. Abuse filter syntax option in a dropdown from the group 
{{msg-mw|abusefilter-edit-builder-group-vars}}.',
+   'abusefilter-edit-builder-vars-removedlines-pst' = 'Paraphrased: 
Removed lines in edit_diff_pst. Abuse filter syntax option in a dropdown from 
the group {{msg-mw|abusefilter-edit-builder-group-vars}}.',
'abusefilter-edit-builder-vars-restrictions-edit' = 'This variable 
contains the level of protection required to edit the page. (Edit here is not 
a verb, but an adjective, like Edit-related protection level

[MediaWiki-commits] [Gerrit] bug 54264 - change (mediawiki/core)

2013-09-18 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/84740


Change subject: bug 54264
..

bug 54264

Change-Id: I8175174d6baf3c03f327775b9db54ef559c3e8b4
---
A tests/phpunit/includes/GlobalFunctions/wfFailingTest.php
1 file changed, 10 insertions(+), 0 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/core 
refs/changes/40/84740/1

diff --git a/tests/phpunit/includes/GlobalFunctions/wfFailingTest.php 
b/tests/phpunit/includes/GlobalFunctions/wfFailingTest.php
new file mode 100644
index 000..43b2fd6
--- /dev/null
+++ b/tests/phpunit/includes/GlobalFunctions/wfFailingTest.php
@@ -0,0 +1,10 @@
+?php
+
+function titleExpected( Title $t ) {
+}
+
+class WfFailingTest extends MediaWikiTestCase {
+   function testFailure() {
+   titleExpected();
+   }
+}

-- 
To view, visit https://gerrit.wikimedia.org/r/84740
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I8175174d6baf3c03f327775b9db54ef559c3e8b4
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/core
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] bug 54264 - change (mediawiki...Wikibase)

2013-09-18 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/84741


Change subject: bug 54264
..

bug 54264

Change-Id: I1849e1ccaefaf171fcd601a76521a0e677314b0c
---
M repo/tests/phpunit/includes/ItemViewTest.php
1 file changed, 1 insertion(+), 1 deletion(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/Wikibase 
refs/changes/41/84741/1

diff --git a/repo/tests/phpunit/includes/ItemViewTest.php 
b/repo/tests/phpunit/includes/ItemViewTest.php
index d65a49c..166f802 100644
--- a/repo/tests/phpunit/includes/ItemViewTest.php
+++ b/repo/tests/phpunit/includes/ItemViewTest.php
@@ -59,7 +59,7 @@
$dataTypeLookup = new InMemoryDataTypeLookup();
 
// test whether we get the right EntityView from an 
EntityContent
-   $view = ItemView::newForEntityContent( $entityContent, 
$valueFormatters, $dataTypeLookup, $entityLoader );
+   $view = ItemView::newForEntityContent();
 
$this-assertType(
ItemView::$typeMap[ 
$entityContent-getEntity()-getType() ],

-- 
To view, visit https://gerrit.wikimedia.org/r/84741
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I1849e1ccaefaf171fcd601a76521a0e677314b0c
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/extensions/Wikibase
Gerrit-Branch: master
Gerrit-Owner: Liangent liang...@gmail.com

___
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits


[MediaWiki-commits] [Gerrit] Revert Revert 3fab17d4 and 61dacb15 for PropertyParserFunct... - change (mediawiki...Wikibase)

2013-09-17 Thread Liangent (Code Review)
Liangent has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/84528


Change subject: Revert Revert 3fab17d4 and 61dacb15 for PropertyParserFunction
..

Revert Revert 3fab17d4 and 61dacb15 for PropertyParserFunction

This reverts commit bf35e4ca9df3f959f58107185e1d7383e6baf400.

Change-Id: I69eb825d7c2b6468a719bb8d3bcf56f83a6845c7
---
M client/includes/parserhooks/PropertyParserFunction.php
M client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
2 files changed, 33 insertions(+), 17 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/mediawiki/extensions/Wikibase 
refs/changes/28/84528/1

diff --git a/client/includes/parserhooks/PropertyParserFunction.php 
b/client/includes/parserhooks/PropertyParserFunction.php
index 4941640..55bc65f 100644
--- a/client/includes/parserhooks/PropertyParserFunction.php
+++ b/client/includes/parserhooks/PropertyParserFunction.php
@@ -2,9 +2,11 @@
 
 namespace Wikibase;
 
+use ValueFormatters\FormatterOptions;
 use Wikibase\Client\WikibaseClient;
 use Wikibase\DataModel\SimpleSiteLink;
 use Wikibase\Lib\SnakFormatter;
+use Wikibase\Lib\SnakFormatterFactory;
 
 /**
  * Handler of the {{#property}} parser function.
@@ -34,6 +36,7 @@
  * @licence GNU GPL v2+
  * @author Katie Filbert  aude.w...@gmail.com 
  * @author Jeroen De Dauw  jeroended...@gmail.com 
+ * @author Daniel Kinzler
  */
 class PropertyParserFunction {
 
@@ -59,7 +62,7 @@
 * @param EntityLookup$entityLookup
 * @param PropertyLabelResolver   $propertyLabelResolver
 * @param ParserErrorMessageFormatter $errorFormatter
-* @param Lib\SnakFormatter   $snaksFormatter
+* @param SnakFormatter   $snaksFormatter
 */
public function __construct( \Language $language,
EntityLookup $entityLookup, PropertyLabelResolver 
$propertyLabelResolver,
@@ -114,12 +117,18 @@
 * @return string - wikitext format
 */
private function formatSnakList( $snaks ) {
-   $languageFallbackChainFactory = 
WikibaseClient::getDefaultInstance()-getLanguageFallbackChainFactory();
-   $languageFallbackChain = 
$languageFallbackChainFactory-newFromLanguage( $this-language,
-   LanguageFallbackChainFactory::FALLBACK_SELF | 
LanguageFallbackChainFactory::FALLBACK_VARIANTS
-   );
-   $formattedValues = $this-snaksFormatter-formatSnaks( $snaks, 
$languageFallbackChain );
+   $formattedValues = $this-formatSnaks( $snaks );
return $this-language-commaList( $formattedValues );
+   }
+
+   private function formatSnaks( $snaks ) {
+   $strings = array();
+
+   foreach ( $snaks as $snak ) {
+   $strings[] = $this-snaksFormatter-formatSnak( $snak );
+   }
+
+   return $strings;
}
 
/**
@@ -191,7 +200,18 @@
 
$entityLookup = $wikibaseClient-getStore()-getEntityLookup();
$propertyLabelResolver = 
$wikibaseClient-getStore()-getPropertyLabelResolver();
-   $formatter = $wikibaseClient-newSnakFormatter();
+
+   $languageFallbackChainFactory = 
WikibaseClient::getDefaultInstance()-getLanguageFallbackChainFactory();
+   $languageFallbackChain = 
$languageFallbackChainFactory-newFromLanguage( $targetLanguage,
+   LanguageFallbackChainFactory::FALLBACK_SELF | 
LanguageFallbackChainFactory::FALLBACK_VARIANTS
+   );
+
+   $options = new FormatterOptions( array(
+   'languages' = $languageFallbackChain,
+   // ...more options...
+   ) );
+
+   $formatter = $wikibaseClient-newSnakFormatter( 
SnakFormatterFactory::FORMAT_WIKI, $options );
 
$instance = new self( $targetLanguage,
$entityLookup, $propertyLabelResolver,
diff --git 
a/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php 
b/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
index 0e5d6b3..4b24dbe 100644
--- a/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
+++ b/client/tests/phpunit/includes/parserhooks/PropertyParserFunctionTest.php
@@ -8,9 +8,6 @@
 use Wikibase\DataModel\Entity\ItemId;
 use Wikibase\DataModel\Entity\PropertyId;
 use Wikibase\Item;
-use Wikibase\Lib\EntityRetrievingDataTypeLookup;
-use Wikibase\Lib\SnakFormatter;
-use Wikibase\Lib\TypedValueFormatter;
 use Wikibase\ParserErrorMessageFormatter;
 use Wikibase\Property;
 use Wikibase\PropertyParserFunction;
@@ -43,11 +40,10 @@
$mockRepo = $this-newMockRepository();
$mockResolver = new MockPropertyLabelResolver( 
$targetLanguage-getCode(), $mockRepo );
 
-   $formatter = new SnakFormatter

  1   2   3   4   5   >