Xqt has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/227646

Change subject: Use sitename property
......................................................................

Use sitename property

sitename is a property in core but a method in compat. For compatibility
purpose sitename() could be called but using the property should be preferred.

Change-Id: I9c36512f7796a160ba40a30ccb62e665fde8c0de
---
M pywikibot/cosmetic_changes.py
M pywikibot/textlib.py
M scripts/blockreview.py
M scripts/fixing_redirects.py
M scripts/imagetransfer.py
M scripts/interwiki.py
M scripts/noreferences.py
7 files changed, 21 insertions(+), 19 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core 
refs/changes/46/227646/1

diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index c21868f..3020f74 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -415,7 +415,7 @@
     def translateAndCapitalizeNamespaces(self, text):
         """Use localized namespace names."""
         # arz uses english stylish codes
-        if self.site.sitename() == 'wikipedia:arz':
+        if self.site.sitename == 'wikipedia:arz':
             return text
         family = self.site.family
         # wiki links aren't parsed here.
@@ -565,7 +565,7 @@
                         # TODO: Add a configuration variable for each site,
                         # which determines if the link target is written in
                         # uppercase
-                        if self.site.sitename() == 'wikipedia:de':
+                        if self.site.sitename == 'wikipedia:de':
                             titleWithSection = first_upper(titleWithSection)
                         newLink = "[[%s|%s]]" % (titleWithSection, label)
                     # re-add spaces that were pulled out of the link.
@@ -898,7 +898,7 @@
 
         [1]: 
https://commons.wikimedia.org/wiki/Commons:Tools/pywiki_file_description_cleanup
         """
-        if self.site.sitename() != u'commons:commons' or self.namespace == 6:
+        if self.site.sitename != u'commons:commons' or self.namespace == 6:
             return
         # section headers to {{int:}} versions
         exceptions = ['comment', 'includeonly', 'math', 'noinclude', 'nowiki',
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index f56fa9b..77454fc 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -1118,7 +1118,7 @@
     marker = findmarker(oldtext)
     if site is None:
         site = pywikibot.Site()
-    if site.sitename() == 'wikipedia:de' and "{{Personendaten" in oldtext:
+    if site.sitename == 'wikipedia:de' and '{{Personendaten' in oldtext:
         raise pywikibot.Error(
             'The Pywikibot is no longer allowed to touch categories on the '
             'German\nWikipedia on pages that contain the Personendaten '
diff --git a/scripts/blockreview.py b/scripts/blockreview.py
index 62ec90b..08ec0ff 100755
--- a/scripts/blockreview.py
+++ b/scripts/blockreview.py
@@ -130,7 +130,7 @@
                 # Notify the blocking admin
                 if templates[1] == [] or templates[1][0] == u'1':
                     if self.info['action'] == 'block' or user.isBlocked():
-                        if self.site.sitename() == 'wikipedia:de':
+                        if self.site.sitename == 'wikipedia:de':
                             admin = pywikibot.User(self.site, 
self.info['user'])
                             adminPage = admin.getUserTalkPage()
                             adminText = adminPage.get()
@@ -144,7 +144,7 @@
                             self.save(adminText, adminPage, comment, False)
                         # test for pt-wiki
                         # just print all sysops talk pages
-                        elif self.site.sitename() == 'wikipedia:pt':
+                        elif self.site.sitename == 'wikipedia:pt':
                             from pywikibot import pagegenerators as pg
                             gen = pg.PreloadingGenerator(self.SysopGenerator())
                             for sysop in gen:
diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py
index f689ec4..2cb94ec 100755
--- a/scripts/fixing_redirects.py
+++ b/scripts/fixing_redirects.py
@@ -127,7 +127,7 @@
             genFactory.handleArg(arg)
 
     mysite = pywikibot.Site()
-    if mysite.sitename() == 'wikipedia:nl':
+    if mysite.sitename == 'wikipedia:nl':
         pywikibot.output(
             '\03{lightred}There is consensus on the Dutch Wikipedia that '
             'bots should not be used to fix redirects.\03{default}')
diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py
index 8f4329f..4dc25b7 100755
--- a/scripts/imagetransfer.py
+++ b/scripts/imagetransfer.py
@@ -178,9 +178,11 @@
         try:
             description = sourceImagePage.get()
             # try to translate license templates
-            if (sourceSite.sitename(), self.targetSite.sitename()) in 
licenseTemplates:
-                for old, new in licenseTemplates[(sourceSite.sitename(),
-                                                  
self.targetSite.sitename())].items():
+            if (sourceSite.sitename,
+                    self.targetSite.sitename) in licenseTemplates:
+                for old, new in licenseTemplates[
+                        (sourceSite.sitename,
+                         self.targetSite.sitename)].items():
                     new = '{{%s}}' % new
                     old = re.compile('{{%s}}' % old)
                     description = textlib.replaceExcept(description, old, new,
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index fb05532..1355798 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1722,9 +1722,10 @@
             for (site, page) in new.items():
                 # edit restriction for some templates on zh-wiki where
                 # interlanguage keys are included by /doc subpage
-                smallWikiAllowed = not (page.site.sitename() == 'wikipedia:zh' 
and
+                smallWikiAllowed = not (page.site.sitename == 'wikipedia:zh' 
and
                                         page.namespace() == 10 and
-                                        u'Country data' in 
page.title(withNamespace=False))
+                                        u'Country data' in
+                                        page.title(withNamespace=False))
                 # edit restriction on is-wiki
                 # https://is.wikipedia.org/wiki/Wikipediaspjall:V%C3%A9lmenni
                 # and zh-wiki for template namespace which prevents increasing 
the queue
@@ -1733,8 +1734,8 @@
                 # or the last edit was 1 month ago
                 if (smallWikiAllowed and
                     globalvar.autonomous and
-                    (page.site.sitename() == 'wikipedia:is' or
-                     page.site.sitename() == 'wikipedia:zh' and
+                    (page.site.sitename == 'wikipedia:is' or
+                     page.site.sitename == 'wikipedia:zh' and
                      page.namespace() == 10
                      )):
                     old = {}
@@ -1772,7 +1773,7 @@
                             else:
                                 pywikibot.output(
                                     u'NOTE: number of edits are restricted at 
%s'
-                                    % page.site.sitename()
+                                    % page.site.sitename
                                 )
 
                 # if we have an account for this site
@@ -1914,8 +1915,8 @@
                     not globalvar.cleanup or
                     unicode(rmPage) not in globalvar.remove or
                     (
-                        rmPage.site.sitename() == 'wikipedia:hi' and
-                        page.site.sitename() != 'wikipedia:de'  # work-arround 
for bug #3081100 (do not remove hi-pages)
+                        rmPage.site.sitename == 'wikipedia:hi' and
+                        page.site.sitename != 'wikipedia:de'  # work-arround 
for bug #3081100 (do not remove hi-pages)
                     )
                 ):
                     new[rmsite] = rmPage
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 709c1e8..df65e8c 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -671,8 +671,7 @@
                 pywikibot.output(u"Page %s is a disambig; skipping."
                                  % page.title(asLink=True))
                 continue
-            if self.site.sitename() == 'wikipedia:en' and \
-               page.isIpEdit():
+            if self.site.sitename == 'wikipedia:en' and page.isIpEdit():
                 pywikibot.output(
                     u"Page %s is edited by IP. Possible vandalized"
                     % page.title(asLink=True))

-- 
To view, visit https://gerrit.wikimedia.org/r/227646
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I9c36512f7796a160ba40a30ccb62e665fde8c0de
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <i...@gno.de>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to