Ladsgroup has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/157334

Change subject: PEP257 on pywikibot folder part III
......................................................................

PEP257 on pywikibot folder part III

files: logentries, login, page, pagegenerators, textlib, throttle.
tools, version, xmlreader
Checked by hand and added some suggestions based on this change:
I910fdd7ce36752fa44b9a6654d7b8509e5020dee

Change-Id: Ic65452262d16ba2ec83633125ce1e3145a6f3aef
---
M pywikibot/logentries.py
M pywikibot/login.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/textlib.py
M pywikibot/throttle.py
M pywikibot/tools.py
M pywikibot/version.py
M pywikibot/xmlreader.py
9 files changed, 109 insertions(+), 93 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core 
refs/changes/34/157334/1

diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index d0e753f..96b520f 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -1,7 +1,5 @@
 # -*- coding: utf-8  -*-
-"""
-Objects representing Mediawiki log entries
-"""
+"""Objects representing Mediawiki log entries."""
 #
 # (C) Pywikibot team, 2007-2013
 #
@@ -22,6 +20,7 @@
     Simple custom dictionary that raises a custom KeyError and logs
     debugging information when a key is missing
     """
+
     def __missing__(self, key):
         pywikibot.debug(u"API log entry received:\n" + repr(self),
                         _logger)
@@ -30,7 +29,7 @@
 
 class LogEntry(object):
 
-    """Generic log entry"""
+    """Generic log entry."""
 
     # Log type expected. None for every type, or one of the (letype) str :
     # block/patrol/etc...
@@ -38,7 +37,7 @@
     _expectedType = None
 
     def __init__(self, apidata):
-        """Initialize object from a logevent dict returned by MW API"""
+        """Initialize object from a logevent dict returned by MW API."""
         self.data = LogDict(apidata)
         if self._expectedType is not None and self._expectedType != 
self.type():
             raise Error("Wrong log type! Expecting %s, received %s instead."
@@ -57,7 +56,7 @@
         return self.data['ns']
 
     def title(self):
-        """Page on which action was performed"""
+        """Return Page object on which action was performed."""
         if not hasattr(self, '_title'):
             self._title = pywikibot.Page(pywikibot.Link(self.data['title']))
         return self._title
@@ -73,7 +72,7 @@
         return self.data['user']
 
     def timestamp(self):
-        """Timestamp object corresponding to event timestamp"""
+        """Return Timestamp object corresponding to event timestamp."""
         if not hasattr(self, '_timestamp'):
             self._timestamp = 
pywikibot.Timestamp.fromISOformat(self.data['timestamp'])
         return self._timestamp
@@ -178,7 +177,7 @@
         return self.data['move']['new_ns']
 
     def new_title(self):
-        """Page object of the new title"""
+        """Page object of the new title."""
         if not hasattr(self, '_new_title'):
             self._new_title = 
pywikibot.Page(pywikibot.Link(self.data['move']['new_title']))
         return self._new_title
@@ -213,6 +212,7 @@
 
     Only available method is create()
     """
+
     _logtypes = {
         'block': BlockEntry,
         'protect': ProtectEntry,
@@ -252,8 +252,9 @@
     @staticmethod
     def _getEntryClass(logtype):
         """
-        Returns the class corresponding to the @logtype string parameter.
-        Returns LogEntry if logtype is unknown or not supported
+        Return the class corresponding to the @logtype string parameter.
+
+        Return LogEntry if logtype is unknown or not supported
         """
         try:
             return LogEntryFactory._logtypes[logtype]
@@ -261,9 +262,7 @@
             return LogEntry
 
     def _createFromData(self, logdata):
-        """
-        Checks for logtype from data, and creates the correct LogEntry
-        """
+        """Check for logtype from data, and creates the correct LogEntry."""
         try:
             logtype = logdata['type']
             return LogEntryFactory._getEntryClass(logtype)(logdata)
diff --git a/pywikibot/login.py b/pywikibot/login.py
index 0026aba..290afa0 100644
--- a/pywikibot/login.py
+++ b/pywikibot/login.py
@@ -1,8 +1,6 @@
 #!/usr/bin/python
 # -*- coding: utf-8  -*-
-"""
-Library to log the robot in to a wiki account.
-"""
+"""Library to log the robot in to a wiki account."""
 #
 # (C) Rob W.W. Hooft, 2003
 # (C) Pywikibot team, 2003-2012
diff --git a/pywikibot/page.py b/pywikibot/page.py
index e77b23e..cabcece 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -131,9 +131,11 @@
         return self.site.data_repository()
 
     def namespace(self):
-        """Return the number of the namespace of the page.
+        """
+        Return the number of the namespace of the page.
 
-        @return: int
+        @return: namespace of the page
+        @rtype: int
         """
         return self._link.namespace
 
@@ -259,7 +261,8 @@
         return hash(unicode(self))
 
     def autoFormat(self):
-        """Return L{date.autoFormat} dictName and value, if any.
+        """
+        Return L{date.autoFormat} dictName and value, if any.
 
         Value can be a year, date, etc., and dictName is 'YearBC',
         'Year_December', or another dictionary name. Please note that two
@@ -283,7 +286,8 @@
     @deprecate_arg("throttle", None)
     @deprecate_arg("change_edit_time", None)
     def get(self, force=False, get_redirect=False, sysop=False):
-        """Return the wiki-text of the page.
+        """
+        Return the wiki-text of the page.
 
         This will retrieve the page from the server if it has not been
         retrieved yet, or if force is True. This can raise the following
@@ -320,7 +324,8 @@
         return self._revisions[self._revid].text
 
     def _getInternals(self, sysop):
-        """Helper function for get().
+        """
+        Helper function for get().
 
         Stores latest revision in self if it doesn't contain it, doesn't think.
         * Raises exceptions from previous runs.
@@ -365,7 +370,8 @@
         return self._revisions[oldid].text
 
     def permalink(self, oldid=None):
-        """Return the permalink URL of an old revision of this page.
+        """
+        Return the permalink URL of an old revision of this page.
 
         @param oldid: The revid of the revision desired.
 
@@ -384,9 +390,11 @@
 
     @property
     def text(self):
-        """Return the current (edited) wikitext, loading it if necessary.
+        """
+        Return the current (edited) wikitext, loading it if necessary.
 
-        @return: unicode
+        @return: text of the page
+        @rtype: unicode
         """
         if not hasattr(self, '_text') or self._text is None:
             try:
@@ -3454,7 +3462,8 @@
             self.sources.remove(source_dict)
 
     def addQualifier(self, qualifier, **kwargs):
-        """Add the given qualifier.
+        """
+        Add the given qualifier.
 
         @param qualifier: the qualifier to add
         @type qualifier: Claim
@@ -3468,7 +3477,8 @@
         """
         Format the target into the proper JSON value that Wikibase wants.
 
-        @return: dict
+        @return: JSON value
+        @rtype: dict
         """
         if self.type == 'wikibase-item':
             value = {'entity-type': 'item',
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index a46e27c..9a5b436 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -206,6 +206,7 @@
     This factory is responsible for processing command line arguments
     that are used by many scripts and that determine which pages to work on.
     """
+
     def __init__(self, site=None):
         self.gens = []
         self.namespaces = []
@@ -221,7 +222,6 @@
 
         Only call this after all arguments have been parsed.
         """
-
         if gen:
             self.gens.insert(0, gen)
 
@@ -589,9 +589,7 @@
 @deprecate_arg("repeat", None)
 def NewpagesPageGenerator(get_redirect=False, site=None,
                           namespaces=[0, ], step=None, total=None):
-    """
-    Iterate Page objects for all new titles in a single namespace.
-    """
+    """Iterate Page objects for all new titles in a single namespace."""
     # API does not (yet) have a newpages function, so this tries to duplicate
     # it by filtering the recentchanges output
     # defaults to namespace 0 because that's how Special:Newpages defaults
@@ -879,7 +877,7 @@
 
     @classmethod
     def __precompile(cls, regex, flag):
-        """ precompile the regex list if needed """
+        """precompile the regex list if needed."""
         # Enable multiple regexes
         if not isinstance(regex, list):
             regex = [regex]
@@ -987,7 +985,6 @@
 @deprecate_arg("lookahead", None)
 def PreloadingGenerator(generator, step=50):
     """Yield preloaded pages taken from another generator."""
-
     # pages may be on more than one site, for example if an interwiki
     # generator is used, so use a separate preloader for each site
     sites = {}
@@ -1205,9 +1202,7 @@
 
 def SearchPageGenerator(query, step=None, total=None, namespaces=None,
                         site=None):
-    """
-    Provides a list of results using the internal MediaWiki search engine
-    """
+    """Provide a list of results using the internal MediaWiki search engine"""
     if site is None:
         site = pywikibot.Site()
     for page in site.search(query, step=step, total=total,
@@ -1216,8 +1211,10 @@
 
 
 def UntaggedPageGenerator(untaggedProject, limit=500):
-    """ Function to get the pages returned by this tool:
-    https://toolserver.org/~daniel/WikiSense/UntaggedImages.php
+    """
+    Function to get the pages returned by UntaggedImages tool.
+
+    More info at U{https://toolserver.org/~daniel/WikiSense/UntaggedImages.php}
     """
     URL = "https://toolserver.org/~daniel/WikiSense/UntaggedImages.php?";
     REGEXP = r"<td valign='top' title='Name'><a href='http[s]?://.*?" \
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index d11154e..74f2830 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -36,7 +36,7 @@
 
 
 def unescape(s):
-    """Replace escaped HTML-special characters by their originals"""
+    """Replace escaped HTML-special characters by their originals."""
     if '&' not in s:
         return s
     s = s.replace("&lt;", "<")
@@ -334,8 +334,8 @@
 def isDisabled(text, index, tags=['*']):
     """
     Return True if text[index] is disabled, e.g. by a comment or by nowiki 
tags.
-    For the tags parameter, see removeDisabledParts() above.
 
+    For the tags parameter, see removeDisabledParts() above.
     """
     # Find a marker that is not already in the text.
     marker = findmarker(text)
@@ -457,7 +457,8 @@
 
 
 def removeLanguageLinks(text, site=None, marker=''):
-    """Return text with all inter-language links removed.
+    """
+    Return text with all inter-language links removed.
 
     If a link to an unknown language is encountered, a warning is printed.
     If a marker is defined, that string is placed at the location of the
@@ -503,12 +504,12 @@
 
 def replaceLanguageLinks(oldtext, new, site=None, addOnly=False,
                          template=False, template_subpage=False):
-    """Replace inter-language links in the text with a new set of links.
+    """
+    Replace inter-language links in the text with a new set of links.
 
     'new' should be a dict with the Site objects as keys, and Page or Link
     objects as values (i.e., just like the dict returned by getLanguageLinks
     function).
-
     """
     # Find a marker that is not already in the text.
     marker = findmarker(oldtext)
@@ -596,7 +597,6 @@
 
     Return a unicode string that is formatted for inclusion in insite
     (defaulting to the current site).
-
     """
     if insite is None:
         insite = pywikibot.Site()
@@ -651,11 +651,11 @@
 # -------------------------------------
 
 def getCategoryLinks(text, site=None):
-    """Return a list of category links found in text.
+    """
+    Return a list of category links found in text.
 
     @return: all category links found
-    @returntype: list of Category objects
-
+    @rtype: list of Category objects
     """
     result = []
     if site is None:
@@ -678,11 +678,11 @@
 
 
 def removeCategoryLinks(text, site=None, marker=''):
-    """Return text with all category links removed.
+    """
+    Return text with all category links removed.
 
     Put the string marker after the last replacement (at the end of the text
     if there is no replacement).
-
     """
     # This regular expression will find every link that is possibly an
     # interwiki link, plus trailing whitespace. The language code is grouped.
@@ -723,9 +723,14 @@
 
 
 def replaceCategoryInPlace(oldtext, oldcat, newcat, site=None):
-    """Replace the category oldcat with the category newcat and return
-       the modified text.
+    """
+    Replace old category with new one and return the modified text.
 
+    @param oldtext: Content of the old category
+    @param oldcat: pywikibot.Category object of the old category
+    @param newcat: pywikibot.Category object of the new category
+    @return: Content of the new category
+    @rtype: unicode
     """
     if site is None:
         site = pywikibot.Site()
@@ -747,7 +752,7 @@
         r'^[^\S\n]*\[\[\s*(%s)\s*:\s*%s\s*((?:\|[^]]+)?\]\])[^\S\n]*\n'
         % (catNamespace, title), re.I | re.M)
     if newcat is None:
-        """ First go through and try the more restrictive regex that removes
+        """First go through and try the more restrictive regex that removes
         an entire line, if the category is the only thing on that line (this
         prevents blank lines left over in category lists following a removal.)
         """
@@ -774,7 +779,6 @@
 
     If addOnly is True, the old category won't be deleted and the
     category(s) given will be added (and so they won't replace anything).
-
     """
     # Find a marker that is not already in the text.
     marker = findmarker(oldtext)
@@ -831,13 +835,13 @@
 
 
 def categoryFormat(categories, insite=None):
-    """Return a string containing links to all categories in a list.
+    """
+    Return a string containing links to all categories in a list.
 
     'categories' should be a list of Category objects or strings
         which can be either the raw name or [[Category:..]].
 
     The string is formatted for inclusion in insite.
-
     """
     if not categories:
         return ''
@@ -901,7 +905,8 @@
 # --------------------------------
 
 def extract_templates_and_params(text):
-    """Return a list of templates found in text.
+    """
+    Return a list of templates found in text.
 
     Return value is a list of tuples. There is one tuple for each use of a
     template in the page, with the template title as the first entry and a
@@ -917,7 +922,6 @@
 
     @param text: The wikitext from which templates are extracted
     @type text: unicode or string
-
     """
 
     if not (config.use_mwparserfromhell and mwparserfromhell):
@@ -934,12 +938,12 @@
 
 def extract_templates_and_params_regex(text):
     """
-    See the documentation for extract_templates_and_params
-    This does basically the same thing, but uses regex.
-    @param text:
-    @return:
-    """
+    See the documentation for extract_templates_and_params.
 
+    This does basically the same thing, but uses regex.
+    @param text: text to parse
+    @return: list of templates
+    """
     # remove commented-out stuff etc.
     thistxt = removeDisabledParts(text)
 
@@ -1091,12 +1095,12 @@
 
 
 def glue_template_and_params(template_and_params):
-    """Return wiki text of template glued from params.
+    """
+    Return wiki text of template glued from params.
 
     You can use items from extract_templates_and_params here to get
     an equivalent template wiki text (it may happen that the order
     of the params changes).
-
     """
     (template, params) = template_and_params
     text = u''
@@ -1124,7 +1128,6 @@
           If a section parameter contains a internal link, it will match the
           section with or without a preceding colon which is required for a
           text link e.g. for categories and files.
-
     """
     # match preceding colon for text links
     section = re.sub(r'\\\[\\\[(\\\:)?', '\[\[\:?', re.escape(section))
@@ -1143,8 +1146,9 @@
     """
     Class building tzinfo objects for fixed-offset time zones
 
-    @offset: a number indicating fixed offset in minutes east from UTC
-    @name: a string with name of the timezone"""
+    @param offset: a number indicating fixed offset in minutes east from UTC
+    @param name: a string with name of the timezone
+    """
 
     def __init__(self, offset, name):
         self.__offset = datetime.timedelta(minutes=offset)
@@ -1169,9 +1173,7 @@
 
 class TimeStripper(object):
 
-    """
-    Find timestamp in page text and returns it as timezone aware datetime 
object
-    """
+    """Find timestamp in page text and returns it as timezone aware datetime 
object."""
 
     def __init__(self, site=None):
         if site is None:
@@ -1236,7 +1238,9 @@
 
     def last_match_and_replace(self, txt, pat):
         """
-        Take the rightmost match, to prevent spurious earlier matches, and 
replace with marker
+        Take the rightmost match and replace with marker.
+
+        It does it to prevent spurious earlier matches.
         """
         m = None
         cnt = 0
@@ -1264,9 +1268,9 @@
     def timestripper(self, line):
         """
         Find timestamp in line and convert it to time zone aware datetime.
+
         All the following items must be matched, otherwise None is returned:
         -. year, month, hour, time, day, minute, tzinfo
-
         """
         # match date fields
         dateDict = dict()
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index ffd4367..003b522 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -1,7 +1,5 @@
 # -*- coding: utf-8  -*-
-"""
-Mechanics to slow down wiki read and/or write rate.
-"""
+"""Mechanics to slow down wiki read and/or write rate."""
 #
 # (C) Pywikibot team, 2008
 #
@@ -180,7 +178,7 @@
         return thisdelay
 
     def waittime(self, write=False):
-        """Return waiting time in seconds if a query would be made right now"""
+        """Return waiting time in seconds if a query would be made right 
now."""
         # Take the previous requestsize in account calculating the desired
         # delay this time
         thisdelay = self.getDelay(write=write)
diff --git a/pywikibot/tools.py b/pywikibot/tools.py
index 26c56fb..aa1fe66 100644
--- a/pywikibot/tools.py
+++ b/pywikibot/tools.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8  -*-
-"""Miscellaneous helper functions (not wiki-dependent)"""
+"""Miscellaneous helper functions (not wiki-dependent)."""
 #
 # (C) Pywikibot team, 2008
 #
@@ -28,8 +28,10 @@
 
 class UnicodeMixin(object):
 
-    """Mixin class to handle defining the proper __str__/__unicode__
-       methods in Python 2 or 3.
+    """
+    Mixin class to handle defining the proper methods in Python 2 or 3.
+
+    By methods it means __str__/__unicode__ methods.
     """
 
     if sys.version_info[0] >= 3:
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 8826a4d..a29dd64 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -21,7 +21,7 @@
 
 class ParseError(Exception):
 
-    """ Parsing went wrong. """
+    """Parsing went wrong."""
 
 
 def _get_program_dir():
@@ -30,7 +30,9 @@
 
 
 def getversion(online=True):
-    """Return a pywikibot version string
+    """
+    Return a pywikibot version string.
+
     @param online: (optional) Include information obtained online
     """
     data = dict(getversiondict())  # copy dict to prevent changes in 'chache'
@@ -189,7 +191,8 @@
 
 
 def getversion_onlinerepo(repo=None):
-    """Retrieve current framework revision number from online repository.
+    """
+    Retrieve current framework revision number from online repository.
 
     @param repo: (optional) Online repository location
     @type repo: URL or string
@@ -205,8 +208,12 @@
 
 
 def getfileversion(filename):
-    """ Retrieve revision number of file (__version__ variable containing Id 
tag)
-        without importing it (thus can be done for any file)
+    """
+    Retrieve revision number of file (__version__ variable containing Id tag)
+
+    Without importing it (thus can be done for any file)
+    @param filename: Name of the file to get version
+    @type filename: string
     """
     _program_dir = _get_program_dir()
     __version__ = None
@@ -226,7 +233,8 @@
 
 
 def package_versions(modules=None, builtins=False, standard_lib=None):
-    """ Retrieve package version information.
+    """
+    Retrieve package version information.
 
     When builtins or standard_lib are None, they will be included only
     if a version was found in the package.
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index 4f7988e..f8ea87b 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -23,8 +23,9 @@
 
 def parseRestrictions(restrictions):
     """
-    Parses the characters within a restrictions tag and returns
-    strings representing user groups allowed to edit and to move
+    Parse the characters within a restrictions tag.
+
+    It returns strings representing user groups allowed to edit and to move
     a page, where None means there are no restrictions.
     """
     if not restrictions:
@@ -45,9 +46,8 @@
 
 class XmlEntry:
 
-    """
-    Represents a page.
-    """
+    """Represent a page."""
+
     def __init__(self, title, ns, id, text, username, ipedit, timestamp,
                  editRestriction, moveRestriction, revisionid, comment,
                  redirect):
@@ -102,7 +102,7 @@
             self._parse = self._parse_only_latest
 
     def parse(self):
-        """Generator using cElementTree iterparse function"""
+        """Generator using cElementTree iterparse function."""
         if self.filename.endswith('.bz2'):
             import bz2
             source = bz2.BZ2File(self.filename)
@@ -133,7 +133,7 @@
                 yield rev
 
     def _parse_only_latest(self, event, elem):
-        """Parser that yields only the latest revision"""
+        """Parser that yields only the latest revision."""
         if event == "end" and elem.tag == "{%s}page" % self.uri:
             self._headers(elem)
             revision = elem.find("{%s}revision" % self.uri)
@@ -142,7 +142,7 @@
             self.root.clear()
 
     def _parse_all(self, event, elem):
-        """Parser that yields all revisions"""
+        """Parser that yields all revisions."""
         if event == "start" and elem.tag == "{%s}page" % self.uri:
             self._headers(elem)
         if event == "end" and elem.tag == "{%s}revision" % self.uri:
@@ -160,7 +160,7 @@
             self.restrictions)
 
     def _create_revision(self, revision):
-        """Create a Single revision"""
+        """Create a Single revision."""
         revisionid = revision.findtext("{%s}id" % self.uri)
         timestamp = revision.findtext("{%s}timestamp" % self.uri)
         comment = revision.findtext("{%s}comment" % self.uri)

-- 
To view, visit https://gerrit.wikimedia.org/r/157334
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ic65452262d16ba2ec83633125ce1e3145a6f3aef
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ladsgroup <ladsgr...@gmail.com>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to