Hello community,

here is the log from the commit of package python-w3lib for openSUSE:Factory 
checked in at 2020-07-24 10:00:44
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-w3lib (Old)
 and      /work/SRC/openSUSE:Factory/.python-w3lib.new.3592 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-w3lib"

Fri Jul 24 10:00:44 2020 rev:6 rq:822239 version:1.22.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-w3lib/python-w3lib.changes        
2019-09-04 09:10:08.478981525 +0200
+++ /work/SRC/openSUSE:Factory/.python-w3lib.new.3592/python-w3lib.changes      
2020-07-24 10:02:57.269810534 +0200
@@ -1,0 +2,9 @@
+Wed Jul 22 11:05:23 UTC 2020 - Marketa Calabkova <mcalabk...@suse.com>
+
+- update to 1.22.0
+  * Python 3.4 is no longer supported (issue #156)
+  * :func:`w3lib.url.safe_url_string` now supports an optional ``quote_path``
+    parameter to disable the percent-encoding of the URL path
+  * more small fixes and improvements
+
+-------------------------------------------------------------------
@@ -4 +13 @@
-- update to 1.21.1
+- update to 1.21.0

Old:
----
  w3lib-1.21.0.tar.gz

New:
----
  w3lib-1.22.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-w3lib.spec ++++++
--- /var/tmp/diff_new_pack.9P8fiz/_old  2020-07-24 10:03:00.185812868 +0200
+++ /var/tmp/diff_new_pack.9P8fiz/_new  2020-07-24 10:03:00.189812870 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-w3lib
 #
-# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2020 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,19 +18,18 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-w3lib
-Version:        1.21.0
+Version:        1.22.0
 Release:        0
 Summary:        Library of Web-Related Functions
 License:        BSD-3-Clause
 Group:          Development/Languages/Python
-Url:            http://github.com/scrapy/w3lib
+URL:            https://github.com/scrapy/w3lib
 Source:         
https://files.pythonhosted.org/packages/source/w/w3lib/w3lib-%{version}.tar.gz
 BuildRequires:  %{python_module setuptools}
-BuildRequires:  %{python_module six} >= 1.4.1
+BuildRequires:  %{python_module six >= 1.4.1}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
 BuildArch:      noarch
-
 %python_subpackages
 
 %description

++++++ w3lib-1.21.0.tar.gz -> w3lib-1.22.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/PKG-INFO new/w3lib-1.22.0/PKG-INFO
--- old/w3lib-1.21.0/PKG-INFO   2019-08-09 13:00:36.000000000 +0200
+++ new/w3lib-1.22.0/PKG-INFO   2020-05-13 21:29:57.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: w3lib
-Version: 1.21.0
+Version: 1.22.0
 Summary: Library of web-related functions
 Home-page: https://github.com/scrapy/w3lib
 Author: Scrapy project
@@ -15,10 +15,10 @@
 Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Internet :: WWW/HTTP
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/README.rst new/w3lib-1.22.0/README.rst
--- old/w3lib-1.21.0/README.rst 2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/README.rst 2020-05-13 21:29:23.000000000 +0200
@@ -27,7 +27,7 @@
 Requirements
 ============
 
-Python 2.7 or Python 3.4+
+Python 2.7 or Python 3.5+
 
 Install
 =======
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/docs/conf.py 
new/w3lib-1.22.0/docs/conf.py
--- old/w3lib-1.21.0/docs/conf.py       2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/docs/conf.py       2020-05-13 21:29:23.000000000 +0200
@@ -26,6 +26,8 @@
 # Add any Sphinx extension module names here, as strings. They can be 
extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = [
+    'hoverxref.extension',
+    'notfound.extension',
     'sphinx.ext.autodoc',
     'sphinx.ext.doctest',
     'sphinx.ext.intersphinx',
@@ -53,7 +55,7 @@
 # built documents.
 #
 # The full version, including alpha/beta/rc tags.
-release = '1.21.0'
+release = '1.22.0'
 # The short X.Y version.
 version = '.'.join(release.split('.')[:2])
 
@@ -125,7 +127,7 @@
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = []
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
@@ -248,4 +250,19 @@
 
 
 # Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'http://docs.python.org/': None}
+intersphinx_mapping = {
+    'pytest': ('https://docs.pytest.org/en/latest', None),
+    'python': ('https://docs.python.org/3', None),
+    'scrapy': ('https://scrapy.readthedocs.io/en/latest', None),
+    'tox': ('https://tox.readthedocs.io/en/latest', None),
+}
+
+
+# -- Nitpicking options -------------------------------------------------------
+
+nitpicky = True
+
+
+# -- sphinx-hoverxref options -------------------------------------------------
+
+hoverxref_auto_ref = True
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/docs/index.rst 
new/w3lib-1.22.0/docs/index.rst
--- old/w3lib-1.21.0/docs/index.rst     2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/docs/index.rst     2020-05-13 21:29:23.000000000 +0200
@@ -39,17 +39,15 @@
 Tests
 =====
 
-`pytest`_ is the preferred way to run tests. Just run: ``pytest`` from the
-root directory to execute tests using the default Python interpreter.
+:doc:`pytest <pytest:index>` is the preferred way to run tests. Just run:
+``pytest`` from the root directory to execute tests using the default Python
+interpreter.
 
-`tox`_ could be used to run tests for all supported Python versions.
-Install it (using 'pip install tox') and then run ``tox`` from
+:doc:`tox <tox:index>` could be used to run tests for all supported Python
+versions. Install it (using 'pip install tox') and then run ``tox`` from
 the root directory - tests will be executed for all available
 Python interpreters.
 
-.. _tox: http://tox.testrun.org
-.. _pytest: https://docs.pytest.org/en/latest/
-
 
 Changelog
 =========
@@ -60,12 +58,10 @@
 History
 -------
 
-The code of w3lib was originally part of the `Scrapy framework`_ but was later
-stripped out of Scrapy, with the aim of make it more reusable and to provide a
-useful library of web functions without depending on Scrapy.
-
-.. _Scrapy framework: http://scrapy.org
-.. _NEWS file: https://github.com/scrapy/w3lib/blob/master/NEWS
+The code of w3lib was originally part of the :doc:`Scrapy framework
+<scrapy:index>` but was later stripped out of Scrapy, with the aim of make it
+more reusable and to provide a useful library of web functions without
+depending on Scrapy.
 
 
 Indices and tables
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/docs/w3lib.rst 
new/w3lib-1.22.0/docs/w3lib.rst
--- old/w3lib-1.21.0/docs/w3lib.rst     2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/docs/w3lib.rst     2020-05-13 21:29:23.000000000 +0200
@@ -1,28 +1,28 @@
 w3lib Package
 =============
 
-:mod:`encoding` Module
-----------------------
+:mod:`~w3lib.encoding` Module
+-----------------------------
 
 .. automodule:: w3lib.encoding
     :members:
 
 
-:mod:`html` Module
-------------------
+:mod:`~w3lib.html` Module
+-------------------------
 
 .. automodule:: w3lib.html
     :members:
 
 
-:mod:`http` Module
-------------------
+:mod:`~w3lib.http` Module
+-------------------------
 
 .. automodule:: w3lib.http
     :members:
 
-:mod:`url` Module
------------------
+:mod:`~w3lib.url` Module
+------------------------
 
 .. automodule:: w3lib.url
     :members:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/pytest.ini new/w3lib-1.22.0/pytest.ini
--- old/w3lib-1.21.0/pytest.ini 2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/pytest.ini 2020-05-13 21:29:23.000000000 +0200
@@ -1,2 +1,15 @@
 [pytest]
 doctest_optionflags = ALLOW_UNICODE ALLOW_BYTES
+flake8-ignore =
+    docs/conf.py E121 E122 E265 E401 E501
+    tests/test_encoding.py E128 E221 E241 E302 E401 E501 E731
+    tests/test_form.py E265 E501
+    tests/test_html.py E123 E128 E241 E303 E501 E502
+    tests/test_http.py E128 E261 E302 W291
+    tests/test_url.py E126 E127 E128 E226 E261 E303 E501 W293 W391
+    w3lib/encoding.py E126 E128 E302 E305 E401 E501
+    w3lib/form.py E402 E501 E721
+    w3lib/html.py E128 E302 E501 E502 W504
+    w3lib/http.py E501
+    w3lib/url.py E128 E261 E302 E305 E501 F841 W291 W293 W504
+    w3lib/util.py E302
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/setup.py new/w3lib-1.22.0/setup.py
--- old/w3lib-1.21.0/setup.py   2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/setup.py   2020-05-13 21:29:23.000000000 +0200
@@ -3,7 +3,7 @@
 
 setup(
     name='w3lib',
-    version='1.21.0',
+    version='1.22.0',
     license='BSD',
     description='Library of web-related functions',
     author='Scrapy project',
@@ -21,10 +21,10 @@
         'Programming Language :: Python :: 2',
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.4',
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: 3.7',
+        'Programming Language :: Python :: 3.8',
         'Programming Language :: Python :: Implementation :: CPython',
         'Programming Language :: Python :: Implementation :: PyPy',
         'Topic :: Internet :: WWW/HTTP',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/tests/test_html.py 
new/w3lib-1.22.0/tests/test_html.py
--- old/w3lib-1.21.0/tests/test_html.py 2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/tests/test_html.py 2020-05-13 21:29:23.000000000 +0200
@@ -211,7 +211,7 @@
         # text with escape chars
         self.assertEqual(replace_escape_chars(u'escape\n\n'), u'escape')
         self.assertEqual(replace_escape_chars(u'escape\n', 
which_ones=('\t',)), u'escape\n')
-        self.assertEqual(replace_escape_chars(u'escape\tchars\n', 
which_ones=('\t')), 'escapechars\n')
+        self.assertEqual(replace_escape_chars(u'escape\tchars\n', 
which_ones=('\t',)), 'escapechars\n')
         self.assertEqual(replace_escape_chars(u'escape\tchars\n', replace_by=' 
'), 'escape chars ')
         self.assertEqual(replace_escape_chars(u'escape\tchars\n', 
replace_by=u'\xa3'), u'escape\xa3chars\xa3')
         self.assertEqual(replace_escape_chars(u'escape\tchars\n', 
replace_by=b'\xc2\xa3'), u'escape\xa3chars\xa3')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/tests/test_url.py 
new/w3lib-1.22.0/tests/test_url.py
--- old/w3lib-1.21.0/tests/test_url.py  2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/tests/test_url.py  2020-05-13 21:29:23.000000000 +0200
@@ -76,6 +76,17 @@
     def test_safe_url_string_unsafe_chars(self):
         safeurl = 
safe_url_string(r"http://localhost:8001/unwise{,},|,\,^,[,],`?|=[]&[]=|")
         self.assertEqual(safeurl, 
r"http://localhost:8001/unwise%7B,%7D,|,%5C,%5E,[,],%60?|=[]&[]=|")
+        
+    def test_safe_url_string_quote_path(self):
+        safeurl = safe_url_string(u'http://google.com/"hello";', 
quote_path=True)
+        self.assertEqual(safeurl, u'http://google.com/%22hello%22')
+        
+        safeurl = safe_url_string(u'http://google.com/"hello";', 
quote_path=False)
+        self.assertEqual(safeurl, u'http://google.com/"hello";')
+        
+        safeurl = safe_url_string(u'http://google.com/"hello";')
+        self.assertEqual(safeurl, u'http://google.com/%22hello%22')
+        
 
     def test_safe_url_string_with_query(self):
         safeurl = safe_url_string(u"http://www.example.com/£?unit=µ";)
@@ -310,6 +321,12 @@
         self.assertEqual(add_or_replace_parameter(url, 'pageurl', 'test'),
                          
'http://example.com/?version=1&pageurl=test&param2=value2')
 
+        url = 'http://domain/test?arg1=v1&arg2=v2&arg1=v3'
+        self.assertEqual(add_or_replace_parameter(url, 'arg4', 'v4'),
+                         'http://domain/test?arg1=v1&arg2=v2&arg1=v3&arg4=v4')
+        self.assertEqual(add_or_replace_parameter(url, 'arg1', 'v3'),
+                         'http://domain/test?arg1=v3&arg2=v2')
+
     def test_add_or_replace_parameters(self):
         url = 'http://domain/test'
         self.assertEqual(add_or_replace_parameters(url, {'arg': 'v'}),
@@ -319,6 +336,17 @@
                          'http://domain/test?arg1=v1&arg2=v2&arg3=v3&arg4=v4')
         self.assertEqual(add_or_replace_parameters(url, {'arg4': 'v4', 'arg3': 
'v3new'}),
                          
'http://domain/test?arg1=v1&arg2=v2&arg3=v3new&arg4=v4')
+        url = 'http://domain/test?arg1=v1&arg2=v2&arg1=v3'
+        self.assertEqual(add_or_replace_parameters(url, {'arg4': 'v4'}),
+                         'http://domain/test?arg1=v1&arg2=v2&arg1=v3&arg4=v4')
+        self.assertEqual(add_or_replace_parameters(url, {'arg1': 'v3'}),
+                         'http://domain/test?arg1=v3&arg2=v2')
+
+    def test_add_or_replace_parameters_does_not_change_input_param(self):
+        url = 'http://domain/test?arg=original'
+        input_param = {'arg': 'value'}
+        new_url = add_or_replace_parameters(url, input_param)  # noqa
+        self.assertEqual(input_param, {'arg': 'value'})
 
     def test_url_query_cleaner(self):
         self.assertEqual('product.html',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/tox.ini new/w3lib-1.22.0/tox.ini
--- old/w3lib-1.21.0/tox.ini    2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/tox.ini    2020-05-13 21:29:23.000000000 +0200
@@ -4,7 +4,7 @@
 # and then run "tox" from this directory.
 
 [tox]
-envlist = py27, pypy, py34, py35, py36, py37, pypy3
+envlist = py27, pypy, py35, py36, py37, py38, pypy3, docs
 
 [testenv]
 deps =
@@ -15,3 +15,34 @@
         --doctest-modules \
         --cov=w3lib --cov-report=term \
         {posargs:w3lib tests}
+
+[testenv:security]
+deps =
+    bandit
+commands =
+    bandit -r -c .bandit.yml {posargs:w3lib}
+
+[testenv:flake8]
+basepython = python3
+deps =
+    {[testenv]deps}
+    pytest-flake8
+commands =
+    pytest --flake8
+
+[testenv:pylint]
+deps =
+    {[testenv]deps}
+    pylint
+commands =
+    pylint conftest.py docs setup.py tests w3lib
+
+[docs]
+changedir = docs
+deps = -rdocs/requirements.txt
+
+[testenv:docs]
+changedir = {[docs]changedir}
+deps = {[docs]deps}
+commands =
+    sphinx-build -W -b html . {envtmpdir}/html
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/w3lib/__init__.py 
new/w3lib-1.22.0/w3lib/__init__.py
--- old/w3lib-1.21.0/w3lib/__init__.py  2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/w3lib/__init__.py  2020-05-13 21:29:23.000000000 +0200
@@ -1,3 +1,3 @@
-__version__ = "1.21.0"
+__version__ = "1.22.0"
 version_info = tuple(int(v) if v.isdigit() else v
                      for v in __version__.split('.'))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/w3lib/encoding.py 
new/w3lib-1.22.0/w3lib/encoding.py
--- old/w3lib-1.21.0/w3lib/encoding.py  2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/w3lib/encoding.py  2020-05-13 21:29:23.000000000 +0200
@@ -109,7 +109,7 @@
 }
 
 def _c18n_encoding(encoding):
-    """Cannonicalize an encoding name
+    """Canonicalize an encoding name
 
     This performs normalization and translates aliases using python's
     encoding aliases
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/w3lib/html.py 
new/w3lib-1.22.0/w3lib/html.py
--- old/w3lib-1.21.0/w3lib/html.py      2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/w3lib/html.py      2020-05-13 21:29:23.000000000 +0200
@@ -175,15 +175,13 @@
 
     >>> w3lib.html.remove_tags(doc, which_ones=('a',), keep=('p',))
     Traceback (most recent call last):
-      File "<stdin>", line 1, in <module>
-      File "/usr/local/lib/python2.7/dist-packages/w3lib/html.py", line 101, 
in remove_tags
-        assert not (which_ones and keep), 'which_ones and keep can not be 
given at the same time'
-    AssertionError: which_ones and keep can not be given at the same time
+        ...
+    ValueError: Cannot use both which_ones and keep
     >>>
 
     """
-
-    assert not (which_ones and keep), 'which_ones and keep can not be given at 
the same time'
+    if which_ones and keep:
+        raise ValueError('Cannot use both which_ones and keep')
 
     which_ones = {tag.lower() for tag in which_ones}
     keep = {tag.lower() for tag in keep}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/w3lib/url.py 
new/w3lib-1.22.0/w3lib/url.py
--- old/w3lib-1.21.0/w3lib/url.py       2019-08-09 13:00:00.000000000 +0200
+++ new/w3lib-1.22.0/w3lib/url.py       2020-05-13 21:29:23.000000000 +0200
@@ -9,7 +9,7 @@
 import posixpath
 import warnings
 import string
-from collections import namedtuple, OrderedDict
+from collections import namedtuple
 import six
 from six.moves.urllib.parse import (urljoin, urlsplit, urlunsplit,
                                     urldefrag, urlencode, urlparse,
@@ -36,15 +36,18 @@
 
 _ascii_tab_newline_re = re.compile(r'[\t\n\r]')  # see 
https://infra.spec.whatwg.org/#ascii-tab-or-newline
 
-def safe_url_string(url, encoding='utf8', path_encoding='utf8'):
+def safe_url_string(url, encoding='utf8', path_encoding='utf8', 
quote_path=True):
     """Convert the given URL into a legal URL by escaping unsafe characters
     according to RFC-3986. Also, ASCII tabs and newlines are removed
     as per https://url.spec.whatwg.org/#url-parsing.
 
     If a bytes URL is given, it is first converted to `str` using the given
-    encoding (which defaults to 'utf-8'). 'utf-8' encoding is used for
-    URL path component (unless overriden by path_encoding), and given
-    encoding is used for query string or form data.
+    encoding (which defaults to 'utf-8'). If quote_path is True (default), 
+    path_encoding ('utf-8' by default) is used to encode URL path component
+    which is then quoted. Otherwise, if quote_path is False, path component
+    is not encoded or quoted. Given encoding is used for query string 
+    or form data.
+
     When passing an encoding, you should use the encoding of the
     original page (the page from which the URL was extracted from).
 
@@ -69,15 +72,18 @@
     except UnicodeError:
         netloc = parts.netloc
 
+    # default encoding for path component SHOULD be UTF-8
+    if quote_path:
+        path = quote(to_bytes(parts.path, path_encoding), _safe_chars)
+    else:
+        path = to_native_str(parts.path)
+    
     # quote() in Python2 return type follows input type;
     # quote() in Python3 always returns Unicode (native str)
     return urlunsplit((
         to_native_str(parts.scheme),
         to_native_str(netloc).rstrip(':'),
-
-        # default encoding for path component SHOULD be UTF-8
-        quote(to_bytes(parts.path, path_encoding), _safe_chars),
-
+        path,
         # encoding of query and fragment follows page encoding
         # or form-charset (if known and passed)
         quote(to_bytes(parts.query, encoding), _safe_chars),
@@ -202,13 +208,21 @@
         url += '#' + fragment
     return url
 
-
 def _add_or_replace_parameters(url, params):
     parsed = urlsplit(url)
-    args = parse_qsl(parsed.query, keep_blank_values=True)
+    current_args = parse_qsl(parsed.query, keep_blank_values=True)
+
+    new_args = []
+    seen_params = set()
+    for name, value in current_args:
+        if name not in params:
+            new_args.append((name, value))
+        elif name not in seen_params:
+            new_args.append((name, params[name]))
+            seen_params.add(name)
 
-    new_args = OrderedDict(args)
-    new_args.update(params)
+    not_modified_args = [(name, value) for name, value in params.items() if 
name not in seen_params]
+    new_args += not_modified_args
 
     query = urlencode(new_args)
     return urlunsplit(parsed._replace(query=query))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/w3lib-1.21.0/w3lib.egg-info/PKG-INFO 
new/w3lib-1.22.0/w3lib.egg-info/PKG-INFO
--- old/w3lib-1.21.0/w3lib.egg-info/PKG-INFO    2019-08-09 13:00:36.000000000 
+0200
+++ new/w3lib-1.22.0/w3lib.egg-info/PKG-INFO    2020-05-13 21:29:57.000000000 
+0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: w3lib
-Version: 1.21.0
+Version: 1.22.0
 Summary: Library of web-related functions
 Home-page: https://github.com/scrapy/w3lib
 Author: Scrapy project
@@ -15,10 +15,10 @@
 Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Internet :: WWW/HTTP


Reply via email to