Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-peewee for openSUSE:Factory 
checked in at 2024-02-12 18:52:48
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-peewee (Old)
 and      /work/SRC/openSUSE:Factory/.python-peewee.new.1815 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-peewee"

Mon Feb 12 18:52:48 2024 rev:26 rq:1146064 version:3.17.1

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-peewee/python-peewee.changes      
2024-01-10 21:52:44.889168373 +0100
+++ /work/SRC/openSUSE:Factory/.python-peewee.new.1815/python-peewee.changes    
2024-02-12 18:55:17.321169593 +0100
@@ -1,0 +2,16 @@
+Mon Feb 12 02:57:29 UTC 2024 - Steve Kowalik <steven.kowa...@suse.com>
+
+- Update to 3.17.1:
+  * Add bitwise and other helper methods to `BigBitField`, #2802.
+  * Add `add_column_default` and `drop_column_default` migrator methods for
+    specifying a server-side default value, #2803.
+  * The new `star` attribute was causing issues for users who had a field named
+    star on their models. This attribute is now renamed to `__star__`. #2796.
+  * Fix compatibility issues with 3.12 related to utcnow() deprecation.
+  * Add stricter locking on connection pool to prevent race conditions.
+  * Add adapters and converters to Sqlite to replace ones deprecated in 3.12.
+  * Fix bug in `model_to_dict()` when only aliases are present.
+  * Fix version check for Sqlite native drop column support.
+  * Do not specify a `reconnect=` argument to `ping()` if using MySQL 8.x.
+
+-------------------------------------------------------------------

Old:
----
  peewee-3.17.0.tar.gz

New:
----
  peewee-3.17.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-peewee.spec ++++++
--- /var/tmp/diff_new_pack.Fy20XA/_old  2024-02-12 18:55:17.773185916 +0100
+++ /var/tmp/diff_new_pack.Fy20XA/_new  2024-02-12 18:55:17.777186060 +0100
@@ -18,7 +18,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-peewee
-Version:        3.17.0
+Version:        3.17.1
 Release:        0
 Summary:        An expressive ORM that supports multiple SQL backends
 License:        BSD-3-Clause
@@ -40,7 +40,7 @@
 BuildRequires:  unzip
 BuildRequires:  pkgconfig(sqlite3)
 Requires(post): update-alternatives
-Requires(postun):update-alternatives
+Requires(postun): update-alternatives
 %python_subpackages
 
 %description
@@ -74,7 +74,7 @@
 %license LICENSE
 %doc CHANGELOG.md README.rst TODO.rst
 %python_alternative %{_bindir}/pwiz.py
-%{python_sitearch}/peewee-%{version}*-info
+%{python_sitearch}/peewee-%{version}.dist-info
 %{python_sitearch}/peewee.py
 %{python_sitearch}/pwiz.py
 %{python_sitearch}/playhouse

++++++ peewee-3.17.0.tar.gz -> peewee-3.17.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/CHANGELOG.md 
new/peewee-3.17.1/CHANGELOG.md
--- old/peewee-3.17.0/CHANGELOG.md      2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/CHANGELOG.md      2024-02-05 16:02:22.000000000 +0100
@@ -7,7 +7,23 @@
 
 ## master
 
-[View commits](https://github.com/coleifer/peewee/compare/3.17.0...master)
+[View commits](https://github.com/coleifer/peewee/compare/3.17.1...master)
+
+## 3.17.1
+
+* Add bitwise and other helper methods to `BigBitField`, #2802.
+* Add `add_column_default` and `drop_column_default` migrator methods for
+  specifying a server-side default value, #2803.
+* The new `star` attribute was causing issues for users who had a field named
+  star on their models. This attribute is now renamed to `__star__`. #2796.
+* Fix compatibility issues with 3.12 related to utcnow() deprecation.
+* Add stricter locking on connection pool to prevent race conditions.
+* Add adapters and converters to Sqlite to replace ones deprecated in 3.12.
+* Fix bug in `model_to_dict()` when only aliases are present.
+* Fix version check for Sqlite native drop column support.
+* Do not specify a `reconnect=` argument to `ping()` if using MySQL 8.x.
+
+[View commits](https://github.com/coleifer/peewee/compare/3.17.0...3.17.1)
 
 ## 3.17.0
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/docs/peewee/api.rst 
new/peewee-3.17.1/docs/peewee/api.rst
--- old/peewee-3.17.0/docs/peewee/api.rst       2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/docs/peewee/api.rst       2024-02-05 16:02:22.000000000 
+0100
@@ -3093,6 +3093,21 @@
         assert bitmap.data.toggle_bit(63) is True
         assert bitmap.data.is_set(63)
 
+        # BigBitField supports item accessor by bit-number, e.g.:
+        assert bitmap.data[63]
+        bitmap.data[0] = 1
+        del bitmap.data[0]
+
+        # We can also combine bitmaps using bitwise operators, e.g.
+        b = Bitmap(data=b'\x01')
+        b.data |= b'\x02'
+        assert list(b.data) == [1, 1, 0, 0, 0, 0, 0, 0]
+        assert len(b.data) == 1
+
+    .. py:method:: clear()
+
+        Clears the bitmap and sets length to 0.
+
     .. py:method:: set_bit(idx)
 
         :param int idx: Bit to set, indexed starting from zero.
@@ -3130,6 +3145,44 @@
 
         Returns boolean indicating whether the *idx*-th bit is set or not.
 
+    .. py:method:: __getitem__(idx)
+
+        Same as :py:meth:`~BigBitField.is_set`
+
+    .. py:method:: __setitem__(idx, value)
+
+        Set the bit at ``idx`` to value (True or False).
+
+    .. py:method:: __delitem__(idx)
+
+        Same as :py:meth:`~BigBitField.clear_bit`
+
+    .. py:method:: __len__()
+
+        Return the length of the bitmap **in bytes**.
+
+    .. py:method:: __iter__()
+
+        Returns an iterator yielding 1 or 0 for each bit in the bitmap.
+
+    .. py:method:: __and__(other)
+
+        :param other: Either :py:class:`BigBitField`, ``bytes``, ``bytearray``
+            or ``memoryview`` object.
+        :returns: bitwise ``and`` of two bitmaps.
+
+    .. py:method:: __or__(other)
+
+        :param other: Either :py:class:`BigBitField`, ``bytes``, ``bytearray``
+            or ``memoryview`` object.
+        :returns: bitwise ``or`` of two bitmaps.
+
+    .. py:method:: __xor__(other)
+
+        :param other: Either :py:class:`BigBitField`, ``bytes``, ``bytearray``
+            or ``memoryview`` object.
+        :returns: bitwise ``xor`` of two bitmaps.
+
 
 .. py:class:: UUIDField
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/docs/peewee/database.rst 
new/peewee-3.17.1/docs/peewee/database.rst
--- old/peewee-3.17.0/docs/peewee/database.rst  2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/docs/peewee/database.rst  2024-02-05 16:02:22.000000000 
+0100
@@ -538,7 +538,7 @@
 
 Peewee also comes with an alternate SQLite database that uses :ref:`apsw`.
 More information on APSW can be obtained on the
-`APSW project website <https://code.google.com/p/apsw/>`_. APSW provides
+`APSW project website <https://rogerbinns.github.io/apsw/>`_. APSW provides
 special features like:
 
 * Virtual tables, virtual file-systems, Blob I/O, backups and file control.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/docs/peewee/models.rst 
new/peewee-3.17.1/docs/peewee/models.rst
--- old/peewee-3.17.0/docs/peewee/models.rst    2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/docs/peewee/models.rst    2024-02-05 16:02:22.000000000 
+0100
@@ -556,6 +556,17 @@
     assert bitmap.data.toggle_bit(63) is True
     assert bitmap.data.is_set(63)
 
+    # BigBitField supports item accessor by bit-number, e.g.:
+    assert bitmap.data[63]
+    bitmap.data[0] = 1
+    del bitmap.data[0]
+
+    # We can also combine bitmaps using bitwise operators, e.g.
+    b = Bitmap(data=b'\x01')
+    b.data |= b'\x02'
+    assert list(b.data) == [1, 1, 0, 0, 0, 0, 0, 0]
+    assert len(b.data) == 1
+
 BareField
 ^^^^^^^^^
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/docs/peewee/playhouse.rst 
new/peewee-3.17.1/docs/peewee/playhouse.rst
--- old/peewee-3.17.0/docs/peewee/playhouse.rst 2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/docs/peewee/playhouse.rst 2024-02-05 16:02:22.000000000 
+0100
@@ -3116,6 +3116,31 @@
     # Add a UNIQUE constraint on the first and last names.
     migrate(migrator.add_unique('person', 'first_name', 'last_name'))
 
+Adding or dropping a database-level default value for a column:
+
+.. code-block:: python
+
+    # Add a default value for a status column.
+    migrate(migrator.add_column_default(
+        'entries',
+        'status',
+        'draft'))
+
+    # Remove the default.
+    migrate(migrator.drop_column_default('entries', 'status'))
+
+    # Use a function for the default value (does not work with Sqlite):
+    migrate(migrator.add_column_default(
+        'entries',
+        'timestamp',
+        fn.now()))
+
+    # Or alternatively (works with Sqlite):
+    migrate(migrator.add_column_default(
+        'entries',
+        'timestamp',
+        'now()'))
+
 .. note::
     Postgres users may need to set the search-path when using a non-standard
     schema. This can be done as follows:
@@ -3189,6 +3214,23 @@
         :param str table: Name of table containing column.
         :param str column: Name of the column to make nullable.
 
+    .. py:method:: add_column_default(table, column, default)
+
+        :param str table: Name of table containing column.
+        :param str column: Name of the column to add default to.
+        :param default: New default value for column. See notes below.
+
+        Peewee attempts to properly quote the default if it appears to be a
+        string literal. Otherwise the default will be treated literally.
+        Postgres and MySQL support specifying the default as a peewee
+        expression, e.g. ``fn.NOW()``, but Sqlite users will need to use
+        ``default='now()'`` instead.
+
+    .. py:method:: drop_column_default(table, column)
+
+        :param str table: Name of table containing column.
+        :param str column: Name of the column to remove default from.
+
     .. py:method:: alter_column_type(table, column, field[, cast=None])
 
         :param str table: Name of the table.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/docs/peewee/querying.rst 
new/peewee-3.17.1/docs/peewee/querying.rst
--- old/peewee-3.17.0/docs/peewee/querying.rst  2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/docs/peewee/querying.rst  2024-02-05 16:02:22.000000000 
+0100
@@ -386,7 +386,8 @@
 Peewee provides support for varying types of upsert functionality. With SQLite
 prior to 3.24.0 and MySQL, Peewee offers the :py:meth:`~Model.replace`, which
 allows you to insert a record or, in the event of a constraint violation,
-replace the existing record.
+replace the existing record. For Sqlite 3.24+ and Postgres, peewee provides 
full
+support for ``ON CONFLICT`` queries.
 
 Example of using :py:meth:`~Model.replace` and 
:py:meth:`~Insert.on_conflict_replace`:
 
@@ -509,6 +510,64 @@
     # updated, as the new value (10) is now less than the value in the
     # original row (11).
 
+There are several important concepts to understand when using ``ON CONFLICT``:
+
+* ``conflict_target=``: which column(s) have the UNIQUE constraint. For a user
+  table, this might be the user's email.
+* ``preserve=``: if a conflict occurs, this parameter is used to indicate which
+  values from the **new** data we wish to update.
+* ``update=``: if a conflict occurs, this is a mapping of data to apply to the
+  pre-existing row.
+* ``EXCLUDED``: this "magic" namespace allows you to reference the new data
+  that would have been inserted if the constraint hadn't failed.
+
+Full example:
+
+.. code-block:: python
+
+    class User(Model):
+        email = CharField(unique=True)  # Unique identifier for user.
+        last_login = DateTimeField()
+        login_count = IntegerField(default=0)
+        ip_log = TextField(default='')
+
+
+    # Demonstrates the above 4 concepts.
+    def login(email, ip):
+        rowid = (User
+                 .insert({User.email: email,
+                          User.last_login: datetime.now(),
+                          User.login_count: 1,
+                          User.ip_log: ip})
+                 .on_conflict(
+                     # If the INSERT fails due to a constraint violation on the
+                     # user email, then perform an UPDATE instead.
+                     conflict_target=[User.email],
+
+                     # Set the "last_login" to the value we would have inserted
+                     # (our call to datetime.now()).
+                     preserve=[User.last_login],
+
+                     # Increment the user's login count and prepend the new IP
+                     # to the user's ip history.
+                     update={User.login_count: User.login_count + 1,
+                             User.ip_log: fn.CONCAT(EXCLUDED.ip_log, ',', 
User.ip_log)})
+                 .execute())
+
+        return rowid
+
+    # This will insert the initial row, returning the new row id (1).
+    print(login('t...@example.com', '127.1'))
+
+    # Because t...@example.com exists, this will trigger the UPSERT. The row id
+    # from above is returned again (1).
+    print(login('t...@example.com', '127.2'))
+
+    u = User.get()
+    print(u.login_count, u.ip_log)
+
+    # Prints "2 127.2,127.1"
+
 For more information, see :py:meth:`Insert.on_conflict` and
 :py:class:`OnConflict`.
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/examples/blog/app.py 
new/peewee-3.17.1/examples/blog/app.py
--- old/peewee-3.17.0/examples/blog/app.py      2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/examples/blog/app.py      2024-02-05 16:02:22.000000000 
+0100
@@ -4,11 +4,12 @@
 import re
 import urllib
 
-from flask import (Flask, flash, Markup, redirect, render_template, request,
+from flask import (Flask, flash, redirect, render_template, request,
                    Response, session, url_for)
 from markdown import markdown
 from markdown.extensions.codehilite import CodeHiliteExtension
 from markdown.extensions.extra import ExtraExtension
+from markupsafe import Markup
 from micawber import bootstrap_basic, parse_html
 from micawber.cache import Cache as OEmbedCache
 from peewee import *
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/peewee.py new/peewee-3.17.1/peewee.py
--- old/peewee-3.17.0/peewee.py 2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/peewee.py 2024-02-05 16:02:22.000000000 +0100
@@ -70,7 +70,7 @@
         mysql = None
 
 
-__version__ = '3.17.0'
+__version__ = '3.17.1'
 __all__ = [
     'AnyField',
     'AsIs',
@@ -192,11 +192,43 @@
             raise value.with_traceback(tb)
         raise value
 
+# Other compat issues.
+if sys.version_info < (3, 12):
+    utcfromtimestamp = datetime.datetime.utcfromtimestamp
+    utcnow = datetime.datetime.utcnow
+else:
+    def utcfromtimestamp(ts):
+        return (datetime.datetime
+                .fromtimestamp(ts, tz=datetime.timezone.utc)
+                .replace(tzinfo=None))
+    def utcnow():
+        return (datetime.datetime
+                .now(datetime.timezone.utc)
+                .replace(tzinfo=None))
+
 
 if sqlite3:
     sqlite3.register_adapter(decimal.Decimal, str)
     sqlite3.register_adapter(datetime.date, str)
     sqlite3.register_adapter(datetime.time, str)
+    if sys.version_info >= (3, 12):
+        # We need to register datetime adapters as these are deprecated.
+        def datetime_adapter(d): return d.isoformat(' ')
+        def convert_date(d): return datetime.date(*map(int, d.split(b'-')))
+        def convert_timestamp(t):
+            date, time = t.split(b' ')
+            y, m, d = map(int, date.split(b'-'))
+            t_full = time.split(b'.')
+            h, m, s = map(int, t_full[0].split(b':'))
+            if len(t_full) == 2:
+                usec = int('{:0<6.6}'.format(t_full[1].decode()))
+            else:
+                usec = 0
+            return datetime.datetime(y, m, d, h, m, s, usec)
+        sqlite3.register_adapter(datetime.datetime, datetime_adapter)
+        sqlite3.register_converter('date', convert_date)
+        sqlite3.register_converter('timestamp', convert_timestamp)
+
     __sqlite_version__ = sqlite3.sqlite_version_info
 else:
     __sqlite_version__ = (0, 0, 0)
@@ -780,6 +812,13 @@
         return self
 
 
+class Star(Node):
+    def __init__(self, source):
+        self.source = source
+    def __sql__(self, ctx):
+        return ctx.sql(QualifiedNames(self.source)).literal('.*')
+
+
 class Source(Node):
     c = _DynamicColumn()
 
@@ -797,8 +836,8 @@
         return Select((self,), columns)
 
     @property
-    def star(self):
-        return NodeList((QualifiedNames(self), SQL('.*')), glue='')
+    def __star__(self):
+        return Star(self)
 
     def join(self, dest, join_type=JOIN.INNER, on=None):
         return Join(self, dest, join_type, on)
@@ -4188,8 +4227,12 @@
 
         conn = self._state.conn
         if hasattr(conn, 'ping'):
+            if self.server_version[0] == 8:
+                args = ()
+            else:
+                args = (False,)
             try:
-                conn.ping(False)
+                conn.ping(*args)
             except Exception:
                 return False
         return True
@@ -5040,6 +5083,9 @@
             value = bytearray(value)
         self._buffer = self.instance.__data__[self.name] = value
 
+    def clear(self):
+        self._buffer.clear()
+
     def _ensure_length(self, idx):
         byte_num, byte_offset = divmod(idx, 8)
         cur_size = len(self._buffer)
@@ -5061,9 +5107,56 @@
         return bool(self._buffer[byte_num] & (1 << byte_offset))
 
     def is_set(self, idx):
-        byte_num, byte_offset = self._ensure_length(idx)
+        byte_num, byte_offset = divmod(idx, 8)
+        cur_size = len(self._buffer)
+        if cur_size <= byte_num:
+            return False
         return bool(self._buffer[byte_num] & (1 << byte_offset))
 
+    __getitem__ = is_set
+    def __setitem__(self, item, value):
+        self.set_bit(item) if value else self.clear_bit(item)
+    __delitem__ = clear_bit
+
+    def __len__(self):
+        return len(self._buffer)
+
+    def _get_compatible_data(self, other):
+        if isinstance(other, BigBitFieldData):
+            data = other._buffer
+        elif isinstance(other, (bytes, bytearray, memoryview)):
+            data = other
+        else:
+            raise ValueError('Incompatible data-type')
+        diff = len(data) - len(self)
+        if diff > 0: self._buffer.extend(b'\x00' * diff)
+        return data
+
+    def _bitwise_op(self, other, op):
+        if isinstance(other, BigBitFieldData):
+            data = other._buffer
+        elif isinstance(other, (bytes, bytearray, memoryview)):
+            data = other
+        else:
+            raise ValueError('Incompatible data-type')
+        buf = bytearray(b'\x00' * max(len(self), len(other)))
+        it = itertools.zip_longest(self._buffer, data, fillvalue=0)
+        for i, (a, b) in enumerate(it):
+            buf[i] = op(a, b)
+        return buf
+
+    def __and__(self, other):
+        return self._bitwise_op(other, operator.and_)
+    def __or__(self, other):
+        return self._bitwise_op(other, operator.or_)
+    def __xor__(self, other):
+        return self._bitwise_op(other, operator.xor)
+
+    def __iter__(self):
+        for b in self._buffer:
+            for j in range(8):
+                yield 1 if (b & (1 << j)) else 0
+
     def __repr__(self):
         return repr(self._buffer)
     if sys.version_info[0] < 3:
@@ -5292,7 +5385,7 @@
         self.ticks_to_microsecond = 1000000 // self.resolution
 
         self.utc = kwargs.pop('utc', False) or False
-        dflt = datetime.datetime.utcnow if self.utc else datetime.datetime.now
+        dflt = utcnow if self.utc else datetime.datetime.now
         kwargs.setdefault('default', dflt)
         super(TimestampField, self).__init__(*args, **kwargs)
 
@@ -5344,7 +5437,7 @@
                 microseconds = 0
 
             if self.utc:
-                value = datetime.datetime.utcfromtimestamp(value)
+                value = utcfromtimestamp(value)
             else:
                 value = datetime.datetime.fromtimestamp(value)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/playhouse/migrate.py 
new/peewee-3.17.1/playhouse/migrate.py
--- old/peewee-3.17.0/playhouse/migrate.py      2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/playhouse/migrate.py      2024-02-05 16:02:22.000000000 
+0100
@@ -390,6 +390,32 @@
                 .literal(' DROP NOT NULL'))
 
     @operation
+    def add_column_default(self, table, column, default):
+        if default is None:
+            raise ValueError('`default` must be not None/NULL.')
+        if callable_(default):
+            default = default()
+        # Try to handle SQL functions and string literals, otherwise pass as a
+        # bound value.
+        if isinstance(default, str) and default.endswith((')', "'")):
+            default = SQL(default)
+
+        return (self
+                ._alter_table(self.make_context(), table)
+                .literal(' ALTER COLUMN ')
+                .sql(Entity(column))
+                .literal(' SET DEFAULT ')
+                .sql(default))
+
+    @operation
+    def drop_column_default(self, table, column):
+        return (self
+                ._alter_table(self.make_context(), table)
+                .literal(' ALTER COLUMN ')
+                .sql(Entity(column))
+                .literal(' DROP DEFAULT'))
+
+    @operation
     def alter_column_type(self, table, column, field, cast=None):
         # ALTER TABLE <table> ALTER COLUMN <column>
         ctx = self.make_context()
@@ -668,7 +694,7 @@
     SQLite supports a subset of ALTER TABLE queries, view the docs for the
     full details http://sqlite.org/lang_altertable.html
     """
-    column_re = re.compile('(.+?)\((.+)\)')
+    column_re = re.compile(r'(.+?)\((.+)\)')
     column_split_re = re.compile(r'(?:[^,(]|\([^)]*\))+')
     column_name_re = re.compile(r'''["`']?([\w]+)''')
     fk_re = re.compile(r'FOREIGN KEY\s+\("?([\w]+)"?\)\s+', re.I)
@@ -825,7 +851,7 @@
         # Strip out any junk after the column name.
         clean = []
         for column in columns:
-            if re.match('%s(?:[\'"`\]]?\s|$)' % column_to_update, column):
+            if re.match(r'%s(?:[\'"`\]]?\s|$)' % column_to_update, column):
                 column = new_column + column[len(column_to_update):]
             clean.append(column)
 
@@ -833,7 +859,7 @@
 
     @operation
     def drop_column(self, table, column_name, cascade=True, legacy=False):
-        if sqlite3.sqlite_version_info >= (3, 25, 0) and not legacy:
+        if sqlite3.sqlite_version_info >= (3, 35, 0) and not legacy:
             ctx = self.make_context()
             (self._alter_table(ctx, table)
              .literal(' DROP COLUMN ')
@@ -867,6 +893,27 @@
         return self._update_column(table, column, _drop_not_null)
 
     @operation
+    def add_column_default(self, table, column, default):
+        if default is None:
+            raise ValueError('`default` must be not None/NULL.')
+        if callable_(default):
+            default = default()
+        if (isinstance(default, str) and not default.endswith((')', "'"))
+            and not default.isdigit()):
+            default = "'%s'" % default
+        def _add_default(column_name, column_def):
+            # Try to handle SQL functions and string literals, otherwise quote.
+            return column_def + ' DEFAULT %s' % default
+        return self._update_column(table, column, _add_default)
+
+    @operation
+    def drop_column_default(self, table, column):
+        def _drop_default(column_name, column_def):
+            col = re.sub(r'DEFAULT\s+[\w"\'\(\)]+(\s|$)', '', column_def, re.I)
+            return col.strip()
+        return self._update_column(table, column, _drop_default)
+
+    @operation
     def alter_column_type(self, table, column, field, cast=None):
         if cast is not None:
             raise ValueError('alter_column_type() does not support cast with '
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/playhouse/pool.py 
new/peewee-3.17.1/playhouse/pool.py
--- old/peewee-3.17.0/playhouse/pool.py 2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/playhouse/pool.py 2024-02-05 16:02:22.000000000 +0100
@@ -31,9 +31,11 @@
 
 That's it!
 """
+import functools
 import heapq
 import logging
 import random
+import threading
 import time
 from collections import namedtuple
 from itertools import chain
@@ -67,6 +69,14 @@
                                                'checked_out'))
 
 
+def locked(fn):
+    @functools.wraps(fn)
+    def inner(self, *args, **kwargs):
+        with self._pool_lock:
+            return fn(self, *args, **kwargs)
+    return inner
+
+
 class PooledDatabase(object):
     def __init__(self, database, max_connections=20, stale_timeout=None,
                  timeout=None, **kwargs):
@@ -76,6 +86,8 @@
         if self._wait_timeout == 0:
             self._wait_timeout = float('inf')
 
+        self._pool_lock = threading.RLock()
+
         # Available / idle connections stored in a heap, sorted oldest first.
         self._connections = []
 
@@ -119,6 +131,7 @@
         raise MaxConnectionsExceeded('Max connections exceeded, timed out '
                                      'attempting to connect.')
 
+    @locked
     def _connect(self):
         while True:
             try:
@@ -154,7 +167,7 @@
                     len(self._in_use) >= self._max_connections):
                 raise MaxConnectionsExceeded('Exceeded maximum connections.')
             conn = super(PooledDatabase, self)._connect()
-            ts = time.time() - random.random() / 1000
+            ts = time.time()
             key = self.conn_key(conn)
             logger.debug('Created new connection %s.', key)
 
@@ -173,6 +186,7 @@
         # Called on check-in to make sure the connection can be re-used.
         return True
 
+    @locked
     def _close(self, conn, close_conn=False):
         key = self.conn_key(conn)
         if close_conn:
@@ -188,6 +202,7 @@
             else:
                 logger.debug('Closed %s.', key)
 
+    @locked
     def manual_close(self):
         """
         Close the underlying connection without returning it to the pool.
@@ -206,46 +221,50 @@
         self.close()
         self._close(conn, close_conn=True)
 
+    @locked
     def close_idle(self):
         # Close any open connections that are not currently in-use.
-        with self._lock:
-            for _, conn in self._connections:
-                self._close(conn, close_conn=True)
-            self._connections = []
+        for _, conn in self._connections:
+            self._close(conn, close_conn=True)
+        self._connections = []
 
+    @locked
     def close_stale(self, age=600):
         # Close any connections that are in-use but were checked out quite some
         # time ago and can be considered stale.
-        with self._lock:
-            in_use = {}
-            cutoff = time.time() - age
-            n = 0
-            for key, pool_conn in self._in_use.items():
-                if pool_conn.checked_out < cutoff:
-                    self._close(pool_conn.connection, close_conn=True)
-                    n += 1
-                else:
-                    in_use[key] = pool_conn
-            self._in_use = in_use
+        in_use = {}
+        cutoff = time.time() - age
+        n = 0
+        for key, pool_conn in self._in_use.items():
+            if pool_conn.checked_out < cutoff:
+                self._close(pool_conn.connection, close_conn=True)
+                n += 1
+            else:
+                in_use[key] = pool_conn
+        self._in_use = in_use
         return n
 
+    @locked
     def close_all(self):
         # Close all connections -- available and in-use. Warning: may break any
         # active connections used by other threads.
         self.close()
-        with self._lock:
-            for _, conn in self._connections:
-                self._close(conn, close_conn=True)
-            for pool_conn in self._in_use.values():
-                self._close(pool_conn.connection, close_conn=True)
-            self._connections = []
-            self._in_use = {}
+        for _, conn in self._connections:
+            self._close(conn, close_conn=True)
+        for pool_conn in self._in_use.values():
+            self._close(pool_conn.connection, close_conn=True)
+        self._connections = []
+        self._in_use = {}
 
 
 class PooledMySQLDatabase(PooledDatabase, MySQLDatabase):
     def _is_closed(self, conn):
+        if self.server_version[0] == 8:
+            args = ()
+        else:
+            args = (False,)
         try:
-            conn.ping(False)
+            conn.ping(*args)
         except:
             return True
         else:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/playhouse/reflection.py 
new/peewee-3.17.1/playhouse/reflection.py
--- old/peewee-3.17.0/playhouse/reflection.py   2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/playhouse/reflection.py   2024-02-05 16:02:22.000000000 
+0100
@@ -419,13 +419,13 @@
         'varchar': CharField,
     }
 
-    begin = '(?:["\[\(]+)?'
-    end = '(?:["\]\)]+)?'
+    begin = r'(?:["\[\(]+)?'
+    end = r'(?:["\]\)]+)?'
     re_foreign_key = (
-        '(?:FOREIGN KEY\s*)?'
-        '{begin}(.+?){end}\s+(?:.+\s+)?'
-        'references\s+{begin}(.+?){end}'
-        '\s*\(["|\[]?(.+?)["|\]]?\)').format(begin=begin, end=end)
+        r'(?:FOREIGN KEY\s*)?'
+        r'{begin}(.+?){end}\s+(?:.+\s+)?'
+        r'references\s+{begin}(.+?){end}'
+        r'\s*\(["|\[]?(.+?)["|\]]?\)').format(begin=begin, end=end)
     re_varchar = r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$'
 
     def _map_col(self, column_type):
@@ -435,7 +435,7 @@
         elif re.search(self.re_varchar, raw_column_type):
             field_class = CharField
         else:
-            column_type = re.sub('\(.+\)', '', raw_column_type)
+            column_type = re.sub(r'\(.+\)', '', raw_column_type)
             if column_type == '':
                 field_class = BareField
             else:
@@ -847,7 +847,7 @@
         sql = sql.replace(model._meta.database.param, '%s')
 
     # Format and indent the table declaration, simplest possible approach.
-    match_obj = re.match('^(.+?\()(.+)(\).*)', sql)
+    match_obj = re.match(r'^(.+?\()(.+)(\).*)', sql)
     create, columns, extra = match_obj.groups()
     indented = ',\n'.join('  %s' % column for column in columns.split(', '))
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/playhouse/shortcuts.py 
new/peewee-3.17.1/playhouse/shortcuts.py
--- old/peewee-3.17.0/playhouse/shortcuts.py    2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/playhouse/shortcuts.py    2024-02-05 16:02:22.000000000 
+0100
@@ -39,6 +39,7 @@
     should_skip = lambda n: (n in exclude) or (only and (n not in only))
 
     if fields_from_query is not None:
+        only.add('__sentinel__')  # Add a placeholder to make non-empty.
         for item in fields_from_query._returning:
             if isinstance(item, Field):
                 only.add(item)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/base.py 
new/peewee-3.17.1/tests/base.py
--- old/peewee-3.17.0/tests/base.py     2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/tests/base.py     2024-02-05 16:02:22.000000000 +0100
@@ -96,6 +96,7 @@
 IS_SQLITE_24 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 24)
 IS_SQLITE_25 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 25)
 IS_SQLITE_30 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 30)
+IS_SQLITE_35 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 35)
 IS_SQLITE_37 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 37)
 IS_SQLITE_9 = IS_SQLITE and sqlite3.sqlite_version_info >= (3, 9)
 IS_MYSQL_ADVANCED_FEATURES = False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/fields.py 
new/peewee-3.17.1/tests/fields.py
--- old/peewee-3.17.0/tests/fields.py   2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/tests/fields.py   2024-02-05 16:02:22.000000000 +0100
@@ -678,6 +678,35 @@
         b.data.clear_bit(0)
         self.assertFalse(b.data.is_set(0))
 
+        # Out-of-bounds returns False and does not extend data.
+        self.assertFalse(b.data.is_set(1000))
+        self.assertTrue(len(b.data), 1)
+
+    def test_bigbit_item_methods(self):
+        b = Bits()
+        idxs = [0, 1, 4, 7, 8, 15, 16, 31, 32, 63]
+        for i in idxs:
+            b.data[i] = True
+        for i in range(64):
+            self.assertEqual(b.data[i], i in idxs)
+
+        data = list(b.data)
+        self.assertEqual(data, [1 if i in idxs else 0 for i in range(64)])
+
+        for i in range(64):
+            del b.data[i]
+        self.assertEqual(len(b.data), 8)
+        self.assertEqual(b.data._buffer, b'\x00' * 8)
+
+    def test_bigbit_set_clear(self):
+        b = Bits()
+        b.data = b'\x01'
+        for i in range(8):
+            self.assertEqual(b.data[i], i == 0)
+
+        b.data.clear()
+        self.assertEqual(len(b.data), 0)
+
     def test_bigbit_field(self):
         b = Bits.create()
         b.data.set_bit(1)
@@ -692,6 +721,36 @@
             else:
                 self.assertFalse(b_db.data.is_set(x))
 
+    def test_bigbit_field_bitwise(self):
+        b1 = Bits(data=b'\x11')
+        b2 = Bits(data=b'\x12')
+        b3 = Bits(data=b'\x99')
+        self.assertEqual(b1.data & b2.data, b'\x10')
+        self.assertEqual(b1.data | b2.data, b'\x13')
+        self.assertEqual(b1.data ^ b2.data, b'\x03')
+        self.assertEqual(b1.data & b3.data, b'\x11')
+        self.assertEqual(b1.data | b3.data, b'\x99')
+        self.assertEqual(b1.data ^ b3.data, b'\x88')
+
+        b1.data &= b2.data
+        self.assertEqual(b1.data._buffer, b'\x10')
+
+        b1.data |= b2.data
+        self.assertEqual(b1.data._buffer, b'\x12')
+
+        b1.data ^= b3.data
+        self.assertEqual(b1.data._buffer, b'\x8b')
+
+        b1.data = b'\x11'
+        self.assertEqual(b1.data & b'\xff\xff', b'\x11\x00')
+        self.assertEqual(b1.data | b'\xff\xff', b'\xff\xff')
+        self.assertEqual(b1.data ^ b'\xff\xff', b'\xee\xff')
+
+        b1.data = b'\x11\x11'
+        self.assertEqual(b1.data & b'\xff', b'\x11\x00')
+        self.assertEqual(b1.data | b'\xff', b'\xff\x11')
+        self.assertEqual(b1.data ^ b'\xff', b'\xee\x11')
+
     def test_bigbit_field_bulk_create(self):
         b1, b2, b3 = Bits(), Bits(), Bits()
         b1.data.set_bit(1)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/migrations.py 
new/peewee-3.17.1/tests/migrations.py
--- old/peewee-3.17.0/tests/migrations.py       2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/tests/migrations.py       2024-02-05 16:02:22.000000000 
+0100
@@ -10,6 +10,7 @@
 from .base import IS_POSTGRESQL
 from .base import IS_SQLITE
 from .base import IS_SQLITE_25
+from .base import IS_SQLITE_35
 from .base import ModelTestCase
 from .base import TestModel
 from .base import db
@@ -261,7 +262,7 @@
             ('charlie',),
             ('huey',),])
 
-    @skip_unless(IS_SQLITE_25, 'Requires sqlite 3.25 or newer')
+    @skip_unless(IS_SQLITE_35, 'Requires sqlite 3.35 or newer')
     def test_drop_column_sqlite_legacy(self):
         self.test_drop_column(legacy=True)
 
@@ -330,6 +331,29 @@
     def test_rename_gh380_sqlite_legacy(self):
         self.test_rename_gh380(legacy=True)
 
+    def test_add_default_drop_default(self):
+        with self.database.transaction():
+            migrate(self.migrator.add_column_default('person', 'first_name',
+                                                     default='x'))
+
+        p = Person.create(last_name='Last')
+        p_db = Person.get(Person.last_name == 'Last')
+        self.assertEqual(p_db.first_name, 'x')
+
+        with self.database.transaction():
+            migrate(self.migrator.drop_column_default('person', 'first_name'))
+
+        if IS_MYSQL:
+            # MySQL, even though the column is NOT NULL, does not seem to be
+            # enforcing the constraint(?).
+            Person.create(last_name='Last2')
+            p_db = Person.get(Person.last_name == 'Last2')
+            self.assertEqual(p_db.first_name, '')
+        else:
+            with self.assertRaises(IntegrityError):
+                with self.database.transaction():
+                    Person.create(last_name='Last2')
+
     def test_add_not_null(self):
         self._create_people()
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/shortcuts.py 
new/peewee-3.17.1/tests/shortcuts.py
--- old/peewee-3.17.0/tests/shortcuts.py        2023-10-13 17:45:33.000000000 
+0200
+++ new/peewee-3.17.1/tests/shortcuts.py        2024-02-05 16:02:22.000000000 
+0100
@@ -425,6 +425,18 @@
                              {'magic': 1337, 'content': 'u0-0',
                               'user': {'username': 'u0'}})
 
+    def test_fields_from_query_alias(self):
+        q = User.select(User.username.alias('name'))
+        res = q[0]
+        self.assertEqual(model_to_dict(res, fields_from_query=q),
+                         {'name': 'peewee'})
+
+        UA = User.alias()
+        q = UA.select(UA.username.alias('name'))
+        res = q[0]
+        self.assertEqual(model_to_dict(res, fields_from_query=q),
+                         {'name': 'peewee'})
+
     def test_only_backref(self):
         for i in range(3):
             Tweet.create(user=self.user, content=str(i))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/sql.py 
new/peewee-3.17.1/tests/sql.py
--- old/peewee-3.17.0/tests/sql.py      2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/tests/sql.py      2024-02-05 16:02:22.000000000 +0100
@@ -170,11 +170,11 @@
             'ORDER BY "t1"."x", "t1"."y"'), [])
 
     def test_star(self):
-        query = User.select(User.star)
+        query = User.select(User.__star__)
         self.assertSQL(query, ('SELECT "t1".* FROM "users" AS "t1"'), [])
 
         query = (Tweet
-                 .select(Tweet.star, User.star)
+                 .select(Tweet.__star__, User.__star__)
                  .join(User, on=(Tweet.c.user_id == User.c.id)))
         self.assertSQL(query, (
             'SELECT "t1".*, "t2".* '
@@ -182,7 +182,7 @@
             'INNER JOIN "users" AS "t2" ON ("t1"."user_id" = "t2"."id")'), [])
 
         query = (Tweet
-                 .select(Tweet.star, User.c.id)
+                 .select(Tweet.__star__, User.c.id)
                  .join(User, on=(Tweet.c.user_id == User.c.id)))
         self.assertSQL(query, (
             'SELECT "t1".*, "t2"."id" '
@@ -1386,7 +1386,7 @@
                 Register.value,
                 fn.SUM(Register.value).over(**over_kwargs))
             sql, params = __sql__(query)
-            match_obj = re.search('OVER \((.*?)\) FROM', sql)
+            match_obj = re.search(r'OVER \((.*?)\) FROM', sql)
             self.assertTrue(match_obj is not None)
             self.assertEqual(match_obj.groups()[0], expected)
             self.assertEqual(params, [])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/peewee-3.17.0/tests/sqlite.py 
new/peewee-3.17.1/tests/sqlite.py
--- old/peewee-3.17.0/tests/sqlite.py   2023-10-13 17:45:33.000000000 +0200
+++ new/peewee-3.17.1/tests/sqlite.py   2024-02-05 16:02:22.000000000 +0100
@@ -287,7 +287,7 @@
             'foo 123 45 bar 678 nuggie 9.0',
             ['123', '45', '678', '9', '0'])
         assertResults(
-            '[\w]+@[\w]+\.[\w]{2,3}',
+            r'[\w]+@[\w]+\.[\w]{2,3}',
             ('Dear char...@example.com, this is n...@baz.com. I am writing on '
              'behalf of zai...@foo.io. He dislikes your blog.'),
             ['char...@example.com', 'n...@baz.com', 'zai...@foo.io'])
@@ -314,7 +314,7 @@
                      messages)
         cur = self.execute('select posts.id, regex_search.rowid, 
regex_search.match '
                            'FROM posts, regex_search(?, posts.msg)',
-                           ('[\w]+@[\w]+\.\w{2,3}',))
+                           (r'[\w]+@[\w]+\.\w{2,3}',))
         results = cur.fetchall()
         self.assertEqual(results, [
             (1, 1, 'f...@example.fap'),
@@ -611,10 +611,10 @@
         self.assertRows((D.extract_json('$.k1') == '"v1"'), ['a', 'c'])
         self.assertRows((D.extract_text('k2') == 'v2'), ['b', 'c'])
         self.assertRows((D.extract_json('k2') == '"v2"'), ['b', 'c'])
-        self.assertRows((D.extract_text('x1.y1') == 'z1'), ['a', 'd'])
-        self.assertRows((D.extract_json('x1.y1') == '"z1"'), ['a', 'd'])
-        self.assertRows((D.extract_text('l1[1]') == 1), ['e'])
-        self.assertRows((D.extract_text('l2[1][1]') == 3), ['e'])
+        self.assertRows((D.extract_text('$.x1.y1') == 'z1'), ['a', 'd'])
+        self.assertRows((D.extract_json('$.x1.y1') == '"z1"'), ['a', 'd'])
+        self.assertRows((D.extract_text('$.l1[1]') == 1), ['e'])
+        self.assertRows((D.extract_text('$.l2[1][1]') == 3), ['e'])
         self.assertRows((D.extract_json('x1') == '{"y1":"z1"}'), ['a'])
 
     def test_extract_multiple(self):

Reply via email to