I'm glad you're still poking because I'm still not seeing anything that would 
trigger this warning.  Here is that exact code in a test case, I can't get it 
to produce the warning:

from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base

Base = declarative_base()

class A(Base):
    __tablename__ = 'a'

    id = Column(Integer, primary_key=True)
    _serials = relationship("MetaSerial")

    def serials(self):
        requiredAmountOfSerials = 10

        if requiredAmountOfSerials > len(self._serials):
            a_len = len(self._serials)
            for a_ctr in range(abs(requiredAmountOfSerials) - a_len):
                self._serials.append(MetaSerial())

            session.add(self)  # MVO? is this needed?
            session.flush()

        return self._serials

class MetaSerial(Base):
    __tablename__ = 'b'

    id = Column(Integer, primary_key=True)
    a_id = Column(Integer, ForeignKey('a.id'))

e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)

session = Session(e)

a1 = A()

# comment out or not, no warning
session.add(a1)
# same
session.flush()

a1.serials()



On Oct 13, 2013, at 12:00 PM, Marc Van Olmen <marcvanol...@gmail.com> wrote:

> I did some more digging today to see If I can pin down the exact reason:
> I tested with a unit test, and reduced the code above to bare minimum and not 
> doing references to collections anymore.
> 
>     def serials(self):
> 
>         requiredAmountOfSerials = self.quantity
> 
>         if requiredAmountOfSerials > len(self._serials):
>             a_len = len(self._serials)
>             for a_ctr in range(abs(requiredAmountOfSerials) - a_len):
>                 self._serials.append( MetaSerial(value=u''))
> 
>             Session.add(self)  # MVO? is this needed?
>             Session.flush()
> 
>         return self._serials
> 
> This above code gives a warning when you try to add for the first time 
> something to self._serials collection. (so self._serials has no items)
> 
> ----------------------------------------------------------------------
> Traceback (most recent call last):
>   File 
> "/Users/marcvano/dev/Acclivity/checkout4_env/Checkout/tests/models/serial_test.py",
>  line 32, in testMakeRequest
>     self.assertEqual(len(op1.serials()), op1.allocated)
>   File 
> "/Users/marcvano/dev/Acclivity/checkout4_env/Checkout/app/db/request.py", 
> line 2161, in serials
>     self._serials.append(MetaSerial(value=u''))
>   File "build/bdist.macosx-10.6-intel/egg/sqlalchemy/orm/collections.py", 
> line 1057, in append
>     item = __set(self, item, _sa_initiator)
>   File "build/bdist.macosx-10.6-intel/egg/sqlalchemy/orm/collections.py", 
> line 1029, in __set
>     item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
>   File "build/bdist.macosx-10.6-intel/egg/sqlalchemy/orm/collections.py", 
> line 729, in fire_append_event
>     self._warn_invalidated()
>   File "build/bdist.macosx-10.6-intel/egg/sqlalchemy/orm/collections.py", 
> line 600, in _warn_invalidated
>     util.warn("This collection has been invalidated.")
>   File "build/bdist.macosx-10.6-intel/egg/sqlalchemy/util/langhelpers.py", 
> line 1036, in warn
>     warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
> SAWarning: This collection has been invalidated.
> 
> Changing above code to:
> 
>             for a_ctr in range(abs(requiredAmountOfSerials) - a_len):
>                 a_serial = MetaSerial(value=u'')
>                 self._serials.append(a_serial)
> 
> produces no warnings
> 
> 
> 
> 
> -- 
> You received this message because you are subscribed to the Google Groups 
> "sqlalchemy" group.
> To unsubscribe from this group and stop receiving emails from it, send an 
> email to sqlalchemy+unsubscr...@googlegroups.com.
> To post to this group, send email to sqlalchemy@googlegroups.com.
> Visit this group at http://groups.google.com/group/sqlalchemy.
> For more options, visit https://groups.google.com/groups/opt_out.

Attachment: signature.asc
Description: Message signed with OpenPGP using GPGMail

Reply via email to