Hi,

We were having a similar problem with history_meta, so we created
history_meta_date (.py file attached).  In our version, we use timestamps
rather than sequence numbers to track history versions and don't need to
worry about duplicate key problems.  The "version_date" timestamps still
provide sortability on version, but are infinitely less likely to have the
same value issued to separate processes.

Disclaimer: The attached file was developed in a hurry to solve an urgent
problem, it has performed well for us, but probably hasn't gone through the
testing rigours that this group might expect.  Feel free to use/adapt it as
necessary and if you have the time/inclination, post a more rigorous
version for consideration as a formal recipe.

Cheers

---------- Forwarded message ----------
From: HP3 <henddher.pedr...@gmail.com>
Date: 5 December 2014 at 10:36
Subject: [sqlalchemy] history_meta.py IntegrityError: (IntegrityError)
duplicate key value violates unique constraint "p2docs_history_pkey"
To: sqlalchemy@googlegroups.com


Hello all,

We are facing a problem when using history_meta.py recipe.

It seems like two concurrent transactions read the same (id,version) tuple
at "#1" and then each one tries to insert a new row into the pbases_history
table with the same pk (id, version) combination (see #2)
By removing #2, the issue goes away but of course, there is no history kept
anywhere.

-- 
You received this message because you are subscribed to the Google Groups 
"sqlalchemy" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to sqlalchemy+unsubscr...@googlegroups.com.
To post to this group, send email to sqlalchemy@googlegroups.com.
Visit this group at http://groups.google.com/group/sqlalchemy.
For more options, visit https://groups.google.com/d/optout.
from datetime import datetime

from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
from sqlalchemy import Table, Column, ForeignKeyConstraint, Integer
from sqlalchemy import event
from sqlalchemy.orm.properties import RelationshipProperty
from sqlalchemy.types import DateTime

def col_references_table(col, table):
    for fk in col.foreign_keys:
        if fk.references(table):
            return True
    return False

def _date_history_mapper(local_mapper):
    cls = local_mapper.class_

    # set the "active_history" flag
    # on on column-mapped attributes so that the old version
    # of the info is always loaded (currently sets it on all attributes)
    for prop in local_mapper.iterate_properties:
        getattr(local_mapper.class_, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_date_history_mapper = getattr(cls, '__date_history_mapper__', None)

    polymorphic_on = None
    super_fks = []
    if not super_mapper or local_mapper.local_table is not 
super_mapper.local_table:
        cols = []
        for column in local_mapper.local_table.c:
            if column.name in ['version_date']:
                continue

            col = column.copy()
            col.unique = False

            if super_mapper and col_references_table(column, 
super_mapper.local_table):
                super_fks.append((col.key, 
list(super_date_history_mapper.local_table.primary_key)[0]))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

        if super_mapper:
            super_fks.append(('version_date', 
super_date_history_mapper.base_mapper.local_table.c.version_date))
            cols.append(Column('version_date', DateTime, 
default=datetime.utcnow, primary_key=True))
        else:
            cols.append(Column('version_date', DateTime, 
default=datetime.utcnow, primary_key=True))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        table = Table(local_mapper.local_table.name + '_history_date', 
local_mapper.local_table.metadata,
           *cols
        )
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_date_history_mapper.local_table.c:
                col = column.copy()
                col.unique = False
                super_date_history_mapper.local_table.append_column(col)
        table = None

    if super_date_history_mapper:
        bases = (super_date_history_mapper.class_,)
    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistoryDate" % cls.__name__, bases, 
{})

    m = mapper(
            versioned_cls,
            table,
            inherits=super_date_history_mapper,
            polymorphic_on=polymorphic_on,
            polymorphic_identity=local_mapper.polymorphic_identity
            )
    cls.__date_history_mapper__ = m

    if not super_date_history_mapper:
        local_mapper.local_table.append_column(
            Column('version_date', DateTime, default=datetime.utcnow, 
nullable=False)
        )
        local_mapper.add_property("version_date", 
local_mapper.local_table.c.version_date)


class VersionedDate(object):
    @declared_attr
    def __mapper_cls__(cls):
        def map(cls, *arg, **kw):
            mp = mapper(cls, *arg, **kw)
            _date_history_mapper(mp)
            return mp
        return map


def versioned_objects(iter):
    for obj in iter:
        if hasattr(obj, '__date_history_mapper__'):
            yield obj

def create_version(obj, session, deleted = False):
    obj_mapper = object_mapper(obj)
    date_history_mapper = obj.__date_history_mapper__
    history_cls = date_history_mapper.class_

    obj_state = attributes.instance_state(obj)

    attr = {}

    obj_changed = False

    for om, hm in zip(obj_mapper.iterate_to_root(), 
date_history_mapper.iterate_to_root()):
        if hm.single:
            continue

        for hist_col in hm.local_table.c:
            if hist_col.key == 'version_date':
                continue

            obj_col = om.local_table.c[hist_col.key]

            # get the value of the
            # attribute based on the MapperProperty related to the
            # mapped column.  this will allow usage of MapperProperties
            # that have a different keyname than that of the mapped column.
            try:
                prop = obj_mapper.get_property_by_column(obj_col)
            except UnmappedColumnError:
                # in the case of single table inheritance, there may be
                # columns on the mapped table intended for the subclass only.
                # the "unmapped" status of the subclass column on the
                # base class is a feature of the declarative module as of sqla 
0.5.2.
                continue

            # expired object attributes and also deferred cols might not be in 
the
            # dict.  force it to load no matter what by using getattr().
            if prop.key not in obj_state.dict:
                getattr(obj, prop.key)

            a, u, d = attributes.get_history(obj, prop.key)

            if d:
                attr[hist_col.key] = d[0]
                obj_changed = True
            elif u:
                attr[hist_col.key] = u[0]
            else:
                # if the attribute had no value.
                attr[hist_col.key] = a[0]
                obj_changed = True

    if not obj_changed:
        # not changed, but we have relationships.  OK
        # check those too
        for prop in obj_mapper.iterate_properties:
            if isinstance(prop, RelationshipProperty) and \
                attributes.get_history(obj, prop.key).has_changes():
                obj_changed = True
                break

    if not obj_changed and not deleted:
        return

    attr['version_date'] = obj.version_date
    hist = history_cls()
    for key, value in attr.iteritems():
        if key == 'version_date':
            hist.version_date = datetime.utcnow()
        else:
            setattr(hist, key, value)
    session.add(hist)
    obj.version_date = datetime.utcnow()

def versioned_session(session):
    @event.listens_for(session, 'before_flush')
    def before_flush(session, flush_context, instances):
        for obj in versioned_objects(session.dirty):
            create_version(obj, session)
        for obj in versioned_objects(session.deleted):
            create_version(obj, session, deleted = True)

-- 
You received this message because you are subscribed to the Google Groups 
"sqlalchemy" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to sqlalchemy+unsubscr...@googlegroups.com.
To post to this group, send email to sqlalchemy@googlegroups.com.
Visit this group at http://groups.google.com/group/sqlalchemy.
For more options, visit https://groups.google.com/d/optout.

Reply via email to