I'm working with SQLAlchemy and Postgres and I have a polymorphic model whose subclasses have a field with the same name. To allow this field to co-exist with the others and not cause any name conflicts, I'm using the `declare_attr` decorator from SQLAlchemy.
This solution works well for fields consisting of primary data types, however when I try to use Postgres's HSTORE to store dictionary values, SQLAlchemy complains with:
My guess is that this happens because `declared_attr` has constrains on which data types its fields have.
Here's an example of how my models look:
import enum
import sqlalchemy as sa
from sqlalchemy.dialects.postgres import ARRAY, HSTORE
from sqlalchemy.ext.mutables import MutableDict
class ChildType(enum.Enum):
sub_1 = "sub_1"
sub_2 = "sub_2"
sub_3 = "sub_3"
class ParentModel(sa.declarative_base()):
__table__ = 'parent'
general_field = sa.Column(sa.String)
r_type = sa.Column(sa.Enum(ChildType))
__mapper_args__ = {
'polymorphic_identity': 'parent',
'polymorphic_on': resource_type
}
class Sub1(ParentModel):
@sa.declared_attr
def child_value(cls):
return ParentModel.__table__.c.get('child_value', sa.Column(sa.Integer, nullable=True))
__mapper_args__ = {
'polymorphic_identity': ChildType.sub_1
}
class Sub2(ParentModel):
@sa.declared_attr
def child_value(cls):
return ParentModel.__table__.c.get('child_value', sa.Column(sa.Boolean, nullable=True))
__mapper_args__ = {
'polymorphic_identity': ChildType.sub_2
}
class Sub3(ParentModel):
@sa.declared_attr
def child_value(cls):
return ParentModel.__table__.c.get('child_value', sa.Column(ARRAY(MutableDict.as_mutable(HSTORE))))
__mapper_args__ = {
'polymorphic_identity': ChildType.sub_3
}