feat: Add cascade deletion for datasets and data

Added cascade deletion so when a dataset or data is deleted the connection in the dataset_data table is also deleted

Feature #COG-455
This commit is contained in:
Igor Ilic 2024-11-01 12:55:20 +01:00
parent eca1b9fddd
commit de1ba5cd7c
4 changed files with 22 additions and 6 deletions

View file

@ -89,10 +89,22 @@ class SQLAlchemyAdapter():
""" """
Delete data in given table based on id. Table must have an id Column. Delete data in given table based on id. Table must have an id Column.
""" """
async with self.get_async_session() as session: if self.engine.dialect.name == "sqlite":
TableModel = await self.get_table(table_name, schema_name) async with self.get_async_session() as session:
await session.execute(TableModel.delete().where(TableModel.c.id == data_id)) TableModel = await self.get_table(table_name, schema_name)
await session.commit()
# Foreign key constraints are disabled by default in SQLite (for backwards compatibility),
# so must be enabled for each database connection/session separately.
await session.execute(text("PRAGMA foreign_keys = ON;"))
await session.execute(TableModel.delete().where(TableModel.c.id == data_id))
await session.commit()
else:
async with self.get_async_session() as session:
TableModel = await self.get_table(table_name, schema_name)
await session.execute(TableModel.delete().where(TableModel.c.id == data_id))
await session.commit()
async def get_table(self, table_name: str, schema_name: Optional[str] = "public") -> Table: async def get_table(self, table_name: str, schema_name: Optional[str] = "public") -> Table:
""" """

View file

@ -20,9 +20,11 @@ class Data(Base):
updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc)) updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc))
datasets: Mapped[List["Dataset"]] = relationship( datasets: Mapped[List["Dataset"]] = relationship(
"Dataset",
secondary = DatasetData.__tablename__, secondary = DatasetData.__tablename__,
back_populates = "data", back_populates = "data",
lazy = "noload", lazy = "noload",
cascade="all, delete"
) )
def to_json(self) -> dict: def to_json(self) -> dict:

View file

@ -19,9 +19,11 @@ class Dataset(Base):
owner_id = Column(UUID, index = True) owner_id = Column(UUID, index = True)
data: Mapped[List["Data"]] = relationship( data: Mapped[List["Data"]] = relationship(
"Data",
secondary = DatasetData.__tablename__, secondary = DatasetData.__tablename__,
back_populates = "datasets", back_populates = "datasets",
lazy = "noload", lazy = "noload",
cascade="all, delete"
) )
def to_json(self) -> dict: def to_json(self) -> dict:

View file

@ -7,5 +7,5 @@ class DatasetData(Base):
created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc)) created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc))
dataset_id = Column(UUID, ForeignKey("datasets.id"), primary_key = True) dataset_id = Column(UUID, ForeignKey("datasets.id", ondelete="CASCADE"), primary_key = True)
data_id = Column(UUID, ForeignKey("data.id"), primary_key = True) data_id = Column(UUID, ForeignKey("data.id", ondelete="CASCADE"), primary_key = True)