diff --git a/cognee/api/v1/cognify/cognify_v2.py b/cognee/api/v1/cognify/cognify_v2.py index 497815ddd..616c710cb 100644 --- a/cognee/api/v1/cognify/cognify_v2.py +++ b/cognee/api/v1/cognify/cognify_v2.py @@ -89,18 +89,6 @@ async def cognify(datasets: Union[str, list[str]] = None, user: User = None): cognee_config = get_cognify_config() graph_config = get_graph_config() root_node_id = None - # - # if graph_config.infer_graph_topology and graph_config.graph_topology_task: - # from cognee.modules.topology.topology import TopologyEngine - # topology_engine = TopologyEngine(infer=graph_config.infer_graph_topology) - # root_node_id = await topology_engine.add_graph_topology(files = data) - # elif graph_config.infer_graph_topology and not graph_config.infer_graph_topology: - # from cognee.modules.topology.topology import TopologyEngine - # topology_engine = TopologyEngine(infer=graph_config.infer_graph_topology) - # await topology_engine.add_graph_topology(graph_config.topology_file_path) - # elif not graph_config.graph_topology_task: - # root_node_id = "ROOT" - tasks = [ Task(document_to_ontology, root_node_id = root_node_id), Task(source_documents_to_chunks, parent_node_id = root_node_id), # Classify documents and save them as a nodes in graph db, extract text chunks based on the document type diff --git a/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py b/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py index 0bec5cd90..c52ad92cf 100644 --- a/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py +++ b/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py @@ -70,11 +70,13 @@ class SQLAlchemyAdapter(): async def delete_table(self, table_name: str): async with self.engine.begin() as connection: await connection.execute(text(f"DROP TABLE IF EXISTS {table_name} CASCADE;")) + await connection.close() async def insert_data(self, schema_name: str, table_name: str, data: list[dict]): columns = ", ".join(data[0].keys()) values = ", ".join([f"({', '.join([f':{key}' for key in row.keys()])})" for row in data]) insert_query = text(f"INSERT INTO {schema_name}.{table_name} ({columns}) VALUES {values};") + async with self.engine.begin() as connection: await connection.execute(insert_query, data) await connection.close()