fix: Resolve issue with table names in SQL commands (#13)
Some SQL commands require lowercase characters in table names unless table name is wrapped in quotes. Renamed all new tables to use lowercase Fix COG-677
This commit is contained in:
commit
acd88e83e2
15 changed files with 19 additions and 14 deletions
|
|
@ -8,6 +8,7 @@ class MetaData(TypedDict):
|
|||
index_fields: list[str]
|
||||
|
||||
class DataPoint(BaseModel):
|
||||
__tablename__ = "data_point"
|
||||
id: UUID = Field(default_factory = uuid4)
|
||||
updated_at: Optional[datetime] = datetime.now(timezone.utc)
|
||||
_metadata: Optional[MetaData] = {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from cognee.infrastructure.engine import DataPoint
|
|||
from cognee.modules.data.processing.document_types import Document
|
||||
|
||||
class DocumentChunk(DataPoint):
|
||||
__tablename__ = "document_chunk"
|
||||
text: str
|
||||
word_count: int
|
||||
chunk_index: int
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
|
|||
from .EntityType import EntityType
|
||||
|
||||
class Entity(DataPoint):
|
||||
__tablename__ = "entity"
|
||||
name: str
|
||||
is_a: EntityType
|
||||
description: str
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from cognee.infrastructure.engine import DataPoint
|
|||
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
|
||||
|
||||
class EntityType(DataPoint):
|
||||
__tablename__ = "entity_type"
|
||||
name: str
|
||||
type: str
|
||||
description: str
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ async def query_chunks(query: str) -> list[dict]:
|
|||
"""
|
||||
vector_engine = get_vector_engine()
|
||||
|
||||
found_chunks = await vector_engine.search("DocumentChunk_text", query, limit = 5)
|
||||
found_chunks = await vector_engine.search("document_chunk_text", query, limit = 5)
|
||||
|
||||
chunks = [result.payload for result in found_chunks]
|
||||
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ async def query_graph_connections(query: str, exploration_levels = 1) -> list[(s
|
|||
else:
|
||||
vector_engine = get_vector_engine()
|
||||
results = await asyncio.gather(
|
||||
vector_engine.search("Entity_name", query_text = query, limit = 5),
|
||||
vector_engine.search("EntityType_name", query_text = query, limit = 5),
|
||||
vector_engine.search("entity_name", query_text = query, limit = 5),
|
||||
vector_engine.search("entity_type_name", query_text = query, limit = 5),
|
||||
)
|
||||
results = [*results[0], *results[1]]
|
||||
relevant_results = [result for result in results if result.score < 0.5][:5]
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ async def index_data_points(data_points: list[DataPoint]):
|
|||
data_point_type = type(data_point)
|
||||
|
||||
for field_name in data_point._metadata["index_fields"]:
|
||||
index_name = f"{data_point_type.__name__}.{field_name}"
|
||||
index_name = f"{data_point_type.__tablename__}.{field_name}"
|
||||
|
||||
if index_name not in created_indexes:
|
||||
await vector_engine.create_vector_index(data_point_type.__name__, field_name)
|
||||
await vector_engine.create_vector_index(data_point_type.__tablename__, field_name)
|
||||
created_indexes[index_name] = True
|
||||
|
||||
if index_name not in index_points:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
|
|||
from cognee.modules.data.processing.document_types import Document
|
||||
|
||||
class TextSummary(DataPoint):
|
||||
__tablename__ = "text_summary"
|
||||
text: str
|
||||
made_from: DocumentChunk
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ async def query_summaries(query: str) -> list:
|
|||
"""
|
||||
vector_engine = get_vector_engine()
|
||||
|
||||
summaries_results = await vector_engine.search("TextSummary_text", query, limit = 5)
|
||||
summaries_results = await vector_engine.search("text_summary_text", query, limit = 5)
|
||||
|
||||
summaries = [summary.payload for summary in summaries_results]
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ async def main():
|
|||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
vector_engine = get_vector_engine()
|
||||
random_node = (await vector_engine.search("Entity_name", "AI"))[0]
|
||||
random_node = (await vector_engine.search("entity_name", "AI"))[0]
|
||||
random_node_name = random_node.payload["text"]
|
||||
|
||||
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ async def main():
|
|||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
vector_engine = get_vector_engine()
|
||||
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
|
||||
random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0]
|
||||
random_node_name = random_node.payload["text"]
|
||||
|
||||
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ async def main():
|
|||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
|
||||
vector_engine = get_vector_engine()
|
||||
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
|
||||
random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0]
|
||||
random_node_name = random_node.payload["text"]
|
||||
|
||||
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ async def main():
|
|||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
vector_engine = get_vector_engine()
|
||||
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
|
||||
random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0]
|
||||
random_node_name = random_node.payload["text"]
|
||||
|
||||
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ async def main():
|
|||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
vector_engine = get_vector_engine()
|
||||
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
|
||||
random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0]
|
||||
random_node_name = random_node.payload["text"]
|
||||
|
||||
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
|
||||
|
|
|
|||
|
|
@ -758,7 +758,7 @@
|
|||
"from cognee.infrastructure.databases.vector import get_vector_engine\n",
|
||||
"\n",
|
||||
"vector_engine = get_vector_engine()\n",
|
||||
"results = await search(vector_engine, \"Entity_name\", \"sarah.nguyen@example.com\")\n",
|
||||
"results = await search(vector_engine, \"entity_name\", \"sarah.nguyen@example.com\")\n",
|
||||
"for result in results:\n",
|
||||
" print(result)"
|
||||
]
|
||||
|
|
@ -788,7 +788,7 @@
|
|||
"source": [
|
||||
"from cognee.api.v1.search import SearchType\n",
|
||||
"\n",
|
||||
"node = (await vector_engine.search(\"Entity_name\", \"sarah.nguyen@example.com\"))[0]\n",
|
||||
"node = (await vector_engine.search(\"entity_name\", \"sarah.nguyen@example.com\"))[0]\n",
|
||||
"node_name = node.payload[\"text\"]\n",
|
||||
"\n",
|
||||
"search_results = await cognee.search(SearchType.SUMMARIES, query_text = node_name)\n",
|
||||
|
|
@ -881,7 +881,7 @@
|
|||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.8"
|
||||
"version": "3.9.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue