fix: Fix chunk naive llm classifier
Fixed chunk naive llm classifier uuid issue, added fix for deletion of data points for LanceDB Fix #COG-472
This commit is contained in:
parent
50f5712f43
commit
3567e0d7e7
2 changed files with 5 additions and 3 deletions
|
|
@ -164,7 +164,10 @@ class LanceDBAdapter(VectorDBInterface):
|
|||
async def delete_data_points(self, collection_name: str, data_point_ids: list[str]):
|
||||
connection = await self.get_connection()
|
||||
collection = await connection.open_table(collection_name)
|
||||
results = await collection.delete(f"id IN {tuple(data_point_ids)}")
|
||||
if len(data_point_ids) == 1:
|
||||
results = await collection.delete(f"id = '{data_point_ids[0]}'")
|
||||
else:
|
||||
results = await collection.delete(f"id IN {tuple(data_point_ids)}")
|
||||
return results
|
||||
|
||||
async def prune(self):
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ async def chunk_naive_llm_classifier(data_chunks: list[DocumentChunk], classific
|
|||
for chunk_index, chunk in enumerate(data_chunks):
|
||||
chunk_classification = chunk_classifications[chunk_index]
|
||||
classification_data_points.append(uuid5(NAMESPACE_OID, chunk_classification.label.type))
|
||||
classification_data_points.append(uuid5(NAMESPACE_OID, chunk_classification.label.type))
|
||||
|
||||
for classification_subclass in chunk_classification.label.subclass:
|
||||
classification_data_points.append(uuid5(NAMESPACE_OID, classification_subclass.value))
|
||||
|
|
@ -39,7 +38,7 @@ async def chunk_naive_llm_classifier(data_chunks: list[DocumentChunk], classific
|
|||
if await vector_engine.has_collection(collection_name):
|
||||
existing_data_points = await vector_engine.retrieve(
|
||||
collection_name,
|
||||
list(set(classification_data_points)),
|
||||
[str(classification_data) for classification_data in list(set(classification_data_points))],
|
||||
) if len(classification_data_points) > 0 else []
|
||||
|
||||
existing_points_map = {point.id: True for point in existing_data_points}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue