Compare commits
2 commits
main
...
testing/br
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
54a0791d7c | ||
|
|
2c2f3ce453 |
5 changed files with 3477 additions and 853 deletions
|
|
@ -0,0 +1,359 @@
|
|||
import asyncio
|
||||
from os import path
|
||||
import lancedb
|
||||
from pydantic import BaseModel
|
||||
from lancedb.pydantic import LanceModel, Vector
|
||||
from typing import Generic, List, Optional, TypeVar, Union, get_args, get_origin, get_type_hints
|
||||
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
from cognee.infrastructure.engine.utils import parse_id
|
||||
from cognee.infrastructure.files.storage import get_file_storage
|
||||
from cognee.modules.storage.utils import copy_model, get_own_properties
|
||||
from cognee.infrastructure.databases.vector.exceptions import CollectionNotFoundError
|
||||
|
||||
from ..embeddings.EmbeddingEngine import EmbeddingEngine
|
||||
from ..models.ScoredResult import ScoredResult
|
||||
from ..utils import normalize_distances
|
||||
from ..vector_db_interface import VectorDBInterface
|
||||
|
||||
|
||||
class IndexSchema(DataPoint):
|
||||
"""
|
||||
Represents a schema for an index data point containing an ID and text.
|
||||
|
||||
Attributes:
|
||||
|
||||
- id: A string representing the unique identifier for the data point.
|
||||
- text: A string representing the content of the data point.
|
||||
- metadata: A dictionary with default index fields for the schema, currently configured
|
||||
to include 'text'.
|
||||
"""
|
||||
|
||||
id: str
|
||||
text: str
|
||||
|
||||
metadata: dict = {"index_fields": ["text"]}
|
||||
|
||||
|
||||
class LanceDBAdapter(VectorDBInterface):
|
||||
name = "LanceDB"
|
||||
url: str
|
||||
api_key: str
|
||||
connection: lancedb.AsyncConnection = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: Optional[str],
|
||||
api_key: Optional[str],
|
||||
embedding_engine: EmbeddingEngine,
|
||||
):
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
self.embedding_engine = embedding_engine
|
||||
self.VECTOR_DB_LOCK = asyncio.Lock()
|
||||
|
||||
async def get_connection(self):
|
||||
"""
|
||||
Establishes and returns a connection to the LanceDB.
|
||||
|
||||
If a connection already exists, it will return the existing connection.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
|
||||
- lancedb.AsyncConnection: An active connection to the LanceDB.
|
||||
"""
|
||||
if self.connection is None:
|
||||
self.connection = await lancedb.connect_async(self.url, api_key=self.api_key)
|
||||
|
||||
return self.connection
|
||||
|
||||
async def embed_data(self, data: list[str]) -> list[list[float]]:
|
||||
"""
|
||||
Embeds the provided textual data into vector representation.
|
||||
|
||||
Uses the embedding engine to convert the list of strings into a list of float vectors.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
|
||||
- data (list[str]): A list of strings representing the data to be embedded.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
|
||||
- list[list[float]]: A list of embedded vectors corresponding to the input data.
|
||||
"""
|
||||
return await self.embedding_engine.embed_text(data)
|
||||
|
||||
async def has_collection(self, collection_name: str) -> bool:
|
||||
"""
|
||||
Checks if the specified collection exists in the LanceDB.
|
||||
|
||||
Returns True if the collection is present, otherwise False.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
|
||||
- collection_name (str): The name of the collection to check.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
|
||||
- bool: True if the collection exists, otherwise False.
|
||||
"""
|
||||
connection = await self.get_connection()
|
||||
collection_names = await connection.table_names()
|
||||
return collection_name in collection_names
|
||||
|
||||
async def create_collection(self, collection_name: str, payload_schema: BaseModel):
|
||||
vector_size = self.embedding_engine.get_vector_size()
|
||||
|
||||
payload_schema = self.get_data_point_schema(payload_schema)
|
||||
data_point_types = get_type_hints(payload_schema)
|
||||
|
||||
class LanceDataPoint(LanceModel):
|
||||
"""
|
||||
Represents a data point in the Lance model with an ID, vector, and associated payload.
|
||||
|
||||
The class inherits from LanceModel and defines the following public attributes:
|
||||
- id: A unique identifier for the data point.
|
||||
- vector: A vector representing the data point in a specified dimensional space.
|
||||
- payload: Additional data or metadata associated with the data point.
|
||||
"""
|
||||
|
||||
id: data_point_types["id"]
|
||||
vector: Vector(vector_size)
|
||||
payload: payload_schema
|
||||
|
||||
if not await self.has_collection(collection_name):
|
||||
async with self.VECTOR_DB_LOCK:
|
||||
if not await self.has_collection(collection_name):
|
||||
connection = await self.get_connection()
|
||||
return await connection.create_table(
|
||||
name=collection_name,
|
||||
schema=LanceDataPoint,
|
||||
exist_ok=True,
|
||||
)
|
||||
|
||||
async def get_collection(self, collection_name: str):
|
||||
if not await self.has_collection(collection_name):
|
||||
raise CollectionNotFoundError(f"Collection '{collection_name}' not found!")
|
||||
|
||||
connection = await self.get_connection()
|
||||
return await connection.open_table(collection_name)
|
||||
|
||||
async def create_data_points(self, collection_name: str, data_points: list[DataPoint]):
|
||||
payload_schema = type(data_points[0])
|
||||
|
||||
if not await self.has_collection(collection_name):
|
||||
async with self.VECTOR_DB_LOCK:
|
||||
if not await self.has_collection(collection_name):
|
||||
await self.create_collection(
|
||||
collection_name,
|
||||
payload_schema,
|
||||
)
|
||||
|
||||
collection = await self.get_collection(collection_name)
|
||||
|
||||
data_vectors = await self.embed_data(
|
||||
[DataPoint.get_embeddable_data(data_point) for data_point in data_points]
|
||||
)
|
||||
|
||||
IdType = TypeVar("IdType")
|
||||
PayloadSchema = TypeVar("PayloadSchema")
|
||||
vector_size = self.embedding_engine.get_vector_size()
|
||||
|
||||
class LanceDataPoint(LanceModel, Generic[IdType, PayloadSchema]):
|
||||
"""
|
||||
Represents a data point in the Lance model with an ID, vector, and payload.
|
||||
|
||||
This class encapsulates a data point consisting of an identifier, a vector representing
|
||||
the data, and an associated payload, allowing for operations and manipulations specific
|
||||
to the Lance data structure.
|
||||
"""
|
||||
|
||||
id: IdType
|
||||
vector: Vector(vector_size)
|
||||
payload: PayloadSchema
|
||||
|
||||
def create_lance_data_point(data_point: DataPoint, vector: list[float]) -> LanceDataPoint:
|
||||
properties = get_own_properties(data_point)
|
||||
properties["id"] = str(properties["id"])
|
||||
|
||||
return LanceDataPoint[str, self.get_data_point_schema(type(data_point))](
|
||||
id=str(data_point.id),
|
||||
vector=vector,
|
||||
payload=properties,
|
||||
)
|
||||
|
||||
lance_data_points = [
|
||||
create_lance_data_point(data_point, data_vectors[data_point_index])
|
||||
for (data_point_index, data_point) in enumerate(data_points)
|
||||
]
|
||||
|
||||
async with self.VECTOR_DB_LOCK:
|
||||
await (
|
||||
collection.merge_insert("id")
|
||||
.when_matched_update_all()
|
||||
.when_not_matched_insert_all()
|
||||
.execute(lance_data_points)
|
||||
)
|
||||
|
||||
async def retrieve(self, collection_name: str, data_point_ids: list[str]):
|
||||
collection = await self.get_collection(collection_name)
|
||||
|
||||
if len(data_point_ids) == 1:
|
||||
results = await collection.query().where(f"id = '{data_point_ids[0]}'").to_pandas()
|
||||
else:
|
||||
results = await collection.query().where(f"id IN {tuple(data_point_ids)}").to_pandas()
|
||||
|
||||
return [
|
||||
ScoredResult(
|
||||
id=parse_id(result["id"]),
|
||||
payload=result["payload"],
|
||||
score=0,
|
||||
)
|
||||
for result in results.to_dict("index").values()
|
||||
]
|
||||
|
||||
async def search(
|
||||
self,
|
||||
collection_name: str,
|
||||
query_text: str = None,
|
||||
query_vector: List[float] = None,
|
||||
limit: int = 15,
|
||||
with_vector: bool = False,
|
||||
normalized: bool = True,
|
||||
):
|
||||
if query_text is None and query_vector is None:
|
||||
raise MissingQueryParameterError()
|
||||
|
||||
if query_text and not query_vector:
|
||||
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|
||||
|
||||
collection = await self.get_collection(collection_name)
|
||||
|
||||
if limit == 0:
|
||||
limit = await collection.count_rows()
|
||||
|
||||
# LanceDB search will break if limit is 0 so we must return
|
||||
if limit == 0:
|
||||
return []
|
||||
|
||||
results = await collection.vector_search(query_vector).limit(limit).to_pandas()
|
||||
|
||||
result_values = list(results.to_dict("index").values())
|
||||
|
||||
if not result_values:
|
||||
return []
|
||||
|
||||
normalized_values = normalize_distances(result_values)
|
||||
|
||||
return [
|
||||
ScoredResult(
|
||||
id=parse_id(result["id"]),
|
||||
payload=result["payload"],
|
||||
score=normalized_values[value_index],
|
||||
)
|
||||
for value_index, result in enumerate(result_values)
|
||||
]
|
||||
|
||||
async def batch_search(
|
||||
self,
|
||||
collection_name: str,
|
||||
query_texts: List[str],
|
||||
limit: int = None,
|
||||
with_vectors: bool = False,
|
||||
):
|
||||
query_vectors = await self.embedding_engine.embed_text(query_texts)
|
||||
|
||||
return await asyncio.gather(
|
||||
*[
|
||||
self.search(
|
||||
collection_name=collection_name,
|
||||
query_vector=query_vector,
|
||||
limit=limit,
|
||||
with_vector=with_vectors,
|
||||
)
|
||||
for query_vector in query_vectors
|
||||
]
|
||||
)
|
||||
|
||||
async def delete_data_points(self, collection_name: str, data_point_ids: list[str]):
|
||||
collection = await self.get_collection(collection_name)
|
||||
|
||||
# Delete one at a time to avoid commit conflicts
|
||||
for data_point_id in data_point_ids:
|
||||
await collection.delete(f"id = '{data_point_id}'")
|
||||
|
||||
async def create_vector_index(self, index_name: str, index_property_name: str):
|
||||
await self.create_collection(
|
||||
f"{index_name}_{index_property_name}", payload_schema=IndexSchema
|
||||
)
|
||||
|
||||
async def index_data_points(
|
||||
self, index_name: str, index_property_name: str, data_points: list[DataPoint]
|
||||
):
|
||||
await self.create_data_points(
|
||||
f"{index_name}_{index_property_name}",
|
||||
[
|
||||
IndexSchema(
|
||||
id=str(data_point.id),
|
||||
text=getattr(data_point, data_point.metadata["index_fields"][0]),
|
||||
)
|
||||
for data_point in data_points
|
||||
],
|
||||
)
|
||||
|
||||
async def prune(self):
|
||||
connection = await self.get_connection()
|
||||
collection_names = await connection.table_names()
|
||||
|
||||
for collection_name in collection_names:
|
||||
collection = await self.get_collection(collection_name)
|
||||
await collection.delete("id IS NOT NULL")
|
||||
await connection.drop_table(collection_name)
|
||||
|
||||
if self.url.startswith("/"):
|
||||
db_dir_path = path.dirname(self.url)
|
||||
db_file_name = path.basename(self.url)
|
||||
await get_file_storage(db_dir_path).remove_all(db_file_name)
|
||||
|
||||
def get_data_point_schema(self, model_type: BaseModel):
|
||||
related_models_fields = []
|
||||
|
||||
for field_name, field_config in model_type.model_fields.items():
|
||||
if hasattr(field_config, "model_fields"):
|
||||
related_models_fields.append(field_name)
|
||||
|
||||
elif hasattr(field_config.annotation, "model_fields"):
|
||||
related_models_fields.append(field_name)
|
||||
|
||||
elif (
|
||||
get_origin(field_config.annotation) == Union
|
||||
or get_origin(field_config.annotation) is list
|
||||
):
|
||||
models_list = get_args(field_config.annotation)
|
||||
if any(hasattr(model, "model_fields") for model in models_list):
|
||||
related_models_fields.append(field_name)
|
||||
elif models_list and any(get_args(model) is DataPoint for model in models_list):
|
||||
related_models_fields.append(field_name)
|
||||
elif models_list and any(
|
||||
submodel is DataPoint for submodel in get_args(models_list[0])
|
||||
):
|
||||
related_models_fields.append(field_name)
|
||||
|
||||
elif get_origin(field_config.annotation) == Optional:
|
||||
model = get_args(field_config.annotation)
|
||||
if hasattr(model, "model_fields"):
|
||||
related_models_fields.append(field_name)
|
||||
|
||||
return copy_model(
|
||||
model_type,
|
||||
include_fields={
|
||||
"id": (str, ...),
|
||||
},
|
||||
exclude_fields=["metadata"] + related_models_fields,
|
||||
)
|
||||
1250
notebooks/cognee_demo.ipynb
vendored
1250
notebooks/cognee_demo.ipynb
vendored
File diff suppressed because it is too large
Load diff
276
notebooks/cognee_multimedia_demo.ipynb
vendored
276
notebooks/cognee_multimedia_demo.ipynb
vendored
|
|
@ -21,13 +21,15 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:54:44.613431Z",
|
||||
"start_time": "2025-06-30T11:54:44.606687Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import pathlib\n",
|
||||
|
|
@ -44,9 +46,7 @@
|
|||
" \"../\",\n",
|
||||
" \"examples/data/multimedia/example.png\",\n",
|
||||
")"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 1
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -57,12 +57,14 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:54:46.739157Z",
|
||||
"start_time": "2025-06-30T11:54:46.734808Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
|
|
@ -93,9 +95,7 @@
|
|||
"# os.environ[\"DB_PORT\"]=\"5432\"\n",
|
||||
"# os.environ[\"DB_USERNAME\"]=\"cognee\"\n",
|
||||
"# os.environ[\"DB_PASSWORD\"]=\"cognee\""
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 2
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -106,7 +106,211 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:47.304571\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted Kuzu database files at /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases/cognee_graph_kuzu\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:47.739751\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase deleted successfully.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mLangfuse client is disabled since no public_key was provided as a parameter or environment variable 'LANGFUSE_PUBLIC_KEY'. See our docs: https://langfuse.com/docs/sdk/python/low-level-sdk#initialize-client\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"User f5c66ce8-859b-44d4-941a-df6eee1f1d2a has registered.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m14:21:48 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.231053\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.377156\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.527056\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.687437\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: pypdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.687879\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: text_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.688276\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: image_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.688542\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: audio_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:54.688832\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: unstructured_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:21:56 - LiteLLM:INFO\u001b[0m: utils.py:1274 - Wrapper: Completed Call, calling success_handler\n",
|
||||
"\n",
|
||||
"\u001b[1mWrapper: Completed Call, calling success_handler\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:56.382164\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:56.534718\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:56.688121\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:56.848444\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:57.015428\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:21:57.159922\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:21:57 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:22:01 - LiteLLM:INFO\u001b[0m: utils.py:1274 - Wrapper: Completed Call, calling success_handler\n",
|
||||
"\n",
|
||||
"\u001b[1mWrapper: Completed Call, calling success_handler\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:01.502324\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:01.643327\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:01.786439\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:01.951101\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:01.978540\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:02.126845\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:02.304535\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:02.471292\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:02.632964\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:22:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.524347\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.525119\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmer' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.525401\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.525697\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.526076\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.526395\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware problem' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.526661\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'joke' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.526931\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'how many programmers does it take to change a light bulb? none, thats a hardware problem.' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:29.527282\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'none' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:31.777289\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:22:31 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:37.705311\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:40.603223\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:40.769398\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:40.984903\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.128272\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.275155\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.436831\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.606070\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.767152\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:41.912031\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:42.052884\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:42.218749\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:22:42.376103\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:22:42 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.700919\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'text' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.703407\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'how many programmers does it take to change a light bulb? none. thats a hardware problem.' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.703797\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.704185\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.704581\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.704897\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.705215\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.705511\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'none' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:24.705874\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware problem' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:27.372108\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:23:27 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:33.517938\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.007176\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.181973\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.332093\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.475271\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.717941\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:36.870258\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:37.018440\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('692741cd-46e5-5988-85e9-f3901d104b7e')}, {'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('899de74a-1bef-5afd-a478-1ea944503514')}])}"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
|
|
@ -119,9 +323,7 @@
|
|||
"\n",
|
||||
"# Create knowledge graph with cognee\n",
|
||||
"await cognee.cognify()"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -132,12 +334,39 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:44:56.372628Z",
|
||||
"start_time": "2025-06-30T11:44:55.978258Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:56.768437\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting completion generation for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:56.769790\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting summary retrieval for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:57.168012\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mFound 2 summaries from vector search\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:57.168772\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning 2 summary payloads \u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:23:57.169214\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning context with 2 item(s)\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '686b9e03-4505-56ec-9295-94c53d4db004', 'created_at': 1756301013713, 'updated_at': 1756301013713, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"Programmer light-bulb joke: none — it's a hardware problem.\"}\n",
|
||||
"{'id': '82343440-bd02-5149-ad3f-80289121146c', 'created_at': 1756300957894, 'updated_at': 1756300957894, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"Programmers don't change light bulbs — that's a hardware problem.\"}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from cognee.api.v1.search import SearchType\n",
|
||||
"\n",
|
||||
|
|
@ -150,18 +379,29 @@
|
|||
"# Display search results\n",
|
||||
"for result_text in search_results:\n",
|
||||
" print(result_text)"
|
||||
],
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '3b530220-7e7c-52a2-8b62-ce5adce1a46c', 'created_at': 1751283883122, 'updated_at': 1751283883122, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"The joke queries the number of programmers required to change a light bulb and answers, 'None. That’s a hardware issue.' This humor highlights the divide between software and hardware challenges in programming.\"}\n",
|
||||
"{'id': '128eb96e-fd36-53ef-ab6d-d4884ecbfee9', 'created_at': 1751283883122, 'updated_at': 1751283883122, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"Changing a light bulb doesn't require programmers.\"}\n"
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n",
|
||||
"\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n",
|
||||
"\u001b[1;31mClick <a href='https://aka.ms/vscodeJupyterKernelCrash'>here</a> for more info. \n",
|
||||
"\u001b[1;31mView Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 5
|
||||
"source": [
|
||||
"import os\n",
|
||||
"os._exit(0)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
|
@ -180,7 +420,7 @@
|
|||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.6"
|
||||
"version": "3.12.7"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
|
|||
805
notebooks/cognee_simple_demo.ipynb
vendored
805
notebooks/cognee_simple_demo.ipynb
vendored
|
|
@ -8,14 +8,6 @@
|
|||
"# Cognee GraphRAG Simple Example"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"id": "982b897a29a26f7d",
|
||||
"metadata": {},
|
||||
"source": "!pip install cognee==0.2.3",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "f51e92e9fdcf77b7",
|
||||
|
|
@ -28,6 +20,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
|
|
@ -35,13 +28,13 @@
|
|||
"start_time": "2025-06-30T12:08:22.646047Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"LLM_API_KEY\"] = \"\""
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 1
|
||||
"if \"LLM_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"LLM_API_KEY\"] = \"YOUR KEY\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -53,6 +46,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "5805c346f03d8070",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
|
|
@ -60,12 +54,11 @@
|
|||
"start_time": "2025-06-30T12:08:23.555854Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"current_directory = os.getcwd()\n",
|
||||
"file_path = os.path.join(current_directory, \"data\", \"alice_in_wonderland.txt\")"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 2
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -77,15 +70,601 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "875763366723ee48",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:14.905059\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/daulet/Desktop/dev/cognee-claude/logs/2025-08-26_12-44-42.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"/Users/daulet/Desktop/dev/cognee-claude/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:15.858104\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.2.4-local\u001b[0m \u001b[36mdatabase_path\u001b[0m=\u001b[35m/Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m \u001b[36mgraph_database_name\u001b[0m=\u001b[35m\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:43 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T8132)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.12.7\u001b[0m \u001b[36mrelational_config\u001b[0m=\u001b[35mcognee_db\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m \u001b[36mvector_config\u001b[0m=\u001b[35mlancedb\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:15.859638\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase storage: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mLangfuse client is disabled since no public_key was provided as a parameter or environment variable 'LANGFUSE_PUBLIC_KEY'. See our docs: https://langfuse.com/docs/sdk/python/low-level-sdk#initialize-client\u001b[0m\n",
|
||||
"\u001b[92m14:34:18 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:22.954992\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.160900\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.335242\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.516399\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: pypdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.517356\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: text_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.518011\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: image_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.518486\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: audio_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.519037\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: unstructured_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.536911\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.685341\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:23.836303\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `bb1e12db-8d3f-5e80-8615-2444eda4b32a`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.058213\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.084248\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.232775\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.378181\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.555749\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:34:24.803351\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:34:24 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.974856\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.976689\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alice' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.976989\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'animal' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.977311\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.977608\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dinah' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.977842\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'location' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.978119\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.978445\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tunnel or well' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.978688\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'heap of sticks and dry leaves' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.979010\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'long low hall' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.979218\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.979434\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'three legged glass table' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.979726\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tiny golden key' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.979974\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'little door' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.980178\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'small passage' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.980378\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lovely garden' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.980559\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'jar labeled orange marmalade' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.980758\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cupboard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.980944\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bottle labeled drink me' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.981181\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'small glass box' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.981405\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cake labeled eat me' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.981672\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pool of tears' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.981921\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white kid gloves and fan' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.982174\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bookchapter' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.982434\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'chapter i down the rabbit hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.982656\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'chapter ii the pool of tears' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.983007\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.983274\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duck' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.983483\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dodo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.983677\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lory' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.983917\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'eaglet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.984157\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'magpie' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.984380\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'canary' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.984576\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'old crab' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.984798\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'young crab' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.985030\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'animalclass' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.985270\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cats' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.985465\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dogs' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.985698\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'shore' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.985875\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'w. rabbit house' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.986089\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fan' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.986292\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white kid gloves' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.986527\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bottle (drink me)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.986754\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'thimble' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.986962\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'comfits' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.987297\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'event' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.987508\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'caucus-race' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.987735\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'grew large event' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.988057\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'creature' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.988461\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'magic bottle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.988654\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'size-changing cake' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.988869\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pebbles' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.989079\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mushroom' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.989289\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'caterpillar' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.989498\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hookah' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.989699\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bill (lizard)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.989916\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'guinea-pigs' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.990178\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.990390\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mary ann' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.990541\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'place' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.990742\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit house' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.990894\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'window' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.991048\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'door' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.991246\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'chimney' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.991431\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cucumber-frame' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.991636\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'wood' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.991856\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'puppy' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.992038\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'father william' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.992222\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.992402\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'size change' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.992893\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pigeon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.993054\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'serpent' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.993213\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'trees' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.993435\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'little house' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.993576\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fish-footman' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.993834\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'frog-footman' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994053\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchess' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994264\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994443\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'baby' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994661\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pig' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994797\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cheshire cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.994960\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'kitchen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.995114\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cauldron' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.995283\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'soup' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.995444\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pepper' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.995637\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'plate' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.995924\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'frying-pan' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996079\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'invitation letter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996298\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996443\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hatter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996602\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march hare' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996794\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'game' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.996993\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'croquet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.997436\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.997651\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the dormouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.997898\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'a mad tea-party' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.998205\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'table' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.998409\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the hatters watch' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.998581\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'house of the march hare' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.998819\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'a rose-tree' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.998980\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'five' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.999133\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'two' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.999302\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'seven' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.999465\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'group' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:55.999749\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'soldiers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.000078\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'courtiers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.000273\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'royal children' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.001132\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'knave of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.002873\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the king of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.003453\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the queen of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.004815\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'treacle-well' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.005347\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'elsie' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.005583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lacie' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.005904\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tillie' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.006224\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'riddle: why is a raven like a writing-desk?' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.006483\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'song: twinkle twinkle little bat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.006698\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tea-time' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.007096\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the king' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.007449\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'executioner' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.008948\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gryphon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.009396\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mock turtle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.009604\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'croquet game' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.009761\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'croquet ground' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.010011\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hedgehog' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.010238\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'flamingo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.010525\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rose-tree' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.010795\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'flower-pot' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.011185\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'players' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.011425\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mock turtle soup' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.011802\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'old turtle (tortoise)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.012966\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bill the lizard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.014629\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lobster' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.015124\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'whiting' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.015383\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'snail' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.015702\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'porpoise' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.015898\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'owl' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.016462\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'panther' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.017210\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sea' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.017407\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sea-shore' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.017920\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'court of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.018187\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lobster quadrille (dance)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.019379\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'creativework' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.020269\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'beautiful soup' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.021436\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'whiting song' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.022224\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'school (sea-school)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.022613\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'reeling and writhing' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.022812\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'ambition' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.023185\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'distraction' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.023469\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'uglification' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.023674\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'derision' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.023904\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mystery (ancient and modern)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.024157\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'seaography' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.024376\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'drawling' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.024600\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'stretching' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.024992\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fainting in coils' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.025219\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'laughing' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.025502\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'grief' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.025949\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'conger eel (drawling-master)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.026243\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'old crab (classics master)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.026692\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tarts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.026920\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pencil' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.027897\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'slate' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.028631\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchesss cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.028848\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lizard (bill)' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.029560\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'guinea-pigs' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.030437\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'organization' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.030900\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'jury' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.031211\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'trial of the knave' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.031414\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'document' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.031563\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'parchment scroll' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.031771\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'trumpet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.031975\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'teacup' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.032145\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bread-and-butter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.032348\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pepper-box' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.032520\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'inkstand' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.032767\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'canvas bag' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.033132\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'jury-box' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.034834\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alices dream' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.036198\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'date' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.037172\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fourteenth of march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.037973\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fifteenth of march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.038237\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sixteenth of march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.038606\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'she' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.038879\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'little sister' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039022\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039194\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dull reality' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039400\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'plant' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039578\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'grass' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039741\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pool' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.039913\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'reeds' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.040074\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rattling teacups' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.040285\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tinkling sheep-bells' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.040468\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'shepherd boy' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.040662\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'other queer noises' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.040836\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'busy farm-yard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.041105\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cattle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.041323\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'grown woman' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.042019\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'simple and loving heart' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.042329\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'other little children' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.042583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'strange tale' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.042824\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dream of wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.043010\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'simple sorrows' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.043173\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'simple joys' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.043331\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'child-life' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:35:56.043505\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'happy summer days' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:02.674965\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\u001b[92m14:36:02 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:16.723969\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:24.505751\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:24.656760\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:24.853916\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:24.999416\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:25.182065\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:25.379190\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:25.534779\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `52948913-bf39-51ee-a535-e4f140f34c10`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('692741cd-46e5-5988-85e9-f3901d104b7e')}, {'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('899de74a-1bef-5afd-a478-1ea944503514')}, {'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1cde937f-ae7a-5151-a20c-dc3567bee0a9'), dataset_id=UUID('241b64d6-f023-5b87-9a8f-87056f0a442c'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('6a0c7501-47bc-5632-b79c-3343d8a0b2a2')}])}"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"await cognee.add(file_path)\n",
|
||||
"await cognee.cognify()"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -97,7 +676,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"id": "29b3a1e3279100d2",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
|
|
@ -105,44 +684,24 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001B[92m20:16:54 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:57 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:30.994342\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 239 nodes, 745 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:31.598044\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.04s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[92m14:36:31 - LiteLLM:INFO\u001b[0m: utils.py:3341 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001B[0m\u001B[92m20:16:59 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m\u001B[92m20:16:59 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m"
|
||||
"\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"['1. Alice \\n2. The White Rabbit \\n3. The Cheshire Cat \\n4. The Hatter (Mad Hatter) \\n5. The March Hare \\n6. The Knave of Hearts \\n7. The Queen of Hearts \\n8. The King of Hearts \\n9. The Mock Turtle \\n10. The Gryphon']"
|
||||
"['Acknowledged.']"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
|
|
@ -161,35 +720,35 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:02 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:02 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001B[0m\u001B[92m20:17:05 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m\u001B[92m20:17:05 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m"
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\u001b[92m20:17:05 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001b[0m\u001b[92m20:17:05 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -217,35 +776,35 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:07 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:07 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:07 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:07 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\n",
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001B[0m\u001B[92m20:17:08 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m\u001B[92m20:17:08 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001B[0m"
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-5-mini; provider = openai\u001b[0m\u001b[92m20:17:08 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001b[0m\u001b[92m20:17:08 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-5-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-5-mini-2024-07-18\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -273,7 +832,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 5,
|
||||
"id": "6effdae590b795d3",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
|
|
@ -282,14 +841,15 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001B[2m2025-06-18T18:17:08.814190\u001B[0m [\u001B[32m\u001B[1minfo \u001B[0m] \u001B[1mGraph visualization saved as /Users/borisarzentar/graph_visualization.html\u001B[0m [\u001B[0m\u001B[1m\u001B[34mcognee.shared.logging_utils\u001B[0m]\u001B[0m\n",
|
||||
"\u001B[2m2025-06-18T18:17:08.814882\u001B[0m [\u001B[32m\u001B[1minfo \u001B[0m] \u001B[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001B[0m [\u001B[0m\u001B[1m\u001B[34mcognee.shared.logging_utils\u001B[0m]\u001B[0m"
|
||||
"\u001b[2m2025-08-27T13:36:40.283583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph visualization saved as /Users/daulet/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-08-27T13:36:40.284941\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'/Users/borisarzentar/graph_visualization.html'"
|
||||
"'/Users/daulet/graph_visualization.html'"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
|
|
@ -301,7 +861,7 @@
|
|||
"True"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
|
|
@ -317,6 +877,29 @@
|
|||
"webbrowser.open(f\"file://{html_file}\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "75054183",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n",
|
||||
"\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n",
|
||||
"\u001b[1;31mClick <a href='https://aka.ms/vscodeJupyterKernelCrash'>here</a> for more info. \n",
|
||||
"\u001b[1;31mView Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"os._exit(0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "f0945d6f1d962ab",
|
||||
|
|
@ -342,7 +925,7 @@
|
|||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.5"
|
||||
"version": "3.12.7"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
|
|||
1640
notebooks/ontology_demo.ipynb
vendored
1640
notebooks/ontology_demo.ipynb
vendored
File diff suppressed because one or more lines are too long
Loading…
Add table
Reference in a new issue