Merge remote-tracking branch 'origin/dev' into fix/mcp-and-ui
This commit is contained in:
commit
9b5ea631c6
6 changed files with 487 additions and 1686 deletions
663
notebooks/cognee_demo.ipynb
vendored
663
notebooks/cognee_demo.ipynb
vendored
|
|
@ -68,6 +68,19 @@
|
|||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:38:36.328621Z",
|
||||
"start_time": "2025-06-30T11:38:36.068313Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"source": "import cognee",
|
||||
"id": "8aba58c24daec519",
|
||||
"outputs": [],
|
||||
"execution_count": 3
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "074f0ea8-c659-4736-be26-be4b0e5ac665",
|
||||
|
|
@ -86,15 +99,13 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "df16431d0f48b006",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:37.309629Z",
|
||||
"start_time": "2025-02-09T21:32:37.305105Z"
|
||||
"end_time": "2025-06-30T11:38:49.122448Z",
|
||||
"start_time": "2025-06-30T11:38:49.120056Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_position = \"\"\"Senior Data Scientist (Machine Learning)\n",
|
||||
"\n",
|
||||
|
|
@ -121,19 +132,19 @@
|
|||
"Strong problem-solving skills and attention to detail.\n",
|
||||
"Candidate CVs\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 5
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "9086abf3af077ab4",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:37.869475Z",
|
||||
"start_time": "2025-02-09T21:32:37.867374Z"
|
||||
"end_time": "2025-06-30T11:38:49.646981Z",
|
||||
"start_time": "2025-06-30T11:38:49.645059Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_1 = \"\"\"\n",
|
||||
"CV 1: Relevant\n",
|
||||
|
|
@ -166,19 +177,19 @@
|
|||
"Big Data Technologies: Hadoop, Spark\n",
|
||||
"Data Visualization: Tableau, Matplotlib\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 6
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "a9de0cc07f798b7f",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:38.269062Z",
|
||||
"start_time": "2025-02-09T21:32:38.267194Z"
|
||||
"end_time": "2025-06-30T11:38:50.081581Z",
|
||||
"start_time": "2025-06-30T11:38:50.079653Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_2 = \"\"\"\n",
|
||||
"CV 2: Relevant\n",
|
||||
|
|
@ -210,19 +221,19 @@
|
|||
"Data Visualization: Seaborn, Plotly\n",
|
||||
"Databases: MySQL, MongoDB\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 7
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "185ff1c102d06111",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:38.738093Z",
|
||||
"start_time": "2025-02-09T21:32:38.736147Z"
|
||||
"end_time": "2025-06-30T11:38:50.492793Z",
|
||||
"start_time": "2025-06-30T11:38:50.490913Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_3 = \"\"\"\n",
|
||||
"CV 3: Relevant\n",
|
||||
|
|
@ -254,19 +265,19 @@
|
|||
"Statistical Analysis: SAS, SPSS\n",
|
||||
"Cloud Platforms: AWS, Azure\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 8
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "d55ce4c58f8efb67",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:39.237542Z",
|
||||
"start_time": "2025-02-09T21:32:39.235742Z"
|
||||
"end_time": "2025-06-30T11:38:50.900140Z",
|
||||
"start_time": "2025-06-30T11:38:50.898061Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_4 = \"\"\"\n",
|
||||
"CV 4: Not Relevant\n",
|
||||
|
|
@ -296,19 +307,19 @@
|
|||
"Web Design: HTML, CSS\n",
|
||||
"Specialties: Branding and Identity, Typography\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 9
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "ca4ecc32721ad332",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:39.740387Z",
|
||||
"start_time": "2025-02-09T21:32:39.738768Z"
|
||||
"end_time": "2025-06-30T11:38:51.360603Z",
|
||||
"start_time": "2025-06-30T11:38:51.357573Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"job_5 = \"\"\"\n",
|
||||
"CV 5: Not Relevant\n",
|
||||
|
|
@ -338,7 +349,9 @@
|
|||
"CRM Software: Salesforce, Zoho\n",
|
||||
"Negotiation and Relationship Building\n",
|
||||
"\"\"\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 10
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -350,31 +363,24 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "bce39dc6",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:31:46.855966Z",
|
||||
"start_time": "2025-02-09T21:31:46.847681Z"
|
||||
"end_time": "2025-06-30T11:39:26.596920Z",
|
||||
"start_time": "2025-06-30T11:39:26.592336Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# Setting environment variables\n",
|
||||
"if \"GRAPHISTRY_USERNAME\" not in os.environ:\n",
|
||||
" os.environ[\"GRAPHISTRY_USERNAME\"] = \"\"\n",
|
||||
"\n",
|
||||
"if \"GRAPHISTRY_PASSWORD\" not in os.environ:\n",
|
||||
" os.environ[\"GRAPHISTRY_PASSWORD\"] = \"\"\n",
|
||||
"\n",
|
||||
"if \"LLM_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"LLM_API_KEY\"] = \"\"\n",
|
||||
" os.environ[\"LLM_API_KEY\"] = \"YOUR KEY\"\n",
|
||||
"\n",
|
||||
"# \"neo4j\" or \"networkx\"\n",
|
||||
"os.environ[\"GRAPH_DATABASE_PROVIDER\"] = \"networkx\"\n",
|
||||
"# Not needed if using networkx\n",
|
||||
"os.environ[\"GRAPH_DATABASE_PROVIDER\"] = \"kuzu\"\n",
|
||||
"# Not needed if using kuzu\n",
|
||||
"# os.environ[\"GRAPH_DATABASE_URL\"]=\"\"\n",
|
||||
"# os.environ[\"GRAPH_DATABASE_USERNAME\"]=\"\"\n",
|
||||
"# os.environ[\"GRAPH_DATABASE_PASSWORD\"]=\"\"\n",
|
||||
|
|
@ -396,39 +402,19 @@
|
|||
"# os.environ[\"DB_PORT\"]=\"5432\"\n",
|
||||
"# os.environ[\"DB_USERNAME\"]=\"cognee\"\n",
|
||||
"# os.environ[\"DB_PASSWORD\"]=\"cognee\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 11
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "9f1a1dbd",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:32:56.703003Z",
|
||||
"start_time": "2025-02-09T21:32:47.375684Z"
|
||||
"end_time": "2025-06-30T11:39:29.907387Z",
|
||||
"start_time": "2025-06-30T11:39:28.664364Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:00:12.657639\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/borisarzentar/Projects/Topoteretes/cognee/logs/2025-06-18_16-18-52.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:00:12.658131\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.1.42-dev\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:25 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T6020)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.11.5\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mHTTP Request: GET https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json \"HTTP/1.1 200 OK\"\u001b[0m\n",
|
||||
"/Users/borisarzentar/Projects/Topoteretes/cognee/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n",
|
||||
"/Users/borisarzentar/Projects/Topoteretes/cognee/.venv/lib/python3.11/site-packages/dlt/helpers/dbt/__init__.py:3: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
|
||||
" import pkg_resources\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:00:16.570235\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCleared all data from graph while preserving structure\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:16.711037\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase deleted successfully.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Reset the cognee system with the following command:\n",
|
||||
"\n",
|
||||
|
|
@ -436,7 +422,9 @@
|
|||
"\n",
|
||||
"await cognee.prune.prune_data()\n",
|
||||
"await cognee.prune.prune_system(metadata=True)"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 12
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -448,66 +436,38 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "904df61ba484a8e5",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:33:07.412263Z",
|
||||
"start_time": "2025-02-09T21:33:04.807282Z"
|
||||
"end_time": "2025-06-30T11:39:34.803971Z",
|
||||
"start_time": "2025-06-30T11:39:32.388973Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
"await cognee.add([job_1, job_2, job_3, job_4, job_5, job_position], \"example\")"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[1mLangfuse client is disabled since no public_key was provided as a parameter or environment variable 'LANGFUSE_PUBLIC_KEY'. See our docs: https://langfuse.com/docs/sdk/python/low-level-sdk#initialize-client\u001b[0m\u001b[92m20:00:16 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.441863\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `428bda5b-e9ed-5271-911b-2b3085d24e15`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.442237\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.443087\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"User f8d1e630-36d9-482c-b9b9-3393095f35ed has registered.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.851761\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.852141\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.852401\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `428bda5b-e9ed-5271-911b-2b3085d24e15`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m"
|
||||
"User 18a80bea-cfa8-428c-be6f-fd0b62af7df5 has registered.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('da7c9789-2d8c-56a8-9e6a-2500534aa25d'), payload=None)"
|
||||
"PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('cd5607a9-7766-593c-859d-38db41f05379'), dataset_id=UUID('b0736e41-095a-58c2-bdc1-69644c81a662'), dataset_name='example', payload=None)"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
"await cognee.add([job_1, job_2, job_3, job_4, job_5, job_position], \"example\")"
|
||||
]
|
||||
"execution_count": 13
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -519,15 +479,13 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"id": "7c431fdef4921ae0",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:33:21.450585Z",
|
||||
"start_time": "2025-02-09T21:33:21.446326Z"
|
||||
"end_time": "2025-06-30T11:39:41.213655Z",
|
||||
"start_time": "2025-06-30T11:39:41.209787Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from cognee.shared.data_models import KnowledgeGraph\n",
|
||||
"from cognee.modules.data.models import Dataset, Data\n",
|
||||
|
|
@ -579,197 +537,19 @@
|
|||
"\n",
|
||||
" except Exception as error:\n",
|
||||
" raise error"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 14
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "f0a91b99c6215e09",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:33:50.228396Z",
|
||||
"start_time": "2025-02-09T21:33:27.935791Z"
|
||||
"end_time": "2025-06-30T11:40:17.060626Z",
|
||||
"start_time": "2025-06-30T11:39:42.388114Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.871430\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `f5945ea1-08b8-53d1-85f6-a935ff7a2932`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.871830\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.872288\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.875133\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:00:19.888621\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:00:36 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:36 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:39 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:39 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:39 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:39 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:44 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:44 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:52 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:00:52 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:19 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:19 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.955570\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.985119\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.985588\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'david thompson' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.986121\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'company' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.986454\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'creativeworks agency' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.986848\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'visual innovations' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.987225\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'educationalinstitution' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.987528\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rhode island school of design' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.987873\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'degree' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.988230\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'b.f.a. in graphic design' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.988558\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'software' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.988873\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'adobe creative suite' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.989215\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dr. emily carter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.989517\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'place' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.989810\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'stanford university' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.990102\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'university of california, berkeley' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.990416\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'organization' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.990681\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'innovateai labs' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.990970\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'datawave analytics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.991274\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'ph.d. in computer science' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.991559\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'b.s. in mathematics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.991841\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'date' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.992072\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2014' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.992282\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2010' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.992697\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'technology' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.993033\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'machine learning' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.993327\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'predictive analytics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.993696\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'natural language processing' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.994020\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'deep learning' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.994404\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cloud-based platforms' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.994801\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'application' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.995274\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'customer segmentation' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.995645\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'churn analysis' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.996031\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hadoop' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.996459\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'spark' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.996828\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programming language' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.997218\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'python' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.997627\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'r' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.997960\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sql' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.998346\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'machine learning library' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.998616\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tensorflow' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.998890\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'keras' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.999219\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'scikit-learn' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.999546\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'data visualization tool' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:19.999841\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tableau' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.000148\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'matplotlib' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.000577\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sarah nguyen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.000911\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'university of washington' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.001331\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'university of texas at austin' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.001668\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'quantumtech' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.001991\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'datacore solutions' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.002317\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'm.s. in statistics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.002540\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'b.s. in applied mathematics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.002823\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'year of experience' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.003312\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'jessica miller' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.003621\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'global enterprises' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.003905\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'market leaders inc.' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.004172\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'educational_institution' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.004531\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'university of southern california' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.004885\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2010' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.005301\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2015-present' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.005724\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2010-2015' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.006076\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'michael rodriguez' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.006495\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alpha analytics' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.006894\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'techinsights' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.007225\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'carnegie mellon university' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.007472\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'university of michigan' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.007735\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2013' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.008050\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2011' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.008364\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '2017' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.008582\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'present' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.008931\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'technova solutions' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.009207\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'job_title' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.009472\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'senior data scientist' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.009800\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'location' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.010157\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'san francisco, ca' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.010428\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'field' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.010706\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'data science' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.011026\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'framework' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.011371\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pytorch' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.011583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'master’s or ph.d.' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.011876\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'experience' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.012163\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '5+ years of experience' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.012471\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'responsibility' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:20.012702\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mentorship' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\u001b[92m20:01:20 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:21 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:21 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:21 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:22.115646\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = azure\u001b[0m\u001b[92m20:01:27 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:27 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:01:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:31.461451\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:01:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:32 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:33 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:33 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:33 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.168179\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.168797\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.169251\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.169673\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.169999\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.170378\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.170769\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `f5945ea1-08b8-53d1-85f6-a935ff7a2932`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from cognee.modules.users.methods import get_default_user\n",
|
||||
"from cognee.modules.data.methods import get_datasets_by_name\n",
|
||||
|
|
@ -782,29 +562,19 @@
|
|||
"datasets = await get_datasets_by_name([\"example\"], user.id)\n",
|
||||
"\n",
|
||||
"await run_cognify_pipeline(datasets[0], user)"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 15
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "9dd29caf28c272d1",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:42:39.479622Z",
|
||||
"start_time": "2025-02-09T21:42:39.469795Z"
|
||||
"end_time": "2025-06-30T11:40:23.648015Z",
|
||||
"start_time": "2025-06-30T11:40:23.638810Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.182929\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph visualization saved as /Users/borisarzentar/Projects/Topoteretes/cognee/notebooks/.artifacts/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:01:34.183348\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mThe HTML file has been stored at path: /Users/borisarzentar/Projects/Topoteretes/cognee/notebooks/.artifacts/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import pathlib\n",
|
||||
"from cognee.api.v1.visualize import visualize_graph\n",
|
||||
|
|
@ -816,117 +586,9 @@
|
|||
"\n",
|
||||
"# Make sure to convert to string if visualize_graph expects a string\n",
|
||||
"b = await visualize_graph(str(graph_file_path))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "9f941848c418d713",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-02-09T21:42:59.312945Z",
|
||||
"start_time": "2025-02-09T21:42:59.311015Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"serving at port 8001\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import http.server\n",
|
||||
"import socketserver\n",
|
||||
"from threading import Thread\n",
|
||||
"\n",
|
||||
"PORT = 8001\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class ServerThread(Thread):\n",
|
||||
" def run(self):\n",
|
||||
" Handler = http.server.SimpleHTTPRequestHandler\n",
|
||||
" with socketserver.TCPServer((\"\", PORT), Handler) as httpd:\n",
|
||||
" print(\"serving at port\", PORT)\n",
|
||||
" httpd.serve_forever()\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ServerThread().start()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"id": "77d5794d-3561-4c9c-a001-df7d6b0c1968",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Visualization server running at: http://0.0.0.0:8002\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"<function cognee.shared.utils.start_visualization_server.<locals>.shutdown()>"
|
||||
]
|
||||
},
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"127.0.0.1 - - [18/Jun/2025 20:01:34] \"GET /.artifacts/graph_visualization.html HTTP/1.1\" 200 -\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from cognee.api.v1.visualize import visualization_server\n",
|
||||
"\n",
|
||||
"visualization_server(port=8002)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"id": "030c6aac-650c-42dc-a89b-d21a5f422474",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"\n",
|
||||
" <iframe\n",
|
||||
" width=\"800\"\n",
|
||||
" height=\"600\"\n",
|
||||
" src=\"http://127.0.0.1:8002/.artifacts/graph_visualization.html\"\n",
|
||||
" frameborder=\"0\"\n",
|
||||
" allowfullscreen\n",
|
||||
" \n",
|
||||
" ></iframe>\n",
|
||||
" "
|
||||
],
|
||||
"text/plain": [
|
||||
"<IPython.lib.display.IFrame at 0x6802a5bd0>"
|
||||
]
|
||||
},
|
||||
"execution_count": 15,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from IPython.display import IFrame, display, HTML\n",
|
||||
"\n",
|
||||
"IFrame(\"http://127.0.0.1:8002/.artifacts/graph_visualization.html\", width=800, height=600)"
|
||||
]
|
||||
"outputs": [],
|
||||
"execution_count": 16
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -946,40 +608,13 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"id": "e5e7dfc8",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2024-12-24T13:44:16.575843Z",
|
||||
"start_time": "2024-12-24T13:44:16.047897Z"
|
||||
"end_time": "2025-06-30T11:40:31.638407Z",
|
||||
"start_time": "2025-06-30T11:40:31.331926Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '4d8dda57-2681-5264-a2bd-e2ddfe66a785', 'payload': {'id': '4d8dda57-2681-5264-a2bd-e2ddfe66a785', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'sarah nguyen'}, 'score': 0.5709310173988342}\n",
|
||||
"{'id': '198e2ab8-75e9-5931-97ab-da9a5a8e188c', 'payload': {'id': '198e2ab8-75e9-5931-97ab-da9a5a8e188c', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'san francisco, ca'}, 'score': 1.349502682685852}\n",
|
||||
"{'id': '435dbd37-ab20-503c-9e99-ab8b8a3484e5', 'payload': {'id': '435dbd37-ab20-503c-9e99-ab8b8a3484e5', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'senior data scientist'}, 'score': 1.3936456441879272}\n",
|
||||
"{'id': '36a5e3c8-c5f5-5ab5-8d59-ea69d8b36932', 'payload': {'id': '36a5e3c8-c5f5-5ab5-8d59-ea69d8b36932', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'jessica miller'}, 'score': 1.4041534662246704}\n",
|
||||
"{'id': '0e975b3e-7ebe-5572-ad6a-08eae2e2af97', 'payload': {'id': '0e975b3e-7ebe-5572-ad6a-08eae2e2af97', 'created_at': 1750269692256, 'updated_at': 1750269692256, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'ph.d. in computer science'}, 'score': 1.4247756004333496}\n",
|
||||
"{'id': '538550fe-80fe-5525-9a79-7da2fd8d8a70', 'payload': {'id': '538550fe-80fe-5525-9a79-7da2fd8d8a70', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'year of experience'}, 'score': 1.4505990743637085}\n",
|
||||
"{'id': '73ae630f-7b09-5dce-8c18-45d0a57b30f9', 'payload': {'id': '73ae630f-7b09-5dce-8c18-45d0a57b30f9', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'michael rodriguez'}, 'score': 1.4519352912902832}\n",
|
||||
"{'id': 'fe5ea90b-e586-5492-a7a4-71b918c71a20', 'payload': {'id': 'fe5ea90b-e586-5492-a7a4-71b918c71a20', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '5+ years of experience'}, 'score': 1.4707216024398804}\n",
|
||||
"{'id': '29e771c8-4c3f-52de-9511-6b705878e130', 'payload': {'id': '29e771c8-4c3f-52de-9511-6b705878e130', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'dr. emily carter'}, 'score': 1.4710627794265747}\n",
|
||||
"{'id': 'd82673ff-ff5e-5436-bc8b-2967d4301b4f', 'payload': {'id': 'd82673ff-ff5e-5436-bc8b-2967d4301b4f', 'created_at': 1750269692257, 'updated_at': 1750269692257, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'm.s. in statistics'}, 'score': 1.471780776977539}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"async def search(\n",
|
||||
" vector_engine,\n",
|
||||
|
|
@ -1011,7 +646,26 @@
|
|||
"results = await search(vector_engine, \"Entity_name\", \"sarah.nguyen@example.com\")\n",
|
||||
"for result in results:\n",
|
||||
" print(result)"
|
||||
]
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '4d8dda57-2681-5264-a2bd-e2ddfe66a785', 'payload': {'id': '4d8dda57-2681-5264-a2bd-e2ddfe66a785', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'sarah nguyen'}, 'score': 0.5707442760467529}\n",
|
||||
"{'id': '198e2ab8-75e9-5931-97ab-da9a5a8e188c', 'payload': {'id': '198e2ab8-75e9-5931-97ab-da9a5a8e188c', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'san francisco, ca'}, 'score': 1.349550724029541}\n",
|
||||
"{'id': '6b726763-e259-5e91-b505-85284f8ea5ea', 'payload': {'id': '6b726763-e259-5e91-b505-85284f8ea5ea', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'sas'}, 'score': 1.3788877725601196}\n",
|
||||
"{'id': '36a5e3c8-c5f5-5ab5-8d59-ea69d8b36932', 'payload': {'id': '36a5e3c8-c5f5-5ab5-8d59-ea69d8b36932', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'jessica miller'}, 'score': 1.4042645692825317}\n",
|
||||
"{'id': 'e886374e-2a54-5a38-88cc-29f081b0a0ce', 'payload': {'id': 'e886374e-2a54-5a38-88cc-29f081b0a0ce', 'created_at': 1751283613423, 'updated_at': 1751283613423, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'ph.d. in computer science'}, 'score': 1.4246981143951416}\n",
|
||||
"{'id': '4aaf788f-d1b7-56a2-a380-40ce26e51507', 'payload': {'id': '4aaf788f-d1b7-56a2-a380-40ce26e51507', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'senior data scientist (machine learning)'}, 'score': 1.4415473937988281}\n",
|
||||
"{'id': '73ae630f-7b09-5dce-8c18-45d0a57b30f9', 'payload': {'id': '73ae630f-7b09-5dce-8c18-45d0a57b30f9', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'michael rodriguez'}, 'score': 1.4519097805023193}\n",
|
||||
"{'id': 'df9b4927-8f23-54ca-8ac9-015ac74e348c', 'payload': {'id': 'df9b4927-8f23-54ca-8ac9-015ac74e348c', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'sales manager'}, 'score': 1.453505516052246}\n",
|
||||
"{'id': '9cb28cb3-3efd-52c0-a4dc-891f3f0137c3', 'payload': {'id': '9cb28cb3-3efd-52c0-a4dc-891f3f0137c3', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'java'}, 'score': 1.466707468032837}\n",
|
||||
"{'id': 'fe5ea90b-e586-5492-a7a4-71b918c71a20', 'payload': {'id': 'fe5ea90b-e586-5492-a7a4-71b918c71a20', 'created_at': 1751283613424, 'updated_at': 1751283613424, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '5+ years of experience'}, 'score': 1.4708971977233887}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 17
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -1031,41 +685,13 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"id": "21a3e9a6",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\\Extracted summaries are:\n",
|
||||
"\n",
|
||||
"{'id': '63284a74-214f-530b-9ec2-0197eeb71e6a', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Experienced Graphic Designer with 8+ years in visual design and branding, adept in Adobe Creative Suite, dedicated to crafting impactful visuals.'}\n",
|
||||
"\n",
|
||||
"{'id': '32be736f-7529-5ffd-8a28-a3c4b491def3', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Senior Data Scientist (Machine Learning) position at TechNova Solutions in San Francisco, CA.'}\n",
|
||||
"\n",
|
||||
"{'id': 'f1022c1c-f20a-5406-9d00-2a527c01cea0', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Experienced Data Scientist with over 8 years specializing in machine learning and predictive analytics.'}\n",
|
||||
"\n",
|
||||
"{'id': 'ef80f687-6dd8-5cba-b1a7-4d6dfc163692', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Data Scientist with expertise in machine learning and statistical analysis, adept at managing large datasets and deriving actionable insights for businesses.'}\n",
|
||||
"\n",
|
||||
"{'id': '0857c07d-c806-5e6e-ac08-d512115ca80d', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Experienced Data Scientist with a focus on machine learning, possessing 6 years in the field. Committed to utilizing data for enhancing business solutions and product effectiveness.'}\n",
|
||||
"\n",
|
||||
"{'id': '5aff0f39-c4df-52df-a1d6-4bf005c6672e', 'created_at': 1750269691541, 'updated_at': 1750269691541, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Experienced Sales Manager with a proven history of driving revenue growth and developing effective teams. Strong leadership and communication abilities.'}\n",
|
||||
"\n"
|
||||
]
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:40:42.923695Z",
|
||||
"start_time": "2025-06-30T11:40:42.104461Z"
|
||||
}
|
||||
],
|
||||
},
|
||||
"source": [
|
||||
"from cognee.api.v1.search import SearchType\n",
|
||||
"\n",
|
||||
|
|
@ -1076,7 +702,31 @@
|
|||
"print(\"\\n\\Extracted summaries are:\\n\")\n",
|
||||
"for result in search_results:\n",
|
||||
" print(f\"{result}\\n\")"
|
||||
]
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\\Extracted summaries are:\n",
|
||||
"\n",
|
||||
"{'id': 'a2f998b2-4360-5c2a-96a0-f1ac003dca23', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Michael Rodriguez is an accomplished Data Scientist with expertise in machine learning and statistical analysis, proficient in managing extensive datasets and converting data into strategic business insights.'}\n",
|
||||
"\n",
|
||||
"{'id': '131a1dda-ef11-5b2d-8bca-b228bf375fa1', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Senior Data Scientist with a focus on Machine Learning sought at TechNova Solutions in San Francisco, CA.'}\n",
|
||||
"\n",
|
||||
"{'id': '9f248159-ad85-5591-93df-c10b6cbbe8b3', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Accomplished Data Scientist with 8+ years of expertise in machine learning and predictive analytics.'}\n",
|
||||
"\n",
|
||||
"{'id': 'cd475433-32ac-50b2-8692-3c5fc9b3f8d6', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Experienced Graphic Designer specializing in visual design and branding with over 8 years in the field.'}\n",
|
||||
"\n",
|
||||
"{'id': '80af436f-9bff-5259-b4ab-5d64ecefe83c', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Data Scientist with 6 years of expertise in machine learning, focused on utilizing data to enhance business solutions and optimize product performance.'}\n",
|
||||
"\n",
|
||||
"{'id': '59ae1d66-7bfa-5539-b01c-704e86d6b287', 'created_at': 1751283611382, 'updated_at': 1751283611382, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Accomplished Sales Manager with proven success in enhancing sales performance and nurturing effective teams. Strong leadership and communication capabilities.'}\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 18
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -1088,18 +738,20 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"id": "c7a8abff",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:40:46.410055Z",
|
||||
"start_time": "2025-06-30T11:40:46.103960Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=node_name)\n",
|
||||
"print(\"\\n\\nExtracted chunks are:\\n\")\n",
|
||||
"for result in search_results:\n",
|
||||
" print(f\"{result}\\n\")"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
|
|
@ -1108,27 +760,22 @@
|
|||
"\n",
|
||||
"Extracted chunks are:\n",
|
||||
"\n",
|
||||
"{'id': '23da506e-af9a-5ba1-96c9-4c58d5ef9f4a', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 3: Relevant\\nName: Sarah Nguyen\\nContact Information:\\n\\nEmail: sarah.nguyen@example.com\\nPhone: (555) 345-6789\\nSummary:\\n\\nData Scientist specializing in machine learning with 6 years of experience. Passionate about leveraging data to drive business solutions and improve product performance.\\n\\nEducation:\\n\\nM.S. in Statistics, University of Washington (2014)\\nB.S. in Applied Mathematics, University of Texas at Austin (2012)\\nExperience:\\n\\nData Scientist, QuantumTech (2016 – Present)\\nDesigned and implemented machine learning algorithms for financial forecasting.\\nImproved model efficiency by 20% through algorithm optimization.\\nJunior Data Scientist, DataCore Solutions (2014 – 2016)\\nAssisted in developing predictive models for supply chain optimization.\\nConducted data cleaning and preprocessing on large datasets.\\nSkills:\\n\\nProgramming Languages: Python, R\\nMachine Learning Frameworks: PyTorch, Scikit-Learn\\nStatistical Analysis: SAS, SPSS\\nCloud Platforms: AWS, Azure\\n'}\n",
|
||||
"{'id': 'f02c60d8-01f3-5099-8d63-3b669553d3b5', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 3: Relevant\\nName: Sarah Nguyen\\nContact Information:\\n\\nEmail: sarah.nguyen@example.com\\nPhone: (555) 345-6789\\nSummary:\\n\\nData Scientist specializing in machine learning with 6 years of experience. Passionate about leveraging data to drive business solutions and improve product performance.\\n\\nEducation:\\n\\nM.S. in Statistics, University of Washington (2014)\\nB.S. in Applied Mathematics, University of Texas at Austin (2012)\\nExperience:\\n\\nData Scientist, QuantumTech (2016 – Present)\\nDesigned and implemented machine learning algorithms for financial forecasting.\\nImproved model efficiency by 20% through algorithm optimization.\\nJunior Data Scientist, DataCore Solutions (2014 – 2016)\\nAssisted in developing predictive models for supply chain optimization.\\nConducted data cleaning and preprocessing on large datasets.\\nSkills:\\n\\nProgramming Languages: Python, R\\nMachine Learning Frameworks: PyTorch, Scikit-Learn\\nStatistical Analysis: SAS, SPSS\\nCloud Platforms: AWS, Azure\\n'}\n",
|
||||
"\n",
|
||||
"{'id': '34b218a9-d931-5d67-88fe-21b6723d3c5c', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"\\nCV 5: Not Relevant\\nName: Jessica Miller\\nContact Information:\\n\\nEmail: jessica.miller@example.com\\nPhone: (555) 567-8901\\nSummary:\\n\\nExperienced Sales Manager with a strong track record in driving sales growth and building high-performing teams. Excellent communication and leadership skills.\\n\\nEducation:\\n\\nB.A. in Business Administration, University of Southern California (2010)\\nExperience:\\n\\nSales Manager, Global Enterprises (2015 – Present)\\nManaged a sales team of 15 members, achieving a 20% increase in annual revenue.\\nDeveloped sales strategies that expanded customer base by 25%.\\nSales Representative, Market Leaders Inc. (2010 – 2015)\\nConsistently exceeded sales targets and received the 'Top Salesperson' award in 2013.\\nSkills:\\n\\nSales Strategy and Planning\\nTeam Leadership and Development\\nCRM Software: Salesforce, Zoho\\nNegotiation and Relationship Building\\n\"}\n",
|
||||
"{'id': '7f06f2e6-6294-545a-8253-502528ebe231', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"\\nCV 5: Not Relevant\\nName: Jessica Miller\\nContact Information:\\n\\nEmail: jessica.miller@example.com\\nPhone: (555) 567-8901\\nSummary:\\n\\nExperienced Sales Manager with a strong track record in driving sales growth and building high-performing teams. Excellent communication and leadership skills.\\n\\nEducation:\\n\\nB.A. in Business Administration, University of Southern California (2010)\\nExperience:\\n\\nSales Manager, Global Enterprises (2015 – Present)\\nManaged a sales team of 15 members, achieving a 20% increase in annual revenue.\\nDeveloped sales strategies that expanded customer base by 25%.\\nSales Representative, Market Leaders Inc. (2010 – 2015)\\nConsistently exceeded sales targets and received the 'Top Salesperson' award in 2013.\\nSkills:\\n\\nSales Strategy and Planning\\nTeam Leadership and Development\\nCRM Software: Salesforce, Zoho\\nNegotiation and Relationship Building\\n\"}\n",
|
||||
"\n",
|
||||
"{'id': '8fb6cafa-0ba1-57d1-b53f-5e590f914bfa', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 4: Not Relevant\\nName: David Thompson\\nContact Information:\\n\\nEmail: david.thompson@example.com\\nPhone: (555) 456-7890\\nSummary:\\n\\nCreative Graphic Designer with over 8 years of experience in visual design and branding. Proficient in Adobe Creative Suite and passionate about creating compelling visuals.\\n\\nEducation:\\n\\nB.F.A. in Graphic Design, Rhode Island School of Design (2012)\\nExperience:\\n\\nSenior Graphic Designer, CreativeWorks Agency (2015 – Present)\\nLed design projects for clients in various industries.\\nCreated branding materials that increased client engagement by 30%.\\nGraphic Designer, Visual Innovations (2012 – 2015)\\nDesigned marketing collateral, including brochures, logos, and websites.\\nCollaborated with the marketing team to develop cohesive brand strategies.\\nSkills:\\n\\nDesign Software: Adobe Photoshop, Illustrator, InDesign\\nWeb Design: HTML, CSS\\nSpecialties: Branding and Identity, Typography\\n'}\n",
|
||||
"{'id': 'd5e106ab-428d-586c-a25c-874e55928e72', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 4: Not Relevant\\nName: David Thompson\\nContact Information:\\n\\nEmail: david.thompson@example.com\\nPhone: (555) 456-7890\\nSummary:\\n\\nCreative Graphic Designer with over 8 years of experience in visual design and branding. Proficient in Adobe Creative Suite and passionate about creating compelling visuals.\\n\\nEducation:\\n\\nB.F.A. in Graphic Design, Rhode Island School of Design (2012)\\nExperience:\\n\\nSenior Graphic Designer, CreativeWorks Agency (2015 – Present)\\nLed design projects for clients in various industries.\\nCreated branding materials that increased client engagement by 30%.\\nGraphic Designer, Visual Innovations (2012 – 2015)\\nDesigned marketing collateral, including brochures, logos, and websites.\\nCollaborated with the marketing team to develop cohesive brand strategies.\\nSkills:\\n\\nDesign Software: Adobe Photoshop, Illustrator, InDesign\\nWeb Design: HTML, CSS\\nSpecialties: Branding and Identity, Typography\\n'}\n",
|
||||
"\n",
|
||||
"{'id': '16510b03-cc2b-5779-9050-4dc8f4d5df09', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 1: Relevant\\nName: Dr. Emily Carter\\nContact Information:\\n\\nEmail: emily.carter@example.com\\nPhone: (555) 123-4567\\nSummary:\\n\\nSenior Data Scientist with over 8 years of experience in machine learning and predictive analytics. Expertise in developing advanced algorithms and deploying scalable models in production environments.\\n\\nEducation:\\n\\nPh.D. in Computer Science, Stanford University (2014)\\nB.S. in Mathematics, University of California, Berkeley (2010)\\nExperience:\\n\\nSenior Data Scientist, InnovateAI Labs (2016 – Present)\\nLed a team in developing machine learning models for natural language processing applications.\\nImplemented deep learning algorithms that improved prediction accuracy by 25%.\\nCollaborated with cross-functional teams to integrate models into cloud-based platforms.\\nData Scientist, DataWave Analytics (2014 – 2016)\\nDeveloped predictive models for customer segmentation and churn analysis.\\nAnalyzed large datasets using Hadoop and Spark frameworks.\\nSkills:\\n\\nProgramming Languages: Python, R, SQL\\nMachine Learning: TensorFlow, Keras, Scikit-Learn\\nBig Data Technologies: Hadoop, Spark\\nData Visualization: Tableau, Matplotlib\\n'}\n",
|
||||
"{'id': 'a3a454c9-ba0f-5399-bf30-71f26329f198', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 1: Relevant\\nName: Dr. Emily Carter\\nContact Information:\\n\\nEmail: emily.carter@example.com\\nPhone: (555) 123-4567\\nSummary:\\n\\nSenior Data Scientist with over 8 years of experience in machine learning and predictive analytics. Expertise in developing advanced algorithms and deploying scalable models in production environments.\\n\\nEducation:\\n\\nPh.D. in Computer Science, Stanford University (2014)\\nB.S. in Mathematics, University of California, Berkeley (2010)\\nExperience:\\n\\nSenior Data Scientist, InnovateAI Labs (2016 – Present)\\nLed a team in developing machine learning models for natural language processing applications.\\nImplemented deep learning algorithms that improved prediction accuracy by 25%.\\nCollaborated with cross-functional teams to integrate models into cloud-based platforms.\\nData Scientist, DataWave Analytics (2014 – 2016)\\nDeveloped predictive models for customer segmentation and churn analysis.\\nAnalyzed large datasets using Hadoop and Spark frameworks.\\nSkills:\\n\\nProgramming Languages: Python, R, SQL\\nMachine Learning: TensorFlow, Keras, Scikit-Learn\\nBig Data Technologies: Hadoop, Spark\\nData Visualization: Tableau, Matplotlib\\n'}\n",
|
||||
"\n",
|
||||
"{'id': 'ceb4ea98-3d56-52d2-8da8-92c6cf814255', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 2: Relevant\\nName: Michael Rodriguez\\nContact Information:\\n\\nEmail: michael.rodriguez@example.com\\nPhone: (555) 234-5678\\nSummary:\\n\\nData Scientist with a strong background in machine learning and statistical modeling. Skilled in handling large datasets and translating data into actionable business insights.\\n\\nEducation:\\n\\nM.S. in Data Science, Carnegie Mellon University (2013)\\nB.S. in Computer Science, University of Michigan (2011)\\nExperience:\\n\\nSenior Data Scientist, Alpha Analytics (2017 – Present)\\nDeveloped machine learning models to optimize marketing strategies.\\nReduced customer acquisition cost by 15% through predictive modeling.\\nData Scientist, TechInsights (2013 – 2017)\\nAnalyzed user behavior data to improve product features.\\nImplemented A/B testing frameworks to evaluate product changes.\\nSkills:\\n\\nProgramming Languages: Python, Java, SQL\\nMachine Learning: Scikit-Learn, XGBoost\\nData Visualization: Seaborn, Plotly\\nDatabases: MySQL, MongoDB\\n'}\n",
|
||||
"{'id': '348d042d-e310-5ccf-9ed8-f6ea653868af', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': '\\nCV 2: Relevant\\nName: Michael Rodriguez\\nContact Information:\\n\\nEmail: michael.rodriguez@example.com\\nPhone: (555) 234-5678\\nSummary:\\n\\nData Scientist with a strong background in machine learning and statistical modeling. Skilled in handling large datasets and translating data into actionable business insights.\\n\\nEducation:\\n\\nM.S. in Data Science, Carnegie Mellon University (2013)\\nB.S. in Computer Science, University of Michigan (2011)\\nExperience:\\n\\nSenior Data Scientist, Alpha Analytics (2017 – Present)\\nDeveloped machine learning models to optimize marketing strategies.\\nReduced customer acquisition cost by 15% through predictive modeling.\\nData Scientist, TechInsights (2013 – 2017)\\nAnalyzed user behavior data to improve product features.\\nImplemented A/B testing frameworks to evaluate product changes.\\nSkills:\\n\\nProgramming Languages: Python, Java, SQL\\nMachine Learning: Scikit-Learn, XGBoost\\nData Visualization: Seaborn, Plotly\\nDatabases: MySQL, MongoDB\\n'}\n",
|
||||
"\n",
|
||||
"{'id': 'd1c4a9fc-ea1b-55f5-91ba-7105b0918986', 'created_at': 1750269691781, 'updated_at': 1750269691781, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Senior Data Scientist (Machine Learning)\\n\\nCompany: TechNova Solutions\\nLocation: San Francisco, CA\\n\\nJob Description:\\n\\nTechNova Solutions is seeking a Senior Data Scientist specializing in Machine Learning to join our dynamic analytics team. The ideal candidate will have a strong background in developing and deploying machine learning models, working with large datasets, and translating complex data into actionable insights.\\n\\nResponsibilities:\\n\\nDevelop and implement advanced machine learning algorithms and models.\\nAnalyze large, complex datasets to extract meaningful patterns and insights.\\nCollaborate with cross-functional teams to integrate predictive models into products.\\nStay updated with the latest advancements in machine learning and data science.\\nMentor junior data scientists and provide technical guidance.\\nQualifications:\\n\\nMaster’s or Ph.D. in Data Science, Computer Science, Statistics, or a related field.\\n5+ years of experience in data science and machine learning.\\nProficient in Python, R, and SQL.\\nExperience with deep learning frameworks (e.g., TensorFlow, PyTorch).\\nStrong problem-solving skills and attention to detail.\\nCandidate CVs\\n'}\n",
|
||||
"{'id': '2fe2215d-8c72-5b67-a8a3-29a7eee87847', 'created_at': 1751283612338, 'updated_at': 1751283612338, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Senior Data Scientist (Machine Learning)\\n\\nCompany: TechNova Solutions\\nLocation: San Francisco, CA\\n\\nJob Description:\\n\\nTechNova Solutions is seeking a Senior Data Scientist specializing in Machine Learning to join our dynamic analytics team. The ideal candidate will have a strong background in developing and deploying machine learning models, working with large datasets, and translating complex data into actionable insights.\\n\\nResponsibilities:\\n\\nDevelop and implement advanced machine learning algorithms and models.\\nAnalyze large, complex datasets to extract meaningful patterns and insights.\\nCollaborate with cross-functional teams to integrate predictive models into products.\\nStay updated with the latest advancements in machine learning and data science.\\nMentor junior data scientists and provide technical guidance.\\nQualifications:\\n\\nMaster’s or Ph.D. in Data Science, Computer Science, Statistics, or a related field.\\n5+ years of experience in data science and machine learning.\\nProficient in Python, R, and SQL.\\nExperience with deep learning frameworks (e.g., TensorFlow, PyTorch).\\nStrong problem-solving skills and attention to detail.\\nCandidate CVs\\n'}\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=node_name)\n",
|
||||
"print(\"\\n\\nExtracted chunks are:\\n\")\n",
|
||||
"for result in search_results:\n",
|
||||
" print(f\"{result}\\n\")"
|
||||
]
|
||||
"execution_count": 19
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -1148,9 +795,9 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m\u001b[92m20:01:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: azure/text-embedding-3-large\u001b[0m"
|
||||
"\u001B[92m20:01:34 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: azure/text-embedding-3-large\u001B[0m\u001B[92m20:01:34 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: azure/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: azure/text-embedding-3-large\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
|||
143
notebooks/cognee_multimedia_demo.ipynb
vendored
143
notebooks/cognee_multimedia_demo.ipynb
vendored
|
|
@ -21,10 +21,13 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:54:44.613431Z",
|
||||
"start_time": "2025-06-30T11:54:44.606687Z"
|
||||
}
|
||||
},
|
||||
"cell_type": "code",
|
||||
"outputs": [],
|
||||
"execution_count": null,
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import pathlib\n",
|
||||
|
|
@ -41,7 +44,9 @@
|
|||
" \"../\",\n",
|
||||
" \"examples/data/multimedia/example.png\",\n",
|
||||
")"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 1
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -52,24 +57,20 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 24,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:54:46.739157Z",
|
||||
"start_time": "2025-06-30T11:54:46.734808Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# Setting environment variables\n",
|
||||
"if \"GRAPHISTRY_USERNAME\" not in os.environ:\n",
|
||||
" os.environ[\"GRAPHISTRY_USERNAME\"] = \"\"\n",
|
||||
"\n",
|
||||
"if \"GRAPHISTRY_PASSWORD\" not in os.environ:\n",
|
||||
" os.environ[\"GRAPHISTRY_PASSWORD\"] = \"\"\n",
|
||||
"\n",
|
||||
"if \"LLM_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"LLM_API_KEY\"] = \"\"\n",
|
||||
"\n",
|
||||
"# \"neo4j\" or \"networkx\"\n",
|
||||
"os.environ[\"GRAPH_DATABASE_PROVIDER\"] = \"networkx\"\n",
|
||||
"os.environ[\"GRAPH_DATABASE_PROVIDER\"] = \"kuzu\"\n",
|
||||
"# Not needed if using networkx\n",
|
||||
"# os.environ[\"GRAPH_DATABASE_URL\"]=\"\"\n",
|
||||
"# os.environ[\"GRAPH_DATABASE_USERNAME\"]=\"\"\n",
|
||||
|
|
@ -92,7 +93,9 @@
|
|||
"# os.environ[\"DB_PORT\"]=\"5432\"\n",
|
||||
"# os.environ[\"DB_USERNAME\"]=\"cognee\"\n",
|
||||
"# os.environ[\"DB_PASSWORD\"]=\"cognee\""
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 2
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -103,9 +106,12 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:55:01.959946Z",
|
||||
"start_time": "2025-06-30T11:54:50.569659Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
|
|
@ -118,7 +124,75 @@
|
|||
"\n",
|
||||
"# Create knowledge graph with cognee\n",
|
||||
"await cognee.cognify()"
|
||||
]
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/Users/vasilije/cognee/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "OperationalError",
|
||||
"evalue": "(sqlite3.OperationalError) database is locked\n[SQL: \nCREATE TABLE data (\n\tid UUID NOT NULL, \n\tname VARCHAR, \n\textension VARCHAR, \n\tmime_type VARCHAR, \n\traw_data_location VARCHAR, \n\towner_id UUID, \n\tcontent_hash VARCHAR, \n\texternal_metadata JSON, \n\tnode_set JSON, \n\ttoken_count INTEGER, \n\tcreated_at DATETIME, \n\tupdated_at DATETIME, \n\tPRIMARY KEY (id)\n)\n\n]\n(Background on this error at: https://sqlalche.me/e/20/e3q8)",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001B[31m---------------------------------------------------------------------------\u001B[39m",
|
||||
"\u001B[31mOperationalError\u001B[39m Traceback (most recent call last)",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1964\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1963\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m evt_handled:\n\u001B[32m-> \u001B[39m\u001B[32m1964\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mdo_execute\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1965\u001B[39m \u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1966\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1968\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/default.py:942\u001B[39m, in \u001B[36mDefaultDialect.do_execute\u001B[39m\u001B[34m(self, cursor, statement, parameters, context)\u001B[39m\n\u001B[32m 941\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mdo_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, cursor, statement, parameters, context=\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[32m--> \u001B[39m\u001B[32m942\u001B[39m \u001B[43mcursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:172\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 171\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m error:\n\u001B[32m--> \u001B[39m\u001B[32m172\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_adapt_connection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_handle_exception\u001B[49m\u001B[43m(\u001B[49m\u001B[43merror\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:323\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_connection._handle_exception\u001B[39m\u001B[34m(self, error)\u001B[39m\n\u001B[32m 322\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m323\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m error\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:154\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 153\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m154\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mawait_\u001B[49m\u001B[43m(\u001B[49m\u001B[43m_cursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43moperation\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 156\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m _cursor.description:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:132\u001B[39m, in \u001B[36mawait_only\u001B[39m\u001B[34m(awaitable)\u001B[39m\n\u001B[32m 128\u001B[39m \u001B[38;5;66;03m# returns the control to the driver greenlet passing it\u001B[39;00m\n\u001B[32m 129\u001B[39m \u001B[38;5;66;03m# a coroutine to run. Once the awaitable is done, the driver greenlet\u001B[39;00m\n\u001B[32m 130\u001B[39m \u001B[38;5;66;03m# switches back to this greenlet with the result of awaitable that is\u001B[39;00m\n\u001B[32m 131\u001B[39m \u001B[38;5;66;03m# then returned to the caller (or raised as error)\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mcurrent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mparent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mswitch\u001B[49m\u001B[43m(\u001B[49m\u001B[43mawaitable\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:196\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 193\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 194\u001B[39m \u001B[38;5;66;03m# wait for a coroutine from await_only and then return its\u001B[39;00m\n\u001B[32m 195\u001B[39m \u001B[38;5;66;03m# result back to it.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:48\u001B[39m, in \u001B[36mCursor.execute\u001B[39m\u001B[34m(self, sql, parameters)\u001B[39m\n\u001B[32m 47\u001B[39m parameters = []\n\u001B[32m---> \u001B[39m\u001B[32m48\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._execute(\u001B[38;5;28mself\u001B[39m._cursor.execute, sql, parameters)\n\u001B[32m 49\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:40\u001B[39m, in \u001B[36mCursor._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 39\u001B[39m \u001B[38;5;250m\u001B[39m\u001B[33;03m\"\"\"Execute the given function on the shared connection's thread.\"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m40\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._conn._execute(fn, *args, **kwargs)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:132\u001B[39m, in \u001B[36mConnection._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 130\u001B[39m \u001B[38;5;28mself\u001B[39m._tx.put_nowait((future, function))\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m future\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:115\u001B[39m, in \u001B[36mConnection.run\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 114\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33mexecuting \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m, function)\n\u001B[32m--> \u001B[39m\u001B[32m115\u001B[39m result = \u001B[43mfunction\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 116\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33moperation \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m completed\u001B[39m\u001B[33m\"\u001B[39m, function)\n",
|
||||
"\u001B[31mOperationalError\u001B[39m: database is locked",
|
||||
"\nThe above exception was the direct cause of the following exception:\n",
|
||||
"\u001B[31mOperationalError\u001B[39m Traceback (most recent call last)",
|
||||
"\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[3]\u001B[39m\u001B[32m, line 8\u001B[39m\n\u001B[32m 5\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m cognee.prune.prune_system(metadata=\u001B[38;5;28;01mTrue\u001B[39;00m)\n\u001B[32m 7\u001B[39m \u001B[38;5;66;03m# Add multimedia files and make them available for cognify\u001B[39;00m\n\u001B[32m----> \u001B[39m\u001B[32m8\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m cognee.add([mp3_file_path, png_file_path])\n\u001B[32m 10\u001B[39m \u001B[38;5;66;03m# Create knowledge graph with cognee\u001B[39;00m\n\u001B[32m 11\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m cognee.cognify()\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/api/v1/add/add.py:26\u001B[39m, in \u001B[36madd\u001B[39m\u001B[34m(data, dataset_name, user, node_set, vector_db_config, graph_db_config, dataset_id)\u001B[39m\n\u001B[32m 19\u001B[39m tasks = [\n\u001B[32m 20\u001B[39m Task(resolve_data_directories),\n\u001B[32m 21\u001B[39m Task(ingest_data, dataset_name, user, node_set, dataset_id),\n\u001B[32m 22\u001B[39m ]\n\u001B[32m 24\u001B[39m pipeline_run_info = \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m26\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mfor\u001B[39;00m run_info \u001B[38;5;129;01min\u001B[39;00m cognee_pipeline(\n\u001B[32m 27\u001B[39m tasks=tasks,\n\u001B[32m 28\u001B[39m datasets=dataset_id \u001B[38;5;28;01mif\u001B[39;00m dataset_id \u001B[38;5;28;01melse\u001B[39;00m dataset_name,\n\u001B[32m 29\u001B[39m data=data,\n\u001B[32m 30\u001B[39m user=user,\n\u001B[32m 31\u001B[39m pipeline_name=\u001B[33m\"\u001B[39m\u001B[33madd_pipeline\u001B[39m\u001B[33m\"\u001B[39m,\n\u001B[32m 32\u001B[39m vector_db_config=vector_db_config,\n\u001B[32m 33\u001B[39m graph_db_config=graph_db_config,\n\u001B[32m 34\u001B[39m ):\n\u001B[32m 35\u001B[39m pipeline_run_info = run_info\n\u001B[32m 37\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m pipeline_run_info\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/modules/pipelines/operations/pipeline.py:63\u001B[39m, in \u001B[36mcognee_pipeline\u001B[39m\u001B[34m(tasks, data, datasets, user, pipeline_name, vector_db_config, graph_db_config)\u001B[39m\n\u001B[32m 60\u001B[39m context_graph_db_config.set(graph_db_config)\n\u001B[32m 62\u001B[39m \u001B[38;5;66;03m# Create tables for databases\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m63\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m create_relational_db_and_tables()\n\u001B[32m 64\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m create_pgvector_db_and_tables()\n\u001B[32m 66\u001B[39m \u001B[38;5;66;03m# Initialize first_run attribute if it doesn't exist\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/infrastructure/databases/relational/create_db_and_tables.py:13\u001B[39m, in \u001B[36mcreate_db_and_tables\u001B[39m\u001B[34m()\u001B[39m\n\u001B[32m 5\u001B[39m \u001B[38;5;250m\u001B[39m\u001B[33;03m\"\"\"\u001B[39;00m\n\u001B[32m 6\u001B[39m \u001B[33;03mCreate a database and its tables.\u001B[39;00m\n\u001B[32m 7\u001B[39m \n\u001B[32m 8\u001B[39m \u001B[33;03mThis asynchronous function retrieves the relational engine and calls its method to\u001B[39;00m\n\u001B[32m 9\u001B[39m \u001B[33;03mcreate a database.\u001B[39;00m\n\u001B[32m 10\u001B[39m \u001B[33;03m\"\"\"\u001B[39;00m\n\u001B[32m 11\u001B[39m relational_engine = get_relational_engine()\n\u001B[32m---> \u001B[39m\u001B[32m13\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m relational_engine.create_database()\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/.pyenv/versions/3.11.0/lib/python3.11/contextlib.py:222\u001B[39m, in \u001B[36m_AsyncGeneratorContextManager.__aexit__\u001B[39m\u001B[34m(self, typ, value, traceback)\u001B[39m\n\u001B[32m 220\u001B[39m value = typ()\n\u001B[32m 221\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m222\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m.gen.athrow(typ, value, traceback)\n\u001B[32m 223\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mStopAsyncIteration\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m exc:\n\u001B[32m 224\u001B[39m \u001B[38;5;66;03m# Suppress StopIteration *unless* it's the same exception that\u001B[39;00m\n\u001B[32m 225\u001B[39m \u001B[38;5;66;03m# was passed to throw(). This prevents a StopIteration\u001B[39;00m\n\u001B[32m 226\u001B[39m \u001B[38;5;66;03m# raised inside the \"with\" statement from being suppressed.\u001B[39;00m\n\u001B[32m 227\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m exc \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m value\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/ext/asyncio/engine.py:1066\u001B[39m, in \u001B[36mAsyncEngine.begin\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 1064\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m conn:\n\u001B[32m 1065\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m conn.begin():\n\u001B[32m-> \u001B[39m\u001B[32m1066\u001B[39m \u001B[38;5;28;01myield\u001B[39;00m conn\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py:445\u001B[39m, in \u001B[36mSQLAlchemyAdapter.create_database\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 443\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine.begin() \u001B[38;5;28;01mas\u001B[39;00m connection:\n\u001B[32m 444\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mlen\u001B[39m(Base.metadata.tables.keys()) > \u001B[32m0\u001B[39m:\n\u001B[32m--> \u001B[39m\u001B[32m445\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m connection.run_sync(Base.metadata.create_all)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/ext/asyncio/engine.py:887\u001B[39m, in \u001B[36mAsyncConnection.run_sync\u001B[39m\u001B[34m(self, fn, *arg, **kw)\u001B[39m\n\u001B[32m 819\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mrun_sync\u001B[39m(\n\u001B[32m 820\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 821\u001B[39m fn: Callable[Concatenate[Connection, _P], _T],\n\u001B[32m 822\u001B[39m *arg: _P.args,\n\u001B[32m 823\u001B[39m **kw: _P.kwargs,\n\u001B[32m 824\u001B[39m ) -> _T:\n\u001B[32m 825\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m'''Invoke the given synchronous (i.e. not async) callable,\u001B[39;00m\n\u001B[32m 826\u001B[39m \u001B[33;03m passing a synchronous-style :class:`_engine.Connection` as the first\u001B[39;00m\n\u001B[32m 827\u001B[39m \u001B[33;03m argument.\u001B[39;00m\n\u001B[32m (...)\u001B[39m\u001B[32m 884\u001B[39m \n\u001B[32m 885\u001B[39m \u001B[33;03m '''\u001B[39;00m \u001B[38;5;66;03m# noqa: E501\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m887\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m greenlet_spawn(\n\u001B[32m 888\u001B[39m fn, \u001B[38;5;28mself\u001B[39m._proxied, *arg, _require_await=\u001B[38;5;28;01mFalse\u001B[39;00m, **kw\n\u001B[32m 889\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:201\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m201\u001B[39m result = \u001B[43mcontext\u001B[49m\u001B[43m.\u001B[49m\u001B[43mthrow\u001B[49m\u001B[43m(\u001B[49m\u001B[43m*\u001B[49m\u001B[43msys\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexc_info\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 202\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 203\u001B[39m result = context.switch(value)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/schema.py:5907\u001B[39m, in \u001B[36mMetaData.create_all\u001B[39m\u001B[34m(self, bind, tables, checkfirst)\u001B[39m\n\u001B[32m 5883\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mcreate_all\u001B[39m(\n\u001B[32m 5884\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 5885\u001B[39m bind: _CreateDropBind,\n\u001B[32m 5886\u001B[39m tables: Optional[_typing_Sequence[Table]] = \u001B[38;5;28;01mNone\u001B[39;00m,\n\u001B[32m 5887\u001B[39m checkfirst: \u001B[38;5;28mbool\u001B[39m = \u001B[38;5;28;01mTrue\u001B[39;00m,\n\u001B[32m 5888\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 5889\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Create all tables stored in this metadata.\u001B[39;00m\n\u001B[32m 5890\u001B[39m \n\u001B[32m 5891\u001B[39m \u001B[33;03m Conditional by default, will not attempt to recreate tables already\u001B[39;00m\n\u001B[32m (...)\u001B[39m\u001B[32m 5905\u001B[39m \n\u001B[32m 5906\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m5907\u001B[39m \u001B[43mbind\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_run_ddl_visitor\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 5908\u001B[39m \u001B[43m \u001B[49m\u001B[43mddl\u001B[49m\u001B[43m.\u001B[49m\u001B[43mSchemaGenerator\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcheckfirst\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcheckfirst\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtables\u001B[49m\u001B[43m=\u001B[49m\u001B[43mtables\u001B[49m\n\u001B[32m 5909\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:2456\u001B[39m, in \u001B[36mConnection._run_ddl_visitor\u001B[39m\u001B[34m(self, visitorcallable, element, **kwargs)\u001B[39m\n\u001B[32m 2444\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_run_ddl_visitor\u001B[39m(\n\u001B[32m 2445\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 2446\u001B[39m visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],\n\u001B[32m 2447\u001B[39m element: SchemaItem,\n\u001B[32m 2448\u001B[39m **kwargs: Any,\n\u001B[32m 2449\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 2450\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"run a DDL visitor.\u001B[39;00m\n\u001B[32m 2451\u001B[39m \n\u001B[32m 2452\u001B[39m \u001B[33;03m This method is only here so that the MockConnection can change the\u001B[39;00m\n\u001B[32m 2453\u001B[39m \u001B[33;03m options given to the visitor so that \"checkfirst\" is skipped.\u001B[39;00m\n\u001B[32m 2454\u001B[39m \n\u001B[32m 2455\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m2456\u001B[39m \u001B[43mvisitorcallable\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[43mtraverse_single\u001B[49m\u001B[43m(\u001B[49m\u001B[43melement\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/visitors.py:664\u001B[39m, in \u001B[36mExternalTraversal.traverse_single\u001B[39m\u001B[34m(self, obj, **kw)\u001B[39m\n\u001B[32m 662\u001B[39m meth = \u001B[38;5;28mgetattr\u001B[39m(v, \u001B[33m\"\u001B[39m\u001B[33mvisit_\u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m % obj.__visit_name__, \u001B[38;5;28;01mNone\u001B[39;00m)\n\u001B[32m 663\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m meth:\n\u001B[32m--> \u001B[39m\u001B[32m664\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\u001B[43mobj\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkw\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:978\u001B[39m, in \u001B[36mSchemaGenerator.visit_metadata\u001B[39m\u001B[34m(self, metadata)\u001B[39m\n\u001B[32m 976\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m table, fkcs \u001B[38;5;129;01min\u001B[39;00m collection:\n\u001B[32m 977\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m table \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m978\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mtraverse_single\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 979\u001B[39m \u001B[43m \u001B[49m\u001B[43mtable\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 980\u001B[39m \u001B[43m \u001B[49m\u001B[43mcreate_ok\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 981\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\u001B[43m=\u001B[49m\u001B[43mfkcs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 982\u001B[39m \u001B[43m \u001B[49m\u001B[43m_is_metadata_operation\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 983\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 984\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 985\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m fkc \u001B[38;5;129;01min\u001B[39;00m fkcs:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/visitors.py:664\u001B[39m, in \u001B[36mExternalTraversal.traverse_single\u001B[39m\u001B[34m(self, obj, **kw)\u001B[39m\n\u001B[32m 662\u001B[39m meth = \u001B[38;5;28mgetattr\u001B[39m(v, \u001B[33m\"\u001B[39m\u001B[33mvisit_\u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m % obj.__visit_name__, \u001B[38;5;28;01mNone\u001B[39;00m)\n\u001B[32m 663\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m meth:\n\u001B[32m--> \u001B[39m\u001B[32m664\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\u001B[43mobj\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkw\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:1016\u001B[39m, in \u001B[36mSchemaGenerator.visit_table\u001B[39m\u001B[34m(self, table, create_ok, include_foreign_key_constraints, _is_metadata_operation)\u001B[39m\n\u001B[32m 1007\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28mself\u001B[39m.dialect.supports_alter:\n\u001B[32m 1008\u001B[39m \u001B[38;5;66;03m# e.g., don't omit any foreign key constraints\u001B[39;00m\n\u001B[32m 1009\u001B[39m include_foreign_key_constraints = \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m 1011\u001B[39m \u001B[43mCreateTable\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1012\u001B[39m \u001B[43m \u001B[49m\u001B[43mtable\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1013\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\u001B[43m=\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1014\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\n\u001B[32m 1015\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m-> \u001B[39m\u001B[32m1016\u001B[39m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_invoke_with\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mconnection\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1018\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mhasattr\u001B[39m(table, \u001B[33m\"\u001B[39m\u001B[33mindexes\u001B[39m\u001B[33m\"\u001B[39m):\n\u001B[32m 1019\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m index \u001B[38;5;129;01min\u001B[39;00m table.indexes:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:314\u001B[39m, in \u001B[36mExecutableDDLElement._invoke_with\u001B[39m\u001B[34m(self, bind)\u001B[39m\n\u001B[32m 312\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_invoke_with\u001B[39m(\u001B[38;5;28mself\u001B[39m, bind):\n\u001B[32m 313\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._should_execute(\u001B[38;5;28mself\u001B[39m.target, bind):\n\u001B[32m--> \u001B[39m\u001B[32m314\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mbind\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1416\u001B[39m, in \u001B[36mConnection.execute\u001B[39m\u001B[34m(self, statement, parameters, execution_options)\u001B[39m\n\u001B[32m 1414\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m exc.ObjectNotExecutableError(statement) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01merr\u001B[39;00m\n\u001B[32m 1415\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m-> \u001B[39m\u001B[32m1416\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1417\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\n\u001B[32m 1418\u001B[39m \u001B[43m \u001B[49m\u001B[43mdistilled_parameters\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1419\u001B[39m \u001B[43m \u001B[49m\u001B[43mexecution_options\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01mor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mNO_OPTIONS\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1420\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:180\u001B[39m, in \u001B[36mExecutableDDLElement._execute_on_connection\u001B[39m\u001B[34m(self, connection, distilled_params, execution_options)\u001B[39m\n\u001B[32m 177\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_execute_on_connection\u001B[39m(\n\u001B[32m 178\u001B[39m \u001B[38;5;28mself\u001B[39m, connection, distilled_params, execution_options\n\u001B[32m 179\u001B[39m ):\n\u001B[32m--> \u001B[39m\u001B[32m180\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mconnection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_execute_ddl\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 181\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdistilled_params\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mexecution_options\u001B[49m\n\u001B[32m 182\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1527\u001B[39m, in \u001B[36mConnection._execute_ddl\u001B[39m\u001B[34m(self, ddl, distilled_parameters, execution_options)\u001B[39m\n\u001B[32m 1522\u001B[39m dialect = \u001B[38;5;28mself\u001B[39m.dialect\n\u001B[32m 1524\u001B[39m compiled = ddl.compile(\n\u001B[32m 1525\u001B[39m dialect=dialect, schema_translate_map=schema_translate_map\n\u001B[32m 1526\u001B[39m )\n\u001B[32m-> \u001B[39m\u001B[32m1527\u001B[39m ret = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_execute_context\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1528\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1529\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecution_ctx_cls\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_init_ddl\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1530\u001B[39m \u001B[43m \u001B[49m\u001B[43mcompiled\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1531\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 1532\u001B[39m \u001B[43m \u001B[49m\u001B[43mexec_opts\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1533\u001B[39m \u001B[43m \u001B[49m\u001B[43mcompiled\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1534\u001B[39m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1535\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n\u001B[32m 1536\u001B[39m \u001B[38;5;28mself\u001B[39m.dispatch.after_execute(\n\u001B[32m 1537\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 1538\u001B[39m ddl,\n\u001B[32m (...)\u001B[39m\u001B[32m 1542\u001B[39m ret,\n\u001B[32m 1543\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1843\u001B[39m, in \u001B[36mConnection._execute_context\u001B[39m\u001B[34m(self, dialect, constructor, statement, parameters, execution_options, *args, **kw)\u001B[39m\n\u001B[32m 1841\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m._exec_insertmany_context(dialect, context)\n\u001B[32m 1842\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m-> \u001B[39m\u001B[32m1843\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_exec_single_context\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1844\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\n\u001B[32m 1845\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1983\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1980\u001B[39m result = context._setup_result_proxy()\n\u001B[32m 1982\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m e:\n\u001B[32m-> \u001B[39m\u001B[32m1983\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_handle_dbapi_exception\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1984\u001B[39m \u001B[43m \u001B[49m\u001B[43me\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1985\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1987\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m result\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:2352\u001B[39m, in \u001B[36mConnection._handle_dbapi_exception\u001B[39m\u001B[34m(self, e, statement, parameters, cursor, context, is_sub_exec)\u001B[39m\n\u001B[32m 2350\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m should_wrap:\n\u001B[32m 2351\u001B[39m \u001B[38;5;28;01massert\u001B[39;00m sqlalchemy_exception \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m2352\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m sqlalchemy_exception.with_traceback(exc_info[\u001B[32m2\u001B[39m]) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01me\u001B[39;00m\n\u001B[32m 2353\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 2354\u001B[39m \u001B[38;5;28;01massert\u001B[39;00m exc_info[\u001B[32m1\u001B[39m] \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1964\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1962\u001B[39m \u001B[38;5;28;01mbreak\u001B[39;00m\n\u001B[32m 1963\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m evt_handled:\n\u001B[32m-> \u001B[39m\u001B[32m1964\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mdo_execute\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1965\u001B[39m \u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1966\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1968\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n\u001B[32m 1969\u001B[39m \u001B[38;5;28mself\u001B[39m.dispatch.after_cursor_execute(\n\u001B[32m 1970\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 1971\u001B[39m cursor,\n\u001B[32m (...)\u001B[39m\u001B[32m 1975\u001B[39m context.executemany,\n\u001B[32m 1976\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/default.py:942\u001B[39m, in \u001B[36mDefaultDialect.do_execute\u001B[39m\u001B[34m(self, cursor, statement, parameters, context)\u001B[39m\n\u001B[32m 941\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mdo_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, cursor, statement, parameters, context=\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[32m--> \u001B[39m\u001B[32m942\u001B[39m \u001B[43mcursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:172\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 170\u001B[39m \u001B[38;5;28mself\u001B[39m._cursor = _cursor\n\u001B[32m 171\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m error:\n\u001B[32m--> \u001B[39m\u001B[32m172\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_adapt_connection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_handle_exception\u001B[49m\u001B[43m(\u001B[49m\u001B[43merror\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:323\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_connection._handle_exception\u001B[39m\u001B[34m(self, error)\u001B[39m\n\u001B[32m 319\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;28mself\u001B[39m.dbapi.sqlite.OperationalError(\n\u001B[32m 320\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mno active connection\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 321\u001B[39m ) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01merror\u001B[39;00m\n\u001B[32m 322\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m323\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m error\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:154\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 152\u001B[39m \u001B[38;5;28mself\u001B[39m.await_(_cursor.execute(operation))\n\u001B[32m 153\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m154\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mawait_\u001B[49m\u001B[43m(\u001B[49m\u001B[43m_cursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43moperation\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 156\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m _cursor.description:\n\u001B[32m 157\u001B[39m \u001B[38;5;28mself\u001B[39m.description = _cursor.description\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:132\u001B[39m, in \u001B[36mawait_only\u001B[39m\u001B[34m(awaitable)\u001B[39m\n\u001B[32m 123\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m exc.MissingGreenlet(\n\u001B[32m 124\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mgreenlet_spawn has not been called; can\u001B[39m\u001B[33m'\u001B[39m\u001B[33mt call await_only() \u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 125\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mhere. Was IO attempted in an unexpected place?\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 126\u001B[39m )\n\u001B[32m 128\u001B[39m \u001B[38;5;66;03m# returns the control to the driver greenlet passing it\u001B[39;00m\n\u001B[32m 129\u001B[39m \u001B[38;5;66;03m# a coroutine to run. Once the awaitable is done, the driver greenlet\u001B[39;00m\n\u001B[32m 130\u001B[39m \u001B[38;5;66;03m# switches back to this greenlet with the result of awaitable that is\u001B[39;00m\n\u001B[32m 131\u001B[39m \u001B[38;5;66;03m# then returned to the caller (or raised as error)\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mcurrent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mparent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mswitch\u001B[49m\u001B[43m(\u001B[49m\u001B[43mawaitable\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:196\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 192\u001B[39m switch_occurred = \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[32m 193\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 194\u001B[39m \u001B[38;5;66;03m# wait for a coroutine from await_only and then return its\u001B[39;00m\n\u001B[32m 195\u001B[39m \u001B[38;5;66;03m# result back to it.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n\u001B[32m 201\u001B[39m result = context.throw(*sys.exc_info())\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:48\u001B[39m, in \u001B[36mCursor.execute\u001B[39m\u001B[34m(self, sql, parameters)\u001B[39m\n\u001B[32m 46\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m parameters \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 47\u001B[39m parameters = []\n\u001B[32m---> \u001B[39m\u001B[32m48\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._execute(\u001B[38;5;28mself\u001B[39m._cursor.execute, sql, parameters)\n\u001B[32m 49\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:40\u001B[39m, in \u001B[36mCursor._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 38\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, fn, *args, **kwargs):\n\u001B[32m 39\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Execute the given function on the shared connection's thread.\"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m40\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._conn._execute(fn, *args, **kwargs)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:132\u001B[39m, in \u001B[36mConnection._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 128\u001B[39m future = asyncio.get_event_loop().create_future()\n\u001B[32m 130\u001B[39m \u001B[38;5;28mself\u001B[39m._tx.put_nowait((future, function))\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m future\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:115\u001B[39m, in \u001B[36mConnection.run\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 113\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 114\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33mexecuting \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m, function)\n\u001B[32m--> \u001B[39m\u001B[32m115\u001B[39m result = \u001B[43mfunction\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 116\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33moperation \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m completed\u001B[39m\u001B[33m\"\u001B[39m, function)\n\u001B[32m 117\u001B[39m future.get_loop().call_soon_threadsafe(set_result, future, result)\n",
|
||||
"\u001B[31mOperationalError\u001B[39m: (sqlite3.OperationalError) database is locked\n[SQL: \nCREATE TABLE data (\n\tid UUID NOT NULL, \n\tname VARCHAR, \n\textension VARCHAR, \n\tmime_type VARCHAR, \n\traw_data_location VARCHAR, \n\towner_id UUID, \n\tcontent_hash VARCHAR, \n\texternal_metadata JSON, \n\tnode_set JSON, \n\ttoken_count INTEGER, \n\tcreated_at DATETIME, \n\tupdated_at DATETIME, \n\tPRIMARY KEY (id)\n)\n\n]\n(Background on this error at: https://sqlalche.me/e/20/e3q8)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 3
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -129,9 +203,12 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T11:44:56.372628Z",
|
||||
"start_time": "2025-06-30T11:44:55.978258Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"from cognee.api.v1.search import SearchType\n",
|
||||
"\n",
|
||||
|
|
@ -144,7 +221,25 @@
|
|||
"# Display search results\n",
|
||||
"for result_text in search_results:\n",
|
||||
" print(result_text)"
|
||||
]
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '3b530220-7e7c-52a2-8b62-ce5adce1a46c', 'created_at': 1751283883122, 'updated_at': 1751283883122, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"The joke queries the number of programmers required to change a light bulb and answers, 'None. That’s a hardware issue.' This humor highlights the divide between software and hardware challenges in programming.\"}\n",
|
||||
"{'id': '128eb96e-fd36-53ef-ab6d-d4884ecbfee9', 'created_at': 1751283883122, 'updated_at': 1751283883122, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"Changing a light bulb doesn't require programmers.\"}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execution_count": 5
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"outputs": [],
|
||||
"execution_count": null,
|
||||
"source": ""
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
|
|
|||
109
notebooks/cognee_openai_compatable_demo.ipynb
vendored
109
notebooks/cognee_openai_compatable_demo.ipynb
vendored
|
|
@ -1,109 +0,0 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-05-03T09:53:28.509066Z",
|
||||
"start_time": "2025-05-03T09:52:53.464324Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"from openai import OpenAI\n",
|
||||
"\n",
|
||||
"# Use /api/v1/auth/login to get JWT\n",
|
||||
"\n",
|
||||
"client = OpenAI(api_key=\"COGNEE_API_KEY\", base_url=\"http://localhost:8000/api/v1/\")\n",
|
||||
"\n",
|
||||
"client.responses.create(\n",
|
||||
" model=\"cognee-v1\",\n",
|
||||
" input=\"Cognify: Natural language processing (NLP) is an interdisciplinary subfield of computer science and information retrieval.\",\n",
|
||||
")"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001B[1mHTTP Request: POST http://localhost:8000/api/v1/responses \"HTTP/1.1 307 Temporary Redirect\"\u001B[0m\n",
|
||||
"\u001B[1mHTTP Request: POST http://localhost:8000/api/v1/responses/ \"HTTP/1.1 200 OK\"\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Response(id='resp_6815e775ed1c8191a7c9e5cd7d18dd6d01678e9c98ad3fea', created_at=None, error=None, incomplete_details=None, instructions=None, metadata=None, model='cognee-v1', object='response', output=None, parallel_tool_calls=None, temperature=None, tool_choice=None, tools=None, top_p=None, max_output_tokens=None, previous_response_id=None, reasoning=None, status='completed', text=None, truncation=None, usage=ResponseUsage(input_tokens=None, output_tokens=None, output_tokens_details=None, total_tokens=242, prompt_tokens=207, completion_tokens=35), user=None, created=1746266008, toolCalls=[{'id': 'call_VPk6HLbJClOEeznVbLe5l5zJ', 'type': 'function', 'function': {'name': 'cognify', 'arguments': '{\"text\":\"Natural language processing (NLP) is an interdisciplinary subfield of computer science and information retrieval.\"}'}, 'output': {'status': 'success', 'data': {'result': 'Text successfully converted into knowledge graph.'}}}])"
|
||||
]
|
||||
},
|
||||
"execution_count": 29,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"execution_count": 29
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-05-03T09:53:35.039433Z",
|
||||
"start_time": "2025-05-03T09:53:31.791117Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"from openai import OpenAI\n",
|
||||
"\n",
|
||||
"client = OpenAI(api_key=\"COGNEE_API_KEY\", base_url=\"http://localhost:8000/api/v1/\")\n",
|
||||
"\n",
|
||||
"client.responses.create(\n",
|
||||
" model=\"cognee-v1\",\n",
|
||||
" input=\"Search insights about NLP\",\n",
|
||||
")"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001B[1mHTTP Request: POST http://localhost:8000/api/v1/responses \"HTTP/1.1 307 Temporary Redirect\"\u001B[0m\n",
|
||||
"\u001B[1mHTTP Request: POST http://localhost:8000/api/v1/responses/ \"HTTP/1.1 200 OK\"\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Response(id='resp_6815e79c2e5c8191a2b2279408544f560bbe226c4ebfeb1f', created_at=None, error=None, incomplete_details=None, instructions=None, metadata=None, model='cognee-v1', object='response', output=None, parallel_tool_calls=None, temperature=None, tool_choice=None, tools=None, top_p=None, max_output_tokens=None, previous_response_id=None, reasoning=None, status='completed', text=None, truncation=None, usage=ResponseUsage(input_tokens=None, output_tokens=None, output_tokens_details=None, total_tokens=215, prompt_tokens=188, completion_tokens=27), user=None, created=1746266015, toolCalls=[{'id': 'call_lP8Le7A76id2MIKqVYa3gvI8', 'type': 'function', 'function': {'name': 'search', 'arguments': '{\"search_query\":\"NLP\",\"search_type\":\"INSIGHTS\",\"top_k\":5}'}, 'output': {'status': 'success', 'data': {'result': [[{'created_at': 1746265986926, 'updated_at': '2025-05-03T09:53:06.926000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['text']}, 'type': 'DocumentChunk', 'belongs_to_set': None, 'text': 'Natural language processing (NLP) is an interdisciplinary subfield of computer science and information retrieval.', 'chunk_size': 34, 'chunk_index': 0, 'cut_type': 'sentence_end', 'id': '6b85cc38-be93-5ada-bed1-90cddad3c385'}, {'source_node_id': '6b85cc38-be93-5ada-bed1-90cddad3c385', 'target_node_id': 'bc338a39-64d6-549a-acec-da60846dd90d', 'relationship_name': 'contains', 'updated_at': '2025-05-03T09:53:24.988192Z'}, {'created_at': 1746265992781, 'updated_at': '2025-05-03T09:53:12.781000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.', 'id': 'bc338a39-64d6-549a-acec-da60846dd90d'}], [{'created_at': 1746265992781, 'updated_at': '2025-05-03T09:53:12.781000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.', 'id': 'bc338a39-64d6-549a-acec-da60846dd90d'}, {'source_node_id': 'bc338a39-64d6-549a-acec-da60846dd90d', 'target_node_id': 'dd9713b7-dc20-5101-aad0-1c4216811147', 'relationship_name': 'is_a', 'updated_at': '2025-05-03T09:53:24.988195Z'}, {'created_at': 1746265992781, 'updated_at': '2025-05-03T09:53:12.781000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'EntityType', 'belongs_to_set': None, 'name': 'concept', 'description': 'concept', 'id': 'dd9713b7-dc20-5101-aad0-1c4216811147'}], [{'created_at': 1746265992781, 'updated_at': '2025-05-03T09:53:12.781000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.', 'id': 'bc338a39-64d6-549a-acec-da60846dd90d'}, {'relationship_name': 'is_a_subfield_of', 'source_node_id': 'bc338a39-64d6-549a-acec-da60846dd90d', 'target_node_id': '6218dbab-eb6a-5759-a864-b3419755ffe0', 'ontology_valid': False, 'updated_at': '2025-05-03T09:53:18.000683Z'}, {'created_at': 1746265992782, 'updated_at': '2025-05-03T09:53:12.782000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'computer science', 'description': 'The study of computers and computational systems.', 'id': '6218dbab-eb6a-5759-a864-b3419755ffe0'}], [{'created_at': 1746265992781, 'updated_at': '2025-05-03T09:53:12.781000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.', 'id': 'bc338a39-64d6-549a-acec-da60846dd90d'}, {'relationship_name': 'is_a_subfield_of', 'source_node_id': 'bc338a39-64d6-549a-acec-da60846dd90d', 'target_node_id': '02bdab9a-0981-518c-a0d4-1684e0329447', 'ontology_valid': False, 'updated_at': '2025-05-03T09:53:18.000691Z'}, {'created_at': 1746265992782, 'updated_at': '2025-05-03T09:53:12.782000Z', 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'metadata': {'index_fields': ['name']}, 'type': 'Entity', 'belongs_to_set': None, 'name': 'information retrieval', 'description': 'The activity of obtaining information system resources that are relevant to an information request.', 'id': '02bdab9a-0981-518c-a0d4-1684e0329447'}]]}}}])"
|
||||
]
|
||||
},
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"execution_count": 30
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.8"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
518
notebooks/cognee_simple_demo.ipynb
vendored
518
notebooks/cognee_simple_demo.ipynb
vendored
|
|
@ -22,9 +22,7 @@
|
|||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!pip install cognee==0.1.41"
|
||||
]
|
||||
"source": "!pip install cognee==0.2.0"
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -38,15 +36,20 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "initial_id",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T12:08:22.650719Z",
|
||||
"start_time": "2025-06-30T12:08:22.646047Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# os.environ[\"LLM_API_KEY\"] = \"\""
|
||||
]
|
||||
"os.environ[\"LLM_API_KEY\"] = \"\""
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 1
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -58,14 +61,19 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "5805c346f03d8070",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T12:08:23.557926Z",
|
||||
"start_time": "2025-06-30T12:08:23.555854Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"current_directory = os.getcwd()\n",
|
||||
"file_path = os.path.join(current_directory, \"data\", \"alice_in_wonderland.txt\")"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": 2
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -77,264 +85,86 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "875763366723ee48",
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-06-30T12:08:34.629436Z",
|
||||
"start_time": "2025-06-30T12:08:24.800887Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"await cognee.add(file_path)\n",
|
||||
"await cognee.cognify()"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:16:15.096441\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/borisarzentar/Projects/Topoteretes/cognee/logs/2025-06-18_19-55-02.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:16:15.096895\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.1.42-dev\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:25 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T6020)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.11.5\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mHTTP Request: GET https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json \"HTTP/1.1 200 OK\"\u001b[0m\n",
|
||||
"/Users/borisarzentar/Projects/Topoteretes/cognee/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n",
|
||||
"/Users/borisarzentar/Projects/Topoteretes/cognee/.venv/lib/python3.11/site-packages/dlt/helpers/dbt/__init__.py:3: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
|
||||
" import pkg_resources\n",
|
||||
"\n",
|
||||
"\u001b[1mLangfuse client is disabled since no public_key was provided as a parameter or environment variable 'LANGFUSE_PUBLIC_KEY'. See our docs: https://langfuse.com/docs/sdk/python/low-level-sdk#initialize-client\u001b[0m\u001b[92m20:16:19 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:21 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\u001b[92m20:16:21 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.122387\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `fc460a84-f4ef-5b2f-a865-50bfadf0e05b`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.122779\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.123318\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m"
|
||||
"/Users/vasilije/cognee/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"User 7448b2a4-f6d3-4a07-9e94-300a427ce4dc has registered.\n"
|
||||
"ename": "OperationalError",
|
||||
"evalue": "(sqlite3.OperationalError) database is locked\n[SQL: \nCREATE TABLE data (\n\tid UUID NOT NULL, \n\tname VARCHAR, \n\textension VARCHAR, \n\tmime_type VARCHAR, \n\traw_data_location VARCHAR, \n\towner_id UUID, \n\tcontent_hash VARCHAR, \n\texternal_metadata JSON, \n\tnode_set JSON, \n\ttoken_count INTEGER, \n\tcreated_at DATETIME, \n\tupdated_at DATETIME, \n\tPRIMARY KEY (id)\n)\n\n]\n(Background on this error at: https://sqlalche.me/e/20/e3q8)",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001B[31m---------------------------------------------------------------------------\u001B[39m",
|
||||
"\u001B[31mOperationalError\u001B[39m Traceback (most recent call last)",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1964\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1963\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m evt_handled:\n\u001B[32m-> \u001B[39m\u001B[32m1964\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mdo_execute\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1965\u001B[39m \u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1966\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1968\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/default.py:942\u001B[39m, in \u001B[36mDefaultDialect.do_execute\u001B[39m\u001B[34m(self, cursor, statement, parameters, context)\u001B[39m\n\u001B[32m 941\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mdo_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, cursor, statement, parameters, context=\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[32m--> \u001B[39m\u001B[32m942\u001B[39m \u001B[43mcursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:172\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 171\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m error:\n\u001B[32m--> \u001B[39m\u001B[32m172\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_adapt_connection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_handle_exception\u001B[49m\u001B[43m(\u001B[49m\u001B[43merror\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:323\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_connection._handle_exception\u001B[39m\u001B[34m(self, error)\u001B[39m\n\u001B[32m 322\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m323\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m error\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:154\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 153\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m154\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mawait_\u001B[49m\u001B[43m(\u001B[49m\u001B[43m_cursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43moperation\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 156\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m _cursor.description:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:132\u001B[39m, in \u001B[36mawait_only\u001B[39m\u001B[34m(awaitable)\u001B[39m\n\u001B[32m 128\u001B[39m \u001B[38;5;66;03m# returns the control to the driver greenlet passing it\u001B[39;00m\n\u001B[32m 129\u001B[39m \u001B[38;5;66;03m# a coroutine to run. Once the awaitable is done, the driver greenlet\u001B[39;00m\n\u001B[32m 130\u001B[39m \u001B[38;5;66;03m# switches back to this greenlet with the result of awaitable that is\u001B[39;00m\n\u001B[32m 131\u001B[39m \u001B[38;5;66;03m# then returned to the caller (or raised as error)\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mcurrent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mparent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mswitch\u001B[49m\u001B[43m(\u001B[49m\u001B[43mawaitable\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:196\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 193\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 194\u001B[39m \u001B[38;5;66;03m# wait for a coroutine from await_only and then return its\u001B[39;00m\n\u001B[32m 195\u001B[39m \u001B[38;5;66;03m# result back to it.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:48\u001B[39m, in \u001B[36mCursor.execute\u001B[39m\u001B[34m(self, sql, parameters)\u001B[39m\n\u001B[32m 47\u001B[39m parameters = []\n\u001B[32m---> \u001B[39m\u001B[32m48\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._execute(\u001B[38;5;28mself\u001B[39m._cursor.execute, sql, parameters)\n\u001B[32m 49\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:40\u001B[39m, in \u001B[36mCursor._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 39\u001B[39m \u001B[38;5;250m\u001B[39m\u001B[33;03m\"\"\"Execute the given function on the shared connection's thread.\"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m40\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._conn._execute(fn, *args, **kwargs)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:132\u001B[39m, in \u001B[36mConnection._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 130\u001B[39m \u001B[38;5;28mself\u001B[39m._tx.put_nowait((future, function))\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m future\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:115\u001B[39m, in \u001B[36mConnection.run\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 114\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33mexecuting \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m, function)\n\u001B[32m--> \u001B[39m\u001B[32m115\u001B[39m result = \u001B[43mfunction\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 116\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33moperation \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m completed\u001B[39m\u001B[33m\"\u001B[39m, function)\n",
|
||||
"\u001B[31mOperationalError\u001B[39m: database is locked",
|
||||
"\nThe above exception was the direct cause of the following exception:\n",
|
||||
"\u001B[31mOperationalError\u001B[39m Traceback (most recent call last)",
|
||||
"\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[3]\u001B[39m\u001B[32m, line 2\u001B[39m\n\u001B[32m 1\u001B[39m \u001B[38;5;28;01mimport\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01mcognee\u001B[39;00m\n\u001B[32m----> \u001B[39m\u001B[32m2\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m cognee.add(file_path)\n\u001B[32m 3\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m cognee.cognify()\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/api/v1/add/add.py:26\u001B[39m, in \u001B[36madd\u001B[39m\u001B[34m(data, dataset_name, user, node_set, vector_db_config, graph_db_config, dataset_id)\u001B[39m\n\u001B[32m 19\u001B[39m tasks = [\n\u001B[32m 20\u001B[39m Task(resolve_data_directories),\n\u001B[32m 21\u001B[39m Task(ingest_data, dataset_name, user, node_set, dataset_id),\n\u001B[32m 22\u001B[39m ]\n\u001B[32m 24\u001B[39m pipeline_run_info = \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m26\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mfor\u001B[39;00m run_info \u001B[38;5;129;01min\u001B[39;00m cognee_pipeline(\n\u001B[32m 27\u001B[39m tasks=tasks,\n\u001B[32m 28\u001B[39m datasets=dataset_id \u001B[38;5;28;01mif\u001B[39;00m dataset_id \u001B[38;5;28;01melse\u001B[39;00m dataset_name,\n\u001B[32m 29\u001B[39m data=data,\n\u001B[32m 30\u001B[39m user=user,\n\u001B[32m 31\u001B[39m pipeline_name=\u001B[33m\"\u001B[39m\u001B[33madd_pipeline\u001B[39m\u001B[33m\"\u001B[39m,\n\u001B[32m 32\u001B[39m vector_db_config=vector_db_config,\n\u001B[32m 33\u001B[39m graph_db_config=graph_db_config,\n\u001B[32m 34\u001B[39m ):\n\u001B[32m 35\u001B[39m pipeline_run_info = run_info\n\u001B[32m 37\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m pipeline_run_info\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/modules/pipelines/operations/pipeline.py:63\u001B[39m, in \u001B[36mcognee_pipeline\u001B[39m\u001B[34m(tasks, data, datasets, user, pipeline_name, vector_db_config, graph_db_config)\u001B[39m\n\u001B[32m 60\u001B[39m context_graph_db_config.set(graph_db_config)\n\u001B[32m 62\u001B[39m \u001B[38;5;66;03m# Create tables for databases\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m63\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m create_relational_db_and_tables()\n\u001B[32m 64\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m create_pgvector_db_and_tables()\n\u001B[32m 66\u001B[39m \u001B[38;5;66;03m# Initialize first_run attribute if it doesn't exist\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/infrastructure/databases/relational/create_db_and_tables.py:13\u001B[39m, in \u001B[36mcreate_db_and_tables\u001B[39m\u001B[34m()\u001B[39m\n\u001B[32m 5\u001B[39m \u001B[38;5;250m\u001B[39m\u001B[33;03m\"\"\"\u001B[39;00m\n\u001B[32m 6\u001B[39m \u001B[33;03mCreate a database and its tables.\u001B[39;00m\n\u001B[32m 7\u001B[39m \n\u001B[32m 8\u001B[39m \u001B[33;03mThis asynchronous function retrieves the relational engine and calls its method to\u001B[39;00m\n\u001B[32m 9\u001B[39m \u001B[33;03mcreate a database.\u001B[39;00m\n\u001B[32m 10\u001B[39m \u001B[33;03m\"\"\"\u001B[39;00m\n\u001B[32m 11\u001B[39m relational_engine = get_relational_engine()\n\u001B[32m---> \u001B[39m\u001B[32m13\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m relational_engine.create_database()\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/.pyenv/versions/3.11.0/lib/python3.11/contextlib.py:222\u001B[39m, in \u001B[36m_AsyncGeneratorContextManager.__aexit__\u001B[39m\u001B[34m(self, typ, value, traceback)\u001B[39m\n\u001B[32m 220\u001B[39m value = typ()\n\u001B[32m 221\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m222\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m.gen.athrow(typ, value, traceback)\n\u001B[32m 223\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mStopAsyncIteration\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m exc:\n\u001B[32m 224\u001B[39m \u001B[38;5;66;03m# Suppress StopIteration *unless* it's the same exception that\u001B[39;00m\n\u001B[32m 225\u001B[39m \u001B[38;5;66;03m# was passed to throw(). This prevents a StopIteration\u001B[39;00m\n\u001B[32m 226\u001B[39m \u001B[38;5;66;03m# raised inside the \"with\" statement from being suppressed.\u001B[39;00m\n\u001B[32m 227\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m exc \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m value\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/ext/asyncio/engine.py:1066\u001B[39m, in \u001B[36mAsyncEngine.begin\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 1064\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m conn:\n\u001B[32m 1065\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m conn.begin():\n\u001B[32m-> \u001B[39m\u001B[32m1066\u001B[39m \u001B[38;5;28;01myield\u001B[39;00m conn\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/Projects/release_test/cognee/cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py:445\u001B[39m, in \u001B[36mSQLAlchemyAdapter.create_database\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 443\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mwith\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine.begin() \u001B[38;5;28;01mas\u001B[39;00m connection:\n\u001B[32m 444\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mlen\u001B[39m(Base.metadata.tables.keys()) > \u001B[32m0\u001B[39m:\n\u001B[32m--> \u001B[39m\u001B[32m445\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m connection.run_sync(Base.metadata.create_all)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/ext/asyncio/engine.py:887\u001B[39m, in \u001B[36mAsyncConnection.run_sync\u001B[39m\u001B[34m(self, fn, *arg, **kw)\u001B[39m\n\u001B[32m 819\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mrun_sync\u001B[39m(\n\u001B[32m 820\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 821\u001B[39m fn: Callable[Concatenate[Connection, _P], _T],\n\u001B[32m 822\u001B[39m *arg: _P.args,\n\u001B[32m 823\u001B[39m **kw: _P.kwargs,\n\u001B[32m 824\u001B[39m ) -> _T:\n\u001B[32m 825\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m'''Invoke the given synchronous (i.e. not async) callable,\u001B[39;00m\n\u001B[32m 826\u001B[39m \u001B[33;03m passing a synchronous-style :class:`_engine.Connection` as the first\u001B[39;00m\n\u001B[32m 827\u001B[39m \u001B[33;03m argument.\u001B[39;00m\n\u001B[32m (...)\u001B[39m\u001B[32m 884\u001B[39m \n\u001B[32m 885\u001B[39m \u001B[33;03m '''\u001B[39;00m \u001B[38;5;66;03m# noqa: E501\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m887\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m greenlet_spawn(\n\u001B[32m 888\u001B[39m fn, \u001B[38;5;28mself\u001B[39m._proxied, *arg, _require_await=\u001B[38;5;28;01mFalse\u001B[39;00m, **kw\n\u001B[32m 889\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:201\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m201\u001B[39m result = \u001B[43mcontext\u001B[49m\u001B[43m.\u001B[49m\u001B[43mthrow\u001B[49m\u001B[43m(\u001B[49m\u001B[43m*\u001B[49m\u001B[43msys\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexc_info\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 202\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 203\u001B[39m result = context.switch(value)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/schema.py:5907\u001B[39m, in \u001B[36mMetaData.create_all\u001B[39m\u001B[34m(self, bind, tables, checkfirst)\u001B[39m\n\u001B[32m 5883\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mcreate_all\u001B[39m(\n\u001B[32m 5884\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 5885\u001B[39m bind: _CreateDropBind,\n\u001B[32m 5886\u001B[39m tables: Optional[_typing_Sequence[Table]] = \u001B[38;5;28;01mNone\u001B[39;00m,\n\u001B[32m 5887\u001B[39m checkfirst: \u001B[38;5;28mbool\u001B[39m = \u001B[38;5;28;01mTrue\u001B[39;00m,\n\u001B[32m 5888\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 5889\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Create all tables stored in this metadata.\u001B[39;00m\n\u001B[32m 5890\u001B[39m \n\u001B[32m 5891\u001B[39m \u001B[33;03m Conditional by default, will not attempt to recreate tables already\u001B[39;00m\n\u001B[32m (...)\u001B[39m\u001B[32m 5905\u001B[39m \n\u001B[32m 5906\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m5907\u001B[39m \u001B[43mbind\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_run_ddl_visitor\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 5908\u001B[39m \u001B[43m \u001B[49m\u001B[43mddl\u001B[49m\u001B[43m.\u001B[49m\u001B[43mSchemaGenerator\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcheckfirst\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcheckfirst\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtables\u001B[49m\u001B[43m=\u001B[49m\u001B[43mtables\u001B[49m\n\u001B[32m 5909\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:2456\u001B[39m, in \u001B[36mConnection._run_ddl_visitor\u001B[39m\u001B[34m(self, visitorcallable, element, **kwargs)\u001B[39m\n\u001B[32m 2444\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_run_ddl_visitor\u001B[39m(\n\u001B[32m 2445\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 2446\u001B[39m visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],\n\u001B[32m 2447\u001B[39m element: SchemaItem,\n\u001B[32m 2448\u001B[39m **kwargs: Any,\n\u001B[32m 2449\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 2450\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"run a DDL visitor.\u001B[39;00m\n\u001B[32m 2451\u001B[39m \n\u001B[32m 2452\u001B[39m \u001B[33;03m This method is only here so that the MockConnection can change the\u001B[39;00m\n\u001B[32m 2453\u001B[39m \u001B[33;03m options given to the visitor so that \"checkfirst\" is skipped.\u001B[39;00m\n\u001B[32m 2454\u001B[39m \n\u001B[32m 2455\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m2456\u001B[39m \u001B[43mvisitorcallable\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[43mtraverse_single\u001B[49m\u001B[43m(\u001B[49m\u001B[43melement\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/visitors.py:664\u001B[39m, in \u001B[36mExternalTraversal.traverse_single\u001B[39m\u001B[34m(self, obj, **kw)\u001B[39m\n\u001B[32m 662\u001B[39m meth = \u001B[38;5;28mgetattr\u001B[39m(v, \u001B[33m\"\u001B[39m\u001B[33mvisit_\u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m % obj.__visit_name__, \u001B[38;5;28;01mNone\u001B[39;00m)\n\u001B[32m 663\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m meth:\n\u001B[32m--> \u001B[39m\u001B[32m664\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\u001B[43mobj\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkw\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:978\u001B[39m, in \u001B[36mSchemaGenerator.visit_metadata\u001B[39m\u001B[34m(self, metadata)\u001B[39m\n\u001B[32m 976\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m table, fkcs \u001B[38;5;129;01min\u001B[39;00m collection:\n\u001B[32m 977\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m table \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m978\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mtraverse_single\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 979\u001B[39m \u001B[43m \u001B[49m\u001B[43mtable\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 980\u001B[39m \u001B[43m \u001B[49m\u001B[43mcreate_ok\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 981\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\u001B[43m=\u001B[49m\u001B[43mfkcs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 982\u001B[39m \u001B[43m \u001B[49m\u001B[43m_is_metadata_operation\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 983\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 984\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 985\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m fkc \u001B[38;5;129;01min\u001B[39;00m fkcs:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/visitors.py:664\u001B[39m, in \u001B[36mExternalTraversal.traverse_single\u001B[39m\u001B[34m(self, obj, **kw)\u001B[39m\n\u001B[32m 662\u001B[39m meth = \u001B[38;5;28mgetattr\u001B[39m(v, \u001B[33m\"\u001B[39m\u001B[33mvisit_\u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m % obj.__visit_name__, \u001B[38;5;28;01mNone\u001B[39;00m)\n\u001B[32m 663\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m meth:\n\u001B[32m--> \u001B[39m\u001B[32m664\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\u001B[43mobj\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkw\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:1016\u001B[39m, in \u001B[36mSchemaGenerator.visit_table\u001B[39m\u001B[34m(self, table, create_ok, include_foreign_key_constraints, _is_metadata_operation)\u001B[39m\n\u001B[32m 1007\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28mself\u001B[39m.dialect.supports_alter:\n\u001B[32m 1008\u001B[39m \u001B[38;5;66;03m# e.g., don't omit any foreign key constraints\u001B[39;00m\n\u001B[32m 1009\u001B[39m include_foreign_key_constraints = \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m 1011\u001B[39m \u001B[43mCreateTable\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1012\u001B[39m \u001B[43m \u001B[49m\u001B[43mtable\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1013\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\u001B[43m=\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1014\u001B[39m \u001B[43m \u001B[49m\u001B[43minclude_foreign_key_constraints\u001B[49m\n\u001B[32m 1015\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m-> \u001B[39m\u001B[32m1016\u001B[39m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_invoke_with\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mconnection\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1018\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mhasattr\u001B[39m(table, \u001B[33m\"\u001B[39m\u001B[33mindexes\u001B[39m\u001B[33m\"\u001B[39m):\n\u001B[32m 1019\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m index \u001B[38;5;129;01min\u001B[39;00m table.indexes:\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:314\u001B[39m, in \u001B[36mExecutableDDLElement._invoke_with\u001B[39m\u001B[34m(self, bind)\u001B[39m\n\u001B[32m 312\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_invoke_with\u001B[39m(\u001B[38;5;28mself\u001B[39m, bind):\n\u001B[32m 313\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._should_execute(\u001B[38;5;28mself\u001B[39m.target, bind):\n\u001B[32m--> \u001B[39m\u001B[32m314\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mbind\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1416\u001B[39m, in \u001B[36mConnection.execute\u001B[39m\u001B[34m(self, statement, parameters, execution_options)\u001B[39m\n\u001B[32m 1414\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m exc.ObjectNotExecutableError(statement) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01merr\u001B[39;00m\n\u001B[32m 1415\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m-> \u001B[39m\u001B[32m1416\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mmeth\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1417\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\n\u001B[32m 1418\u001B[39m \u001B[43m \u001B[49m\u001B[43mdistilled_parameters\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1419\u001B[39m \u001B[43m \u001B[49m\u001B[43mexecution_options\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01mor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mNO_OPTIONS\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1420\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py:180\u001B[39m, in \u001B[36mExecutableDDLElement._execute_on_connection\u001B[39m\u001B[34m(self, connection, distilled_params, execution_options)\u001B[39m\n\u001B[32m 177\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_execute_on_connection\u001B[39m(\n\u001B[32m 178\u001B[39m \u001B[38;5;28mself\u001B[39m, connection, distilled_params, execution_options\n\u001B[32m 179\u001B[39m ):\n\u001B[32m--> \u001B[39m\u001B[32m180\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mconnection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_execute_ddl\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 181\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdistilled_params\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mexecution_options\u001B[49m\n\u001B[32m 182\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1527\u001B[39m, in \u001B[36mConnection._execute_ddl\u001B[39m\u001B[34m(self, ddl, distilled_parameters, execution_options)\u001B[39m\n\u001B[32m 1522\u001B[39m dialect = \u001B[38;5;28mself\u001B[39m.dialect\n\u001B[32m 1524\u001B[39m compiled = ddl.compile(\n\u001B[32m 1525\u001B[39m dialect=dialect, schema_translate_map=schema_translate_map\n\u001B[32m 1526\u001B[39m )\n\u001B[32m-> \u001B[39m\u001B[32m1527\u001B[39m ret = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_execute_context\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1528\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1529\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecution_ctx_cls\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_init_ddl\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1530\u001B[39m \u001B[43m \u001B[49m\u001B[43mcompiled\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1531\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[32m 1532\u001B[39m \u001B[43m \u001B[49m\u001B[43mexec_opts\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1533\u001B[39m \u001B[43m \u001B[49m\u001B[43mcompiled\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 1534\u001B[39m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1535\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n\u001B[32m 1536\u001B[39m \u001B[38;5;28mself\u001B[39m.dispatch.after_execute(\n\u001B[32m 1537\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 1538\u001B[39m ddl,\n\u001B[32m (...)\u001B[39m\u001B[32m 1542\u001B[39m ret,\n\u001B[32m 1543\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1843\u001B[39m, in \u001B[36mConnection._execute_context\u001B[39m\u001B[34m(self, dialect, constructor, statement, parameters, execution_options, *args, **kw)\u001B[39m\n\u001B[32m 1841\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m._exec_insertmany_context(dialect, context)\n\u001B[32m 1842\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m-> \u001B[39m\u001B[32m1843\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_exec_single_context\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1844\u001B[39m \u001B[43m \u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\n\u001B[32m 1845\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1983\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1980\u001B[39m result = context._setup_result_proxy()\n\u001B[32m 1982\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m e:\n\u001B[32m-> \u001B[39m\u001B[32m1983\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_handle_dbapi_exception\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1984\u001B[39m \u001B[43m \u001B[49m\u001B[43me\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1985\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1987\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m result\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:2352\u001B[39m, in \u001B[36mConnection._handle_dbapi_exception\u001B[39m\u001B[34m(self, e, statement, parameters, cursor, context, is_sub_exec)\u001B[39m\n\u001B[32m 2350\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m should_wrap:\n\u001B[32m 2351\u001B[39m \u001B[38;5;28;01massert\u001B[39;00m sqlalchemy_exception \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m2352\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m sqlalchemy_exception.with_traceback(exc_info[\u001B[32m2\u001B[39m]) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01me\u001B[39;00m\n\u001B[32m 2353\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 2354\u001B[39m \u001B[38;5;28;01massert\u001B[39;00m exc_info[\u001B[32m1\u001B[39m] \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/base.py:1964\u001B[39m, in \u001B[36mConnection._exec_single_context\u001B[39m\u001B[34m(self, dialect, context, statement, parameters)\u001B[39m\n\u001B[32m 1962\u001B[39m \u001B[38;5;28;01mbreak\u001B[39;00m\n\u001B[32m 1963\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m evt_handled:\n\u001B[32m-> \u001B[39m\u001B[32m1964\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mdialect\u001B[49m\u001B[43m.\u001B[49m\u001B[43mdo_execute\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1965\u001B[39m \u001B[43m \u001B[49m\u001B[43mcursor\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstr_statement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43meffective_parameters\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcontext\u001B[49m\n\u001B[32m 1966\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 1968\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m._has_events \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m.engine._has_events:\n\u001B[32m 1969\u001B[39m \u001B[38;5;28mself\u001B[39m.dispatch.after_cursor_execute(\n\u001B[32m 1970\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m 1971\u001B[39m cursor,\n\u001B[32m (...)\u001B[39m\u001B[32m 1975\u001B[39m context.executemany,\n\u001B[32m 1976\u001B[39m )\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/engine/default.py:942\u001B[39m, in \u001B[36mDefaultDialect.do_execute\u001B[39m\u001B[34m(self, cursor, statement, parameters, context)\u001B[39m\n\u001B[32m 941\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mdo_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, cursor, statement, parameters, context=\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[32m--> \u001B[39m\u001B[32m942\u001B[39m \u001B[43mcursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43mstatement\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:172\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 170\u001B[39m \u001B[38;5;28mself\u001B[39m._cursor = _cursor\n\u001B[32m 171\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mException\u001B[39;00m \u001B[38;5;28;01mas\u001B[39;00m error:\n\u001B[32m--> \u001B[39m\u001B[32m172\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_adapt_connection\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_handle_exception\u001B[49m\u001B[43m(\u001B[49m\u001B[43merror\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:323\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_connection._handle_exception\u001B[39m\u001B[34m(self, error)\u001B[39m\n\u001B[32m 319\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;28mself\u001B[39m.dbapi.sqlite.OperationalError(\n\u001B[32m 320\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mno active connection\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 321\u001B[39m ) \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01merror\u001B[39;00m\n\u001B[32m 322\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m323\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m error\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/dialects/sqlite/aiosqlite.py:154\u001B[39m, in \u001B[36mAsyncAdapt_aiosqlite_cursor.execute\u001B[39m\u001B[34m(self, operation, parameters)\u001B[39m\n\u001B[32m 152\u001B[39m \u001B[38;5;28mself\u001B[39m.await_(_cursor.execute(operation))\n\u001B[32m 153\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m--> \u001B[39m\u001B[32m154\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mawait_\u001B[49m\u001B[43m(\u001B[49m\u001B[43m_cursor\u001B[49m\u001B[43m.\u001B[49m\u001B[43mexecute\u001B[49m\u001B[43m(\u001B[49m\u001B[43moperation\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mparameters\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 156\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m _cursor.description:\n\u001B[32m 157\u001B[39m \u001B[38;5;28mself\u001B[39m.description = _cursor.description\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:132\u001B[39m, in \u001B[36mawait_only\u001B[39m\u001B[34m(awaitable)\u001B[39m\n\u001B[32m 123\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m exc.MissingGreenlet(\n\u001B[32m 124\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mgreenlet_spawn has not been called; can\u001B[39m\u001B[33m'\u001B[39m\u001B[33mt call await_only() \u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 125\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mhere. Was IO attempted in an unexpected place?\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 126\u001B[39m )\n\u001B[32m 128\u001B[39m \u001B[38;5;66;03m# returns the control to the driver greenlet passing it\u001B[39;00m\n\u001B[32m 129\u001B[39m \u001B[38;5;66;03m# a coroutine to run. Once the awaitable is done, the driver greenlet\u001B[39;00m\n\u001B[32m 130\u001B[39m \u001B[38;5;66;03m# switches back to this greenlet with the result of awaitable that is\u001B[39;00m\n\u001B[32m 131\u001B[39m \u001B[38;5;66;03m# then returned to the caller (or raised as error)\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mcurrent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mparent\u001B[49m\u001B[43m.\u001B[49m\u001B[43mswitch\u001B[49m\u001B[43m(\u001B[49m\u001B[43mawaitable\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/sqlalchemy/util/_concurrency_py3k.py:196\u001B[39m, in \u001B[36mgreenlet_spawn\u001B[39m\u001B[34m(fn, _require_await, *args, **kwargs)\u001B[39m\n\u001B[32m 192\u001B[39m switch_occurred = \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[32m 193\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 194\u001B[39m \u001B[38;5;66;03m# wait for a coroutine from await_only and then return its\u001B[39;00m\n\u001B[32m 195\u001B[39m \u001B[38;5;66;03m# result back to it.\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m196\u001B[39m value = \u001B[38;5;28;01mawait\u001B[39;00m result\n\u001B[32m 197\u001B[39m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m:\n\u001B[32m 198\u001B[39m \u001B[38;5;66;03m# this allows an exception to be raised within\u001B[39;00m\n\u001B[32m 199\u001B[39m \u001B[38;5;66;03m# the moderated greenlet so that it can continue\u001B[39;00m\n\u001B[32m 200\u001B[39m \u001B[38;5;66;03m# its expected flow.\u001B[39;00m\n\u001B[32m 201\u001B[39m result = context.throw(*sys.exc_info())\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:48\u001B[39m, in \u001B[36mCursor.execute\u001B[39m\u001B[34m(self, sql, parameters)\u001B[39m\n\u001B[32m 46\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m parameters \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 47\u001B[39m parameters = []\n\u001B[32m---> \u001B[39m\u001B[32m48\u001B[39m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._execute(\u001B[38;5;28mself\u001B[39m._cursor.execute, sql, parameters)\n\u001B[32m 49\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/cursor.py:40\u001B[39m, in \u001B[36mCursor._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 38\u001B[39m \u001B[38;5;28;01masync\u001B[39;00m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_execute\u001B[39m(\u001B[38;5;28mself\u001B[39m, fn, *args, **kwargs):\n\u001B[32m 39\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Execute the given function on the shared connection's thread.\"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m40\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m \u001B[38;5;28mself\u001B[39m._conn._execute(fn, *args, **kwargs)\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:132\u001B[39m, in \u001B[36mConnection._execute\u001B[39m\u001B[34m(self, fn, *args, **kwargs)\u001B[39m\n\u001B[32m 128\u001B[39m future = asyncio.get_event_loop().create_future()\n\u001B[32m 130\u001B[39m \u001B[38;5;28mself\u001B[39m._tx.put_nowait((future, function))\n\u001B[32m--> \u001B[39m\u001B[32m132\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mawait\u001B[39;00m future\n",
|
||||
"\u001B[36mFile \u001B[39m\u001B[32m~/cognee/.venv/lib/python3.11/site-packages/aiosqlite/core.py:115\u001B[39m, in \u001B[36mConnection.run\u001B[39m\u001B[34m(self)\u001B[39m\n\u001B[32m 113\u001B[39m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[32m 114\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33mexecuting \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m\"\u001B[39m, function)\n\u001B[32m--> \u001B[39m\u001B[32m115\u001B[39m result = \u001B[43mfunction\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 116\u001B[39m LOG.debug(\u001B[33m\"\u001B[39m\u001B[33moperation \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[33m completed\u001B[39m\u001B[33m\"\u001B[39m, function)\n\u001B[32m 117\u001B[39m future.get_loop().call_soon_threadsafe(set_result, future, result)\n",
|
||||
"\u001B[31mOperationalError\u001B[39m: (sqlite3.OperationalError) database is locked\n[SQL: \nCREATE TABLE data (\n\tid UUID NOT NULL, \n\tname VARCHAR, \n\textension VARCHAR, \n\tmime_type VARCHAR, \n\traw_data_location VARCHAR, \n\towner_id UUID, \n\tcontent_hash VARCHAR, \n\texternal_metadata JSON, \n\tnode_set JSON, \n\ttoken_count INTEGER, \n\tcreated_at DATETIME, \n\tupdated_at DATETIME, \n\tPRIMARY KEY (id)\n)\n\n]\n(Background on this error at: https://sqlalche.me/e/20/e3q8)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.634421\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.634807\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.635098\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `fc460a84-f4ef-5b2f-a865-50bfadf0e05b`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.636482\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.646923\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `1aace1af-5592-51b2-8d66-9b170e528df1`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.647397\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.647792\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.652423\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:22.761458\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:22 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:28 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:30 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:31 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:32 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:32 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:34 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:36 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:36 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.676794\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.677467\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alice' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.677726\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'animal' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.678021\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.678303\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dinah' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.678553\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'location' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.678805\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'garden' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.679041\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.679272\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.679503\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mirror' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.679780\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'key' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.680027\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bottle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.680294\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cake' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.680518\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gloves' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.680763\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fan' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.681009\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hall' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.681314\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.681551\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dodo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.681781\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lory' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.682014\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'eaglet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.682222\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duck' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.682487\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'historical figure' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.682765\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'william the conqueror' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.682964\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mary ann' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.683246\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'character' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.683448\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'caterpillar' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.683687\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.683862\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bill' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.684042\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'item' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.684261\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mushroom' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.684472\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'room' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.684688\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.684910\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fairy tales' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.685177\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pigeon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.685390\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchess' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.685601\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.685846\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cheshire cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.686051\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march hare' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.686302\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hatter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.686775\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.687032\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pig' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.687242\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'baby' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.687470\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'event' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.687667\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'invitation' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.688056\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dormouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.688292\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.688525\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'king of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.688714\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'knave of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.688994\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rose tree' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.689294\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tea party' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.689557\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'place' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.689793\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.690243\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gryphon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.690583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mock turtle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.690879\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'king' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.691149\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'soldiers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.691382\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hedgehog' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.691635\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'flamingo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.692037\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dance' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.692259\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lobster quadrille' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.692468\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'literary work' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.692847\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alices adventures in wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.693271\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchess cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.693497\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lizard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.693679\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'jurymen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.694006\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'food' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.694342\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tarts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.694653\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'date' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.694893\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '14 march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.695298\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '15 march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.695644\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '16 march' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.695992\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'trial' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.696572\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'shepherd boy' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.696903\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'little sister' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.697233\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'narrator' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.697481\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'farm yard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:36.697714\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'summer days' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\u001b[92m20:16:38 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:40 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:41 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:41 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:42.468100\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:42 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:46 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:46 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:47 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:47 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:47 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:47 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:48 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:49 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:49 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:49.213666\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\u001b[92m20:16:49 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:50 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:52 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:52 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:53 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:54 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.599254\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.599686\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.600001\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.600414\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.600682\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.600917\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:16:54.601152\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `1aace1af-5592-51b2-8d66-9b170e528df1`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks(tasks: [Task], data)\u001b[0m]\u001b[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('ca449755-faf5-526b-9c3c-d2ee441350ef'), payload=None)"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import cognee\n",
|
||||
"await cognee.add(file_path)\n",
|
||||
"await cognee.cognify()"
|
||||
]
|
||||
"execution_count": 3
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -354,35 +184,35 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:16:54 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:55 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:56 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:56 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:56 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:56 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:56 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:16:57 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"\u001B[92m20:16:54 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:55 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:56 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:16:57 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:16:59 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:16:59 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m"
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001B[0m\u001B[92m20:16:59 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m\u001B[92m20:16:59 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -410,35 +240,35 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:00 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:01 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:02 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:03 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:00 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:01 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:02 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:03 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:17:05 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:17:05 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m"
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001B[0m\u001B[92m20:17:05 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m\u001B[92m20:17:05 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -466,35 +296,35 @@
|
|||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:06 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:07 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/text-embedding-3-large\u001b[0m\u001b[92m20:17:07 - LiteLLM:INFO\u001b[0m: utils.py:3101 - \n",
|
||||
"\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:06 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:07 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/text-embedding-3-large\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/text-embedding-3-large\u001B[0m\u001B[92m20:17:07 - LiteLLM:INFO\u001B[0m: utils.py:3101 - \n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\n",
|
||||
"\u001b[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001b[0m\u001b[92m20:17:08 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m\u001b[92m20:17:08 - LiteLLM:INFO\u001b[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001b[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001b[0m"
|
||||
"\u001B[1m\n",
|
||||
"LiteLLM completion() model= gpt-4o-mini; provider = openai\u001B[0m\u001B[92m20:17:08 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m\u001B[92m20:17:08 - LiteLLM:INFO\u001B[0m: cost_calculator.py:655 - selected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\n",
|
||||
"\u001B[1mselected model name for cost calculation: openai/gpt-4o-mini-2024-07-18\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -531,8 +361,8 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-06-18T18:17:08.814190\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph visualization saved as /Users/borisarzentar/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-06-18T18:17:08.814882\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m"
|
||||
"\u001B[2m2025-06-18T18:17:08.814190\u001B[0m [\u001B[32m\u001B[1minfo \u001B[0m] \u001B[1mGraph visualization saved as /Users/borisarzentar/graph_visualization.html\u001B[0m [\u001B[0m\u001B[1m\u001B[34mcognee.shared.logging_utils\u001B[0m]\u001B[0m\n",
|
||||
"\u001B[2m2025-06-18T18:17:08.814882\u001B[0m [\u001B[32m\u001B[1minfo \u001B[0m] \u001B[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001B[0m [\u001B[0m\u001B[1m\u001B[34mcognee.shared.logging_utils\u001B[0m]\u001B[0m"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
|||
686
notebooks/node_scores.ipynb
vendored
686
notebooks/node_scores.ipynb
vendored
File diff suppressed because one or more lines are too long
54
poetry.lock
generated
54
poetry.lock
generated
|
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiobotocore"
|
||||
|
|
@ -455,7 +455,7 @@ description = "Timeout context manager for asyncio programs"
|
|||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.11\" and python_full_version < \"3.11.3\" and extra == \"falkordb\""
|
||||
markers = "extra == \"falkordb\" and python_full_version < \"3.11.3\" and python_version == \"3.11\""
|
||||
files = [
|
||||
{file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
|
||||
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
|
||||
|
|
@ -599,7 +599,7 @@ description = "Backport of CPython tarfile module"
|
|||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "(python_version == \"3.10\" or python_version == \"3.11\") and extra == \"deepeval\""
|
||||
markers = "extra == \"deepeval\" and python_version < \"3.12\""
|
||||
files = [
|
||||
{file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"},
|
||||
{file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"},
|
||||
|
|
@ -1406,6 +1406,27 @@ files = [
|
|||
{file = "coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7"},
|
||||
{file = "coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f424507f57878e424d9a95dc4ead3fbdd72fd201e404e861e465f28ea469951"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:535fde4001b2783ac80865d90e7cc7798b6b126f4cd8a8c54acfe76804e54e58"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02532fd3290bb8fa6bec876520842428e2a6ed6c27014eca81b031c2d30e3f71"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56f5eb308b17bca3bbff810f55ee26d51926d9f89ba92707ee41d3c061257e55"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfa447506c1a52271f1b0de3f42ea0fa14676052549095e378d5bff1c505ff7b"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9ca8e220006966b4a7b68e8984a6aee645a0384b0769e829ba60281fe61ec4f7"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49f1d0788ba5b7ba65933f3a18864117c6506619f5ca80326b478f72acf3f385"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68cd53aec6f45b8e4724c0950ce86eacb775c6be01ce6e3669fe4f3a21e768ed"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-win32.whl", hash = "sha256:95335095b6c7b1cc14c3f3f17d5452ce677e8490d101698562b2ffcacc304c8d"},
|
||||
{file = "coverage-7.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e1b5191d1648acc439b24721caab2fd0c86679d8549ed2c84d5a7ec1bedcc244"},
|
||||
{file = "coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514"},
|
||||
{file = "coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c"},
|
||||
{file = "coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2118,7 +2139,7 @@ description = "Fast, light, accurate library built for retrieval embedding gener
|
|||
optional = true
|
||||
python-versions = ">=3.9.0"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" and extra == \"codegraph\" or python_version == \"3.11\" and extra == \"codegraph\" or python_version == \"3.12\" and extra == \"codegraph\""
|
||||
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "fastembed-0.6.0-py3-none-any.whl", hash = "sha256:a08385e9388adea0529a586004f2d588c9787880a510e4e5d167127a11e75328"},
|
||||
{file = "fastembed-0.6.0.tar.gz", hash = "sha256:5c9ead25f23449535b07243bbe1f370b820dcc77ec2931e61674e3fe7ff24733"},
|
||||
|
|
@ -2857,7 +2878,7 @@ description = "HTTP/2-based RPC framework"
|
|||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"gemini\" or extra == \"deepeval\" or extra == \"weaviate\" or extra == \"qdrant\" or extra == \"milvus\""
|
||||
markers = "extra == \"gemini\" or extra == \"weaviate\" or extra == \"qdrant\" or extra == \"deepeval\" or extra == \"milvus\" or python_version == \"3.10\" and (extra == \"deepeval\" or extra == \"weaviate\" or extra == \"qdrant\" or extra == \"gemini\" or extra == \"milvus\")"
|
||||
files = [
|
||||
{file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"},
|
||||
{file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"},
|
||||
|
|
@ -3969,6 +3990,8 @@ python-versions = "*"
|
|||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
|
||||
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
|
||||
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -5011,7 +5034,7 @@ description = "Python logging made (stupidly) simple"
|
|||
optional = true
|
||||
python-versions = "<4.0,>=3.5"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" and extra == \"codegraph\" or python_version == \"3.11\" and extra == \"codegraph\" or python_version == \"3.12\" and extra == \"codegraph\""
|
||||
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"},
|
||||
{file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"},
|
||||
|
|
@ -5706,7 +5729,7 @@ description = "Python extension for MurmurHash (MurmurHash3), a set of fast and
|
|||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" and extra == \"codegraph\" or python_version == \"3.11\" and extra == \"codegraph\" or python_version == \"3.12\" and extra == \"codegraph\""
|
||||
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"},
|
||||
{file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"},
|
||||
|
|
@ -6208,7 +6231,7 @@ description = "Python package for creating and manipulating graphs and networks"
|
|||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
markers = "python_version >= \"3.11\""
|
||||
markers = "python_version >= \"3.11\" and python_version < \"3.13\" or python_full_version == \"3.13.0\""
|
||||
files = [
|
||||
{file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"},
|
||||
{file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"},
|
||||
|
|
@ -6347,7 +6370,7 @@ description = "Fundamental package for array computing in Python"
|
|||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" or python_version == \"3.11\""
|
||||
markers = "python_version < \"3.12\""
|
||||
files = [
|
||||
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
|
||||
|
|
@ -6394,7 +6417,7 @@ description = "Fundamental package for array computing in Python"
|
|||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
markers = "python_version >= \"3.12\""
|
||||
markers = "python_version == \"3.12\" or python_full_version == \"3.13.0\""
|
||||
files = [
|
||||
{file = "numpy-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6326ab99b52fafdcdeccf602d6286191a79fe2fda0ae90573c5814cd2b0bc1b8"},
|
||||
{file = "numpy-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0937e54c09f7a9a68da6889362ddd2ff584c02d015ec92672c099b61555f8911"},
|
||||
|
|
@ -7618,6 +7641,7 @@ files = [
|
|||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
|
||||
|
|
@ -7745,7 +7769,7 @@ description = "Fast and parallel snowball stemmer"
|
|||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" and extra == \"codegraph\" or python_version == \"3.11\" and extra == \"codegraph\" or python_version == \"3.12\" and extra == \"codegraph\""
|
||||
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "py_rust_stemmers-0.1.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bfbd9034ae00419ff2154e33b8f5b4c4d99d1f9271f31ed059e5c7e9fa005844"},
|
||||
{file = "py_rust_stemmers-0.1.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7162ae66df2bb0fc39b350c24a049f5f5151c03c046092ba095c2141ec223a2"},
|
||||
|
|
@ -8654,7 +8678,7 @@ description = "Python for Window Extensions"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "(extra == \"qdrant\" or extra == \"deepeval\") and (extra == \"qdrant\" or extra == \"deepeval\" or extra == \"notebook\" or extra == \"dev\") and platform_system == \"Windows\" or sys_platform == \"win32\""
|
||||
markers = "(sys_platform == \"win32\" or extra == \"qdrant\" or extra == \"deepeval\") and (sys_platform == \"win32\" or platform_system == \"Windows\")"
|
||||
files = [
|
||||
{file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"},
|
||||
{file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"},
|
||||
|
|
@ -9741,7 +9765,7 @@ description = "Fundamental algorithms for scientific computing in Python"
|
|||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
markers = "python_version >= \"3.11\""
|
||||
markers = "python_version >= \"3.11\" and python_version < \"3.13\" or python_full_version == \"3.13.0\""
|
||||
files = [
|
||||
{file = "scipy-1.16.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:deec06d831b8f6b5fb0b652433be6a09db29e996368ce5911faf673e78d20085"},
|
||||
{file = "scipy-1.16.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d30c0fe579bb901c61ab4bb7f3eeb7281f0d4c4a7b52dbf563c89da4fd2949be"},
|
||||
|
|
@ -11637,7 +11661,7 @@ description = ""
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.10\" and os_name == \"nt\" or python_version == \"3.11\" and os_name == \"nt\" or python_version == \"3.12\" and os_name == \"nt\""
|
||||
markers = "os_name == \"nt\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "win-precise-time-1.4.2.tar.gz", hash = "sha256:89274785cbc5f2997e01675206da3203835a442c60fd97798415c6b3c179c0b9"},
|
||||
{file = "win_precise_time-1.4.2-cp310-cp310-win32.whl", hash = "sha256:7fa13a2247c2ef41cd5e9b930f40716eacc7fc1f079ea72853bd5613fe087a1a"},
|
||||
|
|
@ -11661,7 +11685,7 @@ description = "A small Python utility to set file creation time on Windows"
|
|||
optional = true
|
||||
python-versions = ">=3.5"
|
||||
groups = ["main"]
|
||||
markers = "(python_version == \"3.10\" or python_version == \"3.11\" or python_version == \"3.12\") and extra == \"codegraph\" and sys_platform == \"win32\" and python_version < \"3.13\""
|
||||
markers = "sys_platform == \"win32\" and extra == \"codegraph\" and python_version <= \"3.12\""
|
||||
files = [
|
||||
{file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"},
|
||||
{file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"},
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue