Merge branch 'main' of https://github.com/topoteretes/cognee
This commit is contained in:
commit
6b7899e160
45 changed files with 9824 additions and 9539 deletions
10
.github/workflows/basic_tests.yml
vendored
10
.github/workflows/basic_tests.yml
vendored
|
|
@ -162,11 +162,11 @@ jobs:
|
|||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
STRUCTURED_OUTPUT_FRAMEWORK: "BAML"
|
||||
BAML_LLM_PROVIDER: azure-openai
|
||||
BAML_LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
BAML_LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
BAML_LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
BAML_LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
BAML_LLM_PROVIDER: openai
|
||||
BAML_LLM_MODEL: ${{ secrets.OPENAI_MODEL }}
|
||||
BAML_LLM_ENDPOINT: ${{ secrets.OPENAI_ENDPOINT }}
|
||||
BAML_LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
# BAML_LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
|
||||
LLM_PROVIDER: openai
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
|
|
|
|||
20
.github/workflows/dockerhub-mcp.yml
vendored
20
.github/workflows/dockerhub-mcp.yml
vendored
|
|
@ -7,14 +7,29 @@ on:
|
|||
|
||||
jobs:
|
||||
docker-build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on:
|
||||
group: Default
|
||||
labels:
|
||||
- docker_build_runner
|
||||
|
||||
steps:
|
||||
- name: Check and free disk space before build
|
||||
run: |
|
||||
echo "=== Before cleanup ==="
|
||||
df -h
|
||||
echo "Removing unused preinstalled SDKs to free space..."
|
||||
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc || true
|
||||
docker system prune -af || true
|
||||
echo "=== After cleanup ==="
|
||||
df -h
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
buildkitd-flags: --root /tmp/buildkit
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
|
|
@ -34,7 +49,7 @@ jobs:
|
|||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
|
@ -45,5 +60,6 @@ jobs:
|
|||
cache-from: type=registry,ref=cognee/cognee-mcp:buildcache
|
||||
cache-to: type=registry,ref=cognee/cognee-mcp:buildcache,mode=max
|
||||
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.build.outputs.digest }}
|
||||
|
|
|
|||
78
.github/workflows/scorecard.yml
vendored
Normal file
78
.github/workflows/scorecard.yml
vendored
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||
# by a third-party and are governed by separate terms of service, privacy
|
||||
# policy, and support documentation.
|
||||
|
||||
name: Scorecard supply-chain security
|
||||
on:
|
||||
# For Branch-Protection check. Only the default branch is supported. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||
branch_protection_rule:
|
||||
# To guarantee Maintained check is occasionally updated. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||
schedule:
|
||||
- cron: '35 8 * * 2'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecard analysis
|
||||
runs-on: ubuntu-latest
|
||||
# `publish_results: true` only works when run from the default branch. conditional can be removed if disabled.
|
||||
if: github.event.repository.default_branch == github.ref_name || github.event_name == 'pull_request'
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Needed to publish results and get a badge (see publish_results below).
|
||||
id-token: write
|
||||
# Uncomment the permissions below if installing in a private repository.
|
||||
# contents: read
|
||||
# actions: read
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecard on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional.
|
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||
|
||||
# Public repositories:
|
||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||
# - Allows the repository to include the Scorecard badge.
|
||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories:
|
||||
# - `publish_results` will always be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# (Optional) Uncomment file_mode if you have a .gitattributes with files marked export-ignore
|
||||
# file_mode: git
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard (optional).
|
||||
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
28
.github/workflows/temporal_graph_tests.yml
vendored
28
.github/workflows/temporal_graph_tests.yml
vendored
|
|
@ -34,10 +34,9 @@ jobs:
|
|||
- name: Run Temporal Graph with Kuzu (lancedb + sqlite)
|
||||
env:
|
||||
ENV: 'dev'
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
LLM_MODEL: ${{ secrets.OPENAI_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.OPENAI_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
|
|
@ -73,10 +72,9 @@ jobs:
|
|||
- name: Run Temporal Graph with Neo4j (lancedb + sqlite)
|
||||
env:
|
||||
ENV: 'dev'
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
LLM_MODEL: ${{ secrets.OPENAI_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.OPENAI_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
|
|
@ -125,10 +123,9 @@ jobs:
|
|||
- name: Run Temporal Graph with Kuzu (postgres + pgvector)
|
||||
env:
|
||||
ENV: dev
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
LLM_MODEL: ${{ secrets.OPENAI_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.OPENAI_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
|
|
@ -192,10 +189,9 @@ jobs:
|
|||
- name: Run Temporal Graph with Neo4j (postgres + pgvector)
|
||||
env:
|
||||
ENV: dev
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
LLM_MODEL: ${{ secrets.OPENAI_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.OPENAI_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
|
|
|
|||
132
AGENTS.md
Normal file
132
AGENTS.md
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
## Repository Guidelines
|
||||
|
||||
This document summarizes how to work with the cognee repository: how it’s organized, how to build, test, lint, and contribute. It mirrors our actual tooling and CI while providing quick commands for local development.
|
||||
|
||||
## Project Structure & Module Organization
|
||||
|
||||
- `cognee/`: Core Python library and API.
|
||||
- `api/`: FastAPI application and versioned routers (add, cognify, memify, search, delete, users, datasets, responses, visualize, settings, sync, update, checks).
|
||||
- `cli/`: CLI entry points and subcommands invoked via `cognee` / `cognee-cli`.
|
||||
- `infrastructure/`: Databases, LLM providers, embeddings, loaders, and storage adapters.
|
||||
- `modules/`: Domain logic (graph, retrieval, ontology, users, processing, observability, etc.).
|
||||
- `tasks/`: Reusable tasks (e.g., code graph, web scraping, storage). Extend with new tasks here.
|
||||
- `eval_framework/`: Evaluation utilities and adapters.
|
||||
- `shared/`: Cross-cutting helpers (logging, settings, utils).
|
||||
- `tests/`: Unit, integration, CLI, and end-to-end tests organized by feature.
|
||||
- `__main__.py`: Entrypoint to route to CLI.
|
||||
- `cognee-mcp/`: Model Context Protocol server exposing cognee as MCP tools (SSE/HTTP/stdio). Contains its own README and Dockerfile.
|
||||
- `cognee-frontend/`: Next.js UI for local development and demos.
|
||||
- `distributed/`: Utilities for distributed execution (Modal, workers, queues).
|
||||
- `examples/`: Example scripts demonstrating the public APIs and features (graph, code graph, multimodal, permissions, etc.).
|
||||
- `notebooks/`: Jupyter notebooks for demos and tutorials.
|
||||
- `alembic/`: Database migrations for relational backends.
|
||||
|
||||
Notes:
|
||||
- Co-locate feature-specific helpers under their respective package (`modules/`, `infrastructure/`, or `tasks/`).
|
||||
- Extend the system by adding new tasks, loaders, or retrievers rather than modifying core pipeline mechanisms.
|
||||
|
||||
## Build, Test, and Development Commands
|
||||
|
||||
Python (root) – requires Python >= 3.10 and < 3.14. We recommend `uv` for speed and reproducibility.
|
||||
|
||||
- Create/refresh env and install dev deps:
|
||||
```bash
|
||||
uv sync --dev --all-extras --reinstall
|
||||
```
|
||||
|
||||
- Run the CLI (examples):
|
||||
```bash
|
||||
uv run cognee-cli add "Cognee turns documents into AI memory."
|
||||
uv run cognee-cli cognify
|
||||
uv run cognee-cli search "What does cognee do?"
|
||||
uv run cognee-cli -ui # Launches UI, backend API, and MCP server together
|
||||
```
|
||||
|
||||
- Start the FastAPI server directly:
|
||||
```bash
|
||||
uv run python -m cognee.api.client
|
||||
```
|
||||
|
||||
- Run tests (CI mirrors these commands):
|
||||
```bash
|
||||
uv run pytest cognee/tests/unit/ -v
|
||||
uv run pytest cognee/tests/integration/ -v
|
||||
```
|
||||
|
||||
- Lint and format (ruff):
|
||||
```bash
|
||||
uv run ruff check .
|
||||
uv run ruff format .
|
||||
```
|
||||
|
||||
- Optional static type checks (mypy):
|
||||
```bash
|
||||
uv run mypy cognee/
|
||||
```
|
||||
|
||||
MCP Server (`cognee-mcp/`):
|
||||
|
||||
- Install and run locally:
|
||||
```bash
|
||||
cd cognee-mcp
|
||||
uv sync --dev --all-extras --reinstall
|
||||
uv run python src/server.py # stdio (default)
|
||||
uv run python src/server.py --transport sse
|
||||
uv run python src/server.py --transport http --host 127.0.0.1 --port 8000 --path /mcp
|
||||
```
|
||||
|
||||
- API Mode (connect to a running Cognee API):
|
||||
```bash
|
||||
uv run python src/server.py --transport sse --api-url http://localhost:8000 --api-token YOUR_TOKEN
|
||||
```
|
||||
|
||||
- Docker quickstart (examples): see `cognee-mcp/README.md` for full details
|
||||
```bash
|
||||
docker run -e TRANSPORT_MODE=http --env-file ./.env -p 8000:8000 --rm -it cognee/cognee-mcp:main
|
||||
```
|
||||
|
||||
Frontend (`cognee-frontend/`):
|
||||
```bash
|
||||
cd cognee-frontend
|
||||
npm install
|
||||
npm run dev # Next.js dev server
|
||||
npm run lint # ESLint
|
||||
npm run build && npm start
|
||||
```
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
|
||||
Python:
|
||||
- 4-space indentation, modules and functions in `snake_case`, classes in `PascalCase`.
|
||||
- Public APIs should be type-annotated where practical.
|
||||
- Use `ruff format` before committing; `ruff check` enforces import hygiene and style (line-length 100 configured in `pyproject.toml`).
|
||||
- Prefer explicit, structured error handling. Use shared logging utilities in `cognee.shared.logging_utils`.
|
||||
|
||||
MCP server and Frontend:
|
||||
- Follow the local `README.md` and ESLint/TypeScript configuration in `cognee-frontend/`.
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
- Place Python tests under `cognee/tests/`.
|
||||
- Unit tests: `cognee/tests/unit/`
|
||||
- Integration tests: `cognee/tests/integration/`
|
||||
- CLI tests: `cognee/tests/cli_tests/`
|
||||
- Name test files `test_*.py`. Use `pytest.mark.asyncio` for async tests.
|
||||
- Avoid external state; rely on test fixtures and the CI-provided env vars when LLM/embedding providers are required. See CI workflows under `.github/workflows/` for expected environment variables.
|
||||
- When adding public APIs, provide/update targeted examples under `examples/python/`.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
- Use clear, imperative subjects (≤ 72 chars) and conventional commit styling in PR titles. Our CI validates semantic PR titles (see `.github/workflows/pr_lint`). Examples:
|
||||
- `feat(graph): add temporal edge weighting`
|
||||
- `fix(api): handle missing auth cookie`
|
||||
- `docs: update installation instructions`
|
||||
- Reference related issues/discussions in the PR body and provide brief context.
|
||||
- PRs should describe scope, list local test commands run, and mention any impacts on MCP server or UI if applicable.
|
||||
- Sign commits and affirm the DCO (see `CONTRIBUTING.md`).
|
||||
|
||||
## CI Mirrors Local Commands
|
||||
|
||||
Our GitHub Actions run the same ruff checks and pytest suites shown above (`.github/workflows/basic_tests.yml` and related workflows). Use the commands in this document locally to minimize CI surprises.
|
||||
|
||||
|
||||
|
|
@ -97,7 +97,7 @@ Hosted platform:
|
|||
|
||||
### 📦 Installation
|
||||
|
||||
You can install Cognee using either **pip**, **poetry**, **uv** or any other python package manager.
|
||||
You can install Cognee using either **pip**, **poetry**, **uv** or any other python package manager..
|
||||
|
||||
Cognee supports Python 3.10 to 3.12
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ dependencies = [
|
|||
# For local cognee repo usage remove comment bellow and add absolute path to cognee. Then run `uv sync --reinstall` in the mcp folder on local cognee changes.
|
||||
#"cognee[postgres,codegraph,gemini,huggingface,docs,neo4j] @ file:/Users/igorilic/Desktop/cognee",
|
||||
# TODO: Remove gemini from optional dependecnies for new Cognee version after 0.3.4
|
||||
"cognee[postgres,codegraph,gemini,huggingface,docs,neo4j]==0.3.4",
|
||||
"cognee[postgres,docs,neo4j]==0.3.7",
|
||||
"fastmcp>=2.10.0,<3.0.0",
|
||||
"mcp>=1.12.0,<2.0.0",
|
||||
"uv>=0.6.3,<1.0.0",
|
||||
|
|
|
|||
|
|
@ -37,12 +37,10 @@ async def run():
|
|||
|
||||
toolResult = await session.call_tool("prune", arguments={})
|
||||
|
||||
toolResult = await session.call_tool(
|
||||
"codify", arguments={"repo_path": "SOME_REPO_PATH"}
|
||||
)
|
||||
toolResult = await session.call_tool("cognify", arguments={})
|
||||
|
||||
toolResult = await session.call_tool(
|
||||
"search", arguments={"search_type": "CODE", "search_query": "exceptions"}
|
||||
"search", arguments={"search_type": "GRAPH_COMPLETION"}
|
||||
)
|
||||
|
||||
print(f"Cognify result: {toolResult.content}")
|
||||
|
|
|
|||
8575
cognee-mcp/uv.lock
generated
8575
cognee-mcp/uv.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -23,7 +23,7 @@ async def add(
|
|||
vector_db_config: dict = None,
|
||||
graph_db_config: dict = None,
|
||||
dataset_id: Optional[UUID] = None,
|
||||
preferred_loaders: dict[str, dict[str, Any]] = None,
|
||||
preferred_loaders: Optional[List[Union[str, dict[str, dict[str, Any]]]]] = None,
|
||||
incremental_loading: bool = True,
|
||||
data_per_batch: Optional[int] = 20,
|
||||
):
|
||||
|
|
@ -164,6 +164,15 @@ async def add(
|
|||
- TAVILY_API_KEY: YOUR_TAVILY_API_KEY
|
||||
|
||||
"""
|
||||
if preferred_loaders is not None:
|
||||
transformed = {}
|
||||
for item in preferred_loaders:
|
||||
if isinstance(item, dict):
|
||||
transformed.update(item)
|
||||
else:
|
||||
transformed[item] = {}
|
||||
preferred_loaders = transformed
|
||||
|
||||
tasks = [
|
||||
Task(resolve_data_directories, include_subdirectories=True),
|
||||
Task(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from uuid import UUID
|
||||
from cognee.modules.data.methods import has_dataset_data
|
||||
from cognee.modules.users.methods import get_default_user
|
||||
from cognee.modules.ingestion import discover_directory_datasets
|
||||
from cognee.modules.pipelines.operations.get_pipeline_status import get_pipeline_status
|
||||
|
|
@ -26,6 +27,16 @@ class datasets:
|
|||
|
||||
return await get_dataset_data(dataset.id)
|
||||
|
||||
@staticmethod
|
||||
async def has_data(dataset_id: str) -> bool:
|
||||
from cognee.modules.data.methods import get_dataset
|
||||
|
||||
user = await get_default_user()
|
||||
|
||||
dataset = await get_dataset(user.id, dataset_id)
|
||||
|
||||
return await has_dataset_data(dataset.id)
|
||||
|
||||
@staticmethod
|
||||
async def get_status(dataset_ids: list[UUID]) -> dict:
|
||||
return await get_pipeline_status(dataset_ids, pipeline_name="cognify_pipeline")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from uuid import UUID
|
||||
from typing import Union, Optional, List, Type
|
||||
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
from cognee.modules.engine.models.node_set import NodeSet
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.search.types import SearchResult, SearchType, CombinedSearchResult
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class CogneeValidationError(CogneeApiError):
|
|||
self,
|
||||
message: str = "A validation error occurred.",
|
||||
name: str = "CogneeValidationError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
log=True,
|
||||
log_level="ERROR",
|
||||
):
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class DatabaseNotCreatedError(CogneeSystemError):
|
|||
self,
|
||||
message: str = "The database has not been created yet. Please call `await setup()` first.",
|
||||
name: str = "DatabaseNotCreatedError",
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
|
@ -99,7 +99,7 @@ class EmbeddingException(CogneeConfigurationError):
|
|||
self,
|
||||
message: str = "Embedding Exception.",
|
||||
name: str = "EmbeddingException",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
|
|
|||
|
|
@ -159,6 +159,11 @@ class GraphDBInterface(ABC):
|
|||
- get_connections
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def is_empty(self) -> bool:
|
||||
logger.warning("is_empty() is not implemented")
|
||||
return True
|
||||
|
||||
@abstractmethod
|
||||
async def query(self, query: str, params: dict) -> List[Any]:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -198,6 +198,15 @@ class KuzuAdapter(GraphDBInterface):
|
|||
except FileNotFoundError:
|
||||
logger.warning(f"Kuzu S3 storage file not found: {self.db_path}")
|
||||
|
||||
async def is_empty(self) -> bool:
|
||||
query = """
|
||||
MATCH (n)
|
||||
RETURN true
|
||||
LIMIT 1;
|
||||
"""
|
||||
query_result = await self.query(query)
|
||||
return len(query_result) == 0
|
||||
|
||||
async def query(self, query: str, params: Optional[dict] = None) -> List[Tuple]:
|
||||
"""
|
||||
Execute a Kuzu query asynchronously with automatic reconnection.
|
||||
|
|
|
|||
|
|
@ -87,6 +87,15 @@ class Neo4jAdapter(GraphDBInterface):
|
|||
async with self.driver.session(database=self.graph_database_name) as session:
|
||||
yield session
|
||||
|
||||
async def is_empty(self) -> bool:
|
||||
query = """
|
||||
RETURN EXISTS {
|
||||
MATCH (n)
|
||||
} AS node_exists;
|
||||
"""
|
||||
query_result = await self.query(query)
|
||||
return not query_result[0]["node_exists"]
|
||||
|
||||
@deadlock_retry()
|
||||
async def query(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|||
self.endpoint, json=payload, headers=headers, timeout=60.0
|
||||
) as response:
|
||||
data = await response.json()
|
||||
return data["embedding"]
|
||||
return data["embeddings"][0]
|
||||
|
||||
def get_vector_size(self) -> int:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class CollectionNotFoundError(CogneeValidationError):
|
|||
self,
|
||||
message,
|
||||
name: str = "CollectionNotFoundError",
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
log=True,
|
||||
log_level="DEBUG",
|
||||
):
|
||||
|
|
|
|||
|
|
@ -324,7 +324,6 @@ class LanceDBAdapter(VectorDBInterface):
|
|||
|
||||
def get_data_point_schema(self, model_type: BaseModel):
|
||||
related_models_fields = []
|
||||
|
||||
for field_name, field_config in model_type.model_fields.items():
|
||||
if hasattr(field_config, "model_fields"):
|
||||
related_models_fields.append(field_name)
|
||||
|
|
|
|||
|
|
@ -8,6 +8,6 @@ class FileContentHashingError(Exception):
|
|||
self,
|
||||
message: str = "Failed to hash content of the file.",
|
||||
name: str = "FileContentHashingError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -82,16 +82,16 @@ class LocalFileStorage(Storage):
|
|||
self.ensure_directory_exists(file_dir_path)
|
||||
|
||||
if overwrite or not os.path.exists(full_file_path):
|
||||
with open(
|
||||
full_file_path,
|
||||
mode="w" if isinstance(data, str) else "wb",
|
||||
encoding="utf-8" if isinstance(data, str) else None,
|
||||
) as file:
|
||||
if hasattr(data, "read"):
|
||||
data.seek(0)
|
||||
file.write(data.read())
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
with open(full_file_path, mode="w", encoding="utf-8", newline="\n") as file:
|
||||
file.write(data)
|
||||
else:
|
||||
with open(full_file_path, mode="wb") as file:
|
||||
if hasattr(data, "read"):
|
||||
data.seek(0)
|
||||
file.write(data.read())
|
||||
else:
|
||||
file.write(data)
|
||||
|
||||
file.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -70,18 +70,18 @@ class S3FileStorage(Storage):
|
|||
if overwrite or not await self.file_exists(file_path):
|
||||
|
||||
def save_data_to_file():
|
||||
with self.s3.open(
|
||||
full_file_path,
|
||||
mode="w" if isinstance(data, str) else "wb",
|
||||
encoding="utf-8" if isinstance(data, str) else None,
|
||||
) as file:
|
||||
if hasattr(data, "read"):
|
||||
data.seek(0)
|
||||
file.write(data.read())
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
with self.s3.open(
|
||||
full_file_path, mode="w", encoding="utf-8", newline="\n"
|
||||
) as file:
|
||||
file.write(data)
|
||||
|
||||
file.close()
|
||||
else:
|
||||
with self.s3.open(full_file_path, mode="wb") as file:
|
||||
if hasattr(data, "read"):
|
||||
data.seek(0)
|
||||
file.write(data.read())
|
||||
else:
|
||||
file.write(data)
|
||||
|
||||
await run_async(save_data_to_file)
|
||||
|
||||
|
|
|
|||
|
|
@ -66,6 +66,90 @@ class BeautifulSoupLoader(LoaderInterface):
|
|||
can = extension in self.supported_extensions and mime_type in self.supported_mime_types
|
||||
return can
|
||||
|
||||
def _get_default_extraction_rules(self):
|
||||
# Comprehensive default extraction rules for common HTML content
|
||||
return {
|
||||
# Meta information
|
||||
"title": {"selector": "title", "all": False},
|
||||
"meta_description": {
|
||||
"selector": "meta[name='description']",
|
||||
"attr": "content",
|
||||
"all": False,
|
||||
},
|
||||
"meta_keywords": {
|
||||
"selector": "meta[name='keywords']",
|
||||
"attr": "content",
|
||||
"all": False,
|
||||
},
|
||||
# Open Graph meta tags
|
||||
"og_title": {
|
||||
"selector": "meta[property='og:title']",
|
||||
"attr": "content",
|
||||
"all": False,
|
||||
},
|
||||
"og_description": {
|
||||
"selector": "meta[property='og:description']",
|
||||
"attr": "content",
|
||||
"all": False,
|
||||
},
|
||||
# Main content areas (prioritized selectors)
|
||||
"article": {"selector": "article", "all": True, "join_with": "\n\n"},
|
||||
"main": {"selector": "main", "all": True, "join_with": "\n\n"},
|
||||
# Semantic content sections
|
||||
"headers_h1": {"selector": "h1", "all": True, "join_with": "\n"},
|
||||
"headers_h2": {"selector": "h2", "all": True, "join_with": "\n"},
|
||||
"headers_h3": {"selector": "h3", "all": True, "join_with": "\n"},
|
||||
"headers_h4": {"selector": "h4", "all": True, "join_with": "\n"},
|
||||
"headers_h5": {"selector": "h5", "all": True, "join_with": "\n"},
|
||||
"headers_h6": {"selector": "h6", "all": True, "join_with": "\n"},
|
||||
# Text content
|
||||
"paragraphs": {"selector": "p", "all": True, "join_with": "\n\n"},
|
||||
"blockquotes": {"selector": "blockquote", "all": True, "join_with": "\n\n"},
|
||||
"preformatted": {"selector": "pre", "all": True, "join_with": "\n\n"},
|
||||
# Lists
|
||||
"ordered_lists": {"selector": "ol", "all": True, "join_with": "\n"},
|
||||
"unordered_lists": {"selector": "ul", "all": True, "join_with": "\n"},
|
||||
"list_items": {"selector": "li", "all": True, "join_with": "\n"},
|
||||
"definition_lists": {"selector": "dl", "all": True, "join_with": "\n"},
|
||||
# Tables
|
||||
"tables": {"selector": "table", "all": True, "join_with": "\n\n"},
|
||||
"table_captions": {
|
||||
"selector": "caption",
|
||||
"all": True,
|
||||
"join_with": "\n",
|
||||
},
|
||||
# Code blocks
|
||||
"code_blocks": {"selector": "code", "all": True, "join_with": "\n"},
|
||||
# Figures and media descriptions
|
||||
"figures": {"selector": "figure", "all": True, "join_with": "\n\n"},
|
||||
"figcaptions": {"selector": "figcaption", "all": True, "join_with": "\n"},
|
||||
"image_alts": {"selector": "img", "attr": "alt", "all": True, "join_with": " "},
|
||||
# Links (text content, not URLs to avoid clutter)
|
||||
"link_text": {"selector": "a", "all": True, "join_with": " "},
|
||||
# Emphasized text
|
||||
"strong": {"selector": "strong", "all": True, "join_with": " "},
|
||||
"emphasis": {"selector": "em", "all": True, "join_with": " "},
|
||||
"marked": {"selector": "mark", "all": True, "join_with": " "},
|
||||
# Time and data elements
|
||||
"time": {"selector": "time", "all": True, "join_with": " "},
|
||||
"data": {"selector": "data", "all": True, "join_with": " "},
|
||||
# Sections and semantic structure
|
||||
"sections": {"selector": "section", "all": True, "join_with": "\n\n"},
|
||||
"asides": {"selector": "aside", "all": True, "join_with": "\n\n"},
|
||||
"details": {"selector": "details", "all": True, "join_with": "\n"},
|
||||
"summary": {"selector": "summary", "all": True, "join_with": "\n"},
|
||||
# Navigation (may contain important links/structure)
|
||||
"nav": {"selector": "nav", "all": True, "join_with": "\n"},
|
||||
# Footer information
|
||||
"footer": {"selector": "footer", "all": True, "join_with": "\n"},
|
||||
# Divs with specific content roles
|
||||
"content_divs": {
|
||||
"selector": "div[role='main'], div[role='article'], div.content, div#content",
|
||||
"all": True,
|
||||
"join_with": "\n\n",
|
||||
},
|
||||
}
|
||||
|
||||
async def load(
|
||||
self,
|
||||
file_path: str,
|
||||
|
|
@ -85,7 +169,8 @@ class BeautifulSoupLoader(LoaderInterface):
|
|||
Path to the stored extracted text file
|
||||
"""
|
||||
if extraction_rules is None:
|
||||
raise ValueError("extraction_rules required for BeautifulSoupLoader")
|
||||
extraction_rules = self._get_default_extraction_rules()
|
||||
logger.info("Using default comprehensive extraction rules for HTML content")
|
||||
|
||||
logger.info(f"Processing HTML file: {file_path}")
|
||||
|
||||
|
|
@ -115,6 +200,7 @@ class BeautifulSoupLoader(LoaderInterface):
|
|||
|
||||
full_content = " ".join(pieces).strip()
|
||||
|
||||
# remove after defaults for extraction rules
|
||||
# Fallback: If no content extracted, check if the file is plain text (not HTML)
|
||||
if not full_content:
|
||||
from bs4 import BeautifulSoup
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class UnstructuredLibraryImportError(CogneeConfigurationError):
|
|||
self,
|
||||
message: str = "Import error. Unstructured library is not installed.",
|
||||
name: str = "UnstructuredModuleImportError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
|
|
|||
|
|
@ -23,3 +23,6 @@ from .create_authorized_dataset import create_authorized_dataset
|
|||
|
||||
# Check
|
||||
from .check_dataset_name import check_dataset_name
|
||||
|
||||
# Boolean check
|
||||
from .has_dataset_data import has_dataset_data
|
||||
|
|
|
|||
21
cognee/modules/data/methods/has_dataset_data.py
Normal file
21
cognee/modules/data/methods/has_dataset_data.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from cognee.infrastructure.databases.relational import get_relational_engine
|
||||
from cognee.modules.data.models import DatasetData
|
||||
|
||||
|
||||
async def has_dataset_data(dataset_id: UUID) -> bool:
|
||||
db_engine = get_relational_engine()
|
||||
|
||||
async with db_engine.get_async_session() as session:
|
||||
count_query = (
|
||||
select(func.count())
|
||||
.select_from(DatasetData)
|
||||
.where(DatasetData.dataset_id == dataset_id)
|
||||
)
|
||||
count = await session.execute(count_query)
|
||||
|
||||
return count.scalar_one() > 0
|
||||
|
|
@ -7,6 +7,6 @@ class PipelineRunFailedError(CogneeSystemError):
|
|||
self,
|
||||
message: str = "Pipeline run failed.",
|
||||
name: str = "PipelineRunFailedError",
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -44,6 +44,12 @@ class CypherSearchRetriever(BaseRetriever):
|
|||
"""
|
||||
try:
|
||||
graph_engine = await get_graph_engine()
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
if is_empty:
|
||||
logger.warning("Search attempt on an empty knowledge graph")
|
||||
return []
|
||||
|
||||
result = await graph_engine.query(query)
|
||||
except Exception as e:
|
||||
logger.error("Failed to execture cypher search retrieval: %s", str(e))
|
||||
|
|
|
|||
|
|
@ -124,6 +124,13 @@ class GraphCompletionRetriever(BaseGraphRetriever):
|
|||
- str: A string representing the resolved context from the retrieved triplets, or an
|
||||
empty string if no triplets are found.
|
||||
"""
|
||||
graph_engine = await get_graph_engine()
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
if is_empty:
|
||||
logger.warning("Search attempt on an empty knowledge graph")
|
||||
return []
|
||||
|
||||
triplets = await self.get_triplets(query)
|
||||
|
||||
if len(triplets) == 0:
|
||||
|
|
|
|||
|
|
@ -122,6 +122,11 @@ class NaturalLanguageRetriever(BaseRetriever):
|
|||
query.
|
||||
"""
|
||||
graph_engine = await get_graph_engine()
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
if is_empty:
|
||||
logger.warning("Search attempt on an empty knowledge graph")
|
||||
return []
|
||||
|
||||
return await self._execute_cypher_query(query, graph_engine)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
from typing import Any, List, Optional, Tuple, Type, Union
|
||||
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
from cognee.modules.data.models.Dataset import Dataset
|
||||
from cognee.modules.engine.models.node_set import NodeSet
|
||||
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Edge
|
||||
from cognee.modules.search.types import SearchType
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
|
||||
from .get_search_type_tools import get_search_type_tools
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def no_access_control_search(
|
||||
query_type: SearchType,
|
||||
|
|
@ -32,6 +36,12 @@ async def no_access_control_search(
|
|||
save_interaction=save_interaction,
|
||||
last_k=last_k,
|
||||
)
|
||||
graph_engine = await get_graph_engine()
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
if is_empty:
|
||||
# TODO: we can log here, but not all search types use graph. Still keeping this here for reviewer input
|
||||
logger.warning("Search attempt on an empty knowledge graph")
|
||||
if len(search_tools) == 2:
|
||||
[get_completion, get_context] = search_tools
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ from uuid import UUID
|
|||
from fastapi.encoders import jsonable_encoder
|
||||
from typing import Any, List, Optional, Tuple, Type, Union
|
||||
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.shared.utils import send_telemetry
|
||||
from cognee.context_global_variables import set_database_global_context_variables
|
||||
|
||||
|
|
@ -27,6 +29,8 @@ from .get_search_type_tools import get_search_type_tools
|
|||
from .no_access_control_search import no_access_control_search
|
||||
from ..utils.prepare_search_result import prepare_search_result
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def search(
|
||||
query_text: str,
|
||||
|
|
@ -329,6 +333,25 @@ async def search_in_datasets_context(
|
|||
# Set database configuration in async context for each dataset user has access for
|
||||
await set_database_global_context_variables(dataset.id, dataset.owner_id)
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
if is_empty:
|
||||
# TODO: we can log here, but not all search types use graph. Still keeping this here for reviewer input
|
||||
from cognee.modules.data.methods import get_dataset_data
|
||||
|
||||
dataset_data = await get_dataset_data(dataset.id)
|
||||
|
||||
if len(dataset_data) > 0:
|
||||
logger.warning(
|
||||
f"Dataset '{dataset.name}' has {len(dataset_data)} data item(s) but the knowledge graph is empty. "
|
||||
"Please run cognify to process the data before searching."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Search attempt on an empty knowledge graph - no data has been added to this dataset"
|
||||
)
|
||||
|
||||
specific_search_tools = await get_search_type_tools(
|
||||
query_type=query_type,
|
||||
query_text=query_text,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,6 @@ class IngestionError(CogneeValidationError):
|
|||
self,
|
||||
message: str = "Failed to load data.",
|
||||
name: str = "IngestionError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class WrongDataDocumentInputError(CogneeValidationError):
|
|||
self,
|
||||
field: str,
|
||||
name: str = "WrongDataDocumentInputError",
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_CONTENT,
|
||||
):
|
||||
message = f"Missing of invalid parameter: '{field}'."
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
import pytest
|
||||
import cognee
|
||||
from cognee.infrastructure.files.utils.get_data_file_path import get_data_file_path
|
||||
|
|
@ -25,6 +26,13 @@ async def test_url_saves_as_html_file():
|
|||
pytest.fail(f"Failed to save data item to storage: {e}")
|
||||
|
||||
|
||||
skip_for_tavily = pytest.mark.skipif(
|
||||
os.getenv("TAVILY_API_KEY") is not None,
|
||||
reason="Skipping as Tavily already handles parsing and outputs text",
|
||||
)
|
||||
|
||||
|
||||
@skip_for_tavily
|
||||
@pytest.mark.asyncio
|
||||
async def test_saved_html_is_valid():
|
||||
try:
|
||||
|
|
@ -67,6 +75,22 @@ async def test_add_url():
|
|||
await cognee.add("https://en.wikipedia.org/wiki/Large_language_model")
|
||||
|
||||
|
||||
skip_in_ci = pytest.mark.skipif(
|
||||
os.getenv("GITHUB_ACTIONS") == "true",
|
||||
reason="Skipping in Github for now - before we get TAVILY_API_KEY",
|
||||
)
|
||||
|
||||
|
||||
@skip_in_ci
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_url_with_tavily():
|
||||
assert os.getenv("TAVILY_API_KEY") is not None
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
await cognee.add("https://en.wikipedia.org/wiki/Large_language_model")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_url_without_incremental_loading():
|
||||
await cognee.prune.prune_data()
|
||||
|
|
@ -96,7 +120,18 @@ async def test_add_url_with_incremental_loading():
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_url_with_extraction_rules(): # TODO: this'll fail due to not implemented `load()` yet
|
||||
async def test_add_url_can_define_preferred_loader_as_list_of_str():
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
await cognee.add(
|
||||
"https://en.wikipedia.org/wiki/Large_language_model",
|
||||
preferred_loaders=["beautiful_soup_loader"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_url_with_extraction_rules():
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
|
|
@ -185,7 +220,7 @@ async def test_beautiful_soup_loader_is_selected_loader_if_preferred_loader_prov
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_beautiful_soup_loader_raises_if_required_args_are_missing():
|
||||
async def test_beautiful_soup_loader_works_with_and_without_arguments():
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
|
|
@ -203,11 +238,10 @@ async def test_beautiful_soup_loader_raises_if_required_args_are_missing():
|
|||
bs_loader = BeautifulSoupLoader()
|
||||
loader_engine.register_loader(bs_loader)
|
||||
preferred_loaders = {"beautiful_soup_loader": {}}
|
||||
with pytest.raises(ValueError):
|
||||
await loader_engine.load_file(
|
||||
file_path,
|
||||
preferred_loaders=preferred_loaders,
|
||||
)
|
||||
await loader_engine.load_file(
|
||||
file_path,
|
||||
preferred_loaders=preferred_loaders,
|
||||
)
|
||||
extraction_rules = {
|
||||
"title": {"selector": "title"},
|
||||
"headings": {"selector": "h1, h2, h3", "all": True},
|
||||
|
|
|
|||
|
|
@ -47,10 +47,26 @@ async def main():
|
|||
pathlib.Path(__file__).parent, "test_data/Quantum_computers.txt"
|
||||
)
|
||||
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert is_empty, "Kuzu graph database is not empty"
|
||||
|
||||
await cognee.add([explanation_file_path_quantum], dataset_name)
|
||||
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert is_empty, "Kuzu graph database should be empty before cognify"
|
||||
|
||||
await cognee.cognify([dataset_name])
|
||||
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert not is_empty, "Kuzu graph database should not be empty"
|
||||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
|
||||
vector_engine = get_vector_engine()
|
||||
|
|
@ -114,11 +130,10 @@ async def main():
|
|||
assert not os.path.isdir(data_root_directory), "Local data files are not deleted"
|
||||
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
nodes, edges = await graph_engine.get_graph_data()
|
||||
assert len(nodes) == 0 and len(edges) == 0, "Kuzu graph database is not empty"
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert is_empty, "Kuzu graph database is not empty"
|
||||
|
||||
finally:
|
||||
# Ensure cleanup even if tests fail
|
||||
|
|
|
|||
|
|
@ -35,6 +35,14 @@ async def main():
|
|||
explanation_file_path_nlp = os.path.join(
|
||||
pathlib.Path(__file__).parent, "test_data/Natural_language_processing.txt"
|
||||
)
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert is_empty, "Graph has to be empty"
|
||||
|
||||
await cognee.add([explanation_file_path_nlp], dataset_name)
|
||||
|
||||
explanation_file_path_quantum = os.path.join(
|
||||
|
|
@ -42,9 +50,16 @@ async def main():
|
|||
)
|
||||
|
||||
await cognee.add([explanation_file_path_quantum], dataset_name)
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert is_empty, "Graph has to be empty before cognify"
|
||||
|
||||
await cognee.cognify([dataset_name])
|
||||
|
||||
is_empty = await graph_engine.is_empty()
|
||||
|
||||
assert not is_empty, "Graph shouldn't be empty"
|
||||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
|
||||
vector_engine = get_vector_engine()
|
||||
|
|
@ -117,11 +132,8 @@ async def main():
|
|||
assert not os.path.isdir(data_root_directory), "Local data files are not deleted"
|
||||
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
nodes, edges = await graph_engine.get_graph_data()
|
||||
assert len(nodes) == 0 and len(edges) == 0, "Neo4j graph database is not empty"
|
||||
is_empty = await graph_engine.is_empty()
|
||||
assert is_empty, "Neo4j graph database is not empty"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
649
notebooks/cognee_demo.ipynb
vendored
649
notebooks/cognee_demo.ipynb
vendored
File diff suppressed because it is too large
Load diff
168
notebooks/cognee_multimedia_demo.ipynb
vendored
168
notebooks/cognee_multimedia_demo.ipynb
vendored
|
|
@ -107,20 +107,18 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:13.488510\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/daulet/Desktop/dev/cognee-claude/logs/2025-10-07_21-16-23.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:21.914432\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/daulet/Desktop/dev/cognee-claude/logs/2025-10-22_18-20-40.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:14.172414\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.3.5-local\u001b[0m \u001b[36mdatabase_path\u001b[0m=\u001b[35m/Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m \u001b[36mgraph_database_name\u001b[0m=\u001b[35m\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:43 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T8132)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.10.11\u001b[0m \u001b[36mrelational_config\u001b[0m=\u001b[35mcognee_db\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m \u001b[36mvector_config\u001b[0m=\u001b[35mlancedb\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:22.759223\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.3.6-local\u001b[0m \u001b[36mdatabase_path\u001b[0m=\u001b[35m/Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m \u001b[36mgraph_database_name\u001b[0m=\u001b[35m\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:43 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T8132)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.10.11\u001b[0m \u001b[36mrelational_config\u001b[0m=\u001b[35mcognee_db\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m \u001b[36mvector_config\u001b[0m=\u001b[35mlancedb\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:14.172932\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase storage: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"/Users/daulet/Desktop/dev/cognee-claude/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||
"\u001b[2m2025-10-22T17:58:22.759643\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase storage: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0.3.5-local\n"
|
||||
"0.3.6-local\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
@ -146,11 +144,11 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:20.743332\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLoaded JSON extension \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:24.045051\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLoaded JSON extension \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:20.776490\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted Kuzu database files at /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases/cognee_graph_kuzu\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:24.081025\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted Kuzu database files at /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases/cognee_graph_kuzu\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:23.387773\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase deleted successfully.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:26.937024\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase deleted successfully.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[1mStorage manager absolute path: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_cache\u001b[0m\n",
|
||||
"\n",
|
||||
|
|
@ -163,7 +161,7 @@
|
|||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"User 03f552c1-331f-40b2-a99b-b3b05aa93e0d has registered.\n"
|
||||
"User 5c6da0e1-4bda-4b32-a6e3-ca70b884fb9a has registered.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -171,133 +169,139 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.397580\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `981301fd-9699-5cd2-9746-577c0076b844`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.691142\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.398001\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.691670\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.398362\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.692087\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.399412\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `981301fd-9699-5cd2-9746-577c0076b844`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.693388\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.399724\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.693668\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.400149\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.694024\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.414674\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: pypdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.708303\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: pypdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.415122\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: text_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.708776\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: text_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.415472\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: image_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.709084\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: image_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.415781\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: audio_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.709426\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: audio_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.416132\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: unstructured_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.709654\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: unstructured_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.416494\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: advanced_pdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:24.709898\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: advanced_pdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:28.416861\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: beautiful_soup_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.420233\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.666583\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.420796\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.667605\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.421255\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.668153\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `981301fd-9699-5cd2-9746-577c0076b844`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.423491\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.673512\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.423881\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.673986\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.424259\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.674429\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `981301fd-9699-5cd2-9746-577c0076b844`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.434168\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.686749\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.453069\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.707284\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.453489\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.707716\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.453823\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.708080\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.454419\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.708748\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.454689\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.709019\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.454948\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.709373\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.462413\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.716846\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.466745\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.720657\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.470294\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.725864\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:28.476006\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:32.731948\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.030103\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLoaded JSON extension \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.077494\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReconnecting to Kuzu database...\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.065148\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.126562\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLoaded JSON extension \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.065868\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmer' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.161293\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.066315\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.161962\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.066713\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.162356\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'profession' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.067064\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.162703\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmer' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.067410\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware problem' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.163116\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.202761\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'profession' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:36.163438\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware problem' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.203355\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:37.300377\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.203785\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:38.621515\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.204225\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.290034\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'profession' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.204544\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.291121\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'programmers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:32.204964\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'humor' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.292185\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:34.265785\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.293038\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'light bulb' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:35.003525\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.293777\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:35.952187\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.294485\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hardware problem' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:35.970171\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.295087\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'joke' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.024476\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:39.295651\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'humor' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.025311\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.433350\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.025564\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.434081\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.025803\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.434611\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.026065\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.435199\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.026413\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.435629\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.026663\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.435958\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.680393\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.436247\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.680986\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:40.697594\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.681355\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:42.368373\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.681647\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.185789\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.681917\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.186535\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.682229\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.186875\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:38.682567\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T17:58:43.187279\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.187623\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.187953\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.188254\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('56c22102-965d-592e-958c-c1ebebf0008f')}, {'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('e26acfac-f1c2-5d9d-b95a-e970a75aedde')}])}"
|
||||
"{UUID('849137b0-173d-5a0f-9462-403398a3b1e2'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('cc1ec4a6-2621-5143-ad19-ae7703db040b')}, {'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('f3d53fbe-2a29-57e4-9e55-d87a49890ecc')}])}"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
|
|
@ -341,29 +345,23 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:42.668682\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting summary retrieval for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[2m2025-10-22T17:58:43.213961\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting summary retrieval for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:42.933137\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mFound 2 summaries from vector search\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.495466\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mFound 2 summaries from vector search\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:42.933995\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning 2 summary payloads \u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.496119\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning 2 summary payloads \u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:42.934301\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting completion generation for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:58:43.496456\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mStarting completion generation for query: 'What is in the multimedia files?'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:37:42.934604\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning context with 2 item(s)\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T17:58:43.496815\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mReturning context with 2 item(s)\u001b[0m [\u001b[0m\u001b[1m\u001b[34mSummariesRetriever\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'id': '766ac5d6-1a81-530e-a934-61e2bf505d9b', 'created_at': 1759869455990, 'updated_at': 1759869455990, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'A humorous take on programmers and light bulbs.'}\n",
|
||||
"{'id': '2862798a-0dfc-5994-a3ca-9f4329f42f06', 'created_at': 1759869455989, 'updated_at': 1759869455989, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': \"Programmers won't change a light bulb.\"}\n"
|
||||
"{'id': 'b4da8f65-1ab7-5816-b6ca-c3b7e16d7ea9', 'created_at': 1761155918667, 'updated_at': 1761155918667, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'Changing a light bulb is a hardware issue for programmers.'}\n",
|
||||
"{'id': '875f97da-6b05-52af-973d-54939a229a21', 'created_at': 1761155922404, 'updated_at': 1761155922404, 'ontology_valid': False, 'version': 1, 'topological_rank': 0, 'type': 'IndexSchema', 'text': 'How many coders are needed to replace a light bulb? Zero. That’s an issue for hardware.'}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
|
|||
248
notebooks/cognee_simple_demo.ipynb
vendored
248
notebooks/cognee_simple_demo.ipynb
vendored
|
|
@ -79,20 +79,18 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:23.321871\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/daulet/Desktop/dev/cognee-claude/logs/2025-10-07_21-16-35.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:27.024379\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDeleted old log file: /Users/daulet/Desktop/dev/cognee-claude/logs/2025-10-22_18-22-03.log\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:23.924664\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.3.5-local\u001b[0m \u001b[36mdatabase_path\u001b[0m=\u001b[35m/Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m \u001b[36mgraph_database_name\u001b[0m=\u001b[35m\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:43 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T8132)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.10.11\u001b[0m \u001b[36mrelational_config\u001b[0m=\u001b[35mcognee_db\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m \u001b[36mvector_config\u001b[0m=\u001b[35mlancedb\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:27.837430\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLogging initialized \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m \u001b[36mcognee_version\u001b[0m=\u001b[35m0.3.6-local\u001b[0m \u001b[36mdatabase_path\u001b[0m=\u001b[35m/Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m \u001b[36mgraph_database_name\u001b[0m=\u001b[35m\u001b[0m \u001b[36mos_info\u001b[0m=\u001b[35m'Darwin 24.5.0 (Darwin Kernel Version 24.5.0: Tue Apr 22 19:54:43 PDT 2025; root:xnu-11417.121.6~2/RELEASE_ARM64_T8132)'\u001b[0m \u001b[36mpython_version\u001b[0m=\u001b[35m3.10.11\u001b[0m \u001b[36mrelational_config\u001b[0m=\u001b[35mcognee_db\u001b[0m \u001b[36mstructlog_version\u001b[0m=\u001b[35m25.4.0\u001b[0m \u001b[36mvector_config\u001b[0m=\u001b[35mlancedb\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:23.925152\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase storage: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"/Users/daulet/Desktop/dev/cognee-claude/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||
"\u001b[2m2025-10-22T17:59:27.837973\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mDatabase storage: /Users/daulet/Desktop/dev/cognee-claude/cognee/.cognee_system/databases\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0.3.5-local\n"
|
||||
"0.3.6-local\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -100,225 +98,205 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[1mEmbeddingRateLimiter initialized: enabled=False, requests_limit=60, interval_seconds=60\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.188799\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mLoaded JSON extension \u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.824653\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.200442\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.825175\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.220787\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.825559\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.221350\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.834754\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: pypdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.221818\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.835421\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: text_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.227285\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.835697\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: image_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:31.328876\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.835966\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: audio_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.869563\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.836157\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: unstructured_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.871104\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alice' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.836754\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mRegistered loader: advanced_pdf_loader\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.infrastructure.loaders.LoaderEngine\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.871562\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.847087\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `ingest_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.872024\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'animal' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.847599\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `resolve_data_directories`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.872453\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dinah' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.847894\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `e16895e4-38f6-5ad7-a969-cd1629861b40`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.872814\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'location' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.967450\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mJSON extension already loaded or unavailable: Binder exception: Extension: JSON is already loaded. You can check loaded extensions by `CALL SHOW_LOADED_EXTENSIONS() RETURN *`.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.873190\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pool of tears' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.980303\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mOntology file 'None' not found. No owl ontology will be attached to the graph.\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.873564\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'garden' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.998286\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run started: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.873938\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.998936\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.874286\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:30.999638\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.874576\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'glass table' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:31.006879\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task started: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.874819\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'golden key' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:31.119544\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.875154\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bottle drink me' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.668159\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'person' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.875545\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cake eat me' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.669164\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'alice' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.875895\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'large rabbit hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.669470\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'white rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.876245\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'well' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.669777\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'animal' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.876682\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.670086\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dinah' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.877037\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dodo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.670369\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'location' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.877413\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lory' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.670667\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit-hole' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.877726\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'eaglet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.670972\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'garden' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.878094\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duck' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.671244\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'object' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.878363\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'historical figure' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.671450\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'table' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.878675\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'william the conqueror' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.671650\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'golden key' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.878919\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'edwin and morcar' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.671857\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bottle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.879759\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'stigand' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.672094\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cake' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.880220\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'creature' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.672398\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.880496\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'caterpillar' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.672607\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dodo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.880805\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.672847\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lory' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.881102\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bill' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.673136\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'eaglet' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.881498\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'plant' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.673399\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duck' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.881878\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mushroom' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.673634\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'historical figure' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.882194\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'literature' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.673871\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'william the conqueror' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.882509\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'fairy tales' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.674120\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mary ann' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.882829\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pigeon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.674384\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchess' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.883121\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'duchess' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.674682\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'creature' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.883403\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.674931\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'caterpillar' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.883739\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cheshire cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.675153\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'rabbit' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.883976\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'character' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.675376\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'bill' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.884222\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march hare' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.675611\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'plant' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.884574\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hatter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.675837\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mushroom' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.884828\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.676060\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'father william' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.885100\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pig' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.676333\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'character' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.885539\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dormouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.676573\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'pigeon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.885826\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'knave of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.676801\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'baby' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.886132\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'king of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.677018\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cheshire cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.886328\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tweedledee' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.677242\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hatter' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.886549\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tweedledum' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.677490\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march hare' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.886795\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'date' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.677694\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.887076\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '4th' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.677922\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'cook' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.887380\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.678148\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'date' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.887610\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'treacle well' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.678396\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'may' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.887922\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'muchness' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.678677\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the cat' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.888198\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'queen' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.678911\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the dormouse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.888461\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'two' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.679113\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the queen of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.888751\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mock turtle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.679341\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the king of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.889022\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gryphon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.679587\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the knave of hearts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.889271\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'king' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.679917\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the rose tree' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.889534\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'hedgehog' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.680113\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'event' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.889769\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'flamingo' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.680347\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'the mad tea party' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.890004\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'executioner' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.680632\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'king' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.890379\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'royalty' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.681429\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mock turtle' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.890663\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'food' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.681702\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gryphon' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.890927\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tarts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.681931\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'soldiers' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.891206\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'place' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.682200\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gardener_1' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.891458\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'court of justice' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.682379\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gardener_2' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.891769\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dance' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.682699\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'gardener_3' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.892054\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lobster quadrille' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.683245\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tortoise' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.892445\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lizard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.683560\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lizards' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.892698\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'guinea pig' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.683846\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'food' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.892987\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'event' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.684025\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'tarts' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.894624\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'trial' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.684355\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'dance' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.894915\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '14 march 2023' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.684671\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lobster quadrille' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.895209\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '15 march 2023' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.684888\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'subject' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.895458\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for '16 march 2023' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.685135\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'seaography' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.895695\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'text' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.685394\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'arithmetic' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.895998\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'verse' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.685715\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'mystery' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.896305\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.685998\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'concept' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.896572\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'little sister' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.686239\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'court' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.896861\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'farm-yard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.686484\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'guinea pig' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.897134\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'happy summer days' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.686688\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'lizard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:57.897389\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'childhood' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.686886\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march 14' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T17:59:59.857882\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.687125\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march 15' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:06.883659\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.687426\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'march 16' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.166881\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.687861\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'place' in category 'classes'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.167312\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.688321\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'wonderland' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.167622\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.688615\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'sister' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.168424\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.688914\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'farm-yard' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.168705\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.689141\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'child-life' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.169005\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.689345\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'simple_joys' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:51.689662\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mNo close match found for 'simple_sorrows' in category 'individuals'\u001b[0m [\u001b[0m\u001b[1m\u001b[34mOntologyAdapter\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:38:55.033467\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:03.406344\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task started: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.113087\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `add_data_points`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.113738\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `summarize_text`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.114015\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `extract_graph_from_data`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.114579\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mAsync Generator task completed: `extract_chunks_from_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.114971\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `check_permissions_on_dataset`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.115220\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mCoroutine task completed: `classify_documents`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_base\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.115479\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `453ce944-eb27-567c-9918-0d44d1614f97`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T18:00:08.169382\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mPipeline run completed: `eea87f6e-3943-552c-b2fe-904ac1e367f0`\u001b[0m [\u001b[0m\u001b[1m\u001b[34mrun_tasks_with_telemetry()\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('3ad0b58b-2b39-5bf8-97de-4db67bd2555c')}, {'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('56c22102-965d-592e-958c-c1ebebf0008f')}, {'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('1c237436-d3eb-5408-874d-91647cf2dcef'), dataset_id=UUID('8f486d81-4723-5f3d-b37b-5e27d9967d33'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('e26acfac-f1c2-5d9d-b95a-e970a75aedde')}])}"
|
||||
"{UUID('849137b0-173d-5a0f-9462-403398a3b1e2'): PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=[{'run_info': PipelineRunCompleted(status='PipelineRunCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('1140fe00-c2fd-5fc3-adec-bf8ebe41572a')}, {'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('cc1ec4a6-2621-5143-ad19-ae7703db040b')}, {'run_info': PipelineRunAlreadyCompleted(status='PipelineRunAlreadyCompleted', pipeline_run_id=UUID('8f4e8447-24c9-5d2a-afb2-f86256ca4f34'), dataset_id=UUID('849137b0-173d-5a0f-9462-403398a3b1e2'), dataset_name='main_dataset', payload=None, data_ingestion_info=None), 'data_id': UUID('f3d53fbe-2a29-57e4-9e55-d87a49890ecc')}])}"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
|
|
@ -352,15 +330,15 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.164471\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 110 nodes, 292 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:08.200794\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 112 nodes, 294 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:07.474073\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.09s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T18:00:08.542511\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.09s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"['1. Alice \\n2. White Rabbit \\n3. March Hare \\n4. Hatter \\n5. Cheshire Cat \\n6. Queen of Hearts \\n7. Knave of Hearts \\n8. Dormouse']"
|
||||
"['The influential characters in \"Alice in Wonderland\" include:\\n1. Alice - the main character who explores Wonderland.\\n2. The White Rabbit - the creature Alice follows into Wonderland.\\n3. The King of Hearts - the ruler of Wonderland.\\n4. The Queen of Hearts - the authoritative figure known for her temper.\\n5. The Mad Hatter - a tea party host and influential character.\\n6. The March Hare - the Hatter\\'s tea party companion.\\n7. The Knave of Hearts - accused of stealing tarts.\\n8. The Mock Turtle - a character who shares stories and lessons.\\n9. The Gryphon - a creature who accompanies Alice and shares knowledge.']"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
|
|
@ -383,15 +361,15 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:36.551739\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 110 nodes, 292 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:12.322968\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 112 nodes, 294 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:36.896038\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.09s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T18:00:12.640396\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.09s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"['Alice ended up in Wonderland by following a hurried White Rabbit down a rabbit-hole after feeling bored and drowsy.']"
|
||||
"['Alice ended up in Wonderland by following a White Rabbit down a rabbit hole after becoming bored while sitting by her sister. She fell into a deep well, leading her into the fantastical realm of Wonderland.']"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
|
|
@ -414,15 +392,15 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:43.171619\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 110 nodes, 292 edges in 0.02s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:14.237335\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph projection completed: 112 nodes, 294 edges in 0.01s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mCogneeGraph\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:43.468210\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.08s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T18:00:14.605330\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mVector collection retrieval completed: Retrieved distances from 6 collections in 0.09s\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[\"Alice is described as a curious girl who exhibits a desire for adventure and exploration. She is imaginative, pondering various whimsical questions and thoughts as she navigates the oddities of Wonderland. Her personality shows signs of being thoughtful and reflective, often giving herself advice, though she doesn't always follow it. Despite her adventures and the surreal situations she encounters, she maintains a sense of bravery and a degree of confidence in her interactions with the fantastical characters she meets.\"]"
|
||||
"[\"Alice's personality is characterized by her curiosity, imaginative thinking, and a thirst for adventure. She often questions the logic of the strange world around her and demonstrates a mix of bravery and innocence as she navigates through Wonderland. Additionally, she sometimes provides herself with advice, indicating a reflective nature. However, her consistent attempts to find order in the chaos and her whimsical thoughts reveal her youthful spirit.\"]"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
|
|
@ -453,9 +431,9 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:50.413314\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph visualization saved as /Users/daulet/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\u001b[2m2025-10-22T18:00:17.008072\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mGraph visualization saved as /Users/daulet/graph_visualization.html\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n",
|
||||
"\n",
|
||||
"\u001b[2m2025-10-07T20:39:50.413846\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
"\u001b[2m2025-10-22T18:00:17.008546\u001b[0m [\u001b[32m\u001b[1minfo \u001b[0m] \u001b[1mThe HTML file has been stored on your home directory! Navigate there with cd ~\u001b[0m [\u001b[0m\u001b[1m\u001b[34mcognee.shared.logging_utils\u001b[0m]\u001b[0m\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
|||
754
notebooks/ontology_demo.ipynb
vendored
754
notebooks/ontology_demo.ipynb
vendored
File diff suppressed because it is too large
Load diff
2
poetry.lock
generated
2
poetry.lock
generated
|
|
@ -13531,4 +13531,4 @@ scraping = ["APScheduler", "beautifulsoup4", "lxml", "playwright", "protego", "t
|
|||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "bcab5420339473ec08b89cde588899b60999762fb8ca9a011240d47ea86198e3"
|
||||
content-hash = "9490de8c950400c004a87333eda35311109bc1708a98e053bc2f66d883f4f702"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[project]
|
||||
name = "cognee"
|
||||
|
||||
version = "0.3.6"
|
||||
version = "0.3.7"
|
||||
description = "Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning."
|
||||
authors = [
|
||||
{ name = "Vasilije Markovic" },
|
||||
|
|
@ -41,7 +41,7 @@ dependencies = [
|
|||
"nbformat>=5.7.0,<6.0.0",
|
||||
"alembic>=1.13.3,<2",
|
||||
"limits>=4.4.1,<5",
|
||||
"fastapi>=0.115.7,<1.0.0",
|
||||
"fastapi>=0.116.2,<1.0.0",
|
||||
"python-multipart>=0.0.20,<1.0.0",
|
||||
"fastapi-users[sqlalchemy]>=14.0.1,<15.0.0",
|
||||
"structlog>=25.2.0,<26",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue