fix: human readable logs (#658)

<!-- .github/pull_request_template.md -->

## Description
Introducing scructlog.

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin
This commit is contained in:
Daniel Molnar 2025-03-25 11:54:40 +01:00 committed by GitHub
parent d192d1fe20
commit 73db1a5a53
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
81 changed files with 854 additions and 559 deletions

View file

@ -2,7 +2,7 @@ import asyncio
import json
import os
import cognee
import logging
from cognee.shared.logging_utils import get_logger
import importlib.util
from contextlib import redirect_stderr, redirect_stdout
@ -17,7 +17,7 @@ from cognee.modules.storage.utils import JSONEncoder
mcp = Server("cognee")
logger = logging.getLogger(__name__)
logger = get_logger()
@mcp.list_tools()

View file

@ -2,7 +2,7 @@
import os
import uvicorn
import logging
from cognee.shared.logging_utils import get_logger
import sentry_sdk
from fastapi import FastAPI, status
from fastapi.responses import JSONResponse, Response
@ -28,12 +28,7 @@ from cognee.api.v1.users.routers import (
)
from contextlib import asynccontextmanager
# Set up logging
logging.basicConfig(
level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL)
format="%(asctime)s [%(levelname)s] %(message)s", # Set the log message format
)
logger = logging.getLogger(__name__)
logger = get_logger()
if os.getenv("ENV", "prod") == "prod":
sentry_sdk.init(

View file

@ -3,13 +3,13 @@ from fastapi.responses import JSONResponse
from fastapi import APIRouter
from typing import List
import subprocess
import logging
from cognee.shared.logging_utils import get_logger
import requests
from cognee.modules.users.models import User
from cognee.modules.users.methods import get_authenticated_user
logger = logging.getLogger(__name__)
logger = get_logger()
def get_add_router() -> APIRouter:

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
from uuid import NAMESPACE_OID, uuid5
from cognee.api.v1.search.search_v2 import search
@ -30,7 +30,7 @@ if monitoring == MonitoringTool.LANGFUSE:
from langfuse.decorators import observe
logger = logging.getLogger("code_graph_pipeline")
logger = get_logger("code_graph_pipeline")
update_status_lock = asyncio.Lock()

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
from typing import Union, Optional
from pydantic import BaseModel
@ -28,7 +28,7 @@ from cognee.tasks.storage import add_data_points
from cognee.tasks.summarization import summarize_text
from cognee.modules.chunking.TextChunker import TextChunker
logger = logging.getLogger("cognify.v2")
logger = get_logger("cognify.v2")
update_status_lock = asyncio.Lock()

View file

@ -1,5 +1,5 @@
import json
import logging
from cognee.shared.logging_utils import get_logger
from fastapi import APIRouter
from fastapi.responses import JSONResponse
from cognee.api.DTO import InDTO
@ -7,7 +7,7 @@ from cognee.modules.retrieval.code_retriever import CodeRetriever
from cognee.modules.storage.utils import JSONEncoder
logger = logging.getLogger(__name__)
logger = get_logger()
class CodePipelineIndexPayloadDTO(InDTO):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from fastapi import APIRouter
from datetime import datetime
from uuid import UUID
@ -14,7 +14,7 @@ from cognee.modules.users.models import User
from cognee.modules.users.methods import get_authenticated_user
from cognee.modules.pipelines.models import PipelineRunStatus
logger = logging.getLogger(__name__)
logger = get_logger()
class ErrorResponseDTO(BaseModel):

View file

@ -1,11 +1,11 @@
import logging
from fastapi import Depends
from fastapi.responses import JSONResponse
from fastapi import APIRouter
from cognee.shared.logging_utils import get_logger
from cognee.modules.users.models import User
from cognee.modules.users.methods import get_authenticated_user
logger = logging.getLogger(__name__)
logger = get_logger()
def get_visualize_router() -> APIRouter:

View file

@ -2,11 +2,13 @@ from cognee.modules.visualization.cognee_network_visualization import (
cognee_network_visualization,
)
from cognee.infrastructure.databases.graph import get_graph_engine
import logging
from cognee.shared.logging_utils import get_logger, ERROR
import asyncio
from cognee.shared.utils import setup_logging
logger = get_logger()
async def visualize_graph(destination_file_path: str = None):
@ -16,9 +18,9 @@ async def visualize_graph(destination_file_path: str = None):
graph = await cognee_network_visualization(graph_data, destination_file_path)
if destination_file_path:
logging.info(f"The HTML file has been stored at path: {destination_file_path}")
logger.info(f"The HTML file has been stored at path: {destination_file_path}")
else:
logging.info(
logger.info(
"The HTML file has been stored on your home directory! Navigate there with cd ~"
)
@ -26,7 +28,7 @@ async def visualize_graph(destination_file_path: str = None):
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
import json
from typing import List, Optional
from cognee.eval_framework.answer_generation.answer_generation_executor import (
@ -14,6 +14,9 @@ from cognee.modules.data.models.answers_base import AnswersBase
from cognee.modules.data.models.answers_data import Answers
logger = get_logger()
async def create_and_insert_answers_table(questions_payload):
relational_config = get_relational_config()
relational_engine = get_relational_engine()
@ -35,7 +38,7 @@ async def run_question_answering(
params: dict, system_prompt="answer_simple_question.txt", top_k: Optional[int] = None
) -> List[dict]:
if params.get("answering_questions"):
logging.info("Question answering started...")
logger.info("Question answering started...")
try:
with open(params["questions_path"], "r", encoding="utf-8") as f:
questions = json.load(f)
@ -44,7 +47,7 @@ async def run_question_answering(
except json.JSONDecodeError as e:
raise ValueError(f"Error decoding JSON from {params['questions_path']}: {e}")
logging.info(f"Loaded {len(questions)} questions from {params['questions_path']}")
logger.info(f"Loaded {len(questions)} questions from {params['questions_path']}")
answer_generator = AnswerGeneratorExecutor()
answers = await answer_generator.question_answering_non_parallel(
questions=questions,
@ -56,11 +59,11 @@ async def run_question_answering(
json.dump(answers, f, ensure_ascii=False, indent=4)
await create_and_insert_answers_table(answers)
logging.info("Question answering End...")
logger.info("Question answering End...")
return answers
else:
logging.info(
logger.info(
"The question answering module was not executed as answering_questions is not enabled"
)
return []

View file

@ -2,9 +2,9 @@ from abc import ABC, abstractmethod
from typing import List, Optional, Any, Union, Tuple
import os
import json
import logging
from cognee.shared.logging_utils import get_logger
logger = logging.getLogger(__name__)
logger = get_logger()
class BaseBenchmarkAdapter(ABC):

View file

@ -1,11 +1,12 @@
import cognee
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from typing import Optional, Tuple, List, Dict, Union, Any, Callable, Awaitable
from cognee.eval_framework.benchmark_adapters.benchmark_adapters import BenchmarkAdapter
from cognee.modules.chunking.TextChunker import TextChunker
from cognee.modules.pipelines.tasks.Task import Task
from cognee.shared.utils import setup_logging
logger = get_logger(level=ERROR)
class CorpusBuilderExecutor:
@ -53,8 +54,6 @@ class CorpusBuilderExecutor:
return self.questions
async def run_cognee(self, chunk_size=1024, chunker=TextChunker) -> None:
setup_logging(logging.ERROR)
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger, ERROR
import json
from typing import List
@ -13,6 +13,8 @@ from cognee.infrastructure.databases.relational.get_relational_engine import (
from cognee.modules.chunking.TextChunker import TextChunker
from cognee.eval_framework.corpus_builder.task_getters.TaskGetters import TaskGetters
logger = get_logger(level=ERROR)
async def create_and_insert_questions_table(questions_payload):
relational_config = get_relational_config()
@ -35,7 +37,7 @@ async def run_corpus_builder(
params: dict, chunk_size=1024, chunker=TextChunker, instance_filter=None
) -> List[dict]:
if params.get("building_corpus_from_scratch"):
logging.info("Corpus Builder started...")
logger.info("Corpus Builder started...")
try:
task_getter = TaskGetters(params.get("task_getter_type", "Default")).getter_func
@ -58,6 +60,6 @@ async def run_corpus_builder(
await create_and_insert_questions_table(questions_payload=questions)
logging.info("Corpus Builder End...")
logger.info("Corpus Builder End...")
return questions

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
import json
from typing import List
from cognee.eval_framework.evaluation.evaluation_executor import EvaluationExecutor
@ -13,6 +13,9 @@ from cognee.modules.data.models.metrics_data import Metrics
from cognee.modules.data.models.metrics_base import MetricsBase
logger = get_logger()
async def create_and_insert_metrics_table(questions_payload):
relational_config = get_relational_config()
relational_engine = get_relational_engine()
@ -32,7 +35,7 @@ async def create_and_insert_metrics_table(questions_payload):
async def execute_evaluation(params: dict) -> None:
"""Execute the evaluation step and save results."""
logging.info("Evaluation started...")
logger.info("Evaluation started...")
try:
with open(params["answers_path"], "r", encoding="utf-8") as f:
answers = json.load(f)
@ -41,7 +44,7 @@ async def execute_evaluation(params: dict) -> None:
except json.JSONDecodeError as e:
raise ValueError(f"Error decoding JSON from {params['answers_path']}: {e}")
logging.info(f"Loaded {len(answers)} answers from {params['answers_path']}")
logger.info(f"Loaded {len(answers)} answers from {params['answers_path']}")
evaluator = EvaluationExecutor(
evaluator_engine=params["evaluation_engine"],
evaluate_contexts=params["evaluating_contexts"],
@ -53,7 +56,7 @@ async def execute_evaluation(params: dict) -> None:
json.dump(metrics, f, ensure_ascii=False, indent=4)
await create_and_insert_metrics_table(metrics)
logging.info("Evaluation completed")
logger.info("Evaluation completed")
return metrics
@ -63,16 +66,16 @@ async def run_evaluation(params: dict) -> List[dict]:
if params.get("evaluating_answers"):
metrics = await execute_evaluation(params)
else:
logging.info("Skipping evaluation as evaluating_answers is False")
logger.info("Skipping evaluation as evaluating_answers is False")
# Step 2: Calculate metrics if requested
if params.get("calculate_metrics"):
logging.info("Calculating metrics statistics...")
logger.info("Calculating metrics statistics...")
calculate_metrics_statistics(
json_data=params["metrics_path"], aggregate_output_path=params["aggregate_metrics_path"]
)
logging.info("Metrics calculation completed")
logger.info("Metrics calculation completed")
return metrics
else:
logging.info("Skipping metrics calculation as calculate_metrics is False")
logger.info("Skipping metrics calculation as calculate_metrics is False")
return []

View file

@ -3,7 +3,7 @@ import os
import json
import asyncio
import datetime
import logging
from cognee.shared.logging_utils import get_logger
from cognee.eval_framework.eval_config import EvalConfig
from cognee.eval_framework.corpus_builder.run_corpus_builder import run_corpus_builder
from cognee.eval_framework.answer_generation.run_question_answering_module import (
@ -11,7 +11,7 @@ from cognee.eval_framework.answer_generation.run_question_answering_module impor
)
from cognee.eval_framework.evaluation.run_evaluation_module import run_evaluation
logger = logging.getLogger(__name__)
logger = get_logger()
def read_and_combine_metrics(eval_params: dict) -> dict:

View file

@ -1,6 +1,5 @@
import logging
from cognee.shared.logging_utils import get_logger
import asyncio
from cognee.shared.utils import setup_logging
from cognee.eval_framework.eval_config import EvalConfig
from cognee.eval_framework.corpus_builder.run_corpus_builder import run_corpus_builder
@ -10,8 +9,8 @@ from cognee.eval_framework.answer_generation.run_question_answering_module impor
from cognee.eval_framework.evaluation.run_evaluation_module import run_evaluation
from cognee.eval_framework.metrics_dashboard import create_dashboard
# Configure logging
setup_logging(logging.INFO)
# Configure logging(logging.INFO)
logger = get_logger()
# Define parameters and file paths.
eval_params = EvalConfig().to_dict()
@ -33,7 +32,7 @@ async def main():
await run_evaluation(eval_params)
if eval_params.get("dashboard"):
logging.info("Generating dashboard...")
logger.info("Generating dashboard...")
create_dashboard(
metrics_path=eval_params["metrics_path"],
aggregate_metrics_path=eval_params["aggregate_metrics_path"],

View file

@ -1,7 +1,7 @@
from fastapi import status
import logging
from cognee.shared.logging_utils import get_logger
logger = logging.getLogger(__name__)
logger = get_logger()
class CogneeApiError(Exception):

View file

@ -1,6 +1,6 @@
"""Adapter for Kuzu graph database."""
import logging
from cognee.shared.logging_utils import get_logger
import json
import os
import shutil
@ -19,7 +19,7 @@ from cognee.infrastructure.engine import DataPoint
from cognee.modules.storage.utils import JSONEncoder
import aiofiles
logger = logging.getLogger(__name__)
logger = get_logger()
class KuzuAdapter(GraphDBInterface):

View file

@ -1,7 +1,7 @@
"""Neo4j Adapter for Graph Database"""
import json
import logging
from cognee.shared.logging_utils import get_logger, ERROR
import asyncio
from textwrap import dedent
from typing import Optional, Any, List, Dict
@ -22,7 +22,7 @@ from .neo4j_metrics_utils import (
count_self_loops,
)
logger = logging.getLogger("Neo4jAdapter")
logger = get_logger("Neo4jAdapter", level=ERROR)
class Neo4jAdapter(GraphDBInterface):

View file

@ -4,7 +4,7 @@ from datetime import datetime, timezone
import os
import json
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
from typing import Dict, Any, List, Union
from uuid import UUID
import aiofiles
@ -16,7 +16,7 @@ from cognee.infrastructure.engine.utils import parse_id
from cognee.modules.storage.utils import JSONEncoder
import numpy as np
logger = logging.getLogger(__name__)
logger = get_logger()
class NetworkXAdapter(GraphDBInterface):

View file

@ -1,6 +1,6 @@
import os
from os import path
import logging
from cognee.shared.logging_utils import get_logger
from uuid import UUID
from typing import Optional
from typing import AsyncGenerator, List
@ -16,7 +16,7 @@ from cognee.modules.data.models.Data import Data
from ..ModelBase import Base
logger = logging.getLogger(__name__)
logger = get_logger()
class SQLAlchemyAdapter:

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import Dict, List, Optional, Any
import os
import json
@ -15,7 +15,7 @@ from ..embeddings.EmbeddingEngine import EmbeddingEngine
from ..vector_db_interface import VectorDBInterface
from ..utils import normalize_distances
logger = logging.getLogger("ChromaDBAdapter")
logger = get_logger("ChromaDBAdapter")
class IndexSchema(DataPoint):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import List, Optional
from fastembed import TextEmbedding
import litellm
@ -8,7 +8,7 @@ from cognee.infrastructure.databases.exceptions.EmbeddingException import Embedd
from cognee.infrastructure.llm.tokenizer.TikToken import TikTokenTokenizer
litellm.set_verbose = False
logger = logging.getLogger("FastembedEmbeddingEngine")
logger = get_logger("FastembedEmbeddingEngine")
class FastembedEmbeddingEngine(EmbeddingEngine):

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
import math
from typing import List, Optional
import litellm
@ -12,7 +12,7 @@ from cognee.infrastructure.llm.tokenizer.Mistral import MistralTokenizer
from cognee.infrastructure.llm.tokenizer.TikToken import TikTokenTokenizer
litellm.set_verbose = False
logger = logging.getLogger("LiteLLMEmbeddingEngine")
logger = get_logger("LiteLLMEmbeddingEngine")
class LiteLLMEmbeddingEngine(EmbeddingEngine):

View file

@ -1,6 +1,6 @@
import asyncio
from cognee.shared.logging_utils import get_logger
import aiohttp
import logging
from typing import List, Optional
import os
@ -10,7 +10,7 @@ from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import Em
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.llm.tokenizer.HuggingFace import HuggingFaceTokenizer
logger = logging.getLogger("OllamaEmbeddingEngine")
logger = get_logger("OllamaEmbeddingEngine")
class OllamaEmbeddingEngine(EmbeddingEngine):

View file

@ -1,7 +1,7 @@
from __future__ import annotations
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
from typing import List, Optional
from cognee.infrastructure.engine import DataPoint
@ -11,7 +11,7 @@ from ..embeddings.EmbeddingEngine import EmbeddingEngine
from ..models.ScoredResult import ScoredResult
from ..vector_db_interface import VectorDBInterface
logger = logging.getLogger("MilvusAdapter")
logger = get_logger("MilvusAdapter")
class IndexSchema(DataPoint):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import Dict, List, Optional
from cognee.infrastructure.engine.utils import parse_id
@ -11,7 +11,7 @@ from cognee.infrastructure.engine import DataPoint
from ..embeddings.EmbeddingEngine import EmbeddingEngine
from ..vector_db_interface import VectorDBInterface
logger = logging.getLogger("QDrantAdapter")
logger = get_logger("QDrantAdapter")
class IndexSchema(DataPoint):

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
from typing import List, Optional
from cognee.exceptions import InvalidValueError
@ -10,7 +10,7 @@ from ..embeddings.EmbeddingEngine import EmbeddingEngine
from ..models.ScoredResult import ScoredResult
from ..vector_db_interface import VectorDBInterface
logger = logging.getLogger("WeaviateAdapter")
logger = get_logger("WeaviateAdapter")
class IndexSchema(DataPoint):

View file

@ -1,6 +1,6 @@
from typing import Type, Optional
from pydantic import BaseModel
import logging
from cognee.shared.logging_utils import get_logger
import litellm
from litellm import acompletion, JSONSchemaValidationError
from cognee.shared.data_models import MonitoringTool
@ -9,7 +9,7 @@ from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.base_config import get_base_config
logger = logging.getLogger(__name__)
logger = get_logger()
monitoring = get_base_config().monitoring_tool

View file

@ -1,10 +1,11 @@
from os import path
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.root_dir import get_absolute_path
def read_query_prompt(prompt_file_name: str, base_directory: str = None):
"""Read a query prompt from a file."""
logger = get_logger(level=ERROR)
try:
if base_directory is None:
base_directory = get_absolute_path("./infrastructure/llm/prompts")
@ -14,8 +15,8 @@ def read_query_prompt(prompt_file_name: str, base_directory: str = None):
with open(file_path, "r", encoding="utf-8") as file:
return file.read()
except FileNotFoundError:
logging.error(f"Error: Prompt file not found. Attempted to read: %s {file_path}")
logger.error(f"Error: Prompt file not found. Attempted to read: %s {file_path}")
return None
except Exception as e:
logging.error(f"An error occurred: %s {e}")
logger.error(f"An error occurred: %s {e}")
return None

View file

@ -1,10 +1,10 @@
import logging
from cognee.shared.logging_utils import get_logger
import litellm
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.llm.get_llm_client import get_llm_client
logger = logging.getLogger(__name__)
logger = get_logger()
def get_max_chunk_tokens():

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from uuid import NAMESPACE_OID, uuid5
from cognee.modules.chunking.Chunker import Chunker
@ -6,7 +6,7 @@ from .models.DocumentChunk import DocumentChunk
from langchain_text_splitters import RecursiveCharacterTextSplitter
from cognee.infrastructure.databases.vector import get_vector_engine
logger = logging.getLogger(__name__)
logger = get_logger()
class LangchainChunker(Chunker):

View file

@ -1,11 +1,11 @@
import logging
from cognee.shared.logging_utils import get_logger
from uuid import NAMESPACE_OID, uuid5
from cognee.tasks.chunks import chunk_by_paragraph
from cognee.modules.chunking.Chunker import Chunker
from .models.DocumentChunk import DocumentChunk
logger = logging.getLogger(__name__)
logger = get_logger()
class TextChunker(Chunker):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
import os
from typing import Type
@ -10,7 +10,7 @@ from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.shared.data_models import SummarizedCode
from cognee.tasks.summarization.mock_summary import get_mock_summarized_code
logger = logging.getLogger("extract_summary")
logger = get_logger("extract_summary")
async def extract_summary(content: str, response_model: Type[BaseModel]):

View file

@ -1,6 +1,6 @@
import logging
from cognee.shared.logging_utils import get_logger, ERROR
logger = logging.getLogger(__name__)
logger = get_logger(level=ERROR)
async def detect_language(text: str):

View file

@ -1,8 +1,8 @@
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.exceptions import InvalidValueError
logger = logging.getLogger(__name__)
logger = get_logger(level=ERROR)
async def translate_text(

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import List, Dict, Union
from cognee.exceptions import InvalidValueError
@ -8,7 +8,7 @@ from cognee.modules.graph.cognee_graph.CogneeGraphElements import Node, Edge
from cognee.modules.graph.cognee_graph.CogneeAbstractGraph import CogneeAbstractGraph
import heapq
logger = logging.getLogger(__name__)
logger = get_logger()
class CogneeGraph(CogneeAbstractGraph):

View file

@ -1,6 +1,6 @@
import os
import difflib
import logging
from cognee.shared.logging_utils import get_logger
from collections import deque
from typing import List, Tuple, Dict, Optional, Any
from owlready2 import get_ontology, ClassConstruct, Ontology, Thing
@ -11,7 +11,7 @@ from cognee.modules.ontology.exceptions import (
GetSubgraphError,
)
logger = logging.getLogger("OntologyAdapter")
logger = get_logger("OntologyAdapter")
class OntologyResolver:
@ -34,7 +34,7 @@ class OntologyResolver:
self.ontology = get_ontology(fallback_url)
self.build_lookup()
except Exception as e:
logger.error("Failed to load ontology: %s", str(e))
logger.error("Failed to load ontology", exc_info=e)
raise OntologyInitializationError() from e
def build_lookup(self):

View file

@ -1,6 +1,6 @@
import inspect
import json
import logging
from cognee.shared.logging_utils import get_logger
from uuid import UUID, uuid4
from typing import Any
@ -17,7 +17,7 @@ from uuid import uuid5, NAMESPACE_OID
from ..tasks.Task import Task
logger = logging.getLogger("run_tasks(tasks: [Task], data)")
logger = get_logger("run_tasks(tasks: [Task], data)")
async def run_tasks_base(tasks: list[Task], data=None, user: User = None):

View file

@ -1,5 +1,5 @@
from typing import Any, Optional, List
import logging
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.entities.BaseEntityExtractor import BaseEntityExtractor
from cognee.infrastructure.context.BaseContextProvider import BaseContextProvider
@ -7,7 +7,7 @@ from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.modules.retrieval.utils.completion import generate_completion
logger = logging.getLogger("entity_completion_retriever")
logger = get_logger("entity_completion_retriever")
class EntityCompletionRetriever(BaseRetriever):

View file

@ -1,12 +1,12 @@
from typing import Any, Optional
import logging
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter
from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.modules.retrieval.utils.completion import generate_completion
from cognee.modules.retrieval.exceptions import SearchTypeNotSupported, CypherSearchError
from cognee.shared.logging_utils import get_logger
logger = logging.getLogger("CypherSearchRetriever")
logger = get_logger("CypherSearchRetriever")
class CypherSearchRetriever(BaseRetriever):

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from typing import List, Optional
from cognee.infrastructure.databases.graph import get_graph_engine
@ -9,6 +9,8 @@ from cognee.modules.users.methods import get_default_user
from cognee.modules.users.models import User
from cognee.shared.utils import send_telemetry
logger = get_logger(level=ERROR)
def format_triplets(edges):
print("\n\n\n")
@ -134,7 +136,7 @@ async def brute_force_search(
try:
vector_engine = get_vector_engine()
except Exception as e:
logging.error("Failed to initialize vector engine: %s", e)
logger.error("Failed to initialize vector engine: %s", e)
raise RuntimeError("Initialization error") from e
send_telemetry("cognee.brute_force_triplet_search EXECUTION STARTED", user.id)
@ -159,7 +161,7 @@ async def brute_force_search(
return results
except Exception as error:
logging.error(
logger.error(
"Error during brute force search for user: %s, query: %s. Error: %s",
user.id,
query,

View file

@ -1,5 +1,5 @@
import asyncio
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from typing import List
from cognee.infrastructure.databases.graph import get_graph_engine
@ -11,7 +11,7 @@ from cognee.shared.utils import send_telemetry
from cognee.modules.search.methods import search
from cognee.infrastructure.llm.get_llm_client import get_llm_client
logger = logging.getLogger(__name__)
logger = get_logger(level=ERROR)
async def code_description_to_code_part_search(
@ -55,13 +55,11 @@ async def code_description_to_code_part(
vector_engine = get_vector_engine()
graph_engine = await get_graph_engine()
except Exception as init_error:
logging.error("Failed to initialize engines: %s", init_error, exc_info=True)
logger.error("Failed to initialize engines: %s", init_error, exc_info=True)
raise RuntimeError("System initialization error. Please try again later.") from init_error
send_telemetry("code_description_to_code_part_search EXECUTION STARTED", user.id)
logging.info(
"Search initiated by user %s with query: '%s' and top_k: %d", user.id, query, top_k
)
logger.info("Search initiated by user %s with query: '%s' and top_k: %d", user.id, query, top_k)
context_from_documents = ""
@ -89,7 +87,7 @@ async def code_description_to_code_part(
"CodeSummary_text", query_text=query, limit=top_k
)
if not code_summaries:
logging.warning("No results found for query: '%s' by user: %s", query, user.id)
logger.warning("No results found for query: '%s' by user: %s", query, user.id)
return []
memory_fragment = CogneeGraph()
@ -112,7 +110,7 @@ async def code_description_to_code_part(
node_to_search_from = memory_fragment.get_node(node_id)
if not node_to_search_from:
logging.debug("Node %s not found in memory fragment graph", node_id)
logger.debug("Node %s not found in memory fragment graph", node_id)
continue
for code_file in node_to_search_from.get_skeleton_neighbours():
@ -127,7 +125,7 @@ async def code_description_to_code_part(
if code_file_edge.get_attribute("relationship_name") == "contains":
code_pieces_to_return.add(code_file_edge.get_destination_node())
logging.info(
logger.info(
"Search completed for user: %s, query: '%s'. Found %d code pieces.",
user.id,
query,
@ -137,7 +135,7 @@ async def code_description_to_code_part(
return code_pieces_to_return, context_from_documents
except Exception as exec_error:
logging.error(
logger.error(
"Error during code description to code part search for user: %s, query: '%s'. Error: %s",
user.id,
query,

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from uuid import UUID
from sqlalchemy import select
from sqlalchemy.orm import joinedload
@ -9,7 +9,7 @@ from cognee.infrastructure.databases.relational import get_relational_engine
from ...models.User import User
from ...models.ACL import ACL
logger = logging.getLogger(__name__)
logger = get_logger()
async def check_permission_on_documents(user: User, permission_type: str, document_ids: list[UUID]):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
import networkx as nx
import json
import os
@ -6,7 +6,7 @@ import os
from cognee.infrastructure.files.storage import LocalStorage
logger = logging.getLogger(__name__)
logger = get_logger()
async def cognee_network_visualization(graph_data, destination_file_path: str = None):

View file

@ -0,0 +1,138 @@
import sys
import logging
import structlog
# Export common log levels
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
# Track if logging has been configured
_is_configured = False
def get_logger(name=None, level=INFO):
"""Get a configured structlog logger.
Args:
name: Logger name (default: None, uses __name__)
level: Logging level (default: INFO)
Returns:
A configured structlog logger instance
"""
global _is_configured
if not _is_configured:
setup_logging(level)
_is_configured = True
return structlog.get_logger(name if name else __name__)
def setup_logging(log_level=INFO, name=None):
"""Sets up the logging configuration with structlog integration.
Args:
log_level: The logging level to use (default: INFO)
name: Optional logger name (default: None, uses __name__)
Returns:
A configured structlog logger instance
"""
def exception_handler(logger, method_name, event_dict):
"""Custom processor to handle uncaught exceptions."""
# Check if there's an exc_info that needs to be processed
if event_dict.get("exc_info"):
# If it's already a tuple, use it directly
if isinstance(event_dict["exc_info"], tuple):
exc_type, exc_value, tb = event_dict["exc_info"]
else:
exc_type, exc_value, tb = sys.exc_info()
event_dict["exception_type"] = exc_type.__name__
event_dict["exception_message"] = str(exc_value)
event_dict["traceback"] = True
return event_dict
# Configure structlog
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
exception_handler, # Add our custom exception handler
structlog.processors.UnicodeDecoder(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
# Set up system-wide exception handling
def handle_exception(exc_type, exc_value, traceback):
"""Handle any uncaught exception."""
if issubclass(exc_type, KeyboardInterrupt):
# Let KeyboardInterrupt pass through
sys.__excepthook__(exc_type, exc_value, traceback)
return
logger = structlog.get_logger()
logger.error(
"Uncaught exception",
exc_info=(exc_type, exc_value, traceback),
)
# Install exception handlers
sys.excepthook = handle_exception
# Create formatter for standard library logging
formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.dev.ConsoleRenderer(
colors=True,
force_colors=True,
level_styles={
"critical": structlog.dev.RED,
"exception": structlog.dev.RED,
"error": structlog.dev.RED,
"warn": structlog.dev.YELLOW,
"warning": structlog.dev.YELLOW,
"info": structlog.dev.GREEN,
"debug": structlog.dev.BLUE,
},
),
)
# Setup handler with newlines
class NewlineStreamHandler(logging.StreamHandler):
def emit(self, record):
try:
msg = self.format(record)
stream = self.stream
stream.write("\n" + msg + self.terminator)
self.flush()
except Exception:
self.handleError(record)
# Use our custom handler
stream_handler = NewlineStreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(log_level)
# Configure root logger
root_logger = logging.getLogger()
if root_logger.hasHandlers():
root_logger.handlers.clear()
root_logger.addHandler(stream_handler)
root_logger.setLevel(log_level)
# Return a configured logger
return structlog.get_logger(name if name else __name__)

View file

@ -13,7 +13,6 @@ import matplotlib.pyplot as plt
import http.server
import socketserver
from threading import Thread
import logging
import sys
from cognee.base_config import get_base_config
@ -235,9 +234,6 @@ async def render_graph(
# return df.replace([np.inf, -np.inf, np.nan], None)
logging.basicConfig(level=logging.INFO)
async def convert_to_serializable_graph(G):
"""
Convert a graph into a serializable format with stringified node and edge attributes.
@ -323,23 +319,6 @@ def graph_to_tuple(graph):
return (nodes, edges)
def setup_logging(log_level=logging.INFO):
"""Sets up the logging configuration."""
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s\n")
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(log_level)
root_logger = logging.getLogger()
if root_logger.hasHandlers():
root_logger.handlers.clear()
root_logger.addHandler(stream_handler)
root_logger.setLevel(log_level)
def start_visualization_server(
host="0.0.0.0", port=8001, handler_class=http.server.SimpleHTTPRequestHandler
):

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import List
from pydantic import BaseModel
@ -9,7 +9,7 @@ from cognee.modules.engine.models.EntityType import EntityType
from cognee.infrastructure.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.get_llm_client import get_llm_client
logger = logging.getLogger("llm_entity_extractor")
logger = get_logger("llm_entity_extractor")
class EntityList(BaseModel):

View file

@ -1,5 +1,5 @@
import json
import logging
from cognee.shared.logging_utils import get_logger
import os
import re
from typing import Dict, List, Pattern, Any
@ -7,7 +7,7 @@ from typing import Dict, List, Pattern, Any
from cognee.modules.engine.models.EntityType import EntityType
from cognee.root_dir import get_absolute_path
logger = logging.getLogger("regex_entity_config")
logger = get_logger("regex_entity_config")
class RegexEntityConfig:

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import List, Optional
from cognee.infrastructure.entities.BaseEntityExtractor import BaseEntityExtractor
@ -6,7 +6,7 @@ from cognee.modules.engine.models import Entity
from cognee.root_dir import get_absolute_path
from cognee.tasks.entity_completion.entity_extractors.regex_entity_config import RegexEntityConfig
logger = logging.getLogger("regex_entity_extractor")
logger = get_logger("regex_entity_extractor")
class RegexEntityExtractor(BaseEntityExtractor):

View file

@ -2,7 +2,7 @@
import csv
import json
import logging
from cognee.shared.logging_utils import get_logger
from datetime import datetime, timezone
from fastapi import status
from typing import Any, Dict, List, Optional, Union, Type
@ -27,7 +27,7 @@ from cognee.tasks.graph.models import NodeModel, GraphOntology
from cognee.shared.data_models import KnowledgeGraph
from cognee.modules.engine.utils import generate_node_id, generate_node_name
logger = logging.getLogger("task:infer_data_ontology")
logger = get_logger("task:infer_data_ontology")
async def extract_ontology(content: str, response_model: Type[BaseModel]):

View file

@ -5,9 +5,9 @@ from uuid import NAMESPACE_OID, uuid5
from cognee.infrastructure.engine import DataPoint
from cognee.shared.CodeGraphEntities import CodeFile, CodePart
from cognee.tasks.repo_processor.extract_code_parts import extract_code_parts
import logging
from cognee.shared.logging_utils import get_logger
logger = logging.getLogger(__name__)
logger = get_logger()
def _add_code_parts_nodes_and_edges(code_file: CodeFile, part_type, code_parts) -> None:

View file

@ -1,7 +1,7 @@
from typing import Dict, List
import logging
from cognee.shared.logging_utils import get_logger, ERROR
logger = logging.getLogger(__name__)
logger = get_logger(level=ERROR)
def _extract_parts_from_module(module, parts_dict: Dict[str, List[str]]) -> Dict[str, List[str]]:

View file

@ -1,11 +1,11 @@
import os
import logging
import aiofiles
import importlib
from typing import AsyncGenerator, Optional
from uuid import NAMESPACE_OID, uuid5
import tree_sitter_python as tspython
from tree_sitter import Language, Node, Parser, Tree
from cognee.shared.logging_utils import get_logger
from cognee.low_level import DataPoint
from cognee.shared.CodeGraphEntities import (
@ -15,7 +15,7 @@ from cognee.shared.CodeGraphEntities import (
ClassDefinition,
)
logger = logging.getLogger(__name__)
logger = get_logger()
class FileParser:

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger
from typing import AsyncGenerator, Generator
from uuid import NAMESPACE_OID, uuid5
@ -7,7 +7,7 @@ from cognee.infrastructure.engine import DataPoint
from cognee.shared.CodeGraphEntities import CodeFile, CodePart, SourceCodeChunk
from cognee.infrastructure.llm import get_max_chunk_tokens
logger = logging.getLogger(__name__)
logger = get_logger()
def _get_naive_subchunk_token_counts(

View file

@ -2,9 +2,9 @@ import os
from tqdm import tqdm
import logging
from cognee.shared.logging_utils import get_logger
logger = logging.getLogger(__name__)
logger = get_logger()
_NODE_TYPE_MAP = {
"funcdef": "func_def",

View file

@ -1,10 +1,10 @@
import logging
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.engine import DataPoint
logger = logging.getLogger("index_data_points")
logger = get_logger("index_data_points")
async def index_data_points(data_points: list[DataPoint]):

View file

@ -1,10 +1,12 @@
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from collections import Counter
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.modules.graph.models.EdgeType import EdgeType
logger = get_logger(level=ERROR)
async def index_graph_edges():
"""
@ -34,7 +36,7 @@ async def index_graph_edges():
vector_engine = get_vector_engine()
graph_engine = await get_graph_engine()
except Exception as e:
logging.error("Failed to initialize engines: %s", e)
logger.error("Failed to initialize engines: %s", e)
raise RuntimeError("Initialization error") from e
_, edges_data = await graph_engine.get_graph_data()

View file

@ -1,4 +1,4 @@
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from collections import Counter
from cognee.tasks.temporal_awareness.graphiti_model import GraphitiNode
@ -6,6 +6,8 @@ from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.modules.graph.models.EdgeType import EdgeType
logger = get_logger(level=ERROR)
async def index_and_transform_graphiti_nodes_and_edges():
try:
@ -15,7 +17,7 @@ async def index_and_transform_graphiti_nodes_and_edges():
vector_engine = get_vector_engine()
graph_engine = await get_graph_engine()
except Exception as e:
logging.error("Failed to initialize engines: %s", e)
logger.error("Failed to initialize engines: %s", e)
raise RuntimeError("Initialization error") from e
await graph_engine.query("""MATCH (n) SET n.id = n.uuid RETURN n""")

View file

@ -1,5 +1,5 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
@ -7,7 +7,7 @@ from cognee.modules.data.models import Data
from cognee.modules.search.types import SearchType
from cognee.modules.users.methods import get_default_user
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def test_local_file_deletion(data_text, file_location):

View file

@ -1,12 +1,12 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
from cognee.shared.utils import render_graph
from cognee.low_level import DataPoint
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,12 +1,12 @@
import hashlib
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.infrastructure.databases.relational import get_relational_engine
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def test_deduplication():

View file

@ -1,11 +1,11 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
# from cognee.shared.utils import render_graph
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,5 +1,5 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
import shutil
@ -8,7 +8,7 @@ from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_forc
from cognee.infrastructure.engine import DataPoint
from uuid import uuid4
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,10 +1,10 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,10 +1,10 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,11 +1,11 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,5 +1,5 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
@ -8,7 +8,7 @@ from cognee.modules.search.types import SearchType
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
from cognee.modules.users.methods import get_default_user
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def test_local_file_deletion(data_text, file_location):

View file

@ -1,11 +1,11 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,11 +1,11 @@
import os
import logging
from cognee.shared.logging_utils import get_logger
import pathlib
import cognee
from cognee.modules.search.types import SearchType
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
logging.basicConfig(level=logging.DEBUG)
logger = get_logger()
async def main():

View file

@ -1,9 +1,8 @@
import argparse
import asyncio
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.api.v1.cognify.code_graph_pipeline import run_code_graph_pipeline
from cognee.shared.utils import setup_logging
async def main(repo_path, include_docs):
@ -33,7 +32,7 @@ def parse_args():
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
args = parse_args()

View file

@ -1,9 +1,8 @@
import cognee
import asyncio
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
job_1 = """
CV 1: Relevant
@ -197,7 +196,7 @@ async def main(enable_steps):
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
rebuild_kg = True
retrieve = True

View file

@ -1,9 +1,8 @@
import cognee
import asyncio
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
from cognee.modules.retrieval.EntityCompletionRetriever import EntityCompletionRetriever
from cognee.modules.retrieval.context_providers.TripletSearchContextProvider import (
TripletSearchContextProvider,
@ -143,7 +142,7 @@ async def main(enable_steps):
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
rebuild_kg = True
retrieve = True

View file

@ -1,9 +1,8 @@
import asyncio
import cognee
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.modules.pipelines import Task, run_tasks
from cognee.shared.utils import setup_logging
from cognee.tasks.temporal_awareness import build_graph_with_temporal_awareness
from cognee.infrastructure.databases.relational import (
create_db_and_tables as create_relational_db_and_tables,
@ -71,7 +70,7 @@ async def main():
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:

View file

@ -1,11 +1,10 @@
import os
import asyncio
import pathlib
import logging
from cognee.shared.logging_utils import get_logger, ERROR
import cognee
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
# Prerequisites:
# 1. Copy `.env.template` and rename it to `.env`.
@ -47,7 +46,7 @@ async def main():
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:

View file

@ -1,11 +1,10 @@
import cognee
import asyncio
import logging
from cognee.shared.logging_utils import get_logger
import os
from cognee.api.v1.search import SearchType
from cognee.api.v1.visualize.visualize import visualize_graph
from cognee.shared.utils import setup_logging
text_1 = """
1. Audi
@ -79,7 +78,7 @@ async def main():
if __name__ == "__main__":
setup_logging(logging.INFO)
logger = get_logger()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)

View file

@ -1,8 +1,7 @@
import asyncio
import cognee
import logging
from cognee.shared.logging_utils import get_logger, ERROR
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
# Prerequisites:
# 1. Copy `.env.template` and rename it to `.env`.
@ -68,7 +67,7 @@ async def main():
if __name__ == "__main__":
setup_logging(logging.ERROR)
logger = get_logger(level=ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:

View file

@ -1,11 +1,11 @@
import modal
import os
import logging
from cognee.shared.logging_utils import get_logger
import asyncio
import cognee
import signal
from cognee.shared.utils import setup_logging
from cognee.modules.search.types import SearchType
app = modal.App("cognee-runner")
@ -22,7 +22,8 @@ image = (
@app.function(image=image, concurrency_limit=10)
async def entry(text: str, query: str):
setup_logging(logging.ERROR)
logger = get_logger()
logger.info("Initializing Cognee")
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
await cognee.add(text)
@ -38,6 +39,7 @@ async def entry(text: str, query: str):
@app.local_entrypoint()
async def main():
logger = get_logger()
text_queries = [
{
"text": "NASA's Artemis program aims to return humans to the Moon by 2026, focusing on sustainable exploration and preparing for future Mars missions.",
@ -85,10 +87,10 @@ async def main():
results = await asyncio.gather(*tasks)
print("\nFinal Results:")
logger.info("Final Results:")
for result in results:
print(result)
print("----")
logger.info(result)
logger.info("----")
os.kill(os.getpid(), signal.SIGTERM)

View file

@ -25,10 +25,9 @@
"outputs": [],
"source": [
"import cognee\n",
"import logging\n",
"from cognee.shared.logging_utils import get_logger, ERROR\n",
"import warnings\n",
"from cognee.modules.pipelines import Task, run_tasks\n",
"from cognee.shared.utils import setup_logging\n",
"from cognee.tasks.temporal_awareness import build_graph_with_temporal_awareness\n",
"from cognee.infrastructure.databases.relational import (\n",
" create_db_and_tables as create_relational_db_and_tables,\n",
@ -128,7 +127,7 @@
"outputs": [],
"source": [
"# 🔧 Setting Up Logging to Suppress Errors\n",
"setup_logging(logging.ERROR) # Keeping logs clean and focused\n",
"logger = get_logger(level=ERROR) # Keeping logs clean and focused\n",
"\n",
"# 🧹 Pruning Old Data and Metadata\n",
"await cognee.prune.prune_data() # Removing outdated data\n",

849
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -81,6 +81,7 @@ plotly = {version = "^6.0.0", optional = true}
gdown = {version = "^5.2.0", optional = true}
qasync = {version = "^0.27.1", optional = true}
graphiti-core = {version = "^0.7.0", optional = true}
structlog = "^25.2.0"
[tool.poetry.extras]