feat: abstract logging tool integration (#787)
<!-- .github/pull_request_template.md --> ## Description <!-- Provide a clear description of the changes in this PR --> ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin. --------- Co-authored-by: Hande <159312713+hande-k@users.noreply.github.com> Co-authored-by: Vasilije <8619304+Vasilije1990@users.noreply.github.com>
This commit is contained in:
parent
5970d964cf
commit
9729547f5a
7 changed files with 36 additions and 39 deletions
|
|
@ -1,18 +1,17 @@
|
|||
import os
|
||||
import pathlib
|
||||
import asyncio
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from uuid import NAMESPACE_OID, uuid5
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.observability.get_observe import get_observe
|
||||
|
||||
from cognee.api.v1.search import SearchType, search
|
||||
from cognee.api.v1.visualize.visualize import visualize_graph
|
||||
from cognee.base_config import get_base_config
|
||||
from cognee.modules.cognify.config import get_cognify_config
|
||||
from cognee.modules.pipelines import run_tasks
|
||||
from cognee.modules.pipelines.tasks.task import Task
|
||||
from cognee.modules.users.methods import get_default_user
|
||||
from cognee.shared.data_models import KnowledgeGraph, MonitoringTool
|
||||
from cognee.shared.utils import render_graph
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from cognee.tasks.documents import classify_documents, extract_chunks_from_documents
|
||||
from cognee.tasks.graph import extract_graph_from_data
|
||||
from cognee.tasks.ingestion import ingest_data
|
||||
|
|
@ -22,11 +21,7 @@ from cognee.tasks.storage import add_data_points
|
|||
from cognee.tasks.summarization import summarize_text
|
||||
from cognee.infrastructure.llm import get_max_chunk_tokens
|
||||
|
||||
monitoring = get_base_config().monitoring_tool
|
||||
|
||||
if monitoring == MonitoringTool.LANGFUSE:
|
||||
from langfuse.decorators import observe
|
||||
|
||||
observe = get_observe()
|
||||
|
||||
logger = get_logger("code_graph_pipeline")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
import os
|
||||
from typing import Optional
|
||||
from functools import lru_cache
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from cognee.root_dir import get_absolute_path
|
||||
from cognee.shared.data_models import MonitoringTool
|
||||
from cognee.modules.observability.observers import Observer
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class BaseConfig(BaseSettings):
|
||||
data_root_directory: str = get_absolute_path(".data_storage")
|
||||
monitoring_tool: object = MonitoringTool.LANGFUSE
|
||||
monitoring_tool: object = Observer.LANGFUSE
|
||||
graphistry_username: Optional[str] = os.getenv("GRAPHISTRY_USERNAME")
|
||||
graphistry_password: Optional[str] = os.getenv("GRAPHISTRY_PASSWORD")
|
||||
langfuse_public_key: Optional[str] = os.getenv("LANGFUSE_PUBLIC_KEY")
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
from typing import Type, Optional
|
||||
from pydantic import BaseModel
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
import litellm
|
||||
from pydantic import BaseModel
|
||||
from typing import Type, Optional
|
||||
from litellm import acompletion, JSONSchemaValidationError
|
||||
from cognee.shared.data_models import MonitoringTool
|
||||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.observability.get_observe import get_observe
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm.llm_interface import LLMInterface
|
||||
from cognee.infrastructure.llm.prompts import read_query_prompt
|
||||
|
|
@ -11,14 +12,9 @@ from cognee.infrastructure.llm.rate_limiter import (
|
|||
rate_limit_async,
|
||||
sleep_and_retry_async,
|
||||
)
|
||||
from cognee.base_config import get_base_config
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
monitoring = get_base_config().monitoring_tool
|
||||
|
||||
if monitoring == MonitoringTool.LANGFUSE:
|
||||
from langfuse.decorators import observe
|
||||
observe = get_observe()
|
||||
|
||||
|
||||
class GeminiAdapter(LLMInterface):
|
||||
|
|
|
|||
|
|
@ -1,14 +1,11 @@
|
|||
import os
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from typing import Type
|
||||
|
||||
import litellm
|
||||
import instructor
|
||||
from typing import Type
|
||||
from pydantic import BaseModel
|
||||
|
||||
from cognee.modules.data.processing.document_types.open_data_file import open_data_file
|
||||
from cognee.shared.data_models import MonitoringTool
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm.llm_interface import LLMInterface
|
||||
from cognee.infrastructure.llm.prompts import read_query_prompt
|
||||
|
|
@ -18,12 +15,9 @@ from cognee.infrastructure.llm.rate_limiter import (
|
|||
sleep_and_retry_async,
|
||||
sleep_and_retry_sync,
|
||||
)
|
||||
from cognee.base_config import get_base_config
|
||||
from cognee.modules.observability.get_observe import get_observe
|
||||
|
||||
monitoring = get_base_config().monitoring_tool
|
||||
|
||||
if monitoring == MonitoringTool.LANGFUSE:
|
||||
from langfuse.decorators import observe
|
||||
observe = get_observe()
|
||||
|
||||
|
||||
class OpenAIAdapter(LLMInterface):
|
||||
|
|
|
|||
11
cognee/modules/observability/get_observe.py
Normal file
11
cognee/modules/observability/get_observe.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from cognee.base_config import get_base_config
|
||||
from .observers import Observer
|
||||
|
||||
|
||||
def get_observe():
|
||||
monitoring = get_base_config().monitoring_tool
|
||||
|
||||
if monitoring == Observer.LANGFUSE:
|
||||
from langfuse.decorators import observe
|
||||
|
||||
return observe
|
||||
9
cognee/modules/observability/observers.py
Normal file
9
cognee/modules/observability/observers.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
from enum import Enum
|
||||
|
||||
|
||||
class Observer(str, Enum):
|
||||
"""Monitoring tools"""
|
||||
|
||||
LANGFUSE = "langfuse"
|
||||
LLMLITE = "llmlite"
|
||||
LANGSMITH = "langsmith"
|
||||
|
|
@ -350,11 +350,3 @@ class ChunkSummaries(BaseModel):
|
|||
"""Relevant summary and chunk id"""
|
||||
|
||||
summaries: List[ChunkSummary]
|
||||
|
||||
|
||||
class MonitoringTool(str, Enum):
|
||||
"""Monitoring tools"""
|
||||
|
||||
LANGFUSE = "langfuse"
|
||||
LLMLITE = "llmlite"
|
||||
LANGSMITH = "langsmith"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue