Merge pull request #2298 from anouar-bm/feat/langfuse-observability
feat: Add optional Langfuse observability integration
This commit is contained in:
commit
6975e69e44
2 changed files with 18 additions and 1 deletions
|
|
@ -11,7 +11,6 @@ if not pm.is_installed("openai"):
|
|||
pm.install("openai")
|
||||
|
||||
from openai import (
|
||||
AsyncOpenAI,
|
||||
APIConnectionError,
|
||||
RateLimitError,
|
||||
APITimeoutError,
|
||||
|
|
@ -27,6 +26,19 @@ from lightrag.utils import (
|
|||
safe_unicode_decode,
|
||||
logger,
|
||||
)
|
||||
|
||||
# Try to import Langfuse for LLM observability (optional)
|
||||
# Falls back to standard OpenAI client if not available
|
||||
try:
|
||||
from langfuse.openai import AsyncOpenAI
|
||||
|
||||
LANGFUSE_ENABLED = True
|
||||
logger.info("Langfuse observability enabled for OpenAI client")
|
||||
except ImportError:
|
||||
from openai import AsyncOpenAI
|
||||
|
||||
LANGFUSE_ENABLED = False
|
||||
logger.debug("Langfuse not available, using standard OpenAI client")
|
||||
from lightrag.types import GPTKeywordExtractionFormat
|
||||
from lightrag.api import __api_version__
|
||||
|
||||
|
|
|
|||
|
|
@ -113,6 +113,11 @@ offline = [
|
|||
"lightrag-hku[offline-docs,offline-storage,offline-llm]",
|
||||
]
|
||||
|
||||
observability = [
|
||||
# LLM observability and tracing dependencies
|
||||
"langfuse>=3.8.1",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
lightrag-server = "lightrag.api.lightrag_server:main"
|
||||
lightrag-gunicorn = "lightrag.api.run_with_gunicorn:main"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue