graphiti/pyproject.toml
supmo668 74a422369c feat: Add enhanced configuration system with multi-provider LLM support
This commit introduces a comprehensive configuration system that makes
Graphiti more flexible and easier to configure across different
providers and deployment environments.

## New Features

- **Unified Configuration**: New GraphitiConfig class with Pydantic validation
- **YAML Support**: Load configuration from .graphiti.yaml files
- **Multi-Provider Support**: Easy switching between OpenAI, Azure, Anthropic,
  Gemini, Groq, and LiteLLM
- **LiteLLM Integration**: Unified access to 100+ LLM providers
- **Factory Functions**: Automatic client creation from configuration
- **Full Backward Compatibility**: Existing code continues to work

## Configuration System

- graphiti_core/config/settings.py: Pydantic configuration classes
- graphiti_core/config/providers.py: Provider enumerations and defaults
- graphiti_core/config/factory.py: Factory functions for client creation

## LiteLLM Client

- graphiti_core/llm_client/litellm_client.py: New unified LLM client
- Support for Azure OpenAI, AWS Bedrock, Vertex AI, Ollama, vLLM, etc.
- Automatic structured output detection

## Documentation

- docs/CONFIGURATION.md: Comprehensive configuration guide
- examples/graphiti_config_example.yaml: Example configurations
- DOMAIN_AGNOSTIC_IMPROVEMENT_PLAN.md: Future improvement roadmap

## Tests

- tests/config/test_settings.py: 22 tests for configuration
- tests/config/test_factory.py: 12 tests for factories
- 33/34 tests passing (97%)

## Issues Addressed

- #1004: Azure OpenAI support
- #1006: Azure OpenAI reranker support
- #1007: vLLM/OpenAI-compatible provider stability
- #1074: Ollama embeddings support
- #995: Docker Azure OpenAI support

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-30 23:47:38 -08:00

108 lines
2.6 KiB
TOML

[project]
name = "graphiti-core"
description = "A temporal graph building library"
version = "0.24.1"
authors = [
{ name = "Paul Paliychuk", email = "paul@getzep.com" },
{ name = "Preston Rasmussen", email = "preston@getzep.com" },
{ name = "Daniel Chalef", email = "daniel@getzep.com" },
]
readme = "README.md"
license = "Apache-2.0"
requires-python = ">=3.10,<4"
dependencies = [
"pydantic>=2.11.5",
"neo4j>=5.26.0",
"diskcache>=5.6.3",
"openai>=1.91.0",
"tenacity>=9.0.0",
"numpy>=1.0.0",
"python-dotenv>=1.0.1",
"posthog>=3.0.0",
"pyyaml>=6.0.0"
]
[project.urls]
Homepage = "https://help.getzep.com/graphiti/graphiti/overview"
Repository = "https://github.com/getzep/graphiti"
[project.optional-dependencies]
anthropic = ["anthropic>=0.49.0"]
groq = ["groq>=0.2.0"]
google-genai = ["google-genai>=1.8.0"]
kuzu = ["kuzu>=0.11.3"]
falkordb = ["falkordb>=1.1.2,<2.0.0"]
voyageai = ["voyageai>=0.2.3"]
neo4j-opensearch = ["boto3>=1.39.16", "opensearch-py>=3.0.0"]
sentence-transformers = ["sentence-transformers>=3.2.1"]
neptune = ["langchain-aws>=0.2.29", "opensearch-py>=3.0.0", "boto3>=1.39.16"]
tracing = ["opentelemetry-api>=1.20.0", "opentelemetry-sdk>=1.20.0"]
litellm = ["litellm>=1.52.0"]
dev = [
"pyright>=1.1.404",
"groq>=0.2.0",
"anthropic>=0.49.0",
"google-genai>=1.8.0",
"falkordb>=1.1.2,<2.0.0",
"kuzu>=0.11.3",
"boto3>=1.39.16",
"opensearch-py>=3.0.0",
"langchain-aws>=0.2.29",
"ipykernel>=6.29.5",
"jupyterlab>=4.2.4",
"diskcache-stubs>=5.6.3.6.20240818",
"langgraph>=0.2.15",
"langchain-anthropic>=0.2.4",
"langsmith>=0.1.108",
"langchain-openai>=0.2.6",
"sentence-transformers>=3.2.1",
"transformers>=4.45.2",
"voyageai>=0.2.3",
"pytest>=8.3.3",
"pytest-asyncio>=0.24.0",
"pytest-xdist>=3.6.1",
"ruff>=0.7.1",
"opentelemetry-sdk>=1.20.0",
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.pytest.ini_options]
pythonpath = ["."]
[tool.ruff]
line-length = 100
[tool.ruff.lint]
select = [
# pycodestyle
"E",
# Pyflakes
"F",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-simplify
"SIM",
# isort
"I",
]
ignore = ["E501"]
[tool.ruff.lint.flake8-tidy-imports.banned-api]
# Required by Pydantic on Python < 3.12
"typing.TypedDict".msg = "Use typing_extensions.TypedDict instead."
[tool.ruff.format]
quote-style = "single"
indent-style = "space"
docstring-code-format = true
[tool.pyright]
include = ["graphiti_core"]
pythonVersion = "3.10"
typeCheckingMode = "basic"