Refactor imports (#675)

* Refactor imports

* Fix: Remove duplicate sentence-transformers dependency from dev requirements

* Refactor: Update optional import patterns across various modules for better type checking and error handling

* Update CONTRIBUTING.md

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
This commit is contained in:
Daniel Chalef 2025-07-05 08:57:07 -07:00 committed by GitHub
parent 4e3f3618fb
commit 513cfbf7b2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 2151 additions and 1967 deletions

View file

@ -116,7 +116,7 @@ Once you've found an issue tagged with "good first issue" or "help wanted," or p
We use several tools to maintain code quality:
- Ruff for linting and formatting
- Mypy for static type checking
- Pyright for static type checking
- Pytest for testing
Before submitting a pull request, please run:
@ -127,6 +127,67 @@ make check
This command will format your code, run linting checks, and execute tests.
## Third-Party Integrations
When contributing integrations for third-party services (LLM providers, embedding services, databases, etc.), please follow these patterns:
### Optional Dependencies
All third-party integrations must be optional dependencies to keep the core library lightweight. Follow this pattern:
1. **Add to `pyproject.toml`**: Define your dependency as an optional extra AND include it in the dev extra:
```toml
[project.optional-dependencies]
your-service = ["your-package>=1.0.0"]
dev = [
# ... existing dev dependencies
"your-package>=1.0.0", # Include all optional extras here
# ... other dependencies
]
```
2. **Use TYPE_CHECKING pattern**: In your integration module, import dependencies conditionally:
```python
from typing import TYPE_CHECKING
if TYPE_CHECKING:
import your_package
from your_package import SomeType
else:
try:
import your_package
from your_package import SomeType
except ImportError:
raise ImportError(
'your-package is required for YourServiceClient. '
'Install it with: pip install graphiti-core[your-service]'
) from None
```
3. **Benefits of this pattern**:
- Fast startup times (no import overhead during type checking)
- Clear error messages with installation instructions
- Proper type hints for development
- Consistent user experience
4. **Do NOT**:
- Add optional imports to `__init__.py` files
- Use direct imports without error handling
- Include optional dependencies in the main `dependencies` list
### Integration Structure
- Place LLM clients in `graphiti_core/llm_client/`
- Place embedding clients in `graphiti_core/embedder/`
- Place database drivers in `graphiti_core/driver/`
- Follow existing naming conventions (e.g., `your_service_client.py`)
### Testing
- Add comprehensive tests in the appropriate `tests/` subdirectory
- Mark integration tests with `_int` suffix if they require external services
- Include both unit tests and integration tests where applicable
# Questions?
Stuck on a contribution or have a half-formed idea? Come say hello in our [Discord server](https://discord.com/invite/W8Kw6bsgXQ). Whether you're ready to contribute or just want to learn more, we're happy to have you! It's faster than GitHub issues and you'll find both maintainers and fellow contributors ready to help.

View file

@ -15,7 +15,6 @@ limitations under the License.
"""
from .client import CrossEncoderClient
from .gemini_reranker_client import GeminiRerankerClient
from .openai_reranker_client import OpenAIRerankerClient
__all__ = ['CrossEncoderClient', 'GeminiRerankerClient', 'OpenAIRerankerClient']
__all__ = ['CrossEncoderClient', 'OpenAIRerankerClient']

View file

@ -15,8 +15,18 @@ limitations under the License.
"""
import asyncio
from typing import TYPE_CHECKING
from sentence_transformers import CrossEncoder
if TYPE_CHECKING:
from sentence_transformers import CrossEncoder
else:
try:
from sentence_transformers import CrossEncoder
except ImportError:
raise ImportError(
'sentence-transformers is required for BGERerankerClient. '
'Install it with: pip install graphiti-core[sentence-transformers]'
) from None
from graphiti_core.cross_encoder.client import CrossEncoderClient

View file

@ -16,14 +16,25 @@ limitations under the License.
import logging
import re
from google import genai # type: ignore
from google.genai import types # type: ignore
from typing import TYPE_CHECKING
from ..helpers import semaphore_gather
from ..llm_client import LLMConfig, RateLimitError
from .client import CrossEncoderClient
if TYPE_CHECKING:
from google import genai
from google.genai import types
else:
try:
from google import genai
from google.genai import types
except ImportError:
raise ImportError(
'google-genai is required for GeminiRerankerClient. '
'Install it with: pip install graphiti-core[google-genai]'
) from None
logger = logging.getLogger(__name__)
DEFAULT_MODEL = 'gemini-2.5-flash-lite-preview-06-17'
@ -33,7 +44,7 @@ class GeminiRerankerClient(CrossEncoderClient):
def __init__(
self,
config: LLMConfig | None = None,
client: genai.Client | None = None,
client: 'genai.Client | None' = None,
):
"""
Initialize the GeminiRerankerClient with the provided configuration and client.
@ -46,6 +57,7 @@ class GeminiRerankerClient(CrossEncoderClient):
config (LLMConfig | None): The configuration for the LLM client, including API key, model, base URL, temperature, and max tokens.
client (genai.Client | None): An optional async client instance to use. If not provided, a new genai.Client is created.
"""
if config is None:
config = LLMConfig()

View file

@ -14,7 +14,6 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
from falkordb import FalkorDB
from neo4j import Neo4jDriver
__all__ = ['Neo4jDriver', 'FalkorDB']
__all__ = ['Neo4jDriver']

View file

@ -16,10 +16,21 @@ limitations under the License.
import logging
from datetime import datetime
from typing import Any
from typing import TYPE_CHECKING, Any
from falkordb import Graph as FalkorGraph # type: ignore
from falkordb.asyncio import FalkorDB # type: ignore
if TYPE_CHECKING:
from falkordb import Graph as FalkorGraph
from falkordb.asyncio import FalkorDB
else:
try:
from falkordb import Graph as FalkorGraph
from falkordb.asyncio import FalkorDB
except ImportError:
# If falkordb is not installed, raise an ImportError
raise ImportError(
'falkordb is required for FalkorDriver. '
'Install it with: pip install graphiti-core[falkordb]'
) from None
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession
from graphiti_core.helpers import DEFAULT_DATABASE

View file

@ -15,9 +15,21 @@ limitations under the License.
"""
from collections.abc import Iterable
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from google import genai
from google.genai import types
else:
try:
from google import genai
from google.genai import types
except ImportError:
raise ImportError(
'google-genai is required for GeminiEmbedder. '
'Install it with: pip install graphiti-core[google-genai]'
) from None
from google import genai # type: ignore
from google.genai import types # type: ignore
from pydantic import Field
from .client import EmbedderClient, EmbedderConfig

View file

@ -15,8 +15,19 @@ limitations under the License.
"""
from collections.abc import Iterable
from typing import TYPE_CHECKING
if TYPE_CHECKING:
import voyageai
else:
try:
import voyageai
except ImportError:
raise ImportError(
'voyageai is required for VoyageAIEmbedderClient. '
'Install it with: pip install graphiti-core[voyageai]'
) from None
import voyageai # type: ignore
from pydantic import Field
from .client import EmbedderClient, EmbedderConfig

View file

@ -19,11 +19,8 @@ import logging
import os
import typing
from json import JSONDecodeError
from typing import Literal
from typing import TYPE_CHECKING, Literal
import anthropic
from anthropic import AsyncAnthropic
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
from pydantic import BaseModel, ValidationError
from ..prompts.models import Message
@ -31,6 +28,22 @@ from .client import LLMClient
from .config import DEFAULT_MAX_TOKENS, LLMConfig, ModelSize
from .errors import RateLimitError, RefusalError
if TYPE_CHECKING:
import anthropic
from anthropic import AsyncAnthropic
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
else:
try:
import anthropic
from anthropic import AsyncAnthropic
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
except ImportError:
raise ImportError(
'anthropic is required for AnthropicClient. '
'Install it with: pip install graphiti-core[anthropic]'
) from None
logger = logging.getLogger(__name__)
AnthropicModel = Literal[

View file

@ -17,10 +17,8 @@ limitations under the License.
import json
import logging
import typing
from typing import ClassVar
from typing import TYPE_CHECKING, ClassVar
from google import genai # type: ignore
from google.genai import types # type: ignore
from pydantic import BaseModel
from ..prompts.models import Message
@ -28,6 +26,21 @@ from .client import MULTILINGUAL_EXTRACTION_RESPONSES, LLMClient
from .config import DEFAULT_MAX_TOKENS, LLMConfig, ModelSize
from .errors import RateLimitError
if TYPE_CHECKING:
from google import genai
from google.genai import types
else:
try:
from google import genai
from google.genai import types
except ImportError:
# If gemini client is not installed, raise an ImportError
raise ImportError(
'google-genai is required for GeminiClient. '
'Install it with: pip install graphiti-core[google-genai]'
) from None
logger = logging.getLogger(__name__)
DEFAULT_MODEL = 'gemini-2.5-flash'

View file

@ -17,10 +17,21 @@ limitations under the License.
import json
import logging
import typing
from typing import TYPE_CHECKING
import groq
from groq import AsyncGroq
from groq.types.chat import ChatCompletionMessageParam
if TYPE_CHECKING:
import groq
from groq import AsyncGroq
from groq.types.chat import ChatCompletionMessageParam
else:
try:
import groq
from groq import AsyncGroq
from groq.types.chat import ChatCompletionMessageParam
except ImportError:
raise ImportError(
'groq is required for GroqClient. Install it with: pip install graphiti-core[groq]'
) from None
from pydantic import BaseModel
from ..prompts.models import Message

View file

@ -30,11 +30,14 @@ anthropic = ["anthropic>=0.49.0"]
groq = ["groq>=0.2.0"]
google-genai = ["google-genai>=1.8.0"]
falkordb = ["falkordb>=1.1.2,<2.0.0"]
voyageai = ["voyageai>=0.2.3"]
sentence-transformers = ["sentence-transformers>=3.2.1"]
dev = [
"pyright>=1.1.380",
"groq>=0.2.0",
"anthropic>=0.49.0",
"google-genai>=1.8.0",
"falkordb>=1.1.2,<2.0.0",
"ipykernel>=6.29.5",
"jupyterlab>=4.2.4",
"diskcache-stubs>=5.6.3.6.20240818",

3911
uv.lock generated

File diff suppressed because it is too large Load diff