parent
a7ae6784c8
commit
587f1b9876
1 changed files with 49 additions and 0 deletions
49
README.md
49
README.md
|
|
@ -298,6 +298,55 @@ graphiti = Graphiti(
|
||||||
# Now you can use Graphiti with Google Gemini
|
# Now you can use Graphiti with Google Gemini
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Using Graphiti with Ollama (Local LLM)
|
||||||
|
|
||||||
|
Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
|
||||||
|
|
||||||
|
|
||||||
|
Install the models:
|
||||||
|
ollama pull deepseek-r1:7b # LLM
|
||||||
|
ollama pull nomic-embed-text # embeddings
|
||||||
|
|
||||||
|
```python
|
||||||
|
from graphiti_core import Graphiti
|
||||||
|
from graphiti_core.llm_client.config import LLMConfig
|
||||||
|
from graphiti_core.llm_client.openai_client import OpenAIClient
|
||||||
|
from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
|
||||||
|
from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
|
||||||
|
|
||||||
|
# Configure Ollama LLM client
|
||||||
|
llm_config = LLMConfig(
|
||||||
|
api_key="abc", # Ollama doesn't require a real API key
|
||||||
|
model="deepseek-r1:7b",
|
||||||
|
small_model="deepseek-r1:7b",
|
||||||
|
base_url="http://localhost:11434/v1", # Ollama provides this port
|
||||||
|
)
|
||||||
|
|
||||||
|
llm_client = OpenAIClient(config=llm_config)
|
||||||
|
|
||||||
|
# Initialize Graphiti with Ollama clients
|
||||||
|
graphiti = Graphiti(
|
||||||
|
"bolt://localhost:7687",
|
||||||
|
"neo4j",
|
||||||
|
"password",
|
||||||
|
llm_client=llm_client,
|
||||||
|
embedder=OpenAIEmbedder(
|
||||||
|
config=OpenAIEmbedderConfig(
|
||||||
|
api_key="abc",
|
||||||
|
embedding_model="nomic-embed-text",
|
||||||
|
embedding_dim=768,
|
||||||
|
base_url="http://localhost:11434/v1",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
cross_encoder=OpenAIRerankerClient(client=llm_client, config=llm_config),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now you can use Graphiti with local Ollama models
|
||||||
|
```
|
||||||
|
|
||||||
|
Ensure Ollama is running (`ollama serve`) and that you have pulled the models you want to use.
|
||||||
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
- [Guides and API documentation](https://help.getzep.com/graphiti).
|
- [Guides and API documentation](https://help.getzep.com/graphiti).
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue