docs: Update Ollama integration to use OpenAIGenericClient (#866)
- Replace OpenAIClient with OpenAIGenericClient in Ollama documentation - Add bash code block formatting for model installation commands - Update API key placeholder from 'abc' to 'ollama' for clarity - Add comment clarifying Ollama's OpenAI-compatible endpoint Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com> Co-authored-by: Daniel Chalef <danielchalef@users.noreply.github.com>
This commit is contained in:
parent
41c3da2440
commit
d62c203147
1 changed files with 7 additions and 5 deletions
12
README.md
12
README.md
|
|
@ -431,25 +431,27 @@ The Gemini reranker uses the `gemini-2.5-flash-lite-preview-06-17` model by defa
|
||||||
Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
|
Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
|
||||||
|
|
||||||
Install the models:
|
Install the models:
|
||||||
|
```bash
|
||||||
ollama pull deepseek-r1:7b # LLM
|
ollama pull deepseek-r1:7b # LLM
|
||||||
ollama pull nomic-embed-text # embeddings
|
ollama pull nomic-embed-text # embeddings
|
||||||
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from graphiti_core import Graphiti
|
from graphiti_core import Graphiti
|
||||||
from graphiti_core.llm_client.config import LLMConfig
|
from graphiti_core.llm_client.config import LLMConfig
|
||||||
from graphiti_core.llm_client.openai_client import OpenAIClient
|
from graphiti_core.llm_client.openai_generic_client import OpenAIGenericClient
|
||||||
from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
|
from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
|
||||||
from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
|
from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
|
||||||
|
|
||||||
# Configure Ollama LLM client
|
# Configure Ollama LLM client
|
||||||
llm_config = LLMConfig(
|
llm_config = LLMConfig(
|
||||||
api_key="abc", # Ollama doesn't require a real API key
|
api_key="ollama", # Ollama doesn't require a real API key, but some placeholder is needed
|
||||||
model="deepseek-r1:7b",
|
model="deepseek-r1:7b",
|
||||||
small_model="deepseek-r1:7b",
|
small_model="deepseek-r1:7b",
|
||||||
base_url="http://localhost:11434/v1", # Ollama provides this port
|
base_url="http://localhost:11434/v1", # Ollama's OpenAI-compatible endpoint
|
||||||
)
|
)
|
||||||
|
|
||||||
llm_client = OpenAIClient(config=llm_config)
|
llm_client = OpenAIGenericClient(config=llm_config)
|
||||||
|
|
||||||
# Initialize Graphiti with Ollama clients
|
# Initialize Graphiti with Ollama clients
|
||||||
graphiti = Graphiti(
|
graphiti = Graphiti(
|
||||||
|
|
@ -459,7 +461,7 @@ graphiti = Graphiti(
|
||||||
llm_client=llm_client,
|
llm_client=llm_client,
|
||||||
embedder=OpenAIEmbedder(
|
embedder=OpenAIEmbedder(
|
||||||
config=OpenAIEmbedderConfig(
|
config=OpenAIEmbedderConfig(
|
||||||
api_key="abc",
|
api_key="ollama", # Placeholder API key
|
||||||
embedding_model="nomic-embed-text",
|
embedding_model="nomic-embed-text",
|
||||||
embedding_dim=768,
|
embedding_dim=768,
|
||||||
base_url="http://localhost:11434/v1",
|
base_url="http://localhost:11434/v1",
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue