diff --git a/README.md b/README.md
index e11e7072..005e0b0f 100644
--- a/README.md
+++ b/README.md
@@ -16,7 +16,7 @@ Graphiti
[](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml)
[](https://codespaces.new/getzep/Graphiti)
-:star: *Help us reach more developers and grow the Graphiti community. Star this repo!*
+:star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
Graphiti builds dynamic, temporally aware Knowledge Graphs that represent complex, evolving relationships between
@@ -180,6 +180,7 @@ graphiti.close()
```
## Graph Service
+
The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
Please see the [server README](./server/README.md) for more information.
@@ -195,6 +196,49 @@ to enable Neo4j's parallel runtime feature for several of our search queries.
Note that this feature is not supported for Neo4j Community edition or for smaller AuraDB instances,
as such this feature is off by default.
+## Using Graphiti with Azure OpenAI
+
+Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azure OpenAI, you'll need to configure both the LLM client and embedder with your Azure OpenAI credentials.
+
+```python
+from openai import AsyncAzureOpenAI
+from graphiti_core import Graphiti
+from graphiti_core.llm_client import OpenAIClient
+from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
+
+# Azure OpenAI configuration
+api_key = ""
+api_version = ""
+azure_endpoint = ""
+
+# Create Azure OpenAI client for LLM
+azure_openai_client = AsyncAzureOpenAI(
+ api_key=api_key,
+ api_version=api_version,
+ azure_endpoint=azure_endpoint
+)
+
+# Initialize Graphiti with Azure OpenAI clients
+graphiti = Graphiti(
+ "bolt://localhost:7687",
+ "neo4j",
+ "password",
+ llm_client=OpenAIClient(
+ client=azure_openai_client
+ ),
+ embedder=OpenAIEmbedder(
+ config=OpenAIEmbedderConfig(
+ embedding_model="text-embedding-3-small" # Use your Azure deployed embedding model name
+ ),
+ client=azure_openai_client
+ )
+)
+
+# Now you can use Graphiti with Azure OpenAI
+```
+
+Make sure to replace the placeholder values with your actual Azure OpenAI credentials and specify the correct embedding model name that's deployed in your Azure OpenAI service.
+
## Documentation
- [Guides and API documentation](https://help.getzep.com/graphiti).
@@ -205,9 +249,9 @@ as such this feature is off by default.
Graphiti is under active development. We aim to maintain API stability while working on:
-- [ ] Supporting custom graph schemas:
- - Allow developers to provide their own defined node and edge classes when ingesting episodes
- - Enable more flexible knowledge representation tailored to specific use cases
+- [x] Supporting custom graph schemas:
+ - Allow developers to provide their own defined node and edge classes when ingesting episodes
+ - Enable more flexible knowledge representation tailored to specific use cases
- [x] Enhancing retrieval capabilities with more robust and configurable options
- [ ] Expanding test coverage to ensure reliability and catch edge cases
diff --git a/graphiti_core/embedder/openai.py b/graphiti_core/embedder/openai.py
index 6f5f86d2..45b53cc7 100644
--- a/graphiti_core/embedder/openai.py
+++ b/graphiti_core/embedder/openai.py
@@ -15,8 +15,9 @@ limitations under the License.
"""
from collections.abc import Iterable
+from typing import Union
-from openai import AsyncOpenAI
+from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai.types import EmbeddingModel
from .client import EmbedderClient, EmbedderConfig
@@ -33,13 +34,23 @@ class OpenAIEmbedderConfig(EmbedderConfig):
class OpenAIEmbedder(EmbedderClient):
"""
OpenAI Embedder Client
+
+ This client supports both AsyncOpenAI and AsyncAzureOpenAI clients.
"""
- def __init__(self, config: OpenAIEmbedderConfig | None = None):
+ def __init__(
+ self,
+ config: OpenAIEmbedderConfig | None = None,
+ client: Union[AsyncOpenAI, AsyncAzureOpenAI, None] = None,
+ ):
if config is None:
config = OpenAIEmbedderConfig()
self.config = config
- self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
+
+ if client is not None:
+ self.client = client
+ else:
+ self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
async def create(
self, input_data: str | list[str] | Iterable[int] | Iterable[Iterable[int]]