This commit is contained in:
Daniel Chalef 2025-04-24 15:02:16 -07:00
parent bf70d700b3
commit 30ab064f76
3 changed files with 9 additions and 11 deletions

View file

@ -151,12 +151,6 @@ For a complete working example, see the [Quickstart Example](./examples/quicksta
The example is fully documented with clear explanations of each functionality and includes a comprehensive README with setup instructions and next steps. The example is fully documented with clear explanations of each functionality and includes a comprehensive README with setup instructions and next steps.
## Graph Service
The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
Please see the [server README](./server/README.md) for more information.
## MCP Server ## MCP Server
The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol. The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
@ -173,6 +167,12 @@ The MCP server can be deployed using Docker with Neo4j, making it easy to integr
For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md). For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
## Graph Service
The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
Please see the [server README](./server/README.md) for more information.
## Optional Environment Variables ## Optional Environment Variables
In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables. In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.

View file

@ -19,7 +19,7 @@ import typing
from typing import ClassVar from typing import ClassVar
import openai import openai
from openai import AsyncOpenAI from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai.types.chat import ChatCompletionMessageParam from openai.types.chat import ChatCompletionMessageParam
from pydantic import BaseModel from pydantic import BaseModel
@ -61,7 +61,7 @@ class OpenAIClient(LLMClient):
self, self,
config: LLMConfig | None = None, config: LLMConfig | None = None,
cache: bool = False, cache: bool = False,
client: typing.Any = None, client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
max_tokens: int = DEFAULT_MAX_TOKENS, max_tokens: int = DEFAULT_MAX_TOKENS,
): ):
""" """

View file

@ -101,9 +101,7 @@ def label_propagation(projection: dict[str, list[Neighbor]]) -> list[list[str]]:
] ]
community_lst.sort(reverse=True) community_lst.sort(reverse=True)
candidate_rank, community_candidate = ( candidate_rank, community_candidate = community_lst[0] if community_lst else (0, -1)
community_lst[0] if community_lst else (0, -1)
)
if community_candidate != -1 and candidate_rank > 1: if community_candidate != -1 and candidate_rank > 1:
new_community = community_candidate new_community = community_candidate
else: else: