wip
This commit is contained in:
parent
bf70d700b3
commit
30ab064f76
3 changed files with 9 additions and 11 deletions
12
README.md
12
README.md
|
|
@ -151,12 +151,6 @@ For a complete working example, see the [Quickstart Example](./examples/quicksta
|
|||
|
||||
The example is fully documented with clear explanations of each functionality and includes a comprehensive README with setup instructions and next steps.
|
||||
|
||||
## Graph Service
|
||||
|
||||
The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
|
||||
|
||||
Please see the [server README](./server/README.md) for more information.
|
||||
|
||||
## MCP Server
|
||||
|
||||
The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
|
||||
|
|
@ -173,6 +167,12 @@ The MCP server can be deployed using Docker with Neo4j, making it easy to integr
|
|||
|
||||
For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
|
||||
|
||||
## Graph Service
|
||||
|
||||
The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
|
||||
|
||||
Please see the [server README](./server/README.md) for more information.
|
||||
|
||||
## Optional Environment Variables
|
||||
|
||||
In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import typing
|
|||
from typing import ClassVar
|
||||
|
||||
import openai
|
||||
from openai import AsyncOpenAI
|
||||
from openai import AsyncAzureOpenAI, AsyncOpenAI
|
||||
from openai.types.chat import ChatCompletionMessageParam
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ class OpenAIClient(LLMClient):
|
|||
self,
|
||||
config: LLMConfig | None = None,
|
||||
cache: bool = False,
|
||||
client: typing.Any = None,
|
||||
client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
|
||||
max_tokens: int = DEFAULT_MAX_TOKENS,
|
||||
):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -101,9 +101,7 @@ def label_propagation(projection: dict[str, list[Neighbor]]) -> list[list[str]]:
|
|||
]
|
||||
|
||||
community_lst.sort(reverse=True)
|
||||
candidate_rank, community_candidate = (
|
||||
community_lst[0] if community_lst else (0, -1)
|
||||
)
|
||||
candidate_rank, community_candidate = community_lst[0] if community_lst else (0, -1)
|
||||
if community_candidate != -1 and candidate_rank > 1:
|
||||
new_community = community_candidate
|
||||
else:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue