Merge branch 'main' into feat/mcp-server-v1-final
Resolved conflicts: - Removed old mcp_server/graphiti_mcp_server.py (moved to src/) - Kept our uv.lock with MCP dependencies
This commit is contained in:
commit
89e6e7c354
9 changed files with 154 additions and 147 deletions
73
.github/workflows/mcp-server-docker.yml
vendored
73
.github/workflows/mcp-server-docker.yml
vendored
|
|
@ -1,73 +0,0 @@
|
|||
name: Build and Push MCP Server Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "mcp_server/pyproject.toml"
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
paths:
|
||||
- "mcp_server/pyproject.toml"
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push_image:
|
||||
description: "Push image to registry (unchecked for testing)"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: zepai/knowledge-graph-mcp
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: depot-ubuntu-24.04-small
|
||||
environment: development
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(python -c "import tomllib; print(tomllib.load(open('mcp_server/pyproject.toml', 'rb'))['project']['version'])")
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "tag=v$VERSION" >> $GITHUB_OUTPUT
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image)
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=${{ steps.version.outputs.tag }}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Depot build and push image
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: v9jv1mlpwc
|
||||
context: ./mcp_server
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image) }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
74
.github/workflows/release-mcp-server.yml
vendored
Normal file
74
.github/workflows/release-mcp-server.yml
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
name: Release MCP Server
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["mcp-v*.*.*"]
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: zepai/knowledge-graph-mcp
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: depot-ubuntu-24.04-small
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
environment:
|
||||
name: release
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Extract and validate version
|
||||
id: version
|
||||
run: |
|
||||
TAG_VERSION=${GITHUB_REF#refs/tags/mcp-v}
|
||||
|
||||
if ! [[ $TAG_VERSION =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Tag must follow semantic versioning: mcp-vX.Y.Z"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PROJECT_VERSION=$(python -c "import tomllib; print(tomllib.load(open('mcp_server/pyproject.toml', 'rb'))['project']['version'])")
|
||||
|
||||
if [ "$TAG_VERSION" != "$PROJECT_VERSION" ]; then
|
||||
echo "Tag version mcp-v$TAG_VERSION does not match mcp_server/pyproject.toml version $PROJECT_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "version=$PROJECT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=${{ steps.version.outputs.version }}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: v9jv1mlpwc
|
||||
context: ./mcp_server
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
|
@ -14,8 +14,8 @@ See the License for the specific language governing permissions and
|
|||
limitations under the License.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import asyncio
|
||||
import datetime
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
|
|
@ -231,17 +231,17 @@ class FalkorDriver(GraphDriver):
|
|||
"""
|
||||
cloned = FalkorDriver(falkor_db=self.client, database=database)
|
||||
|
||||
return cloned
|
||||
return cloned
|
||||
|
||||
async def health_check(self) -> None:
|
||||
"""Check FalkorDB connectivity by running a simple query."""
|
||||
try:
|
||||
await self.execute_query("MATCH (n) RETURN 1 LIMIT 1")
|
||||
await self.execute_query('MATCH (n) RETURN 1 LIMIT 1')
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"FalkorDB health check failed: {e}")
|
||||
print(f'FalkorDB health check failed: {e}')
|
||||
raise
|
||||
|
||||
|
||||
@staticmethod
|
||||
def convert_datetimes_to_strings(obj):
|
||||
if isinstance(obj, dict):
|
||||
|
|
|
|||
|
|
@ -77,14 +77,12 @@ class GraphOperationsInterface(BaseModel):
|
|||
|
||||
async def node_load_embeddings_bulk(
|
||||
self,
|
||||
_cls: Any,
|
||||
driver: Any,
|
||||
transaction: Any,
|
||||
nodes: list[Any],
|
||||
batch_size: int = 100,
|
||||
) -> None:
|
||||
) -> dict[str, list[float]]:
|
||||
"""
|
||||
Load embedding vectors for many nodes in batches. Mutates the provided node instances.
|
||||
Load embedding vectors for many nodes in batches.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
|
@ -183,13 +181,11 @@ class GraphOperationsInterface(BaseModel):
|
|||
|
||||
async def edge_load_embeddings_bulk(
|
||||
self,
|
||||
_cls: Any,
|
||||
driver: Any,
|
||||
transaction: Any,
|
||||
edges: list[Any],
|
||||
batch_size: int = 100,
|
||||
) -> None:
|
||||
) -> dict[str, list[float]]:
|
||||
"""
|
||||
Load embedding vectors for many edges in batches. Mutates the provided edge instances.
|
||||
Load embedding vectors for many edges in batches
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
|
|
|||
|
|
@ -72,12 +72,12 @@ class Neo4jDriver(GraphDriver):
|
|||
return self.client.execute_query(
|
||||
'CALL db.indexes() YIELD name DROP INDEX name',
|
||||
)
|
||||
|
||||
|
||||
async def health_check(self) -> None:
|
||||
"""Check Neo4j connectivity by running the driver's verify_connectivity method."""
|
||||
try:
|
||||
await self.client.verify_connectivity()
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Neo4j health check failed: {e}")
|
||||
print(f'Neo4j health check failed: {e}')
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -217,11 +217,11 @@ async def edge_fulltext_search(
|
|||
# Match the edge ids and return the values
|
||||
query = (
|
||||
"""
|
||||
UNWIND $ids as id
|
||||
MATCH (n:Entity)-[e:RELATES_TO]->(m:Entity)
|
||||
WHERE e.group_id IN $group_ids
|
||||
AND id(e)=id
|
||||
"""
|
||||
UNWIND $ids as id
|
||||
MATCH (n:Entity)-[e:RELATES_TO]->(m:Entity)
|
||||
WHERE e.group_id IN $group_ids
|
||||
AND id(e)=id
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
AND id(e)=id
|
||||
|
|
@ -339,8 +339,8 @@ async def edge_similarity_search(
|
|||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
query = (
|
||||
"""
|
||||
MATCH (n:Entity)-[e:RELATES_TO]->(m:Entity)
|
||||
"""
|
||||
MATCH (n:Entity)-[e:RELATES_TO]->(m:Entity)
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
RETURN DISTINCT id(e) as id, e.fact_embedding as embedding
|
||||
|
|
@ -596,11 +596,11 @@ async def node_fulltext_search(
|
|||
# Match the edge ides and return the values
|
||||
query = (
|
||||
"""
|
||||
UNWIND $ids as i
|
||||
MATCH (n:Entity)
|
||||
WHERE n.uuid=i.id
|
||||
RETURN
|
||||
"""
|
||||
UNWIND $ids as i
|
||||
MATCH (n:Entity)
|
||||
WHERE n.uuid=i.id
|
||||
RETURN
|
||||
"""
|
||||
+ get_entity_node_return_query(driver.provider)
|
||||
+ """
|
||||
ORDER BY i.score DESC
|
||||
|
|
@ -678,8 +678,8 @@ async def node_similarity_search(
|
|||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
query = (
|
||||
"""
|
||||
MATCH (n:Entity)
|
||||
"""
|
||||
MATCH (n:Entity)
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
RETURN DISTINCT id(n) as id, n.name_embedding as embedding
|
||||
|
|
@ -708,11 +708,11 @@ async def node_similarity_search(
|
|||
# Match the edge ides and return the values
|
||||
query = (
|
||||
"""
|
||||
UNWIND $ids as i
|
||||
MATCH (n:Entity)
|
||||
WHERE id(n)=i.id
|
||||
RETURN
|
||||
"""
|
||||
UNWIND $ids as i
|
||||
MATCH (n:Entity)
|
||||
WHERE id(n)=i.id
|
||||
RETURN
|
||||
"""
|
||||
+ get_entity_node_return_query(driver.provider)
|
||||
+ """
|
||||
ORDER BY i.score DESC
|
||||
|
|
@ -733,8 +733,8 @@ async def node_similarity_search(
|
|||
else:
|
||||
query = (
|
||||
"""
|
||||
MATCH (n:Entity)
|
||||
"""
|
||||
MATCH (n:Entity)
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH n, """
|
||||
|
|
@ -1037,8 +1037,8 @@ async def community_similarity_search(
|
|||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
query = (
|
||||
"""
|
||||
MATCH (n:Community)
|
||||
"""
|
||||
MATCH (n:Community)
|
||||
"""
|
||||
+ group_filter_query
|
||||
+ """
|
||||
RETURN DISTINCT id(n) as id, n.name_embedding as embedding
|
||||
|
|
@ -1097,8 +1097,8 @@ async def community_similarity_search(
|
|||
|
||||
query = (
|
||||
"""
|
||||
MATCH (c:Community)
|
||||
"""
|
||||
MATCH (c:Community)
|
||||
"""
|
||||
+ group_filter_query
|
||||
+ """
|
||||
WITH c,
|
||||
|
|
@ -1240,9 +1240,9 @@ async def get_relevant_nodes(
|
|||
# FIXME: Kuzu currently does not support using variables such as `node.fulltext_query` as an input to FTS, which means `get_relevant_nodes()` won't work with Kuzu as the graph driver.
|
||||
query = (
|
||||
"""
|
||||
UNWIND $nodes AS node
|
||||
MATCH (n:Entity {group_id: $group_id})
|
||||
"""
|
||||
UNWIND $nodes AS node
|
||||
MATCH (n:Entity {group_id: $group_id})
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH node, n, """
|
||||
|
|
@ -1287,9 +1287,9 @@ async def get_relevant_nodes(
|
|||
else:
|
||||
query = (
|
||||
"""
|
||||
UNWIND $nodes AS node
|
||||
MATCH (n:Entity {group_id: $group_id})
|
||||
"""
|
||||
UNWIND $nodes AS node
|
||||
MATCH (n:Entity {group_id: $group_id})
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH node, n, """
|
||||
|
|
@ -1378,9 +1378,9 @@ async def get_relevant_edges(
|
|||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[e:RELATES_TO {group_id: edge.group_id}]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[e:RELATES_TO {group_id: edge.group_id}]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH e, edge
|
||||
|
|
@ -1450,9 +1450,9 @@ async def get_relevant_edges(
|
|||
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[:RELATES_TO]-(e:RelatesToNode_ {group_id: edge.group_id})-[:RELATES_TO]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[:RELATES_TO]-(e:RelatesToNode_ {group_id: edge.group_id})-[:RELATES_TO]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH e, edge, n, m, """
|
||||
|
|
@ -1488,9 +1488,9 @@ async def get_relevant_edges(
|
|||
else:
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[e:RELATES_TO {group_id: edge.group_id}]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity {uuid: edge.source_node_uuid})-[e:RELATES_TO {group_id: edge.group_id}]-(m:Entity {uuid: edge.target_node_uuid})
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH e, edge, """
|
||||
|
|
@ -1563,10 +1563,10 @@ async def get_edge_invalidation_candidates(
|
|||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[e:RELATES_TO {group_id: edge.group_id}]->(m:Entity)
|
||||
WHERE n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid]
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[e:RELATES_TO {group_id: edge.group_id}]->(m:Entity)
|
||||
WHERE n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid]
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH e, edge
|
||||
|
|
@ -1636,10 +1636,10 @@ async def get_edge_invalidation_candidates(
|
|||
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_ {group_id: edge.group_id})-[:RELATES_TO]->(m:Entity)
|
||||
WHERE (n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid])
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_ {group_id: edge.group_id})-[:RELATES_TO]->(m:Entity)
|
||||
WHERE (n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid])
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH edge, e, n, m, """
|
||||
|
|
@ -1675,10 +1675,10 @@ async def get_edge_invalidation_candidates(
|
|||
else:
|
||||
query = (
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[e:RELATES_TO {group_id: edge.group_id}]->(m:Entity)
|
||||
WHERE n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid]
|
||||
"""
|
||||
UNWIND $edges AS edge
|
||||
MATCH (n:Entity)-[e:RELATES_TO {group_id: edge.group_id}]->(m:Entity)
|
||||
WHERE n.uuid IN [edge.source_node_uuid, edge.target_node_uuid] OR m.uuid IN [edge.target_node_uuid, edge.source_node_uuid]
|
||||
"""
|
||||
+ filter_query
|
||||
+ """
|
||||
WITH edge, e, """
|
||||
|
|
@ -1879,7 +1879,9 @@ def maximal_marginal_relevance(
|
|||
async def get_embeddings_for_nodes(
|
||||
driver: GraphDriver, nodes: list[EntityNode]
|
||||
) -> dict[str, list[float]]:
|
||||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
if driver.graph_operations_interface:
|
||||
return await driver.graph_operations_interface.node_load_embeddings_bulk(driver, nodes)
|
||||
elif driver.provider == GraphProvider.NEPTUNE:
|
||||
query = """
|
||||
MATCH (n:Entity)
|
||||
WHERE n.uuid IN $node_uuids
|
||||
|
|
@ -1949,7 +1951,9 @@ async def get_embeddings_for_communities(
|
|||
async def get_embeddings_for_edges(
|
||||
driver: GraphDriver, edges: list[EntityEdge]
|
||||
) -> dict[str, list[float]]:
|
||||
if driver.provider == GraphProvider.NEPTUNE:
|
||||
if driver.graph_operations_interface:
|
||||
return await driver.graph_operations_interface.edge_load_embeddings_bulk(driver, edges)
|
||||
elif driver.provider == GraphProvider.NEPTUNE:
|
||||
query = """
|
||||
MATCH (n:Entity)-[e:RELATES_TO]-(m:Entity)
|
||||
WHERE e.uuid IN $edge_uuids
|
||||
|
|
|
|||
|
|
@ -214,12 +214,10 @@ async def add_nodes_and_edges_bulk_tx(
|
|||
edges.append(edge_data)
|
||||
|
||||
if driver.graph_operations_interface:
|
||||
await driver.graph_operations_interface.episodic_node_save_bulk(
|
||||
None, driver, tx, episodic_nodes
|
||||
)
|
||||
await driver.graph_operations_interface.episodic_node_save_bulk(None, driver, tx, episodes)
|
||||
await driver.graph_operations_interface.node_save_bulk(None, driver, tx, nodes)
|
||||
await driver.graph_operations_interface.episodic_edge_save_bulk(
|
||||
None, driver, tx, episodic_edges
|
||||
None, driver, tx, [edge.model_dump() for edge in episodic_edges]
|
||||
)
|
||||
await driver.graph_operations_interface.edge_save_bulk(None, driver, tx, edges)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[project]
|
||||
name = "graphiti-core"
|
||||
description = "A temporal graph building library"
|
||||
version = "0.22.0"
|
||||
version = "0.22.1pre2"
|
||||
authors = [
|
||||
{ name = "Paul Paliychuk", email = "paul@getzep.com" },
|
||||
{ name = "Preston Rasmussen", email = "preston@getzep.com" },
|
||||
|
|
|
|||
|
|
@ -431,6 +431,14 @@
|
|||
"created_at": "2025-10-22T09:52:01Z",
|
||||
"repoId": 840056306,
|
||||
"pullRequestNo": 1020
|
||||
},
|
||||
{
|
||||
"name": "didier-durand",
|
||||
"id": 2927957,
|
||||
"comment_id": 3460571645,
|
||||
"created_at": "2025-10-29T09:31:25Z",
|
||||
"repoId": 840056306,
|
||||
"pullRequestNo": 1028
|
||||
}
|
||||
]
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue