Merge remote-tracking branch 'origin/dev'
This commit is contained in:
commit
de50b36322
13 changed files with 1941 additions and 100 deletions
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
uses: ./.github/workflows/reusable_python_example.yml
|
||||
with:
|
||||
example-location: ./examples/python/code_graph_example.py
|
||||
arguments: "--repo_path ./evals"
|
||||
arguments: "--repo_path ./cognee/tasks/graph"
|
||||
secrets:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ ARG POETRY_EXTRAS="\
|
|||
# API \
|
||||
api \
|
||||
# Storage & Databases \
|
||||
filesystem postgres weaviate qdrant neo4j falkordb milvus kuzu \
|
||||
filesystem postgres weaviate qdrant neo4j falkordb milvus kuzu chromadb \
|
||||
# Notebooks & Interactive Environments \
|
||||
notebook \
|
||||
# LLM & AI Frameworks \
|
||||
langchain llama-index gemini huggingface ollama mistral groq \
|
||||
langchain llama-index gemini huggingface ollama mistral groq anthropic \
|
||||
# Evaluation & Monitoring \
|
||||
deepeval evals posthog \
|
||||
# Graph Processing & Code Analysis \
|
||||
|
|
|
|||
13
README.md
13
README.md
|
|
@ -113,7 +113,8 @@ if __name__ == '__main__':
|
|||
```
|
||||
Example output:
|
||||
```
|
||||
# ({'id': UUID('bc338a39-64d6-549a-acec-da60846dd90d'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 1, 211808, tzinfo=datetime.timezone.utc), 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.'}, {'relationship_name': 'is_a_subfield_of', 'source_node_id': UUID('bc338a39-64d6-549a-acec-da60846dd90d'), 'target_node_id': UUID('6218dbab-eb6a-5759-a864-b3419755ffe0'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 15, 473137, tzinfo=datetime.timezone.utc)}, {'id': UUID('6218dbab-eb6a-5759-a864-b3419755ffe0'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 1, 211808, tzinfo=datetime.timezone.utc), 'name': 'computer science', 'description': 'The study of computation and information processing.'})
|
||||
Natural Language Processing (NLP) is a cross-disciplinary and interdisciplinary field that involves computer science and information retrieval. It focuses on the interaction between computers and human language, enabling machines to understand and process natural language.
|
||||
|
||||
```
|
||||
Graph visualization:
|
||||
<a href="https://rawcdn.githack.com/topoteretes/cognee/refs/heads/add-visualization-readme/assets/graph_visualization.html"><img src="assets/graph_visualization.png" width="100%" alt="Graph Visualization"></a>
|
||||
|
|
@ -132,10 +133,18 @@ For more advanced usage, have a look at our <a href="https://docs.cognee.ai"> do
|
|||
|
||||
## Demos
|
||||
|
||||
What is AI memory:
|
||||
1. What is AI memory:
|
||||
|
||||
[Learn about cognee](https://github.com/user-attachments/assets/8b2a0050-5ec4-424c-b417-8269971503f0)
|
||||
|
||||
2. Simple GraphRAG demo
|
||||
|
||||
[Simple GraphRAG demo](https://github.com/user-attachments/assets/d80b0776-4eb9-4b8e-aa22-3691e2d44b8f)
|
||||
|
||||
3. cognee with Ollama
|
||||
|
||||
[cognee with local models](https://github.com/user-attachments/assets/8621d3e8-ecb8-4860-afb2-5594f2ee17db)
|
||||
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import cognee
|
||||
from cognee.shared.logging_utils import get_logger, get_log_file_location
|
||||
import importlib.util
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
from contextlib import redirect_stdout
|
||||
|
||||
# from PIL import Image as PILImage
|
||||
import mcp.types as types
|
||||
|
|
@ -90,56 +91,55 @@ async def list_tools() -> list[types.Tool]:
|
|||
@mcp.call_tool()
|
||||
async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
|
||||
try:
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
log_file = get_log_file_location()
|
||||
# NOTE: MCP uses stdout to communicate, we must redirect all output
|
||||
# going to stdout ( like the print function ) to stderr.
|
||||
with redirect_stdout(sys.stderr):
|
||||
log_file = get_log_file_location()
|
||||
|
||||
if name == "cognify":
|
||||
asyncio.create_task(
|
||||
cognify(
|
||||
text=arguments["text"],
|
||||
graph_model_file=arguments.get("graph_model_file"),
|
||||
graph_model_name=arguments.get("graph_model_name"),
|
||||
)
|
||||
if name == "cognify":
|
||||
asyncio.create_task(
|
||||
cognify(
|
||||
text=arguments["text"],
|
||||
graph_model_file=arguments.get("graph_model_file"),
|
||||
graph_model_name=arguments.get("graph_model_name"),
|
||||
)
|
||||
)
|
||||
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current cognify status you can check the log file at: {log_file}"
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current cognify status you can check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
if name == "codify":
|
||||
asyncio.create_task(codify(arguments.get("repo_path")))
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
if name == "codify":
|
||||
asyncio.create_task(codify(arguments.get("repo_path")))
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current codify status you can check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current codify status you can check the log file at: {log_file}"
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
elif name == "search":
|
||||
search_results = await search(arguments["search_query"], arguments["search_type"])
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
elif name == "search":
|
||||
search_results = await search(
|
||||
arguments["search_query"], arguments["search_type"]
|
||||
)
|
||||
return [types.TextContent(type="text", text=search_results)]
|
||||
elif name == "prune":
|
||||
await prune()
|
||||
|
||||
return [types.TextContent(type="text", text=search_results)]
|
||||
elif name == "prune":
|
||||
await prune()
|
||||
|
||||
return [types.TextContent(type="text", text="Pruned")]
|
||||
return [types.TextContent(type="text", text="Pruned")]
|
||||
except Exception as e:
|
||||
logger.error(f"Error calling tool '{name}': {str(e)}")
|
||||
return [types.TextContent(type="text", text=f"Error calling tool '{name}': {str(e)}")]
|
||||
|
|
@ -147,45 +147,56 @@ async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
|
|||
|
||||
async def cognify(text: str, graph_model_file: str = None, graph_model_name: str = None) -> str:
|
||||
"""Build knowledge graph from the input text"""
|
||||
logger.info("Cognify process starting.")
|
||||
if graph_model_file and graph_model_name:
|
||||
graph_model = load_class(graph_model_file, graph_model_name)
|
||||
else:
|
||||
graph_model = KnowledgeGraph
|
||||
# NOTE: MCP uses stdout to communicate, we must redirect all output
|
||||
# going to stdout ( like the print function ) to stderr.
|
||||
# As cognify is an async background job the output had to be redirected again.
|
||||
with redirect_stdout(sys.stderr):
|
||||
logger.info("Cognify process starting.")
|
||||
if graph_model_file and graph_model_name:
|
||||
graph_model = load_class(graph_model_file, graph_model_name)
|
||||
else:
|
||||
graph_model = KnowledgeGraph
|
||||
|
||||
await cognee.add(text)
|
||||
await cognee.add(text)
|
||||
|
||||
try:
|
||||
await cognee.cognify(graph_model=graph_model)
|
||||
logger.info("Cognify process finished.")
|
||||
except Exception as e:
|
||||
logger.error("Cognify process failed.")
|
||||
raise ValueError(f"Failed to cognify: {str(e)}")
|
||||
try:
|
||||
await cognee.cognify(graph_model=graph_model)
|
||||
logger.info("Cognify process finished.")
|
||||
except Exception as e:
|
||||
logger.error("Cognify process failed.")
|
||||
raise ValueError(f"Failed to cognify: {str(e)}")
|
||||
|
||||
|
||||
async def codify(repo_path: str):
|
||||
logger.info("Codify process starting.")
|
||||
results = []
|
||||
async for result in run_code_graph_pipeline(repo_path, False):
|
||||
results.append(result)
|
||||
logger.info(result)
|
||||
if all(results):
|
||||
logger.info("Codify process finished succesfully.")
|
||||
else:
|
||||
logger.info("Codify process failed.")
|
||||
# NOTE: MCP uses stdout to communicate, we must redirect all output
|
||||
# going to stdout ( like the print function ) to stderr.
|
||||
# As codify is an async background job the output had to be redirected again.
|
||||
with redirect_stdout(sys.stderr):
|
||||
logger.info("Codify process starting.")
|
||||
results = []
|
||||
async for result in run_code_graph_pipeline(repo_path, False):
|
||||
results.append(result)
|
||||
logger.info(result)
|
||||
if all(results):
|
||||
logger.info("Codify process finished succesfully.")
|
||||
else:
|
||||
logger.info("Codify process failed.")
|
||||
|
||||
|
||||
async def search(search_query: str, search_type: str) -> str:
|
||||
"""Search the knowledge graph"""
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType[search_type.upper()], query_text=search_query
|
||||
)
|
||||
# NOTE: MCP uses stdout to communicate, we must redirect all output
|
||||
# going to stdout ( like the print function ) to stderr.
|
||||
with redirect_stdout(sys.stderr):
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType[search_type.upper()], query_text=search_query
|
||||
)
|
||||
|
||||
if search_type.upper() == "CODE":
|
||||
return json.dumps(search_results, cls=JSONEncoder)
|
||||
else:
|
||||
results = retrieved_edges_to_string(search_results)
|
||||
return results
|
||||
if search_type.upper() == "CODE":
|
||||
return json.dumps(search_results, cls=JSONEncoder)
|
||||
else:
|
||||
results = retrieved_edges_to_string(search_results)
|
||||
return results
|
||||
|
||||
|
||||
async def prune():
|
||||
|
|
@ -198,7 +209,7 @@ async def main():
|
|||
try:
|
||||
from mcp.server.stdio import stdio_server
|
||||
|
||||
logger.info("Starting Cognee MCP server...")
|
||||
logger.info("Cognee MCP server started...")
|
||||
|
||||
async with stdio_server() as (read_stream, write_stream):
|
||||
await mcp.run(
|
||||
|
|
@ -215,7 +226,8 @@ async def main():
|
|||
raise_exceptions=True,
|
||||
)
|
||||
|
||||
logger.info("Cognee MCP server started.")
|
||||
logger.info("Cognee MCP server closed.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Server failed to start: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ def get_file_metadata(file: BinaryIO) -> FileMetadata:
|
|||
file_type = guess_file_type(file)
|
||||
|
||||
file_path = file.name
|
||||
file_name = file_path.split("/")[-1].split(".")[0] if file_path else None
|
||||
file_name = str(file_path).split("/")[-1].split(".")[0] if file_path else None
|
||||
|
||||
return FileMetadata(
|
||||
name=file_name,
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ def classify(data: Union[str, BinaryIO], filename: str = None):
|
|||
return TextData(data)
|
||||
|
||||
if isinstance(data, BufferedReader) or isinstance(data, SpooledTemporaryFile):
|
||||
return BinaryData(data, data.name.split("/")[-1] if data.name else filename)
|
||||
return BinaryData(data, str(data.name).split("/")[-1] if data.name else filename)
|
||||
|
||||
raise IngestionError(
|
||||
message=f"Type of data sent to classify(data: Union[str, BinaryIO) not supported: {type(data)}"
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ class PlainFileHandler(logging.FileHandler):
|
|||
logger_name = record.msg.get("logger", record.name)
|
||||
|
||||
# Format timestamp
|
||||
timestamp = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
timestamp = datetime.now().strftime(get_timestamp_format())
|
||||
|
||||
# Create the log entry
|
||||
log_entry = f"{timestamp} [{record.levelname.ljust(8)}] {message}{context_str} [{logger_name}]\n"
|
||||
|
|
@ -226,7 +226,7 @@ def setup_logging(log_level=None, name=None):
|
|||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.TimeStamper(fmt=get_timestamp_format(), utc=True),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
exception_handler, # Add our custom exception handler
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
|
|
@ -288,9 +288,18 @@ def setup_logging(log_level=None, name=None):
|
|||
stream_handler.setFormatter(console_formatter)
|
||||
stream_handler.setLevel(log_level)
|
||||
|
||||
# Check if we already have a log file path from the environment
|
||||
# NOTE: environment variable must be used here as it allows us to
|
||||
# log to a single file with a name based on a timestamp in a multiprocess setting.
|
||||
# Without it, we would have a separate log file for every process.
|
||||
log_file_path = os.environ.get("LOG_FILE_NAME")
|
||||
if not log_file_path:
|
||||
# Create a new log file name with the cognee start time
|
||||
start_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
log_file_path = os.path.join(LOGS_DIR, f"{start_time}.log")
|
||||
os.environ["LOG_FILE_NAME"] = log_file_path
|
||||
|
||||
# Create a file handler that uses our custom PlainFileHandler
|
||||
current_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
log_file_path = os.path.join(LOGS_DIR, f"{current_time}.log")
|
||||
file_handler = PlainFileHandler(log_file_path, encoding="utf-8")
|
||||
file_handler.setLevel(DEBUG)
|
||||
|
||||
|
|
@ -328,3 +337,23 @@ def get_log_file_location():
|
|||
for handler in root_logger.handlers:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
return handler.baseFilename
|
||||
|
||||
|
||||
def get_timestamp_format():
|
||||
# NOTE: Some users have complained that Cognee crashes when trying to get microsecond value
|
||||
# Added handler to not use microseconds if users can't access it
|
||||
logger = structlog.get_logger()
|
||||
try:
|
||||
# We call datetime.now() here to test if microseconds are supported.
|
||||
# If they are not supported a ValueError will be raised
|
||||
datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
return "%Y-%m-%dT%H:%M:%S.%f"
|
||||
except Exception as e:
|
||||
logger.debug(f"Exception caught: {e}")
|
||||
logger.debug(
|
||||
"Could not use microseconds for the logging timestamp, defaulting to use hours minutes and seconds only"
|
||||
)
|
||||
# We call datetime.now() here to test if won't break.
|
||||
datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
|
||||
# We return the timestamp format without microseconds as they are not supported
|
||||
return "%Y-%m-%dT%H:%M:%S"
|
||||
|
|
|
|||
|
|
@ -22,19 +22,23 @@ services:
|
|||
cpus: "2.0"
|
||||
memory: 8GB
|
||||
|
||||
frontend:
|
||||
container_name: frontend
|
||||
build:
|
||||
context: ./cognee-frontend
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
- ./cognee-frontend/src:/app/src
|
||||
- ./cognee-frontend/public:/app/public
|
||||
ports:
|
||||
- 3000:3000
|
||||
# - 9229:9229 # Debugging
|
||||
networks:
|
||||
- cognee-network
|
||||
# NOTE: Frontend is a work in progress and is not intended to be used by users yet.
|
||||
# If you want to use Cognee with a UI environment you can run the cognee-gui.py script or
|
||||
# integrate the Cognee MCP Server to Cursor / Claude Desktop / Visual Studio Code ( through Cline/Roo )
|
||||
|
||||
# frontend:
|
||||
# container_name: frontend
|
||||
# build:
|
||||
# context: ./cognee-frontend
|
||||
# dockerfile: Dockerfile
|
||||
# volumes:
|
||||
# - ./cognee-frontend/src:/app/src
|
||||
# - ./cognee-frontend/public:/app/public
|
||||
# ports:
|
||||
# - 3000:3000
|
||||
# # - 9229:9229 # Debugging
|
||||
# networks:
|
||||
# - cognee-network
|
||||
|
||||
neo4j:
|
||||
image: neo4j:latest
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import argparse
|
||||
import asyncio
|
||||
import cognee
|
||||
from cognee import SearchType
|
||||
from cognee.shared.logging_utils import get_logger, ERROR
|
||||
|
||||
from cognee.api.v1.cognify.code_graph_pipeline import run_code_graph_pipeline
|
||||
|
|
@ -10,6 +12,13 @@ async def main(repo_path, include_docs):
|
|||
async for run_status in run_code_graph_pipeline(repo_path, include_docs=include_docs):
|
||||
run_status = run_status
|
||||
|
||||
# Test CODE search
|
||||
search_results = await cognee.search(query_type=SearchType.CODE, query_text="test")
|
||||
assert len(search_results) != 0, "The search results list is empty."
|
||||
print("\n\nSearch results are:\n")
|
||||
for result in search_results:
|
||||
print(f"{result}\n")
|
||||
|
||||
return run_status
|
||||
|
||||
|
||||
|
|
|
|||
128
notebooks/github_graph_visualization.html
Normal file
128
notebooks/github_graph_visualization.html
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="https://d3js.org/d3.v5.min.js"></script>
|
||||
<style>
|
||||
body, html { margin: 0; padding: 0; width: 100%; height: 100%; overflow: hidden; background: linear-gradient(90deg, #101010, #1a1a2e); color: white; font-family: 'Inter', sans-serif; }
|
||||
|
||||
svg { width: 100vw; height: 100vh; display: block; }
|
||||
.links line { stroke: rgba(255, 255, 255, 0.4); stroke-width: 2px; }
|
||||
.nodes circle { stroke: white; stroke-width: 0.5px; filter: drop-shadow(0 0 5px rgba(255,255,255,0.3)); }
|
||||
.node-label { font-size: 5px; font-weight: bold; fill: white; text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
|
||||
.edge-label { font-size: 3px; fill: rgba(255, 255, 255, 0.7); text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<svg></svg>
|
||||
<script>
|
||||
var nodes = [];
|
||||
var links = [];
|
||||
|
||||
var svg = d3.select("svg"),
|
||||
width = window.innerWidth,
|
||||
height = window.innerHeight;
|
||||
|
||||
var container = svg.append("g");
|
||||
|
||||
var simulation = d3.forceSimulation(nodes)
|
||||
.force("link", d3.forceLink(links).id(d => d.id).strength(0.1))
|
||||
.force("charge", d3.forceManyBody().strength(-275))
|
||||
.force("center", d3.forceCenter(width / 2, height / 2))
|
||||
.force("x", d3.forceX().strength(0.1).x(width / 2))
|
||||
.force("y", d3.forceY().strength(0.1).y(height / 2));
|
||||
|
||||
var link = container.append("g")
|
||||
.attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(links)
|
||||
.enter().append("line")
|
||||
.attr("stroke-width", 2);
|
||||
|
||||
var edgeLabels = container.append("g")
|
||||
.attr("class", "edge-labels")
|
||||
.selectAll("text")
|
||||
.data(links)
|
||||
.enter().append("text")
|
||||
.attr("class", "edge-label")
|
||||
.text(d => d.relation);
|
||||
|
||||
var nodeGroup = container.append("g")
|
||||
.attr("class", "nodes")
|
||||
.selectAll("g")
|
||||
.data(nodes)
|
||||
.enter().append("g");
|
||||
|
||||
var node = nodeGroup.append("circle")
|
||||
.attr("r", 13)
|
||||
.attr("fill", d => d.color)
|
||||
.call(d3.drag()
|
||||
.on("start", dragstarted)
|
||||
.on("drag", dragged)
|
||||
.on("end", dragended));
|
||||
|
||||
nodeGroup.append("text")
|
||||
.attr("class", "node-label")
|
||||
.attr("dy", 4)
|
||||
.attr("text-anchor", "middle")
|
||||
.text(d => d.name);
|
||||
|
||||
node.append("title").text(d => JSON.stringify(d));
|
||||
|
||||
simulation.on("tick", function() {
|
||||
link.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
|
||||
edgeLabels
|
||||
.attr("x", d => (d.source.x + d.target.x) / 2)
|
||||
.attr("y", d => (d.source.y + d.target.y) / 2 - 5);
|
||||
|
||||
node.attr("cx", d => d.x)
|
||||
.attr("cy", d => d.y);
|
||||
|
||||
nodeGroup.select("text")
|
||||
.attr("x", d => d.x)
|
||||
.attr("y", d => d.y)
|
||||
.attr("dy", 4)
|
||||
.attr("text-anchor", "middle");
|
||||
});
|
||||
|
||||
svg.call(d3.zoom().on("zoom", function() {
|
||||
container.attr("transform", d3.event.transform);
|
||||
}));
|
||||
|
||||
function dragstarted(d) {
|
||||
if (!d3.event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x;
|
||||
d.fy = d.y;
|
||||
}
|
||||
|
||||
function dragged(d) {
|
||||
d.fx = d3.event.x;
|
||||
d.fy = d3.event.y;
|
||||
}
|
||||
|
||||
function dragended(d) {
|
||||
if (!d3.event.active) simulation.alphaTarget(0);
|
||||
d.fx = null;
|
||||
d.fy = null;
|
||||
}
|
||||
|
||||
window.addEventListener("resize", function() {
|
||||
width = window.innerWidth;
|
||||
height = window.innerHeight;
|
||||
svg.attr("width", width).attr("height", height);
|
||||
simulation.force("center", d3.forceCenter(width / 2, height / 2));
|
||||
simulation.alpha(1).restart();
|
||||
});
|
||||
</script>
|
||||
|
||||
<svg style="position: fixed; bottom: 10px; right: 10px; width: 150px; height: auto; z-index: 9999;" viewBox="0 0 158 44" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.7496 4.92654C7.83308 4.92654 4.8585 7.94279 4.8585 11.3612V14.9304C4.8585 18.3488 7.83308 21.3651 11.7496 21.3651C13.6831 21.3651 15.0217 20.8121 16.9551 19.3543C18.0458 18.5499 19.5331 18.8013 20.3263 19.9072C21.1195 21.0132 20.8717 22.5213 19.781 23.3257C17.3518 25.0851 15.0217 26.2414 11.7 26.2414C5.35425 26.2414 0 21.2646 0 14.9304V11.3612C0 4.97681 5.35425 0.0502739 11.7 0.0502739C15.0217 0.0502739 17.3518 1.2065 19.781 2.96598C20.8717 3.77032 21.1195 5.27843 20.3263 6.38439C19.5331 7.49035 18.0458 7.69144 16.9551 6.93737C15.0217 5.52979 13.6831 4.92654 11.7496 4.92654ZM35.5463 4.92654C31.7289 4.92654 28.6552 8.04333 28.6552 11.8639V14.478C28.6552 18.2986 31.7289 21.4154 35.5463 21.4154C39.3141 21.4154 42.3878 18.2986 42.3878 14.478V11.8639C42.3878 8.04333 39.3141 4.92654 35.5463 4.92654ZM23.7967 11.8639C23.7967 5.32871 29.0518 0 35.5463 0C42.0408 0 47.2463 5.32871 47.2463 11.8639V14.478C47.2463 21.0132 42.0408 26.3419 35.5463 26.3419C29.0518 26.3419 23.7967 21.0635 23.7967 14.478V11.8639ZM63.3091 5.07736C59.4917 5.07736 56.418 8.19415 56.418 12.0147C56.418 15.8353 59.4917 18.9521 63.3091 18.9521C67.1265 18.9521 70.1506 15.8856 70.1506 12.0147C70.1506 8.14388 67.0769 5.07736 63.3091 5.07736ZM51.5595 11.9645C51.5595 5.42925 56.8146 0.150814 63.3091 0.150814C66.0854 0.150814 68.5642 1.10596 70.5968 2.71463L72.4311 0.904876C73.3731 -0.0502693 74.9099 -0.0502693 75.8519 0.904876C76.7938 1.86002 76.7938 3.41841 75.8519 4.37356L73.7201 6.53521C74.5629 8.19414 75.0587 10.0542 75.0587 12.0147C75.0587 18.4997 69.8532 23.8284 63.3587 23.8284C63.3091 23.8284 63.2099 23.8284 63.1603 23.8284H58.0044C57.1616 23.8284 56.4675 24.5322 56.4675 25.3868C56.4675 26.2414 57.1616 26.9452 58.0044 26.9452H64.6476H66.7794C68.5146 26.9452 70.3489 27.4479 71.7866 28.6041C73.2739 29.8106 74.2159 31.5701 74.4142 33.7317C74.7116 37.6026 72.0345 40.2166 69.8532 41.0713L63.8048 43.7859C62.5654 44.3389 61.1277 43.7859 60.6319 42.5291C60.0866 41.2723 60.6319 39.8648 61.8714 39.3118L68.0188 36.5972C68.0684 36.5972 68.118 36.5469 68.1675 36.5469C68.4154 36.4463 68.8616 36.1447 69.2087 35.6923C69.5061 35.2398 69.7044 34.7371 69.6548 34.1339C69.6053 33.229 69.2582 32.7263 68.8616 32.4247C68.4154 32.0728 67.7214 31.8214 66.8786 31.8214H58.2027C58.1531 31.8214 58.1531 31.8214 58.1035 31.8214H58.054C54.534 31.8214 51.6586 28.956 51.6586 25.3868C51.6586 23.0743 52.8485 21.0635 54.6828 19.9072C52.6997 17.7959 51.5595 15.031 51.5595 11.9645ZM90.8736 5.07736C87.0562 5.07736 83.9824 8.19415 83.9824 12.0147V23.9289C83.9824 25.2862 82.8917 26.3922 81.5532 26.3922C80.2146 26.3922 79.1239 25.2862 79.1239 23.9289V11.9645C79.1239 5.42925 84.379 0.150814 90.824 0.150814C97.2689 0.150814 102.524 5.42925 102.524 11.9645V23.8786C102.524 25.2359 101.433 26.3419 100.095 26.3419C98.7562 26.3419 97.6655 25.2359 97.6655 23.8786V11.9645C97.7647 8.14387 94.6414 5.07736 90.8736 5.07736ZM119.43 5.07736C115.513 5.07736 112.39 8.24441 112.39 12.065V14.5785C112.39 18.4494 115.513 21.5662 119.43 21.5662C120.768 21.5662 122.057 21.164 123.098 20.5105C124.238 19.8067 125.726 20.1586 126.42 21.3148C127.114 22.4711 126.767 23.9792 125.627 24.683C123.842 25.7889 121.71 26.4425 119.43 26.4425C112.885 26.4425 107.581 21.1137 107.581 14.5785V12.065C107.581 5.47952 112.935 0.201088 119.43 0.201088C125.032 0.201088 129.692 4.07194 130.931 9.3001L131.427 11.3612L121.115 15.584C119.876 16.0867 118.488 15.4834 117.942 14.2266C117.447 12.9699 118.041 11.5623 119.281 11.0596L125.478 8.54604C124.238 6.43466 122.008 5.07736 119.43 5.07736ZM146.003 5.07736C142.086 5.07736 138.963 8.24441 138.963 12.065V14.5785C138.963 18.4494 142.086 21.5662 146.003 21.5662C147.341 21.5662 148.63 21.164 149.671 20.5105C150.217 20.1586 150.663 19.8067 151.109 19.304C152.001 18.2986 153.538 18.2483 154.53 19.2034C155.521 20.1083 155.571 21.6667 154.629 22.6721C153.935 23.4262 153.092 24.13 152.2 24.683C150.415 25.7889 148.283 26.4425 146.003 26.4425C139.458 26.4425 134.154 21.1137 134.154 14.5785V12.065C134.154 5.47952 139.508 0.201088 146.003 0.201088C151.605 0.201088 156.265 4.07194 157.504 9.3001L158 11.3612L147.688 15.584C146.449 16.0867 145.061 15.4834 144.515 14.2266C144.019 12.9699 144.614 11.5623 145.854 11.0596L152.051 8.54604C150.762 6.43466 148.58 5.07736 146.003 5.07736Z" fill="white"/>
|
||||
</svg>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
1577
notebooks/ontology_demo.ipynb
Normal file
1577
notebooks/ontology_demo.ipynb
Normal file
File diff suppressed because it is too large
Load diff
76
poetry.lock
generated
76
poetry.lock
generated
|
|
@ -7849,6 +7849,63 @@ files = [
|
|||
[package.extras]
|
||||
dev = ["build", "flake8", "mypy", "pytest", "twine"]
|
||||
|
||||
[[package]]
|
||||
name = "pyside6"
|
||||
version = "6.8.3"
|
||||
description = "Python bindings for the Qt cross-platform application and UI framework"
|
||||
optional = true
|
||||
python-versions = "<3.14,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"gui\""
|
||||
files = [
|
||||
{file = "PySide6-6.8.3-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:31f390c961b54067ae41360e5ea3b340ce0e0e5feadea2236c28226d3b37edcc"},
|
||||
{file = "PySide6-6.8.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:8e53e2357bfbdee1fa86c48312bf637460a2c26d49e7af0b3fae2e179ccc7052"},
|
||||
{file = "PySide6-6.8.3-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:5bf5153cab9484629315f57c56a9ad4b7d075b4dd275f828f7549abf712c590b"},
|
||||
{file = "PySide6-6.8.3-cp39-abi3-win_amd64.whl", hash = "sha256:722dc0061d8ef6dbb8c0b99864f21e83a5b49ece1ecb2d0b890840d969e1e461"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PySide6-Addons = "6.8.3"
|
||||
PySide6-Essentials = "6.8.3"
|
||||
shiboken6 = "6.8.3"
|
||||
|
||||
[[package]]
|
||||
name = "pyside6-addons"
|
||||
version = "6.8.3"
|
||||
description = "Python bindings for the Qt cross-platform application and UI framework (Addons)"
|
||||
optional = true
|
||||
python-versions = "<3.14,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"gui\""
|
||||
files = [
|
||||
{file = "PySide6_Addons-6.8.3-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:ea46649e40b9e6ab11a0da2da054d3914bff5607a5882885e9c3bc2eef200036"},
|
||||
{file = "PySide6_Addons-6.8.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6983d3b01fad53637bad5360930d5923509c744cc39704f9c1190eb9934e33da"},
|
||||
{file = "PySide6_Addons-6.8.3-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:7949a844a40ee10998eb2734e2c06c4c7182dfcd4c21cc4108a6b96655ebe59f"},
|
||||
{file = "PySide6_Addons-6.8.3-cp39-abi3-win_amd64.whl", hash = "sha256:67548f6db11f4e1b7e4b6efd9c3fc2e8d275188a7b2feac388961128572a6955"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PySide6-Essentials = "6.8.3"
|
||||
shiboken6 = "6.8.3"
|
||||
|
||||
[[package]]
|
||||
name = "pyside6-essentials"
|
||||
version = "6.8.3"
|
||||
description = "Python bindings for the Qt cross-platform application and UI framework (Essentials)"
|
||||
optional = true
|
||||
python-versions = "<3.14,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"gui\""
|
||||
files = [
|
||||
{file = "PySide6_Essentials-6.8.3-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:aa56c135db924ecfaf50088baf32f737d28027419ca5fee67c0c7141b29184e3"},
|
||||
{file = "PySide6_Essentials-6.8.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fd57fa0c886ef99b3844173322c0023ec77cc946a0c9a0cdfbc2ac5c511053c1"},
|
||||
{file = "PySide6_Essentials-6.8.3-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:b4f4823f870b5bed477d6f7b6a3041839b859f70abfd703cf53208c73c2fe4cd"},
|
||||
{file = "PySide6_Essentials-6.8.3-cp39-abi3-win_amd64.whl", hash = "sha256:3c0fae5550aff69f2166f46476c36e0ef56ce73d84829eac4559770b0c034b07"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
shiboken6 = "6.8.3"
|
||||
|
||||
[[package]]
|
||||
name = "pysocks"
|
||||
version = "1.7.1"
|
||||
|
|
@ -9379,6 +9436,21 @@ files = [
|
|||
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shiboken6"
|
||||
version = "6.8.3"
|
||||
description = "Python/C++ bindings helper module"
|
||||
optional = true
|
||||
python-versions = "<3.14,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"gui\""
|
||||
files = [
|
||||
{file = "shiboken6-6.8.3-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:483efc7dd53c69147b8a8ade71f7619c79ffc683efcb1dc4f4cb6c40bb23d29b"},
|
||||
{file = "shiboken6-6.8.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:295a003466ca2cccf6660e2f2ceb5e6cef4af192a48a196a32d46b6f0c9ec5cb"},
|
||||
{file = "shiboken6-6.8.3-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:2b1a41348102952d2a5fbf3630bddd4d44112e18058b5e4cf505e51f2812429d"},
|
||||
{file = "shiboken6-6.8.3-cp39-abi3-win_amd64.whl", hash = "sha256:bca3a94513ce9242f7d4bbdca902072a1631888e0aa3a8711a52cc5dbe93588f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simplejson"
|
||||
version = "3.20.1"
|
||||
|
|
@ -11251,7 +11323,7 @@ filesystem = ["botocore"]
|
|||
gemini = []
|
||||
graphiti = ["graphiti-core"]
|
||||
groq = ["groq"]
|
||||
gui = ["qasync"]
|
||||
gui = ["pyside6", "qasync"]
|
||||
huggingface = ["transformers"]
|
||||
kuzu = ["kuzu"]
|
||||
langchain = ["langchain_text_splitters", "langsmith"]
|
||||
|
|
@ -11269,4 +11341,4 @@ weaviate = ["weaviate-client"]
|
|||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<=3.13"
|
||||
content-hash = "4bda223028508503b326912854c60fa4a5f60349370d26f22dd997d0dec11e01"
|
||||
content-hash = "25b759ffc908ce0b4df33344424d2043dd3126d944c6d2e9b24031bd24e1152b"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "cognee"
|
||||
version = "0.1.35"
|
||||
version = "0.1.36"
|
||||
description = "Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning."
|
||||
authors = ["Vasilije Markovic", "Boris Arzentar"]
|
||||
readme = "README.md"
|
||||
|
|
@ -82,6 +82,7 @@ gdown = {version = "^5.2.0", optional = true}
|
|||
qasync = {version = "^0.27.1", optional = true}
|
||||
graphiti-core = {version = "^0.7.0", optional = true}
|
||||
structlog = "^25.2.0"
|
||||
pyside6 = {version = "^6.8.3", optional = true}
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue