refactor: deprecate coding assistance tools
This commit is contained in:
parent
5e2b3c2e59
commit
9adb54a761
6 changed files with 0 additions and 372 deletions
|
|
@ -13,17 +13,12 @@ import uvicorn
|
|||
from src.shared import context
|
||||
from src.clients import CogneeClient
|
||||
from src.tools import (
|
||||
cognee_add_developer_rules,
|
||||
cognify,
|
||||
save_interaction,
|
||||
codify,
|
||||
search,
|
||||
get_developer_rules,
|
||||
list_data,
|
||||
delete,
|
||||
prune,
|
||||
cognify_status,
|
||||
codify_status,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -31,17 +26,12 @@ mcp = FastMCP("Cognee")
|
|||
|
||||
logger = get_logger()
|
||||
|
||||
mcp.tool()(cognee_add_developer_rules)
|
||||
mcp.tool()(cognify)
|
||||
mcp.tool()(save_interaction)
|
||||
mcp.tool()(codify)
|
||||
mcp.tool()(search)
|
||||
mcp.tool()(get_developer_rules)
|
||||
mcp.tool()(list_data)
|
||||
mcp.tool()(delete)
|
||||
mcp.tool()(prune)
|
||||
mcp.tool()(cognify_status)
|
||||
mcp.tool()(codify_status)
|
||||
|
||||
|
||||
async def run_sse_with_cors():
|
||||
|
|
|
|||
|
|
@ -1,79 +0,0 @@
|
|||
"""Tool for analyzing and generating code-specific knowledge graphs from repositories."""
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
from contextlib import redirect_stdout
|
||||
import mcp.types as types
|
||||
from cognee.shared.logging_utils import get_logger, get_log_file_location
|
||||
|
||||
from src.shared import context
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def codify(repo_path: str) -> list:
|
||||
"""
|
||||
Analyze and generate a code-specific knowledge graph from a software repository.
|
||||
|
||||
This function launches a background task that processes the provided repository
|
||||
and builds a code knowledge graph. The function returns immediately while
|
||||
the processing continues in the background due to MCP timeout constraints.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
repo_path : str
|
||||
Path to the code repository to analyze. This can be a local file path or a
|
||||
relative path to a repository. The path should point to the root of the
|
||||
repository or a specific directory within it.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list
|
||||
A list containing a single TextContent object with information about the
|
||||
background task launch and how to check its status.
|
||||
|
||||
Notes
|
||||
-----
|
||||
- The function launches a background task and returns immediately
|
||||
- The code graph generation may take significant time for larger repositories
|
||||
- Use the codify_status tool to check the progress of the operation
|
||||
- Process results are logged to the standard Cognee log file
|
||||
- All stdout is redirected to stderr to maintain MCP communication integrity
|
||||
"""
|
||||
|
||||
if context.cognee_client.use_api:
|
||||
error_msg = "❌ Codify operation is not available in API mode. Please use direct mode for code graph pipeline."
|
||||
logger.error(error_msg)
|
||||
return [types.TextContent(type="text", text=error_msg)]
|
||||
|
||||
async def codify_task(repo_path: str):
|
||||
# NOTE: MCP uses stdout to communicate, we must redirect all output
|
||||
# going to stdout ( like the print function ) to stderr.
|
||||
with redirect_stdout(sys.stderr):
|
||||
logger.info("Codify process starting.")
|
||||
from cognee.api.v1.cognify.code_graph_pipeline import run_code_graph_pipeline
|
||||
|
||||
results = []
|
||||
async for result in run_code_graph_pipeline(repo_path, False):
|
||||
results.append(result)
|
||||
logger.info(result)
|
||||
if all(results):
|
||||
logger.info("Codify process finished succesfully.")
|
||||
else:
|
||||
logger.info("Codify process failed.")
|
||||
|
||||
asyncio.create_task(codify_task(repo_path))
|
||||
|
||||
log_file = get_log_file_location()
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"To check current codify status use the codify_status tool\n"
|
||||
f"or you can check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
"""Tool for getting the status of the codify pipeline."""
|
||||
|
||||
import sys
|
||||
from contextlib import redirect_stdout
|
||||
import mcp.types as types
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
|
||||
from src.shared import context
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def codify_status():
|
||||
"""
|
||||
Get the current status of the codify pipeline.
|
||||
|
||||
This function retrieves information about current and recently completed codify operations
|
||||
in the codebase dataset. It provides details on progress, success/failure status, and statistics
|
||||
about the processed code repositories.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list
|
||||
A list containing a single TextContent object with the status information as a string.
|
||||
The status includes information about active and completed jobs for the cognify_code_pipeline.
|
||||
|
||||
Notes
|
||||
-----
|
||||
- The function retrieves pipeline status specifically for the "cognify_code_pipeline" on the "codebase" dataset
|
||||
- Status information includes job progress, execution time, and completion status
|
||||
- The status is returned in string format for easy reading
|
||||
- This operation is not available in API mode
|
||||
"""
|
||||
with redirect_stdout(sys.stderr):
|
||||
try:
|
||||
from cognee.modules.data.methods.get_unique_dataset_id import get_unique_dataset_id
|
||||
from cognee.modules.users.methods import get_default_user
|
||||
|
||||
user = await get_default_user()
|
||||
status = await context.cognee_client.get_pipeline_status(
|
||||
[await get_unique_dataset_id("codebase", user)], "cognify_code_pipeline"
|
||||
)
|
||||
return [types.TextContent(type="text", text=str(status))]
|
||||
except NotImplementedError:
|
||||
error_msg = "❌ Pipeline status is not available in API mode"
|
||||
logger.error(error_msg)
|
||||
return [types.TextContent(type="text", text=error_msg)]
|
||||
except Exception as e:
|
||||
error_msg = f"❌ Failed to get codify status: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return [types.TextContent(type="text", text=error_msg)]
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
"""Tool for ingesting core developer rule files into Cognee's memory layer."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
from contextlib import redirect_stdout
|
||||
import mcp.types as types
|
||||
from cognee.shared.logging_utils import get_logger, get_log_file_location
|
||||
|
||||
from src.shared import context
|
||||
from .utils import load_class
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def cognee_add_developer_rules(
|
||||
base_path: str = ".", graph_model_file: str = None, graph_model_name: str = None
|
||||
) -> list:
|
||||
"""
|
||||
Ingest core developer rule files into Cognee's memory layer.
|
||||
|
||||
This function loads a predefined set of developer-related configuration,
|
||||
rule, and documentation files from the base repository and assigns them
|
||||
to the special 'developer_rules' node set in Cognee. It ensures these
|
||||
foundational files are always part of the structured memory graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
base_path : str
|
||||
Root path to resolve relative file paths. Defaults to current directory.
|
||||
|
||||
graph_model_file : str, optional
|
||||
Optional path to a custom schema file for knowledge graph generation.
|
||||
|
||||
graph_model_name : str, optional
|
||||
Optional class name to use from the graph_model_file schema.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list
|
||||
A message indicating how many rule files were scheduled for ingestion,
|
||||
and how to check their processing status.
|
||||
|
||||
Notes
|
||||
-----
|
||||
- Each file is processed asynchronously in the background.
|
||||
- Files are attached to the 'developer_rules' node set.
|
||||
- Missing files are skipped with a logged warning.
|
||||
"""
|
||||
|
||||
developer_rule_paths = [
|
||||
".cursorrules",
|
||||
".cursor/rules",
|
||||
".same/todos.md",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
"CLAUDE.md",
|
||||
".sourcegraph/memory.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
]
|
||||
|
||||
async def cognify_task(file_path: str) -> None:
|
||||
with redirect_stdout(sys.stderr):
|
||||
logger.info(f"Starting cognify for: {file_path}")
|
||||
try:
|
||||
await context.cognee_client.add(file_path, node_set=["developer_rules"])
|
||||
|
||||
model = None
|
||||
if graph_model_file and graph_model_name:
|
||||
if context.cognee_client.use_api:
|
||||
logger.warning(
|
||||
"Custom graph models are not supported in API mode, ignoring."
|
||||
)
|
||||
else:
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
|
||||
model = load_class(graph_model_file, graph_model_name)
|
||||
|
||||
await context.cognee_client.cognify(graph_model=model)
|
||||
logger.info(f"Cognify finished for: {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Cognify failed for {file_path}: {str(e)}")
|
||||
raise ValueError(f"Failed to cognify: {str(e)}")
|
||||
|
||||
tasks = []
|
||||
for rel_path in developer_rule_paths:
|
||||
abs_path = os.path.join(base_path, rel_path)
|
||||
if os.path.isfile(abs_path):
|
||||
tasks.append(asyncio.create_task(cognify_task(abs_path)))
|
||||
else:
|
||||
logger.warning(f"Skipped missing developer rule file: {abs_path}")
|
||||
log_file = get_log_file_location()
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=(
|
||||
f"Started cognify for {len(tasks)} developer rule files in background.\n"
|
||||
f"All are added to the `developer_rules` node set.\n"
|
||||
f"Use `cognify_status` or check logs at {log_file} to monitor progress."
|
||||
),
|
||||
)
|
||||
]
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
"""Tool for retrieving developer rules from the knowledge graph."""
|
||||
|
||||
import sys
|
||||
from contextlib import redirect_stdout
|
||||
import mcp.types as types
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
|
||||
from src.shared import context
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
# Import coding agent rules functions
|
||||
try:
|
||||
from cognee.tasks.codingagents.coding_rule_associations import get_existing_rules
|
||||
except ModuleNotFoundError:
|
||||
from src.codingagents.coding_rule_associations import get_existing_rules
|
||||
|
||||
|
||||
async def get_developer_rules() -> list:
|
||||
"""
|
||||
Retrieve all developer rules that were generated based on previous interactions.
|
||||
|
||||
This tool queries the Cognee knowledge graph and returns a list of developer
|
||||
rules.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
list
|
||||
A list containing a single TextContent object with the retrieved developer rules.
|
||||
The format is plain text containing the developer rules in bulletpoints.
|
||||
|
||||
Notes
|
||||
-----
|
||||
- The specific logic for fetching rules is handled internally.
|
||||
- This tool does not accept any parameters and is intended for simple rule inspection use cases.
|
||||
"""
|
||||
|
||||
async def fetch_rules_from_cognee() -> str:
|
||||
"""Collect all developer rules from Cognee"""
|
||||
with redirect_stdout(sys.stderr):
|
||||
if context.cognee_client.use_api:
|
||||
logger.warning("Developer rules retrieval is not available in API mode")
|
||||
return "Developer rules retrieval is not available in API mode"
|
||||
|
||||
developer_rules = await get_existing_rules(rules_nodeset_name="coding_agent_rules")
|
||||
return developer_rules
|
||||
|
||||
rules_text = await fetch_rules_from_cognee()
|
||||
|
||||
return [types.TextContent(type="text", text=rules_text)]
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
"""Tool for transforming and saving user-agent interactions into structured knowledge."""
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
from contextlib import redirect_stdout
|
||||
import mcp.types as types
|
||||
from cognee.shared.logging_utils import get_logger, get_log_file_location
|
||||
|
||||
from src.shared import context
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
# Import coding agent rules functions
|
||||
try:
|
||||
from cognee.tasks.codingagents.coding_rule_associations import add_rule_associations
|
||||
except ModuleNotFoundError:
|
||||
from src.codingagents.coding_rule_associations import add_rule_associations
|
||||
|
||||
|
||||
async def save_interaction(data: str) -> list:
|
||||
"""
|
||||
Transform and save a user-agent interaction into structured knowledge.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : str
|
||||
The input string containing user queries and corresponding agent answers.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list
|
||||
A list containing a single TextContent object with information about the background task launch.
|
||||
"""
|
||||
|
||||
async def save_user_agent_interaction(data: str) -> None:
|
||||
"""Build knowledge graph from the interaction data"""
|
||||
with redirect_stdout(sys.stderr):
|
||||
logger.info("Save interaction process starting.")
|
||||
|
||||
await context.cognee_client.add(data, node_set=["user_agent_interaction"])
|
||||
|
||||
try:
|
||||
await context.cognee_client.cognify()
|
||||
logger.info("Save interaction process finished.")
|
||||
|
||||
# Rule associations only work in direct mode
|
||||
if not context.cognee_client.use_api:
|
||||
logger.info("Generating associated rules from interaction data.")
|
||||
await add_rule_associations(data=data, rules_nodeset_name="coding_agent_rules")
|
||||
logger.info("Associated rules generated from interaction data.")
|
||||
else:
|
||||
logger.warning("Rule associations are not available in API mode, skipping.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Save interaction process failed.")
|
||||
raise ValueError(f"Failed to Save interaction: {str(e)}")
|
||||
|
||||
asyncio.create_task(
|
||||
save_user_agent_interaction(
|
||||
data=data,
|
||||
)
|
||||
)
|
||||
|
||||
log_file = get_log_file_location()
|
||||
text = (
|
||||
f"Background process launched to process the user-agent interaction.\n"
|
||||
f"To check the current status, use the cognify_status tool or check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
Loading…
Add table
Reference in a new issue