feat: start mcp in ui cli command (#1472)

<!-- .github/pull_request_template.md -->

## Overall UI

<img width="1908" height="881" alt="Screenshot 2025-09-25 at 22 21 35"
src="https://github.com/user-attachments/assets/a3e26827-8e1a-47d1-8ac6-346387d6407f"
/>

## MCP connection - live

<img width="1519" height="691" alt="Screenshot 2025-09-25 at 22 22 02"
src="https://github.com/user-attachments/assets/2314242f-0998-4401-99e1-cfc745d40728"
/>

## MCP connection - disconnected

<img width="1520" height="698" alt="Screenshot 2025-09-25 at 22 22 11"
src="https://github.com/user-attachments/assets/ef13bb5e-6d50-4248-8d4c-8845becb1582"
/>


## Description
<!--
Please provide a clear, human-generated description of the changes in
this PR.
DO NOT use AI-generated descriptions. We want to understand your thought
process and reasoning.
-->

## Type of Change
<!-- Please check the relevant option -->
- [ ] Bug fix (non-breaking change that fixes an issue)
- [ ] New feature (non-breaking change that adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
- [ ] Documentation update
- [ ] Code refactoring
- [ ] Performance improvement
- [ ] Other (please specify):

## Screenshots/Videos (if applicable)
<!-- Add screenshots or videos to help explain your changes -->

## Pre-submission Checklist
<!-- Please check all boxes that apply before submitting your PR -->
- [ ] **I have tested my changes thoroughly before submitting this PR**
- [ ] **This PR contains minimal changes necessary to address the
issue/feature**
- [ ] My code follows the project's coding standards and style
guidelines
- [ ] I have added tests that prove my fix is effective or that my
feature works
- [ ] I have added necessary documentation (if applicable)
- [ ] All new and existing tests pass
- [ ] I have searched existing PRs to ensure this change hasn't been
submitted already
- [ ] I have linked any relevant issues in the description
- [ ] My commits have clear and descriptive messages

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
Vasilije 2025-09-28 15:23:10 +02:00 committed by GitHub
commit 738935e9d9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 293 additions and 157 deletions

View file

@ -2,10 +2,11 @@
import Link from "next/link"; import Link from "next/link";
import Image from "next/image"; import Image from "next/image";
import { useBoolean } from "@/utils"; import { useEffect } from "react";
import { useBoolean, fetch } from "@/utils";
import { CloseIcon, CloudIcon, CogneeIcon } from "../Icons"; import { CloseIcon, CloudIcon, CogneeIcon } from "../Icons";
import { CTAButton, GhostButton, IconButton, Modal } from "../elements"; import { CTAButton, GhostButton, IconButton, Modal, StatusDot } from "../elements";
import syncData from "@/modules/cloud/syncData"; import syncData from "@/modules/cloud/syncData";
interface HeaderProps { interface HeaderProps {
@ -23,6 +24,12 @@ export default function Header({ user }: HeaderProps) {
setFalse: closeSyncModal, setFalse: closeSyncModal,
} = useBoolean(false); } = useBoolean(false);
const {
value: isMCPConnected,
setTrue: setMCPConnected,
setFalse: setMCPDisconnected,
} = useBoolean(false);
const handleDataSyncConfirm = () => { const handleDataSyncConfirm = () => {
syncData() syncData()
.finally(() => { .finally(() => {
@ -30,6 +37,19 @@ export default function Header({ user }: HeaderProps) {
}); });
}; };
useEffect(() => {
const checkMCPConnection = () => {
fetch.checkMCPHealth()
.then(() => setMCPConnected())
.catch(() => setMCPDisconnected());
};
checkMCPConnection();
const interval = setInterval(checkMCPConnection, 30000);
return () => clearInterval(interval);
}, [setMCPConnected, setMCPDisconnected]);
return ( return (
<> <>
<header className="relative flex flex-row h-14 min-h-14 px-5 items-center justify-between w-full max-w-[1920px] mx-auto"> <header className="relative flex flex-row h-14 min-h-14 px-5 items-center justify-between w-full max-w-[1920px] mx-auto">
@ -39,6 +59,10 @@ export default function Header({ user }: HeaderProps) {
</div> </div>
<div className="flex flex-row items-center gap-2.5"> <div className="flex flex-row items-center gap-2.5">
<Link href="/mcp-status" className="!text-indigo-600 pl-4 pr-4">
<StatusDot className="mr-2" isActive={isMCPConnected} />
{ isMCPConnected ? "MCP connected" : "MCP disconnected" }
</Link>
<GhostButton onClick={openSyncModal} className="text-indigo-600 gap-3 pl-4 pr-4"> <GhostButton onClick={openSyncModal} className="text-indigo-600 gap-3 pl-4 pr-4">
<CloudIcon /> <CloudIcon />
<div>Sync</div> <div>Sync</div>

View file

@ -0,0 +1,13 @@
import React from "react";
const StatusDot = ({ isActive, className }: { isActive: boolean, className?: string }) => {
return (
<span
className={`inline-block w-3 h-3 rounded-full ${className} ${
isActive ? "bg-green-500" : "bg-red-500"
}`}
/>
);
};
export default StatusDot;

View file

@ -8,5 +8,6 @@ export { default as IconButton } from "./IconButton";
export { default as GhostButton } from "./GhostButton"; export { default as GhostButton } from "./GhostButton";
export { default as NeutralButton } from "./NeutralButton"; export { default as NeutralButton } from "./NeutralButton";
export { default as StatusIndicator } from "./StatusIndicator"; export { default as StatusIndicator } from "./StatusIndicator";
export { default as StatusDot } from "./StatusDot";
export { default as Accordion } from "./Accordion"; export { default as Accordion } from "./Accordion";
export { default as Notebook } from "./Notebook"; export { default as Notebook } from "./Notebook";

View file

@ -9,6 +9,8 @@ const backendApiUrl = process.env.NEXT_PUBLIC_BACKEND_API_URL || "http://localho
const cloudApiUrl = process.env.NEXT_PUBLIC_CLOUD_API_URL || "http://localhost:8001"; const cloudApiUrl = process.env.NEXT_PUBLIC_CLOUD_API_URL || "http://localhost:8001";
const mcpApiUrl = process.env.NEXT_PUBLIC_MCP_API_URL || "http://localhost:8001";
let apiKey: string | null = process.env.NEXT_PUBLIC_COGWIT_API_KEY || null; let apiKey: string | null = process.env.NEXT_PUBLIC_COGWIT_API_KEY || null;
let accessToken: string | null = null; let accessToken: string | null = null;
@ -66,6 +68,10 @@ fetch.checkHealth = () => {
return global.fetch(`${backendApiUrl.replace("/api", "")}/health`); return global.fetch(`${backendApiUrl.replace("/api", "")}/health`);
}; };
fetch.checkMCPHealth = () => {
return global.fetch(`${mcpApiUrl.replace("/api", "")}/health`);
};
fetch.setApiKey = (newApiKey: string) => { fetch.setApiKey = (newApiKey: string) => {
apiKey = newApiKey; apiKey = newApiKey;
}; };

View file

@ -48,27 +48,27 @@ if [ "$ENVIRONMENT" = "dev" ] || [ "$ENVIRONMENT" = "local" ]; then
if [ "$DEBUG" = "true" ]; then if [ "$DEBUG" = "true" ]; then
echo "Waiting for the debugger to attach..." echo "Waiting for the debugger to attach..."
if [ "$TRANSPORT_MODE" = "sse" ]; then if [ "$TRANSPORT_MODE" = "sse" ]; then
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then elif [ "$TRANSPORT_MODE" = "http" ]; then
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else else
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport stdio --no-migration exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport stdio --no-migration
fi fi
else else
if [ "$TRANSPORT_MODE" = "sse" ]; then if [ "$TRANSPORT_MODE" = "sse" ]; then
exec cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration exec cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then elif [ "$TRANSPORT_MODE" = "http" ]; then
exec cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration exec cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else else
exec cognee --transport stdio --no-migration exec cognee-mcp --transport stdio --no-migration
fi fi
fi fi
else else
if [ "$TRANSPORT_MODE" = "sse" ]; then if [ "$TRANSPORT_MODE" = "sse" ]; then
exec cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration exec cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then elif [ "$TRANSPORT_MODE" = "http" ]; then
exec cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration exec cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else else
exec cognee --transport stdio --no-migration exec cognee-mcp --transport stdio --no-migration
fi fi
fi fi

View file

@ -36,4 +36,4 @@ dev = [
allow-direct-references = true allow-direct-references = true
[project.scripts] [project.scripts]
cognee = "src:main" cognee-mcp = "src:main"

View file

@ -19,6 +19,10 @@ from cognee.api.v1.cognify.code_graph_pipeline import run_code_graph_pipeline
from cognee.modules.search.types import SearchType from cognee.modules.search.types import SearchType
from cognee.shared.data_models import KnowledgeGraph from cognee.shared.data_models import KnowledgeGraph
from cognee.modules.storage.utils import JSONEncoder from cognee.modules.storage.utils import JSONEncoder
from starlette.responses import JSONResponse
from starlette.middleware import Middleware
from starlette.middleware.cors import CORSMiddleware
import uvicorn
try: try:
@ -38,6 +42,53 @@ mcp = FastMCP("Cognee")
logger = get_logger() logger = get_logger()
async def run_sse_with_cors():
"""Custom SSE transport with CORS middleware."""
sse_app = mcp.sse_app()
sse_app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:3000"],
allow_credentials=True,
allow_methods=["GET"],
allow_headers=["*"],
)
config = uvicorn.Config(
sse_app,
host=mcp.settings.host,
port=mcp.settings.port,
log_level=mcp.settings.log_level.lower(),
)
server = uvicorn.Server(config)
await server.serve()
async def run_http_with_cors():
"""Custom HTTP transport with CORS middleware."""
http_app = mcp.streamable_http_app()
http_app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:3000"],
allow_credentials=True,
allow_methods=["GET"],
allow_headers=["*"],
)
config = uvicorn.Config(
http_app,
host=mcp.settings.host,
port=mcp.settings.port,
log_level=mcp.settings.log_level.lower(),
)
server = uvicorn.Server(config)
await server.serve()
@mcp.custom_route("/health", methods=["GET"])
async def health_check(request):
return JSONResponse({"status": "ok"})
@mcp.tool() @mcp.tool()
async def cognee_add_developer_rules( async def cognee_add_developer_rules(
base_path: str = ".", graph_model_file: str = None, graph_model_name: str = None base_path: str = ".", graph_model_file: str = None, graph_model_name: str = None
@ -975,12 +1026,12 @@ async def main():
await mcp.run_stdio_async() await mcp.run_stdio_async()
elif args.transport == "sse": elif args.transport == "sse":
logger.info(f"Running MCP server with SSE transport on {args.host}:{args.port}") logger.info(f"Running MCP server with SSE transport on {args.host}:{args.port}")
await mcp.run_sse_async() await run_sse_with_cors()
elif args.transport == "http": elif args.transport == "http":
logger.info( logger.info(
f"Running MCP server with Streamable HTTP transport on {args.host}:{args.port}{args.path}" f"Running MCP server with Streamable HTTP transport on {args.host}:{args.port}{args.path}"
) )
await mcp.run_streamable_http_async() await run_http_with_cors()
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -1 +1 @@
from .ui import start_ui, stop_ui, ui from .ui import start_ui

View file

@ -1,5 +1,5 @@
import os import os
import signal import socket
import subprocess import subprocess
import threading import threading
import time import time
@ -7,7 +7,7 @@ import webbrowser
import zipfile import zipfile
import requests import requests
from pathlib import Path from pathlib import Path
from typing import Callable, Optional, Tuple from typing import Callable, Optional, Tuple, List
import tempfile import tempfile
import shutil import shutil
@ -17,6 +17,80 @@ from cognee.version import get_cognee_version
logger = get_logger() logger = get_logger()
def _stream_process_output(
process: subprocess.Popen, stream_name: str, prefix: str, color_code: str = ""
) -> threading.Thread:
"""
Stream output from a process with a prefix to identify the source.
Args:
process: The subprocess to monitor
stream_name: 'stdout' or 'stderr'
prefix: Text prefix for each line (e.g., '[BACKEND]', '[FRONTEND]')
color_code: ANSI color code for the prefix (optional)
Returns:
Thread that handles the streaming
"""
def stream_reader():
stream = getattr(process, stream_name)
if stream is None:
return
reset_code = "\033[0m" if color_code else ""
try:
for line in iter(stream.readline, b""):
if line:
line_text = line.decode("utf-8").rstrip()
if line_text:
print(f"{color_code}{prefix}{reset_code} {line_text}", flush=True)
except Exception:
pass
finally:
if stream:
stream.close()
thread = threading.Thread(target=stream_reader, daemon=True)
thread.start()
return thread
def _is_port_available(port: int) -> bool:
"""
Check if a port is available on localhost.
Returns True if the port is available, False otherwise.
"""
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(1) # 1 second timeout
result = sock.connect_ex(("localhost", port))
return result != 0 # Port is available if connection fails
except Exception:
return False
def _check_required_ports(ports_to_check: List[Tuple[int, str]]) -> Tuple[bool, List[str]]:
"""
Check if all required ports are available on localhost.
Args:
ports_to_check: List of (port, service_name) tuples
Returns:
Tuple of (all_available: bool, unavailable_services: List[str])
"""
unavailable = []
for port, service_name in ports_to_check:
if not _is_port_available(port):
unavailable.append(f"{service_name} (port {port})")
logger.error(f"Port {port} is already in use for {service_name}")
return len(unavailable) == 0, unavailable
def normalize_version_for_comparison(version: str) -> str: def normalize_version_for_comparison(version: str) -> str:
""" """
Normalize version string for comparison. Normalize version string for comparison.
@ -327,55 +401,111 @@ def prompt_user_for_download() -> bool:
def start_ui( def start_ui(
pid_callback: Callable[[int], None], pid_callback: Callable[[int], None],
host: str = "localhost",
port: int = 3000, port: int = 3000,
open_browser: bool = True, open_browser: bool = True,
auto_download: bool = False, auto_download: bool = False,
start_backend: bool = False, start_backend: bool = False,
backend_host: str = "localhost",
backend_port: int = 8000, backend_port: int = 8000,
start_mcp: bool = False,
mcp_port: int = 8001,
) -> Optional[subprocess.Popen]: ) -> Optional[subprocess.Popen]:
""" """
Start the cognee frontend UI server, optionally with the backend API server. Start the cognee frontend UI server, optionally with the backend API server and MCP server.
This function will: This function will:
1. Optionally start the cognee backend API server 1. Optionally start the cognee backend API server
2. Find the cognee-frontend directory (development) or download it (pip install) 2. Optionally start the cognee MCP server
3. Check if Node.js and npm are available (for development mode) 3. Find the cognee-frontend directory (development) or download it (pip install)
4. Install dependencies if needed (development mode) 4. Check if Node.js and npm are available (for development mode)
5. Start the frontend server 5. Install dependencies if needed (development mode)
6. Optionally open the browser 6. Start the frontend server
7. Optionally open the browser
Args: Args:
pid_callback: Callback to notify with PID of each spawned process pid_callback: Callback to notify with PID of each spawned process
host: Host to bind the frontend server to (default: localhost)
port: Port to run the frontend server on (default: 3000) port: Port to run the frontend server on (default: 3000)
open_browser: Whether to open the browser automatically (default: True) open_browser: Whether to open the browser automatically (default: True)
auto_download: If True, download frontend without prompting (default: False) auto_download: If True, download frontend without prompting (default: False)
start_backend: If True, also start the cognee API backend server (default: False) start_backend: If True, also start the cognee API backend server (default: False)
backend_host: Host to bind the backend server to (default: localhost)
backend_port: Port to run the backend server on (default: 8000) backend_port: Port to run the backend server on (default: 8000)
start_mcp: If True, also start the cognee MCP server (default: False)
mcp_port: Port to run the MCP server on (default: 8001)
Returns: Returns:
subprocess.Popen object representing the running frontend server, or None if failed subprocess.Popen object representing the running frontend server, or None if failed
Note: If backend is started, it runs in a separate process that will be cleaned up Note: If backend and/or MCP server are started, they run in separate processes
when the frontend process is terminated. that will be cleaned up when the frontend process is terminated.
Example: Example:
>>> import cognee >>> import cognee
>>> def dummy_callback(pid): pass
>>> # Start just the frontend >>> # Start just the frontend
>>> server = cognee.start_ui() >>> server = cognee.start_ui(dummy_callback)
>>> >>>
>>> # Start both frontend and backend >>> # Start both frontend and backend
>>> server = cognee.start_ui(start_backend=True) >>> server = cognee.start_ui(dummy_callback, start_backend=True)
>>> # UI will be available at http://localhost:3000 >>> # UI will be available at http://localhost:3000
>>> # API will be available at http://localhost:8000 >>> # API will be available at http://localhost:8000
>>> # To stop both servers later: >>>
>>> # Start frontend with MCP server
>>> server = cognee.start_ui(dummy_callback, start_mcp=True)
>>> # UI will be available at http://localhost:3000
>>> # MCP server will be available at http://127.0.0.1:8001/sse
>>> # To stop all servers later:
>>> server.terminate() >>> server.terminate()
""" """
logger.info("Starting cognee UI...") logger.info("Starting cognee UI...")
ports_to_check = [(port, "Frontend UI")]
if start_backend:
ports_to_check.append((backend_port, "Backend API"))
if start_mcp:
ports_to_check.append((mcp_port, "MCP Server"))
logger.info("Checking port availability...")
all_ports_available, unavailable_services = _check_required_ports(ports_to_check)
if not all_ports_available:
error_msg = f"Cannot start cognee UI: The following services have ports already in use: {', '.join(unavailable_services)}"
logger.error(error_msg)
logger.error("Please stop the conflicting services or change the port configuration.")
return None
logger.info("✓ All required ports are available")
backend_process = None backend_process = None
if start_mcp:
logger.info("Starting Cognee MCP server with Docker...")
cwd = os.getcwd()
env_file = os.path.join(cwd, ".env")
try:
mcp_process = subprocess.Popen(
[
"docker",
"run",
"-p",
f"{mcp_port}:8000",
"--rm",
"--env-file",
env_file,
"-e",
"TRANSPORT_MODE=sse",
"cognee/cognee-mcp:daulet-dev",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
)
_stream_process_output(mcp_process, "stdout", "[MCP]", "\033[34m") # Blue
_stream_process_output(mcp_process, "stderr", "[MCP]", "\033[34m") # Blue
pid_callback(mcp_process.pid)
logger.info(f"✓ Cognee MCP server starting on http://127.0.0.1:{mcp_port}/sse")
except Exception as e:
logger.error(f"Failed to start MCP server with Docker: {str(e)}")
# Start backend server if requested # Start backend server if requested
if start_backend: if start_backend:
logger.info("Starting cognee backend API server...") logger.info("Starting cognee backend API server...")
@ -389,16 +519,19 @@ def start_ui(
"uvicorn", "uvicorn",
"cognee.api.client:app", "cognee.api.client:app",
"--host", "--host",
backend_host, "localhost",
"--port", "--port",
str(backend_port), str(backend_port),
], ],
# Inherit stdout/stderr from parent process to show logs stdout=subprocess.PIPE,
stdout=None, stderr=subprocess.PIPE,
stderr=None,
preexec_fn=os.setsid if hasattr(os, "setsid") else None, preexec_fn=os.setsid if hasattr(os, "setsid") else None,
) )
# Start threads to stream backend output with prefix
_stream_process_output(backend_process, "stdout", "[BACKEND]", "\033[32m") # Green
_stream_process_output(backend_process, "stderr", "[BACKEND]", "\033[32m") # Green
pid_callback(backend_process.pid) pid_callback(backend_process.pid)
# Give the backend a moment to start # Give the backend a moment to start
@ -408,7 +541,7 @@ def start_ui(
logger.error("Backend server failed to start - process exited early") logger.error("Backend server failed to start - process exited early")
return None return None
logger.info(f"✓ Backend API started at http://{backend_host}:{backend_port}") logger.info(f"✓ Backend API started at http://localhost:{backend_port}")
except Exception as e: except Exception as e:
logger.error(f"Failed to start backend server: {str(e)}") logger.error(f"Failed to start backend server: {str(e)}")
@ -453,11 +586,11 @@ def start_ui(
# Prepare environment variables # Prepare environment variables
env = os.environ.copy() env = os.environ.copy()
env["HOST"] = host env["HOST"] = "localhost"
env["PORT"] = str(port) env["PORT"] = str(port)
# Start the development server # Start the development server
logger.info(f"Starting frontend server at http://{host}:{port}") logger.info(f"Starting frontend server at http://localhost:{port}")
logger.info("This may take a moment to compile and start...") logger.info("This may take a moment to compile and start...")
try: try:
@ -468,10 +601,13 @@ def start_ui(
env=env, env=env,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
text=True,
preexec_fn=os.setsid if hasattr(os, "setsid") else None, preexec_fn=os.setsid if hasattr(os, "setsid") else None,
) )
# Start threads to stream frontend output with prefix
_stream_process_output(process, "stdout", "[FRONTEND]", "\033[33m") # Yellow
_stream_process_output(process, "stderr", "[FRONTEND]", "\033[33m") # Yellow
pid_callback(process.pid) pid_callback(process.pid)
# Give it a moment to start up # Give it a moment to start up
@ -479,10 +615,7 @@ def start_ui(
# Check if process is still running # Check if process is still running
if process.poll() is not None: if process.poll() is not None:
stdout, stderr = process.communicate() logger.error("Frontend server failed to start - check the logs above for details")
logger.error("Frontend server failed to start:")
logger.error(f"stdout: {stdout}")
logger.error(f"stderr: {stderr}")
return None return None
# Open browser if requested # Open browser if requested
@ -491,7 +624,7 @@ def start_ui(
def open_browser_delayed(): def open_browser_delayed():
time.sleep(5) # Give Next.js time to fully start time.sleep(5) # Give Next.js time to fully start
try: try:
webbrowser.open(f"http://{host}:{port}") # TODO: use dashboard url? webbrowser.open(f"http://localhost:{port}")
except Exception as e: except Exception as e:
logger.warning(f"Could not open browser automatically: {e}") logger.warning(f"Could not open browser automatically: {e}")
@ -499,13 +632,9 @@ def start_ui(
browser_thread.start() browser_thread.start()
logger.info("✓ Cognee UI is starting up...") logger.info("✓ Cognee UI is starting up...")
logger.info(f"✓ Open your browser to: http://{host}:{port}") logger.info(f"✓ Open your browser to: http://localhost:{port}")
logger.info("✓ The UI will be available once Next.js finishes compiling") logger.info("✓ The UI will be available once Next.js finishes compiling")
# Store backend process reference in the frontend process for cleanup
if backend_process:
process._cognee_backend_process = backend_process
return process return process
except Exception as e: except Exception as e:
@ -523,102 +652,3 @@ def start_ui(
except (OSError, ProcessLookupError): except (OSError, ProcessLookupError):
pass pass
return None return None
def stop_ui(process: subprocess.Popen) -> bool:
"""
Stop a running UI server process and backend process (if started), along with all their children.
Args:
process: The subprocess.Popen object returned by start_ui()
Returns:
bool: True if stopped successfully, False otherwise
"""
if not process:
return False
success = True
try:
# First, stop the backend process if it exists
backend_process = getattr(process, "_cognee_backend_process", None)
if backend_process:
logger.info("Stopping backend server...")
try:
backend_process.terminate()
try:
backend_process.wait(timeout=5)
logger.info("Backend server stopped gracefully")
except subprocess.TimeoutExpired:
logger.warning("Backend didn't terminate gracefully, forcing kill")
backend_process.kill()
backend_process.wait()
logger.info("Backend server stopped")
except Exception as e:
logger.error(f"Error stopping backend server: {str(e)}")
success = False
# Now stop the frontend process
logger.info("Stopping frontend server...")
# Try to terminate the process group (includes child processes like Next.js)
if hasattr(os, "killpg"):
try:
# Kill the entire process group
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
logger.debug("Sent SIGTERM to process group")
except (OSError, ProcessLookupError):
# Fall back to terminating just the main process
process.terminate()
logger.debug("Terminated main process only")
else:
process.terminate()
logger.debug("Terminated main process (Windows)")
try:
process.wait(timeout=10)
logger.info("Frontend server stopped gracefully")
except subprocess.TimeoutExpired:
logger.warning("Frontend didn't terminate gracefully, forcing kill")
# Force kill the process group
if hasattr(os, "killpg"):
try:
os.killpg(os.getpgid(process.pid), signal.SIGKILL)
logger.debug("Sent SIGKILL to process group")
except (OSError, ProcessLookupError):
process.kill()
logger.debug("Force killed main process only")
else:
process.kill()
logger.debug("Force killed main process (Windows)")
process.wait()
if success:
logger.info("UI servers stopped successfully")
return success
except Exception as e:
logger.error(f"Error stopping UI servers: {str(e)}")
return False
# Convenience function similar to DuckDB's approach
def ui() -> Optional[subprocess.Popen]:
"""
Convenient alias for start_ui() with default parameters.
Similar to how DuckDB provides simple ui() function.
"""
return start_ui()
if __name__ == "__main__":
# Test the UI startup
server = start_ui()
if server:
try:
input("Press Enter to stop the server...")
finally:
stop_ui(server)

View file

@ -204,19 +204,27 @@ def main() -> int:
nonlocal spawned_pids nonlocal spawned_pids
spawned_pids.append(pid) spawned_pids.append(pid)
frontend_port = 3000
start_backend, backend_port = True, 8000
start_mcp, mcp_port = True, 8001
server_process = start_ui( server_process = start_ui(
host="localhost",
port=3000,
open_browser=True,
start_backend=True,
auto_download=True,
pid_callback=pid_callback, pid_callback=pid_callback,
port=frontend_port,
open_browser=True,
auto_download=True,
start_backend=start_backend,
backend_port=backend_port,
start_mcp=start_mcp,
mcp_port=mcp_port,
) )
if server_process: if server_process:
fmt.success("UI server started successfully!") fmt.success("UI server started successfully!")
fmt.echo("The interface is available at: http://localhost:3000") fmt.echo(f"The interface is available at: http://localhost:{frontend_port}")
fmt.echo("The API backend is available at: http://localhost:8000") if start_backend:
fmt.echo(f"The API backend is available at: http://localhost:{backend_port}")
if start_mcp:
fmt.echo(f"The MCP server is available at: http://localhost:{mcp_port}")
fmt.note("Press Ctrl+C to stop the server...") fmt.note("Press Ctrl+C to stop the server...")
try: try:

View file

@ -29,8 +29,11 @@ async def main():
print("=" * 60) print("=" * 60)
# Start the UI server # Start the UI server
def dummy_callback(pid):
pass
server = cognee.start_ui( server = cognee.start_ui(
host="localhost", pid_callback=dummy_callback,
port=3000, port=3000,
open_browser=True, # This will automatically open your browser open_browser=True, # This will automatically open your browser
) )