feat: Add Windows compatibility and error handling improvements

- Fix Windows process termination using taskkill instead of killpg
- Add Windows-compatible npm detection and commands
- Fix Next.js SSR error with dynamic imports
- Improve cloud API error handling for local mode
- Add frontend connection retry mechanism with 5 retries
- Fix string formatting in prompt loading
- Add cognee CLI entry point alongside cognee-cli

Resolves Windows compatibility issues and improves error handling
across CLI and frontend components.
This commit is contained in:
Nikoloz Turazashvili 2025-09-25 03:51:01 +07:00
parent 300b774252
commit 1fc8e5ad08
10 changed files with 149 additions and 78 deletions

View file

@ -3,10 +3,18 @@
import classNames from "classnames"; import classNames from "classnames";
import { MutableRefObject, useEffect, useImperativeHandle, useRef, useState, useCallback } from "react"; import { MutableRefObject, useEffect, useImperativeHandle, useRef, useState, useCallback } from "react";
import { forceCollide, forceManyBody } from "d3-force-3d"; import { forceCollide, forceManyBody } from "d3-force-3d";
import ForceGraph, { ForceGraphMethods, GraphData, LinkObject, NodeObject } from "react-force-graph-2d"; import dynamic from "next/dynamic";
import { GraphControlsAPI } from "./GraphControls"; import { GraphControlsAPI } from "./GraphControls";
import getColorForNodeType from "./getColorForNodeType"; import getColorForNodeType from "./getColorForNodeType";
// Dynamically import ForceGraph to prevent SSR issues
const ForceGraph = dynamic(() => import("react-force-graph-2d"), {
ssr: false,
loading: () => <div className="w-full h-full flex items-center justify-center">Loading graph...</div>
});
import type { ForceGraphMethods, GraphData, LinkObject, NodeObject } from "react-force-graph-2d";
interface GraphVisuzaliationProps { interface GraphVisuzaliationProps {
ref: MutableRefObject<GraphVisualizationAPI>; ref: MutableRefObject<GraphVisualizationAPI>;
data?: GraphData<NodeObject, LinkObject>; data?: GraphData<NodeObject, LinkObject>;
@ -200,7 +208,7 @@ export default function GraphVisualization({ ref, data, graphControls, className
const graphRef = useRef<ForceGraphMethods>(); const graphRef = useRef<ForceGraphMethods>();
useEffect(() => { useEffect(() => {
if (typeof window !== "undefined" && data && graphRef.current) { if (data && graphRef.current) {
// add collision force // add collision force
graphRef.current.d3Force("collision", forceCollide(nodeSize * 1.5)); graphRef.current.d3Force("collision", forceCollide(nodeSize * 1.5));
graphRef.current.d3Force("charge", forceManyBody().strength(-10).distanceMin(10).distanceMax(50)); graphRef.current.d3Force("charge", forceManyBody().strength(-10).distanceMin(10).distanceMax(50));
@ -216,56 +224,34 @@ export default function GraphVisualization({ ref, data, graphControls, className
return ( return (
<div ref={containerRef} className={classNames("w-full h-full", className)} id="graph-container"> <div ref={containerRef} className={classNames("w-full h-full", className)} id="graph-container">
{(data && typeof window !== "undefined") ? ( <ForceGraph
<ForceGraph ref={graphRef}
ref={graphRef} width={dimensions.width}
width={dimensions.width} height={dimensions.height}
height={dimensions.height} dagMode={graphShape as unknown as undefined}
dagMode={graphShape as unknown as undefined} dagLevelDistance={data ? 300 : 100}
dagLevelDistance={300} onDagError={handleDagError}
onDagError={handleDagError} graphData={data || {
graphData={data} nodes: [{ id: 1, label: "Add" }, { id: 2, label: "Cognify" }, { id: 3, label: "Search" }],
links: [{ source: 1, target: 2, label: "but don't forget to" }, { source: 2, target: 3, label: "and after that you can" }],
}}
nodeLabel="label" nodeLabel="label"
nodeRelSize={nodeSize} nodeRelSize={data ? nodeSize : 20}
nodeCanvasObject={renderNode} nodeCanvasObject={data ? renderNode : renderInitialNode}
nodeCanvasObjectMode={() => "replace"} nodeCanvasObjectMode={() => data ? "replace" : "after"}
nodeAutoColorBy={data ? undefined : "type"}
linkLabel="label" linkLabel="label"
linkCanvasObject={renderLink} linkCanvasObject={renderLink}
linkCanvasObjectMode={() => "after"} linkCanvasObjectMode={() => "after"}
linkDirectionalArrowLength={3.5} linkDirectionalArrowLength={3.5}
linkDirectionalArrowRelPos={1} linkDirectionalArrowRelPos={1}
onNodeClick={handleNodeClick} onNodeClick={handleNodeClick}
onBackgroundClick={handleBackgroundClick} onBackgroundClick={handleBackgroundClick}
d3VelocityDecay={0.3} d3VelocityDecay={data ? 0.3 : undefined}
/> />
) : (
<ForceGraph
ref={graphRef}
width={dimensions.width}
height={dimensions.height}
dagMode={graphShape as unknown as undefined}
dagLevelDistance={100}
graphData={{
nodes: [{ id: 1, label: "Add" }, { id: 2, label: "Cognify" }, { id: 3, label: "Search" }],
links: [{ source: 1, target: 2, label: "but don't forget to" }, { source: 2, target: 3, label: "and after that you can" }],
}}
nodeLabel="label"
nodeRelSize={20}
nodeCanvasObject={renderInitialNode}
nodeCanvasObjectMode={() => "after"}
nodeAutoColorBy="type"
linkLabel="label"
linkCanvasObject={renderLink}
linkCanvasObjectMode={() => "after"}
linkDirectionalArrowLength={3.5}
linkDirectionalArrowRelPos={1}
/>
)}
</div> </div>
); );
} }

View file

@ -49,6 +49,13 @@ export default async function fetch(url: string, options: RequestInit = {}, useC
) )
.then((response) => handleServerErrors(response, retry, useCloud)) .then((response) => handleServerErrors(response, retry, useCloud))
.catch((error) => { .catch((error) => {
// Handle network errors more gracefully
if (error.name === 'TypeError' && error.message.includes('fetch')) {
return Promise.reject(
new Error("Backend server is not responding. Please check if the server is running.")
);
}
if (error.detail === undefined) { if (error.detail === undefined) {
return Promise.reject( return Promise.reject(
new Error("No connection to the server.") new Error("No connection to the server.")
@ -62,8 +69,27 @@ export default async function fetch(url: string, options: RequestInit = {}, useC
}); });
} }
fetch.checkHealth = () => { fetch.checkHealth = async () => {
return global.fetch(`${backendApiUrl.replace("/api", "")}/health`); const maxRetries = 5;
const retryDelay = 1000; // 1 second
for (let i = 0; i < maxRetries; i++) {
try {
const response = await global.fetch(`${backendApiUrl.replace("/api", "")}/health`);
if (response.ok) {
return response;
}
} catch (error) {
// If this is the last retry, throw the error
if (i === maxRetries - 1) {
throw error;
}
// Wait before retrying
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
}
throw new Error("Backend server is not responding after multiple attempts");
}; };
fetch.setApiKey = (newApiKey: string) => { fetch.setApiKey = (newApiKey: string) => {

View file

@ -194,7 +194,7 @@ class HealthChecker:
config = get_llm_config() config = get_llm_config()
# Test actual API connection with minimal request # Test actual API connection with minimal request
LLMGateway.show_prompt("test", "test") LLMGateway.show_prompt("test", "test.txt")
response_time = int((time.time() - start_time) * 1000) response_time = int((time.time() - start_time) * 1000)
return ComponentHealth( return ComponentHealth(

View file

@ -16,7 +16,13 @@ def get_checks_router():
api_token = request.headers.get("X-Api-Key") api_token = request.headers.get("X-Api-Key")
if api_token is None: if api_token is None:
raise CloudApiKeyMissingError() # Return a graceful response for local/self-hosted installations
return {
"status": "local_mode",
"message": "Running in local mode. Cloud features are not available without API key.",
"cloud_available": False,
"local_mode": True,
}
return await check_api_key(api_token) return await check_api_key(api_token)

View file

@ -1,4 +1,5 @@
import os import os
import platform
import signal import signal
import subprocess import subprocess
import threading import threading
@ -214,6 +215,7 @@ def check_node_npm() -> tuple[bool, str]:
Check if Node.js and npm are available. Check if Node.js and npm are available.
Returns (is_available, error_message) Returns (is_available, error_message)
""" """
try: try:
# Check Node.js # Check Node.js
result = subprocess.run(["node", "--version"], capture_output=True, text=True, timeout=10) result = subprocess.run(["node", "--version"], capture_output=True, text=True, timeout=10)
@ -223,8 +225,17 @@ def check_node_npm() -> tuple[bool, str]:
node_version = result.stdout.strip() node_version = result.stdout.strip()
logger.debug(f"Found Node.js version: {node_version}") logger.debug(f"Found Node.js version: {node_version}")
# Check npm # Check npm - handle Windows PowerShell scripts
result = subprocess.run(["npm", "--version"], capture_output=True, text=True, timeout=10) if platform.system() == "Windows":
# On Windows, npm might be a PowerShell script, so we need to use shell=True
result = subprocess.run(
["npm", "--version"], capture_output=True, text=True, timeout=10, shell=True
)
else:
result = subprocess.run(
["npm", "--version"], capture_output=True, text=True, timeout=10
)
if result.returncode != 0: if result.returncode != 0:
return False, "npm is not installed or not in PATH" return False, "npm is not installed or not in PATH"
@ -246,6 +257,7 @@ def install_frontend_dependencies(frontend_path: Path) -> bool:
Install frontend dependencies if node_modules doesn't exist. Install frontend dependencies if node_modules doesn't exist.
This is needed for both development and downloaded frontends since both use npm run dev. This is needed for both development and downloaded frontends since both use npm run dev.
""" """
node_modules = frontend_path / "node_modules" node_modules = frontend_path / "node_modules"
if node_modules.exists(): if node_modules.exists():
logger.debug("Frontend dependencies already installed") logger.debug("Frontend dependencies already installed")
@ -254,13 +266,24 @@ def install_frontend_dependencies(frontend_path: Path) -> bool:
logger.info("Installing frontend dependencies (this may take a few minutes)...") logger.info("Installing frontend dependencies (this may take a few minutes)...")
try: try:
result = subprocess.run( # Use shell=True on Windows for npm commands
["npm", "install"], if platform.system() == "Windows":
cwd=frontend_path, result = subprocess.run(
capture_output=True, ["npm", "install"],
text=True, cwd=frontend_path,
timeout=300, # 5 minutes timeout capture_output=True,
) text=True,
timeout=300, # 5 minutes timeout
shell=True,
)
else:
result = subprocess.run(
["npm", "install"],
cwd=frontend_path,
capture_output=True,
text=True,
timeout=300, # 5 minutes timeout
)
if result.returncode == 0: if result.returncode == 0:
logger.info("Frontend dependencies installed successfully") logger.info("Frontend dependencies installed successfully")
@ -462,15 +485,27 @@ def start_ui(
try: try:
# Create frontend in its own process group for clean termination # Create frontend in its own process group for clean termination
process = subprocess.Popen( # Use shell=True on Windows for npm commands
["npm", "run", "dev"], if platform.system() == "Windows":
cwd=frontend_path, process = subprocess.Popen(
env=env, ["npm", "run", "dev"],
stdout=subprocess.PIPE, cwd=frontend_path,
stderr=subprocess.PIPE, env=env,
text=True, stdout=subprocess.PIPE,
preexec_fn=os.setsid if hasattr(os, "setsid") else None, stderr=subprocess.PIPE,
) text=True,
shell=True,
)
else:
process = subprocess.Popen(
["npm", "run", "dev"],
cwd=frontend_path,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
)
pid_callback(process.pid) pid_callback(process.pid)

View file

@ -183,10 +183,20 @@ def main() -> int:
for pid in spawned_pids: for pid in spawned_pids:
try: try:
pgid = os.getpgid(pid) if hasattr(os, "killpg"):
os.killpg(pgid, signal.SIGTERM) # Unix-like systems: Use process groups
fmt.success(f"✓ Process group {pgid} (PID {pid}) terminated.") pgid = os.getpgid(pid)
except (OSError, ProcessLookupError) as e: os.killpg(pgid, signal.SIGTERM)
fmt.success(f"✓ Process group {pgid} (PID {pid}) terminated.")
else:
# Windows: Use taskkill to terminate process and its children
subprocess.run(
["taskkill", "/F", "/T", "/PID", str(pid)],
capture_output=True,
check=False,
)
fmt.success(f"✓ Process {pid} and its children terminated.")
except (OSError, ProcessLookupError, subprocess.SubprocessError) as e:
fmt.warning(f"Could not terminate process {pid}: {e}") fmt.warning(f"Could not terminate process {pid}: {e}")
sys.exit(0) sys.exit(0)

View file

@ -26,6 +26,7 @@ def read_query_prompt(prompt_file_name: str, base_directory: str = None):
read due to an error. read due to an error.
""" """
logger = get_logger(level=ERROR) logger = get_logger(level=ERROR)
try: try:
if base_directory is None: if base_directory is None:
base_directory = get_absolute_path("./infrastructure/llm/prompts") base_directory = get_absolute_path("./infrastructure/llm/prompts")
@ -35,8 +36,8 @@ def read_query_prompt(prompt_file_name: str, base_directory: str = None):
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, "r", encoding="utf-8") as file:
return file.read() return file.read()
except FileNotFoundError: except FileNotFoundError:
logger.error(f"Error: Prompt file not found. Attempted to read: %s {file_path}") logger.error(f"Error: Prompt file not found. Attempted to read: {file_path}")
return None return None
except Exception as e: except Exception as e:
logger.error(f"An error occurred: %s {e}") logger.error(f"An error occurred: {e}")
return None return None

View file

@ -0,0 +1 @@
Respond with: test

View file

@ -41,7 +41,12 @@ class TestCogneeServerStart(unittest.TestCase):
def tearDownClass(cls): def tearDownClass(cls):
# Terminate the server process # Terminate the server process
if hasattr(cls, "server_process") and cls.server_process: if hasattr(cls, "server_process") and cls.server_process:
os.killpg(os.getpgid(cls.server_process.pid), signal.SIGTERM) if hasattr(os, "killpg"):
# Unix-like systems: Use process groups
os.killpg(os.getpgid(cls.server_process.pid), signal.SIGTERM)
else:
# Windows: Just terminate the main process
cls.server_process.terminate()
cls.server_process.wait() cls.server_process.wait()
def test_server_is_running(self): def test_server_is_running(self):

View file

@ -147,6 +147,7 @@ Homepage = "https://www.cognee.ai"
Repository = "https://github.com/topoteretes/cognee" Repository = "https://github.com/topoteretes/cognee"
[project.scripts] [project.scripts]
cognee = "cognee.cli._cognee:main"
cognee-cli = "cognee.cli._cognee:main" cognee-cli = "cognee.cli._cognee:main"
[build-system] [build-system]