docs: clarify API_TOKEN requirement in README and improve JSON handling in CogneeClient

This commit is contained in:
Daulet Amirkhanov 2025-10-10 16:03:16 +01:00
parent c7ca4c6782
commit d480ea23a1
2 changed files with 7 additions and 8 deletions

View file

@ -181,7 +181,7 @@ docker run \
**Environment variables for API mode:** **Environment variables for API mode:**
- `API_URL`: URL of the running Cognee API server - `API_URL`: URL of the running Cognee API server
- `API_TOKEN`: Authentication token (optional, if API requires auth) - `API_TOKEN`: Authentication token (optional, required if API has authentication enabled)
**Note:** When running in API mode: **Note:** When running in API mode:
- Database migrations are automatically skipped (API server handles its own DB) - Database migrations are automatically skipped (API server handles its own DB)

View file

@ -12,6 +12,7 @@ from uuid import UUID
from contextlib import redirect_stdout from contextlib import redirect_stdout
import httpx import httpx
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
import json
logger = get_logger() logger = get_logger()
@ -26,7 +27,7 @@ class CogneeClient:
Base URL of the Cognee API server (e.g., "http://localhost:8000"). Base URL of the Cognee API server (e.g., "http://localhost:8000").
If None, uses direct cognee function calls. If None, uses direct cognee function calls.
api_token : str, optional api_token : str, optional
Authentication token for API requests. Required if api_url is provided. Authentication token for the API (optional, required if API has authentication enabled).
""" """
def __init__(self, api_url: Optional[str] = None, api_token: Optional[str] = None): def __init__(self, api_url: Optional[str] = None, api_token: Optional[str] = None):
@ -72,17 +73,14 @@ class CogneeClient:
Result of the add operation Result of the add operation
""" """
if self.use_api: if self.use_api:
# API mode: Make HTTP request
endpoint = f"{self.api_url}/api/v1/add" endpoint = f"{self.api_url}/api/v1/add"
# For API mode, we need to handle file uploads differently
# For now, we'll assume data is text content
files = {"data": ("data.txt", str(data), "text/plain")} files = {"data": ("data.txt", str(data), "text/plain")}
form_data = { form_data = {
"datasetName": dataset_name, "datasetName": dataset_name,
} }
if node_set: if node_set is not None:
form_data["node_set"] = node_set form_data["node_set"] = json.dumps(node_set)
response = await self.client.post( response = await self.client.post(
endpoint, endpoint,
@ -93,7 +91,6 @@ class CogneeClient:
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
else: else:
# Direct mode: Call cognee directly
with redirect_stdout(sys.stderr): with redirect_stdout(sys.stderr):
await self.cognee.add(data, dataset_name=dataset_name, node_set=node_set) await self.cognee.add(data, dataset_name=dataset_name, node_set=node_set)
return {"status": "success", "message": "Data added successfully"} return {"status": "success", "message": "Data added successfully"}
@ -138,6 +135,8 @@ class CogneeClient:
# Direct mode: Call cognee directly # Direct mode: Call cognee directly
with redirect_stdout(sys.stderr): with redirect_stdout(sys.stderr):
kwargs = {} kwargs = {}
if datasets:
kwargs["datasets"] = datasets
if custom_prompt: if custom_prompt:
kwargs["custom_prompt"] = custom_prompt kwargs["custom_prompt"] = custom_prompt
if graph_model: if graph_model: