Basic
diff --git a/cognee-frontend/src/modules/chat/hooks/useChat.ts b/cognee-frontend/src/modules/chat/hooks/useChat.ts
index ed5bc4d79..be6ab8e51 100644
--- a/cognee-frontend/src/modules/chat/hooks/useChat.ts
+++ b/cognee-frontend/src/modules/chat/hooks/useChat.ts
@@ -40,7 +40,7 @@ export default function useChat(dataset: Dataset) {
setTrue: disableSearchRun,
setFalse: enableSearchRun,
} = useBoolean(false);
-
+
const refreshChat = useCallback(async () => {
const data = await fetchMessages();
return setMessages(data);
diff --git a/cognee-frontend/src/modules/ingestion/useDatasets.ts b/cognee-frontend/src/modules/ingestion/useDatasets.ts
index ab8006a9f..9e9cb8fc0 100644
--- a/cognee-frontend/src/modules/ingestion/useDatasets.ts
+++ b/cognee-frontend/src/modules/ingestion/useDatasets.ts
@@ -46,7 +46,7 @@ function useDatasets(useCloud = false) {
// checkDatasetStatuses(datasets);
// }, 50000);
// }, [fetchDatasetStatuses]);
-
+
// useEffect(() => {
// return () => {
// if (statusTimeout.current !== null) {
diff --git a/cognee-frontend/src/modules/notebooks/createNotebook.ts b/cognee-frontend/src/modules/notebooks/createNotebook.ts
index b7c1279ec..270a932ad 100644
--- a/cognee-frontend/src/modules/notebooks/createNotebook.ts
+++ b/cognee-frontend/src/modules/notebooks/createNotebook.ts
@@ -7,7 +7,7 @@ export default function createNotebook(notebookName: string, instance: CogneeIns
headers: {
"Content-Type": "application/json",
},
- }).then((response: Response) =>
+ }).then((response: Response) =>
response.ok ? response.json() : Promise.reject(response)
);
}
diff --git a/cognee-frontend/src/modules/notebooks/getNotebooks.ts b/cognee-frontend/src/modules/notebooks/getNotebooks.ts
index 1951012e7..2f6afa817 100644
--- a/cognee-frontend/src/modules/notebooks/getNotebooks.ts
+++ b/cognee-frontend/src/modules/notebooks/getNotebooks.ts
@@ -6,7 +6,7 @@ export default function getNotebooks(instance: CogneeInstance) {
headers: {
"Content-Type": "application/json",
},
- }).then((response: Response) =>
+ }).then((response: Response) =>
response.ok ? response.json() : Promise.reject(response)
);
}
diff --git a/cognee-frontend/src/modules/notebooks/saveNotebook.ts b/cognee-frontend/src/modules/notebooks/saveNotebook.ts
index a99ff5be1..58a042300 100644
--- a/cognee-frontend/src/modules/notebooks/saveNotebook.ts
+++ b/cognee-frontend/src/modules/notebooks/saveNotebook.ts
@@ -7,7 +7,7 @@ export default function saveNotebook(notebookId: string, notebookData: object, i
headers: {
"Content-Type": "application/json",
},
- }).then((response: Response) =>
+ }).then((response: Response) =>
response.ok ? response.json() : Promise.reject(response)
);
}
diff --git a/cognee-frontend/src/ui/Icons/GitHubIcon.tsx b/cognee-frontend/src/ui/Icons/GitHubIcon.tsx
index 590b2c217..76b34a81f 100644
--- a/cognee-frontend/src/ui/Icons/GitHubIcon.tsx
+++ b/cognee-frontend/src/ui/Icons/GitHubIcon.tsx
@@ -7,4 +7,4 @@ export default function GitHubIcon({ width = 24, height = 24, color = 'currentCo
);
-}
\ No newline at end of file
+}
diff --git a/cognee-frontend/src/ui/Layout/Header.tsx b/cognee-frontend/src/ui/Layout/Header.tsx
index 1bc57f699..53f053f2e 100644
--- a/cognee-frontend/src/ui/Layout/Header.tsx
+++ b/cognee-frontend/src/ui/Layout/Header.tsx
@@ -46,7 +46,7 @@ export default function Header({ user }: HeaderProps) {
checkMCPConnection();
const interval = setInterval(checkMCPConnection, 30000);
-
+
return () => clearInterval(interval);
}, [setMCPConnected, setMCPDisconnected]);
diff --git a/cognee-frontend/src/ui/Partials/SearchView/SearchView.tsx b/cognee-frontend/src/ui/Partials/SearchView/SearchView.tsx
index 8b471dd51..c9a2c328a 100644
--- a/cognee-frontend/src/ui/Partials/SearchView/SearchView.tsx
+++ b/cognee-frontend/src/ui/Partials/SearchView/SearchView.tsx
@@ -90,7 +90,7 @@ export default function SearchView() {
scrollToBottom();
setSearchInputValue("");
-
+
// Pass topK to sendMessage
sendMessage(chatInput, searchType, topK)
.then(scrollToBottom)
@@ -171,4 +171,4 @@ export default function SearchView() {
);
-}
\ No newline at end of file
+}
diff --git a/cognee-frontend/src/ui/elements/Modal/index.ts b/cognee-frontend/src/ui/elements/Modal/index.ts
index 6386401d6..b4740f10c 100644
--- a/cognee-frontend/src/ui/elements/Modal/index.ts
+++ b/cognee-frontend/src/ui/elements/Modal/index.ts
@@ -1,3 +1,2 @@
export { default as Modal } from "./Modal";
export { default as useModal } from "./useModal";
-
diff --git a/cognee-frontend/src/ui/elements/Notebook/MarkdownPreview.tsx b/cognee-frontend/src/ui/elements/Notebook/MarkdownPreview.tsx
index 6ea69bfc6..9e9f82a2e 100644
--- a/cognee-frontend/src/ui/elements/Notebook/MarkdownPreview.tsx
+++ b/cognee-frontend/src/ui/elements/Notebook/MarkdownPreview.tsx
@@ -74,4 +74,3 @@ function MarkdownPreview({ content, className = "" }: MarkdownPreviewProps) {
}
export default memo(MarkdownPreview);
-
diff --git a/cognee-frontend/src/ui/elements/Notebook/Notebook.tsx b/cognee-frontend/src/ui/elements/Notebook/Notebook.tsx
index b6b935229..3ddafe728 100644
--- a/cognee-frontend/src/ui/elements/Notebook/Notebook.tsx
+++ b/cognee-frontend/src/ui/elements/Notebook/Notebook.tsx
@@ -534,7 +534,7 @@ function transformInsightsGraphData(triplets: Triplet[]) {
target: string,
label: string,
}
- } = {};
+ } = {};
for (const triplet of triplets) {
nodes[triplet[0].id] = {
diff --git a/cognee-frontend/src/ui/elements/TextArea.tsx b/cognee-frontend/src/ui/elements/TextArea.tsx
index 4e8b36457..0a901bcc7 100644
--- a/cognee-frontend/src/ui/elements/TextArea.tsx
+++ b/cognee-frontend/src/ui/elements/TextArea.tsx
@@ -34,8 +34,8 @@ export default function TextArea({
// Cache maxHeight on first calculation
if (maxHeightRef.current === null) {
const computedStyle = getComputedStyle(textarea);
- maxHeightRef.current = computedStyle.maxHeight === "none"
- ? Infinity
+ maxHeightRef.current = computedStyle.maxHeight === "none"
+ ? Infinity
: parseInt(computedStyle.maxHeight) || Infinity;
}
diff --git a/cognee-frontend/src/ui/elements/index.ts b/cognee-frontend/src/ui/elements/index.ts
index 0133f56f6..73dd661e1 100644
--- a/cognee-frontend/src/ui/elements/index.ts
+++ b/cognee-frontend/src/ui/elements/index.ts
@@ -10,4 +10,4 @@ export { default as NeutralButton } from "./NeutralButton";
export { default as StatusIndicator } from "./StatusIndicator";
export { default as StatusDot } from "./StatusDot";
export { default as Accordion } from "./Accordion";
-export { default as Notebook } from "./Notebook";
+export { default as Notebook } from "./Notebook";
diff --git a/cognee-frontend/src/utils/fetch.ts b/cognee-frontend/src/utils/fetch.ts
index 3159c8426..4e1b40e4b 100644
--- a/cognee-frontend/src/utils/fetch.ts
+++ b/cognee-frontend/src/utils/fetch.ts
@@ -57,7 +57,7 @@ export default async function fetch(url: string, options: RequestInit = {}, useC
new Error("Backend server is not responding. Please check if the server is running.")
);
}
-
+
if (error.detail === undefined) {
return Promise.reject(
new Error("No connection to the server.")
@@ -74,7 +74,7 @@ export default async function fetch(url: string, options: RequestInit = {}, useC
fetch.checkHealth = async () => {
const maxRetries = 5;
const retryDelay = 1000; // 1 second
-
+
for (let i = 0; i < maxRetries; i++) {
try {
const response = await global.fetch(`${backendApiUrl.replace("/api", "")}/health`);
@@ -90,7 +90,7 @@ fetch.checkHealth = async () => {
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
}
-
+
throw new Error("Backend server is not responding after multiple attempts");
};
diff --git a/cognee-mcp/README.md b/cognee-mcp/README.md
index ccfd37928..85363b2c6 100644
--- a/cognee-mcp/README.md
+++ b/cognee-mcp/README.md
@@ -105,14 +105,14 @@ If you'd rather run cognee-mcp in a container, you have two options:
```bash
# For HTTP transport (recommended for web deployments)
docker run -e TRANSPORT_MODE=http --env-file ./.env -p 8000:8000 --rm -it cognee/cognee-mcp:main
- # For SSE transport
+ # For SSE transport
docker run -e TRANSPORT_MODE=sse --env-file ./.env -p 8000:8000 --rm -it cognee/cognee-mcp:main
# For stdio transport (default)
docker run -e TRANSPORT_MODE=stdio --env-file ./.env --rm -it cognee/cognee-mcp:main
```
-
+
**Installing optional dependencies at runtime:**
-
+
You can install optional dependencies when running the container by setting the `EXTRAS` environment variable:
```bash
# Install a single optional dependency group at runtime
@@ -122,7 +122,7 @@ If you'd rather run cognee-mcp in a container, you have two options:
--env-file ./.env \
-p 8000:8000 \
--rm -it cognee/cognee-mcp:main
-
+
# Install multiple optional dependency groups at runtime (comma-separated)
docker run \
-e TRANSPORT_MODE=sse \
@@ -131,7 +131,7 @@ If you'd rather run cognee-mcp in a container, you have two options:
-p 8000:8000 \
--rm -it cognee/cognee-mcp:main
```
-
+
**Available optional dependency groups:**
- `aws` - S3 storage support
- `postgres` / `postgres-binary` - PostgreSQL database support
@@ -160,7 +160,7 @@ If you'd rather run cognee-mcp in a container, you have two options:
# With stdio transport (default)
docker run -e TRANSPORT_MODE=stdio --env-file ./.env --rm -it cognee/cognee-mcp:main
```
-
+
**With runtime installation of optional dependencies:**
```bash
# Install optional dependencies from Docker Hub image
@@ -357,7 +357,7 @@ You can configure both transports simultaneously for testing:
"url": "http://localhost:8000/sse"
},
"cognee-http": {
- "type": "http",
+ "type": "http",
"url": "http://localhost:8000/mcp"
}
}
diff --git a/cognee-mcp/entrypoint.sh b/cognee-mcp/entrypoint.sh
index cf7d19f0a..b4df5ba00 100644
--- a/cognee-mcp/entrypoint.sh
+++ b/cognee-mcp/entrypoint.sh
@@ -7,11 +7,11 @@ echo "Environment: $ENVIRONMENT"
# Install optional dependencies if EXTRAS is set
if [ -n "$EXTRAS" ]; then
echo "Installing optional dependencies: $EXTRAS"
-
+
# Get the cognee version that's currently installed
COGNEE_VERSION=$(uv pip show cognee | grep "Version:" | awk '{print $2}')
echo "Current cognee version: $COGNEE_VERSION"
-
+
# Build the extras list for cognee
IFS=',' read -ra EXTRA_ARRAY <<< "$EXTRAS"
# Combine base extras from pyproject.toml with requested extras
@@ -28,11 +28,11 @@ if [ -n "$EXTRAS" ]; then
fi
fi
done
-
+
echo "Installing cognee with extras: $ALL_EXTRAS"
echo "Running: uv pip install 'cognee[$ALL_EXTRAS]==$COGNEE_VERSION'"
uv pip install "cognee[$ALL_EXTRAS]==$COGNEE_VERSION"
-
+
# Verify installation
echo ""
echo "✓ Optional dependencies installation completed"
@@ -93,19 +93,19 @@ if [ -n "$API_URL" ]; then
if echo "$API_URL" | grep -q "localhost" || echo "$API_URL" | grep -q "127.0.0.1"; then
echo "⚠️ Warning: API_URL contains localhost/127.0.0.1"
echo " Original: $API_URL"
-
+
# Try to use host.docker.internal (works on Mac/Windows and recent Linux with Docker Desktop)
FIXED_API_URL=$(echo "$API_URL" | sed 's/localhost/host.docker.internal/g' | sed 's/127\.0\.0\.1/host.docker.internal/g')
-
+
echo " Converted to: $FIXED_API_URL"
echo " This will work on Mac/Windows/Docker Desktop."
echo " On Linux without Docker Desktop, you may need to:"
echo " - Use --network host, OR"
echo " - Set API_URL=http://172.17.0.1:8000 (Docker bridge IP)"
-
+
API_URL="$FIXED_API_URL"
fi
-
+
API_ARGS="--api-url $API_URL"
if [ -n "$API_TOKEN" ]; then
API_ARGS="$API_ARGS --api-token $API_TOKEN"
diff --git a/cognee-starter-kit/.env.template b/cognee-starter-kit/.env.template
index 1aefd2739..d895c16ed 100644
--- a/cognee-starter-kit/.env.template
+++ b/cognee-starter-kit/.env.template
@@ -16,4 +16,4 @@ EMBEDDING_API_VERSION=""
GRAPHISTRY_USERNAME=""
-GRAPHISTRY_PASSWORD=""
\ No newline at end of file
+GRAPHISTRY_PASSWORD=""
diff --git a/cognee-starter-kit/README.md b/cognee-starter-kit/README.md
index 9cdcd5795..e47a1c270 100644
--- a/cognee-starter-kit/README.md
+++ b/cognee-starter-kit/README.md
@@ -14,7 +14,7 @@ This starter kit is deprecated. Its examples have been integrated into the `/new
# Cognee Starter Kit
Welcome to the
cognee Starter Repo! This repository is designed to help you get started quickly by providing a structured dataset and pre-built data pipelines using cognee to build powerful knowledge graphs.
-You can use this repo to ingest, process, and visualize data in minutes.
+You can use this repo to ingest, process, and visualize data in minutes.
By following this guide, you will:
@@ -80,7 +80,7 @@ Custom model uses custom pydantic model for graph extraction. This script catego
python src/pipelines/custom-model.py
```
-## Graph preview
+## Graph preview
cognee provides a visualize_graph function that will render the graph for you.
diff --git a/cognee/api/v1/search/routers/get_search_router.py b/cognee/api/v1/search/routers/get_search_router.py
index 171c03e49..1aaed7f39 100644
--- a/cognee/api/v1/search/routers/get_search_router.py
+++ b/cognee/api/v1/search/routers/get_search_router.py
@@ -8,12 +8,14 @@ from fastapi.encoders import jsonable_encoder
from cognee.modules.search.types import SearchType, SearchResult, CombinedSearchResult
from cognee.api.DTO import InDTO, OutDTO
-from cognee.modules.users.exceptions.exceptions import PermissionDeniedError
+from cognee.modules.users.exceptions.exceptions import PermissionDeniedError, UserNotFoundError
from cognee.modules.users.models import User
from cognee.modules.search.operations import get_history
from cognee.modules.users.methods import get_authenticated_user
from cognee.shared.utils import send_telemetry
from cognee import __version__ as cognee_version
+from cognee.infrastructure.databases.exceptions import DatabaseNotCreatedError
+from cognee.exceptions import CogneeValidationError
# Note: Datasets sent by name will only map to datasets owned by the request sender
@@ -138,6 +140,17 @@ def get_search_router() -> APIRouter:
)
return jsonable_encoder(results)
+ except (DatabaseNotCreatedError, UserNotFoundError, CogneeValidationError) as e:
+ # Return a clear 422 with actionable guidance instead of leaking a stacktrace
+ status_code = getattr(e, "status_code", 422)
+ return JSONResponse(
+ status_code=status_code,
+ content={
+ "error": "Search prerequisites not met",
+ "detail": str(e),
+ "hint": "Run `await cognee.add(...)` then `await cognee.cognify()` before searching.",
+ },
+ )
except PermissionDeniedError:
return []
except Exception as error:
diff --git a/cognee/api/v1/search/search.py b/cognee/api/v1/search/search.py
index 354331c57..ee7408758 100644
--- a/cognee/api/v1/search/search.py
+++ b/cognee/api/v1/search/search.py
@@ -11,6 +11,9 @@ from cognee.modules.data.methods import get_authorized_existing_datasets
from cognee.modules.data.exceptions import DatasetNotFoundError
from cognee.context_global_variables import set_session_user_context_variable
from cognee.shared.logging_utils import get_logger
+from cognee.infrastructure.databases.exceptions import DatabaseNotCreatedError
+from cognee.exceptions import CogneeValidationError
+from cognee.modules.users.exceptions.exceptions import UserNotFoundError
logger = get_logger()
@@ -176,7 +179,18 @@ async def search(
datasets = [datasets]
if user is None:
- user = await get_default_user()
+ try:
+ user = await get_default_user()
+ except (DatabaseNotCreatedError, UserNotFoundError) as error:
+ # Provide a clear, actionable message instead of surfacing low-level stacktraces
+ raise CogneeValidationError(
+ message=(
+ "Search prerequisites not met: no database/default user found. "
+ "Initialize Cognee before searching by:\n"
+ "• running `await cognee.add(...)` followed by `await cognee.cognify()`."
+ ),
+ name="SearchPreconditionError",
+ ) from error
await set_session_user_context_variable(user)
diff --git a/cognee/api/v1/sync/routers/get_sync_router.py b/cognee/api/v1/sync/routers/get_sync_router.py
index a7d466c10..fe974649a 100644
--- a/cognee/api/v1/sync/routers/get_sync_router.py
+++ b/cognee/api/v1/sync/routers/get_sync_router.py
@@ -71,7 +71,7 @@ def get_sync_router() -> APIRouter:
-H "Content-Type: application/json" \\
-H "Cookie: auth_token=your-token" \\
-d '{"dataset_ids": ["123e4567-e89b-12d3-a456-426614174000", "456e7890-e12b-34c5-d678-901234567000"]}'
-
+
# Sync all user datasets (empty request body or null dataset_ids)
curl -X POST "http://localhost:8000/api/v1/sync" \\
-H "Content-Type: application/json" \\
@@ -88,7 +88,7 @@ def get_sync_router() -> APIRouter:
- **413 Payload Too Large**: Dataset too large for current cloud plan
- **429 Too Many Requests**: Rate limit exceeded
- ## Notes
+ ## Notes
- Sync operations run in the background - you get an immediate response
- Use the returned run_id to track progress (status API coming soon)
- Large datasets are automatically chunked for efficient transfer
@@ -179,7 +179,7 @@ def get_sync_router() -> APIRouter:
```
## Example Responses
-
+
**No running syncs:**
```json
{
diff --git a/cognee/cli/commands/add_command.py b/cognee/cli/commands/add_command.py
index e05998ec5..f3eee26a8 100644
--- a/cognee/cli/commands/add_command.py
+++ b/cognee/cli/commands/add_command.py
@@ -21,7 +21,7 @@ binary streams, then stores them in a specified dataset for further processing.
Supported Input Types:
- **Text strings**: Direct text content
-- **File paths**: Local file paths (absolute paths starting with "/")
+- **File paths**: Local file paths (absolute paths starting with "/")
- **File URLs**: "file:///absolute/path" or "file://relative/path"
- **S3 paths**: "s3://bucket-name/path/to/file"
- **Lists**: Multiple files or text strings in a single call
diff --git a/cognee/cli/commands/config_command.py b/cognee/cli/commands/config_command.py
index 752db6403..a0d18ec7b 100644
--- a/cognee/cli/commands/config_command.py
+++ b/cognee/cli/commands/config_command.py
@@ -17,7 +17,7 @@ The `cognee config` command allows you to view and modify configuration settings
You can:
- View all current configuration settings
-- Get specific configuration values
+- Get specific configuration values
- Set configuration values
- Unset (reset to default) specific configuration values
- Reset all configuration to defaults
diff --git a/cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py b/cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py
index 1e16642b5..72a1fac01 100644
--- a/cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py
+++ b/cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py
@@ -290,7 +290,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
query_string = f"""
CALL neptune.algo.vectors.topKByEmbeddingWithFiltering({{
topK: {limit},
- embedding: {embedding},
+ embedding: {embedding},
nodeFilter: {{ equals: {{property: '{self._COLLECTION_PREFIX}', value: '{collection_name}'}} }}
}}
)
@@ -299,7 +299,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
if with_vector:
query_string += """
- WITH node, score, id(node) as node_id
+ WITH node, score, id(node) as node_id
MATCH (n)
WHERE id(n) = id(node)
CALL neptune.algo.vectors.get(n)
diff --git a/cognee/infrastructure/llm/prompts/extract_query_time.txt b/cognee/infrastructure/llm/prompts/extract_query_time.txt
index ce78c3471..b87c845a7 100644
--- a/cognee/infrastructure/llm/prompts/extract_query_time.txt
+++ b/cognee/infrastructure/llm/prompts/extract_query_time.txt
@@ -10,4 +10,4 @@ Extraction rules:
5. Current-time references ("now", "current", "today"): If the query explicitly refers to the present, set both starts_at and ends_at to now (the ingestion timestamp).
6. "Who is" and "Who was" questions: These imply a general identity or biographical inquiry without a specific temporal scope. Set both starts_at and ends_at to None.
7. Ordering rule: Always ensure the earlier date is assigned to starts_at and the later date to ends_at.
-8. No temporal information: If no valid or inferable time reference is found, set both starts_at and ends_at to None.
\ No newline at end of file
+8. No temporal information: If no valid or inferable time reference is found, set both starts_at and ends_at to None.
diff --git a/cognee/infrastructure/llm/prompts/generate_event_entity_prompt.txt b/cognee/infrastructure/llm/prompts/generate_event_entity_prompt.txt
index 7a34ef25b..a193dec8d 100644
--- a/cognee/infrastructure/llm/prompts/generate_event_entity_prompt.txt
+++ b/cognee/infrastructure/llm/prompts/generate_event_entity_prompt.txt
@@ -22,4 +22,4 @@ The `attributes` should be a list of dictionaries, each containing:
- Relationships should be technical with one or at most two words. If two words, use underscore camelcase style
- Relationships could imply general meaning like: subject, object, participant, recipient, agent, instrument, tool, source, cause, effect, purpose, manner, resource, etc.
- You can combine two words to form a relationship name: subject_role, previous_owner, etc.
-- Focus on how the entity specifically relates to the event
\ No newline at end of file
+- Focus on how the entity specifically relates to the event
diff --git a/cognee/infrastructure/llm/prompts/generate_event_graph_prompt.txt b/cognee/infrastructure/llm/prompts/generate_event_graph_prompt.txt
index c81ae6d3d..c0d64a0ea 100644
--- a/cognee/infrastructure/llm/prompts/generate_event_graph_prompt.txt
+++ b/cognee/infrastructure/llm/prompts/generate_event_graph_prompt.txt
@@ -27,4 +27,4 @@ class Event(BaseModel):
time_from: Optional[Timestamp] = None
time_to: Optional[Timestamp] = None
location: Optional[str] = None
-```
\ No newline at end of file
+```
diff --git a/cognee/infrastructure/llm/prompts/generate_graph_prompt.txt b/cognee/infrastructure/llm/prompts/generate_graph_prompt.txt
index 6392cdc33..ce3317381 100644
--- a/cognee/infrastructure/llm/prompts/generate_graph_prompt.txt
+++ b/cognee/infrastructure/llm/prompts/generate_graph_prompt.txt
@@ -19,8 +19,8 @@ The aim is to achieve simplicity and clarity in the knowledge graph.
- **Naming Convention**: Use snake_case for relationship names, e.g., `acted_in`.
# 3. Coreference Resolution
- **Maintain Entity Consistency**: When extracting entities, it's vital to ensure consistency.
- If an entity, such as "John Doe", is mentioned multiple times in the text but is referred to by different names or pronouns (e.g., "Joe", "he"),
- always use the most complete identifier for that entity throughout the knowledge graph. In this example, use "John Doe" as the Persons ID.
+ If an entity, is mentioned multiple times in the text but is referred to by different names or pronouns,
+ always use the most complete identifier for that entity throughout the knowledge graph.
Remember, the knowledge graph should be coherent and easily understandable, so maintaining consistency in entity references is crucial.
# 4. Strict Compliance
Adhere to the rules strictly. Non-compliance will result in termination
diff --git a/cognee/infrastructure/llm/prompts/generate_graph_prompt_guided.txt b/cognee/infrastructure/llm/prompts/generate_graph_prompt_guided.txt
index a216b835f..b087755d3 100644
--- a/cognee/infrastructure/llm/prompts/generate_graph_prompt_guided.txt
+++ b/cognee/infrastructure/llm/prompts/generate_graph_prompt_guided.txt
@@ -22,7 +22,7 @@ You are an advanced algorithm designed to extract structured information to buil
3. **Coreference Resolution**:
- Maintain one consistent node ID for each real-world entity.
- Resolve aliases, acronyms, and pronouns to the most complete form.
- - *Example*: Always use "John Doe" even if later referred to as "Doe" or "he".
+ - *Example*: Always use full identifier even if later referred to as in a similar but slightly different way
**Property & Data Guidelines**:
diff --git a/cognee/infrastructure/llm/prompts/generate_graph_prompt_oneshot.txt b/cognee/infrastructure/llm/prompts/generate_graph_prompt_oneshot.txt
index adc31f469..6375e6eb1 100644
--- a/cognee/infrastructure/llm/prompts/generate_graph_prompt_oneshot.txt
+++ b/cognee/infrastructure/llm/prompts/generate_graph_prompt_oneshot.txt
@@ -42,10 +42,10 @@ You are an advanced algorithm designed to extract structured information from un
- **Rule**: Resolve all aliases, acronyms, and pronouns to one canonical identifier.
> **One-Shot Example**:
-> **Input**: "John Doe is an author. Later, Doe published a book. He is well-known."
+> **Input**: "X is an author. Later, Doe published a book. He is well-known."
> **Output Node**:
> ```
-> John Doe (Person)
+> X (Person)
> ```
---
diff --git a/cognee/infrastructure/llm/prompts/generate_graph_prompt_simple.txt b/cognee/infrastructure/llm/prompts/generate_graph_prompt_simple.txt
index 4a166c027..177c9f34a 100644
--- a/cognee/infrastructure/llm/prompts/generate_graph_prompt_simple.txt
+++ b/cognee/infrastructure/llm/prompts/generate_graph_prompt_simple.txt
@@ -15,7 +15,7 @@ You are an advanced algorithm that extracts structured data into a knowledge gra
- Properties are key-value pairs; do not use escaped quotes.
3. **Coreference Resolution**
- - Use a single, complete identifier for each entity (e.g., always "John Doe" not "Joe" or "he").
+ - Use a single, complete identifier for each entity
4. **Relationship Labels**:
- Use descriptive, lowercase, snake_case names for edges.
diff --git a/cognee/infrastructure/llm/prompts/generate_graph_prompt_strict.txt b/cognee/infrastructure/llm/prompts/generate_graph_prompt_strict.txt
index a8191033f..08c117ee4 100644
--- a/cognee/infrastructure/llm/prompts/generate_graph_prompt_strict.txt
+++ b/cognee/infrastructure/llm/prompts/generate_graph_prompt_strict.txt
@@ -26,7 +26,7 @@ Use **basic atomic types** for node labels. Always prefer general types over spe
- Good: "Alan Turing", "Google Inc.", "World War II"
- Bad: "Entity_001", "1234", "he", "they"
- Never use numeric or autogenerated IDs.
-- Prioritize **most complete form** of entity names for consistency (e.g., always use "John Doe" instead of "John" or "he").
+- Prioritize **most complete form** of entity names for consistency
2. Dates, Numbers, and Properties
---------------------------------
diff --git a/cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt b/cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt
index 1a00bce7e..7804a3030 100644
--- a/cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt
+++ b/cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt
@@ -2,12 +2,12 @@ You are an expert query analyzer for a **GraphRAG system**. Your primary goal is
Here are the available `SearchType` tools and their specific functions:
-- **`SUMMARIES`**: The `SUMMARIES` search type retrieves summarized information from the knowledge graph.
+- **`SUMMARIES`**: The `SUMMARIES` search type retrieves summarized information from the knowledge graph.
- **Best for:**
+ **Best for:**
- - Getting concise overviews of topics
- - Summarizing large amounts of information
+ - Getting concise overviews of topics
+ - Summarizing large amounts of information
- Quick understanding of complex subjects
**Best for:**
@@ -16,7 +16,7 @@ Here are the available `SearchType` tools and their specific functions:
- Understanding relationships between concepts
- Exploring the structure of your knowledge graph
-* **`CHUNKS`**: The `CHUNKS` search type retrieves specific facts and information chunks from the knowledge graph.
+* **`CHUNKS`**: The `CHUNKS` search type retrieves specific facts and information chunks from the knowledge graph.
**Best for:**
@@ -122,4 +122,4 @@ Response: `NATURAL_LANGUAGE`
-Your response MUST be a single word, consisting of only the chosen `SearchType` name. Do not provide any explanation.
\ No newline at end of file
+Your response MUST be a single word, consisting of only the chosen `SearchType` name. Do not provide any explanation.
diff --git a/cognee/infrastructure/llm/prompts/test.txt b/cognee/infrastructure/llm/prompts/test.txt
index 529a11d86..265acebc7 100644
--- a/cognee/infrastructure/llm/prompts/test.txt
+++ b/cognee/infrastructure/llm/prompts/test.txt
@@ -1 +1 @@
-Respond with: test
\ No newline at end of file
+Respond with: test
diff --git a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py
index dc0fd995a..1ddb9c480 100644
--- a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py
+++ b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py
@@ -194,6 +194,7 @@ def get_llm_client(raise_api_key_error: bool = True):
)
# Get optional local mode parameters (will be None if not set)
+ # TODO: refactor llm_config to include these parameters, currently they cannot be defined and defaults are used
model_path = getattr(llm_config, "llama_cpp_model_path", None)
n_ctx = getattr(llm_config, "llama_cpp_n_ctx", 2048)
n_gpu_layers = getattr(llm_config, "llama_cpp_n_gpu_layers", 0)
diff --git a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/copilot_conversations.json b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/copilot_conversations.json
index 9471928c2..dcab8ddc6 100644
--- a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/copilot_conversations.json
+++ b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/copilot_conversations.json
@@ -105,4 +105,3 @@
}
}
}
-
diff --git a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/guido_contributions.json b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/guido_contributions.json
index 918486b8c..6424d7a55 100644
--- a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/guido_contributions.json
+++ b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/guido_contributions.json
@@ -973,4 +973,4 @@
"python_version": null,
"pep_status": null
}
-]
\ No newline at end of file
+]
diff --git a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/my_developer_rules.md b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/my_developer_rules.md
index 18a7ba18d..751fda2bd 100644
--- a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/my_developer_rules.md
+++ b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/my_developer_rules.md
@@ -76,4 +76,4 @@ Section: Open Questions or TODOs
Create a checklist of unresolved decisions, logic that needs clarification, or tasks that are still pending.
Section: Last Updated
-Include the most recent update date and who made the update.
\ No newline at end of file
+Include the most recent update date and who made the update.
diff --git a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/pep_style_guide.md b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/pep_style_guide.md
index 8982d7e81..bc6466324 100644
--- a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/pep_style_guide.md
+++ b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/pep_style_guide.md
@@ -72,4 +72,3 @@ profile = "black"
- E501: line too long -> break with parentheses
- E225: missing whitespace around operator
- E402: module import not at top of file
-
diff --git a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/zen_principles.md b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/zen_principles.md
index bbcb4e56d..d49dc9c13 100644
--- a/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/zen_principles.md
+++ b/cognee/modules/notebooks/tutorials/python-development-with-cognee/data/zen_principles.md
@@ -72,4 +72,3 @@ Use modules/packages to separate concerns; avoid wildcard imports.
- Is this the simplest working solution?
- Are errors explicit and logged?
- Are modules/namespaces used appropriately?
-
diff --git a/cognee/modules/retrieval/__init__.py b/cognee/modules/retrieval/__init__.py
index 8b1378917..e69de29bb 100644
--- a/cognee/modules/retrieval/__init__.py
+++ b/cognee/modules/retrieval/__init__.py
@@ -1 +0,0 @@
-
diff --git a/cognee/modules/users/authentication/get_api_auth_backend.py b/cognee/modules/users/authentication/get_api_auth_backend.py
index ffb591a9d..6d39c7d8f 100644
--- a/cognee/modules/users/authentication/get_api_auth_backend.py
+++ b/cognee/modules/users/authentication/get_api_auth_backend.py
@@ -16,11 +16,8 @@ def get_api_auth_backend():
def get_jwt_strategy() -> JWTStrategy[models.UP, models.ID]:
secret = os.getenv("FASTAPI_USERS_JWT_SECRET", "super_secret")
- try:
- lifetime_seconds = int(os.getenv("JWT_LIFETIME_SECONDS", "3600"))
- except ValueError:
- lifetime_seconds = 3600
-
+ lifetime_seconds = int(os.getenv("JWT_LIFETIME_SECONDS", "3600"))
+
return APIJWTStrategy(secret, lifetime_seconds=lifetime_seconds)
auth_backend = AuthenticationBackend(
diff --git a/cognee/modules/users/authentication/get_client_auth_backend.py b/cognee/modules/users/authentication/get_client_auth_backend.py
index bf794377d..ba5dad2b3 100644
--- a/cognee/modules/users/authentication/get_client_auth_backend.py
+++ b/cognee/modules/users/authentication/get_client_auth_backend.py
@@ -18,10 +18,7 @@ def get_client_auth_backend():
from .default.default_jwt_strategy import DefaultJWTStrategy
secret = os.getenv("FASTAPI_USERS_JWT_SECRET", "super_secret")
- try:
- lifetime_seconds = int(os.getenv("JWT_LIFETIME_SECONDS", "3600"))
- except ValueError:
- lifetime_seconds = 3600
+ lifetime_seconds = int(os.getenv("JWT_LIFETIME_SECONDS", "3600"))
return DefaultJWTStrategy(secret, lifetime_seconds=lifetime_seconds)
diff --git a/cognee/shared/utils.py b/cognee/shared/utils.py
index 7c76cfa59..3b6e7ba14 100644
--- a/cognee/shared/utils.py
+++ b/cognee/shared/utils.py
@@ -8,7 +8,8 @@ import http.server
import socketserver
from threading import Thread
import pathlib
-from uuid import uuid4, uuid5, NAMESPACE_OID
+from typing import Union, Any, Dict, List
+from uuid import uuid4, uuid5, NAMESPACE_OID, UUID
from cognee.base_config import get_base_config
from cognee.shared.logging_utils import get_logger
@@ -58,7 +59,7 @@ def get_anonymous_id():
return anonymous_id
-def _sanitize_nested_properties(obj, property_names: list[str]):
+def _sanitize_nested_properties(obj: Any, property_names: list[str]) -> Any:
"""
Recursively replaces any property whose key matches one of `property_names`
(e.g., ['url', 'path']) in a nested dict or list with a uuid5 hash
@@ -78,7 +79,9 @@ def _sanitize_nested_properties(obj, property_names: list[str]):
return obj
-def send_telemetry(event_name: str, user_id, additional_properties: dict = {}):
+def send_telemetry(event_name: str, user_id: Union[str, UUID], additional_properties: dict = {}):
+ if additional_properties is None:
+ additional_properties = {}
if os.getenv("TELEMETRY_DISABLED"):
return
@@ -108,7 +111,7 @@ def send_telemetry(event_name: str, user_id, additional_properties: dict = {}):
print(f"Error sending telemetry through proxy: {response.status_code}")
-def embed_logo(p, layout_scale, logo_alpha, position):
+def embed_logo(p: Any, layout_scale: float, logo_alpha: float, position: str):
"""
Embed a logo into the graph visualization as a watermark.
"""
@@ -138,7 +141,11 @@ def embed_logo(p, layout_scale, logo_alpha, position):
def start_visualization_server(
- host="0.0.0.0", port=8001, handler_class=http.server.SimpleHTTPRequestHandler
+ host: str = "0.0.0.0",
+ port: int = 8001,
+ handler_class: type[
+ http.server.SimpleHTTPRequestHandler
+ ] = http.server.SimpleHTTPRequestHandler,
):
"""
Spin up a simple HTTP server in a background thread to serve files.
diff --git a/cognee/tasks/temporal_graph/__init__.py b/cognee/tasks/temporal_graph/__init__.py
index 8b1378917..e69de29bb 100644
--- a/cognee/tasks/temporal_graph/__init__.py
+++ b/cognee/tasks/temporal_graph/__init__.py
@@ -1 +0,0 @@
-
diff --git a/cognee/tests/test_cleanup_unused_data.py b/cognee/tests/test_cleanup_unused_data.py
index fa5c174c2..abb6d8f3e 100644
--- a/cognee/tests/test_cleanup_unused_data.py
+++ b/cognee/tests/test_cleanup_unused_data.py
@@ -46,10 +46,10 @@ async def test_textdocument_cleanup_with_sql():
# Step 1: Add and cognify a test document
dataset_name = "test_cleanup_dataset"
- test_text = """
- Machine learning is a subset of artificial intelligence that enables systems to learn
- and improve from experience without being explicitly programmed. Deep learning uses
- neural networks with multiple layers to process data.
+ test_text = """
+ Machine learning is a subset of artificial intelligence that enables systems to learn
+ and improve from experience without being explicitly programmed. Deep learning uses
+ neural networks with multiple layers to process data.
"""
await setup()
diff --git a/cognee/tests/test_delete_by_id.py b/cognee/tests/test_delete_by_id.py
index 8fc5395eb..90b2d6b42 100644
--- a/cognee/tests/test_delete_by_id.py
+++ b/cognee/tests/test_delete_by_id.py
@@ -47,20 +47,20 @@ async def main():
# Test data
text_1 = """
- Apple Inc. is an American multinational technology company that specializes in consumer electronics,
- software, and online services. Apple is the world's largest technology company by revenue and,
+ Apple Inc. is an American multinational technology company that specializes in consumer electronics,
+ software, and online services. Apple is the world's largest technology company by revenue and,
since January 2021, the world's most valuable company.
"""
text_2 = """
- Microsoft Corporation is an American multinational technology corporation which produces computer software,
- consumer electronics, personal computers, and related services. Its best known software products are the
+ Microsoft Corporation is an American multinational technology corporation which produces computer software,
+ consumer electronics, personal computers, and related services. Its best known software products are the
Microsoft Windows line of operating systems and the Microsoft Office suite.
"""
text_3 = """
- Google LLC is an American multinational technology company that specializes in Internet-related services and products,
- which include online advertising technologies, search engine, cloud computing, software, and hardware. Google has been
+ Google LLC is an American multinational technology company that specializes in Internet-related services and products,
+ which include online advertising technologies, search engine, cloud computing, software, and hardware. Google has been
referred to as the most powerful company in the world and one of the world's most valuable brands.
"""
diff --git a/deployment/helm/README.md b/deployment/helm/README.md
index b7aaa6325..3b496c54b 100644
--- a/deployment/helm/README.md
+++ b/deployment/helm/README.md
@@ -1,6 +1,7 @@
-# cognee-infra-helm
-General infrastructure setup for Cognee on Kubernetes using a Helm chart.
+# Example helm chart
+Example Helm chart fro Cognee with PostgreSQL and pgvector extension
+It is not ready for production usage
## Prerequisites
Before deploying the Helm chart, ensure the following prerequisites are met:
@@ -13,13 +14,22 @@ Before deploying the Helm chart, ensure the following prerequisites are met:
Clone the Repository Clone this repository to your local machine and navigate to the directory.
-## Deploy Helm Chart:
+## Example deploy Helm Chart:
```bash
- helm install cognee ./cognee-chart
+ helm upgrade --install cognee deployment/helm \
+ --namespace cognee --create-namespace \
+ --set cognee.env.LLM_API_KEY="$YOUR_KEY"
```
-**Uninstall Helm Release**:
+**Uninstall Helm Release**:
```bash
helm uninstall cognee
```
+
+## Port forwarding
+To access cognee, run
+```
+kubectl port-forward svc/cognee-service -n cognee 8000
+```
+it will be available at localhost:8000
diff --git a/deployment/helm/docker-compose-helm.yml b/deployment/helm/docker-compose-helm.yml
index 8aaa63816..90c3ca206 100644
--- a/deployment/helm/docker-compose-helm.yml
+++ b/deployment/helm/docker-compose-helm.yml
@@ -43,4 +43,3 @@ networks:
volumes:
postgres_data:
-
diff --git a/deployment/helm/templates/cognee_deployment.yaml b/deployment/helm/templates/cognee_deployment.yaml
index f16a475ec..cf44d7301 100644
--- a/deployment/helm/templates/cognee_deployment.yaml
+++ b/deployment/helm/templates/cognee_deployment.yaml
@@ -20,12 +20,35 @@ spec:
ports:
- containerPort: {{ .Values.cognee.port }}
env:
+ - name: ENABLE_BACKEND_ACCESS_CONTROL
+ value: "false"
- name: HOST
value: {{ .Values.cognee.env.HOST }}
- name: ENVIRONMENT
value: {{ .Values.cognee.env.ENVIRONMENT }}
- name: PYTHONPATH
value: {{ .Values.cognee.env.PYTHONPATH }}
+ - name: VECTOR_DB_PROVIDER
+ value: pgvector
+ - name: DB_HOST
+ value: {{ .Release.Name }}-postgres
+ - name: DB_PORT
+ value: "{{ .Values.postgres.port }}"
+ - name: DB_NAME
+ value: {{ .Values.postgres.env.POSTGRES_DB }}
+ - name: DB_USERNAME
+ value: {{ .Values.postgres.env.POSTGRES_USER }}
+ - name: DB_PASSWORD
+ value: {{ .Values.postgres.env.POSTGRES_PASSWORD }}
+ - name: LLM_API_KEY
+ valueFrom:
+ secretKeyRef:
+ name: {{ .Release.Name }}-llm-api-key
+ key: LLM_API_KEY
+ - name: LLM_MODEL
+ value: {{ .Values.cognee.env.LLM_MODEL }}
+ - name: LLM_PROVIDER
+ value: {{ .Values.cognee.env.LLM_PROVIDER }}
resources:
limits:
cpu: {{ .Values.cognee.resources.cpu }}
diff --git a/deployment/helm/templates/cognee_service.yaml b/deployment/helm/templates/cognee_service.yaml
index 21e9e470e..b3ecbd5e3 100644
--- a/deployment/helm/templates/cognee_service.yaml
+++ b/deployment/helm/templates/cognee_service.yaml
@@ -5,7 +5,7 @@ metadata:
labels:
app: {{ .Release.Name }}-cognee
spec:
- type: NodePort
+ type: ClusterIP
ports:
- port: {{ .Values.cognee.port }}
targetPort: {{ .Values.cognee.port }}
diff --git a/deployment/helm/templates/postgres_service.yaml b/deployment/helm/templates/postgres_service.yaml
index 7a944a128..c1195fcaa 100644
--- a/deployment/helm/templates/postgres_service.yaml
+++ b/deployment/helm/templates/postgres_service.yaml
@@ -11,4 +11,3 @@ spec:
targetPort: {{ .Values.postgres.port }}
selector:
app: {{ .Release.Name }}-postgres
-
diff --git a/deployment/helm/templates/secrets.yml b/deployment/helm/templates/secrets.yml
new file mode 100644
index 000000000..1088865d2
--- /dev/null
+++ b/deployment/helm/templates/secrets.yml
@@ -0,0 +1,7 @@
+apiVersion: v1
+kind: Secret
+metadata:
+ name: {{ .Release.Name }}-llm-api-key
+type: Opaque
+data:
+ LLM_API_KEY: {{ .Values.cognee.env.LLM_API_KEY | b64enc | quote }}
diff --git a/deployment/helm/values.yaml b/deployment/helm/values.yaml
index 278312373..4a8fd4622 100644
--- a/deployment/helm/values.yaml
+++ b/deployment/helm/values.yaml
@@ -7,9 +7,11 @@ cognee:
HOST: "0.0.0.0"
ENVIRONMENT: "local"
PYTHONPATH: "."
+ LLM_MODEL: "openai/gpt-4o-mini"
+ LLM_PROVIDER: "openai"
resources:
cpu: "4.0"
- memory: "8Gi"
+ memory: "2Gi"
# Configuration for the 'postgres' database service
postgres:
@@ -19,4 +21,4 @@ postgres:
POSTGRES_USER: "cognee"
POSTGRES_PASSWORD: "cognee"
POSTGRES_DB: "cognee_db"
- storage: "8Gi"
+ storage: "2Gi"
diff --git a/evals/requirements.txt b/evals/requirements.txt
index cb11af255..97b3ea3e7 100644
--- a/evals/requirements.txt
+++ b/evals/requirements.txt
@@ -3,4 +3,4 @@ numpy==1.26.4
matplotlib==3.10.0
seaborn==0.13.2
scipy==1.11.4
-pathlib
\ No newline at end of file
+pathlib
diff --git a/examples/data/car_and_tech_companies.txt b/examples/data/car_and_tech_companies.txt
index 699f9812d..154232d86 100644
--- a/examples/data/car_and_tech_companies.txt
+++ b/examples/data/car_and_tech_companies.txt
@@ -34,4 +34,4 @@ What began as an online bookstore has grown into one of the largest e-commerce p
Meta, originally known as Facebook, revolutionized social media by connecting billions of people worldwide. Beyond its core social networking service, Meta is investing in the next generation of digital experiences through virtual and augmented reality technologies, with projects like Oculus. The company's efforts signal a commitment to evolving digital interaction and building the metaverse—a shared virtual space where users can connect and collaborate.
Each of these companies has significantly impacted the technology landscape, driving innovation and transforming everyday life through their groundbreaking products and services.
-"""
\ No newline at end of file
+"""
diff --git a/examples/database_examples/neptune_analytics_example.py b/examples/database_examples/neptune_analytics_example.py
index d98d1768c..1c6eb791f 100644
--- a/examples/database_examples/neptune_analytics_example.py
+++ b/examples/database_examples/neptune_analytics_example.py
@@ -63,10 +63,10 @@ async def main():
traversals.
"""
- sample_text_2 = """Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads
- that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It
- complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load
- the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's
+ sample_text_2 = """Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads
+ that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It
+ complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load
+ the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's
stored in Amazon S3.
"""
diff --git a/examples/low_level/product_recommendation.py b/examples/low_level/product_recommendation.py
index 782311618..142625318 100644
--- a/examples/low_level/product_recommendation.py
+++ b/examples/low_level/product_recommendation.py
@@ -165,8 +165,8 @@ async def main():
// If a stored preference exists and it does not match the new value,
// raise an error using APOC's utility procedure.
CALL apoc.util.validate(
- preference IS NOT NULL AND preference.value <> new_size,
- "Conflicting shoe size preference: existing size is " + preference.value + " and new size is " + new_size,
+ preference IS NOT NULL AND preference.value <> new_size,
+ "Conflicting shoe size preference: existing size is " + preference.value + " and new size is " + new_size,
[]
)
diff --git a/examples/python/temporal_example.py b/examples/python/temporal_example.py
index f5e7d4a9a..a647f31be 100644
--- a/examples/python/temporal_example.py
+++ b/examples/python/temporal_example.py
@@ -35,16 +35,16 @@ biography_1 = """
biography_2 = """
Arnulf Øverland Ole Peter Arnulf Øverland ( 27 April 1889 – 25 March 1968 ) was a Norwegian poet and artist . He is principally known for his poetry which served to inspire the Norwegian resistance movement during the German occupation of Norway during World War II .
-
+
Biography .
Øverland was born in Kristiansund and raised in Bergen . His parents were Peter Anton Øverland ( 1852–1906 ) and Hanna Hage ( 1854–1939 ) . The early death of his father , left the family economically stressed . He was able to attend Bergen Cathedral School and in 1904 Kristiania Cathedral School . He graduated in 1907 and for a time studied philology at University of Kristiania . Øverland published his first collection of poems ( 1911 ) .
-
+
Øverland became a communist sympathizer from the early 1920s and became a member of Mot Dag . He also served as chairman of the Norwegian Students Society 1923–28 . He changed his stand in 1937 , partly as an expression of dissent against the ongoing Moscow Trials . He was an avid opponent of Nazism and in 1936 he wrote the poem Du må ikke sove which was printed in the journal Samtiden . It ends with . ( I thought: : Something is imminent . Our era is over – Europe’s on fire! ) . Probably the most famous line of the poem is ( You mustnt endure so well the injustice that doesnt affect you yourself! )
-
+
During the German occupation of Norway from 1940 in World War II , he wrote to inspire the Norwegian resistance movement . He wrote a series of poems which were clandestinely distributed , leading to the arrest of both him and his future wife Margrete Aamot Øverland in 1941 . Arnulf Øverland was held first in the prison camp of Grini before being transferred to Sachsenhausen concentration camp in Germany . He spent a four-year imprisonment until the liberation of Norway in 1945 . His poems were later collected in Vi overlever alt and published in 1945 .
-
+
Øverland played an important role in the Norwegian language struggle in the post-war era . He became a noted supporter for the conservative written form of Norwegian called Riksmål , he was president of Riksmålsforbundet ( an organization in support of Riksmål ) from 1947 to 1956 . In addition , Øverland adhered to the traditionalist style of writing , criticising modernist poetry on several occasions . His speech Tungetale fra parnasset , published in Arbeiderbladet in 1954 , initiated the so-called Glossolalia debate .
-
+
Personal life .
In 1918 he had married the singer Hildur Arntzen ( 1888–1957 ) . Their marriage was dissolved in 1939 . In 1940 , he married Bartholine Eufemia Leganger ( 1903–1995 ) . They separated shortly after , and were officially divorced in 1945 . Øverland was married to journalist Margrete Aamot Øverland ( 1913–1978 ) during June 1945 . In 1946 , the Norwegian Parliament arranged for Arnulf and Margrete Aamot Øverland to reside at the Grotten . He lived there until his death in 1968 and she lived there for another ten years until her death in 1978 . Arnulf Øverland was buried at Vår Frelsers Gravlund in Oslo . Joseph Grimeland designed the bust of Arnulf Øverland ( bronze , 1970 ) at his grave site .
@@ -56,7 +56,7 @@ biography_2 = """
- Vi overlever alt ( 1945 )
- Sverdet bak døren ( 1956 )
- Livets minutter ( 1965 )
-
+
Awards .
- Gyldendals Endowment ( 1935 )
- Dobloug Prize ( 1951 )
diff --git a/examples/python/weighted_graph_visualization.html b/examples/python/weighted_graph_visualization.html
index 89920a780..3866d8cd6 100644
--- a/examples/python/weighted_graph_visualization.html
+++ b/examples/python/weighted_graph_visualization.html
@@ -14,7 +14,7 @@
.nodes circle { stroke: white; stroke-width: 0.5px; filter: drop-shadow(0 0 5px rgba(255,255,255,0.3)); }
.node-label { font-size: 5px; font-weight: bold; fill: white; text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
.edge-label { font-size: 3px; fill: rgba(255, 255, 255, 0.7); text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
-
+
.tooltip {
position: absolute;
text-align: left;
@@ -76,7 +76,7 @@
// Create tooltip content for edge
var content = "
Edge Information";
content += "Relationship: " + d.relation + "
";
-
+
// Show all weights
if (d.all_weights && Object.keys(d.all_weights).length > 0) {
content += "
Weights:";
@@ -86,23 +86,23 @@
} else if (d.weight !== null && d.weight !== undefined) {
content += "Weight: " + d.weight + "
";
}
-
+
if (d.relationship_type) {
content += "Type: " + d.relationship_type + "
";
}
-
+
// Add other edge properties
if (d.edge_info) {
Object.keys(d.edge_info).forEach(function(key) {
- if (key !== 'weight' && key !== 'weights' && key !== 'relationship_type' &&
- key !== 'source_node_id' && key !== 'target_node_id' &&
- key !== 'relationship_name' && key !== 'updated_at' &&
+ if (key !== 'weight' && key !== 'weights' && key !== 'relationship_type' &&
+ key !== 'source_node_id' && key !== 'target_node_id' &&
+ key !== 'relationship_name' && key !== 'updated_at' &&
!key.startsWith('weight_')) {
content += key + ": " + d.edge_info[key] + "
";
}
});
}
-
+
tooltip.html(content)
.style("left", (d3.event.pageX + 10) + "px")
.style("top", (d3.event.pageY - 10) + "px")
@@ -209,4 +209,3 @@