Compare commits
132 commits
main
...
cognify_ad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c02676b4f | ||
|
|
042c122da0 | ||
|
|
3419f9c45b | ||
|
|
911bba9a30 | ||
|
|
bdfa1704d9 | ||
|
|
12bc64562d | ||
|
|
dcfed0d433 | ||
|
|
31aed48cab | ||
|
|
1ae926c7f5 | ||
|
|
289e395d0e | ||
|
|
814c49332a | ||
|
|
c5c09b2cfa | ||
|
|
f5f5bcd76a | ||
|
|
977cb3f312 | ||
|
|
68284a7ade | ||
|
|
c9d3a3ea16 | ||
|
|
fa8006546b | ||
|
|
1d099a28a6 | ||
|
|
28939a881a | ||
|
|
21730bf2e8 | ||
|
|
03211bcbc9 | ||
|
|
6d407fb99d | ||
|
|
d55212c7c3 | ||
|
|
64b783bc04 | ||
|
|
d8ec665d76 | ||
|
|
2edd51198c | ||
|
|
6a54f3698b | ||
|
|
56c89ca73e | ||
|
|
efbcd3e00a | ||
|
|
450d9ada33 | ||
|
|
748671484e | ||
|
|
bcb5b12964 | ||
|
|
7f3aebd06d | ||
|
|
8e85cd2ae3 | ||
|
|
f08bd82b55 | ||
|
|
e14609bc4f | ||
|
|
f0022e2f07 | ||
|
|
ae0b6c5cae | ||
|
|
939aaf1b5e | ||
|
|
a500aeba80 | ||
|
|
880f7d4c5d | ||
|
|
55943630bd | ||
|
|
e644c3b86d | ||
|
|
29ec092520 | ||
|
|
f0f3a5de5f | ||
|
|
7c3d7eb718 | ||
|
|
b057cf27f5 | ||
|
|
9e473996d3 | ||
|
|
ad462d8510 | ||
|
|
569ffdead0 | ||
|
|
4d7c07e483 | ||
|
|
fac930dc59 | ||
|
|
7053ce7c84 | ||
|
|
6da557565a | ||
|
|
4636b69664 | ||
|
|
dda6e92169 | ||
|
|
c9590ef760 | ||
|
|
5582a4cd69 | ||
|
|
ef929ba442 | ||
|
|
72efd83061 | ||
|
|
046e8dcdc6 | ||
|
|
135c2c7520 | ||
|
|
1d2fd2f2ec | ||
|
|
34ee9f577d | ||
|
|
a89d5570ff | ||
|
|
f8110c4548 | ||
|
|
450320ba2c | ||
|
|
c68175d3f5 | ||
|
|
a1bf8416bd | ||
|
|
c803042280 | ||
|
|
c14f1a5fb0 | ||
|
|
342cbc9461 | ||
|
|
ea1e23a7aa | ||
|
|
7df1daee71 | ||
|
|
950195223e | ||
|
|
d47d410499 | ||
|
|
9e09d26501 | ||
|
|
f8f400dbeb | ||
|
|
b11236f592 | ||
|
|
c383253195 | ||
|
|
9aa8e543cb | ||
|
|
6466f66a76 | ||
|
|
2383843ec7 | ||
|
|
c908aefd80 | ||
|
|
4ca3baa383 | ||
|
|
70d49745d9 | ||
|
|
d7a8b29147 | ||
|
|
893fdd1588 | ||
|
|
232ac4e271 | ||
|
|
00948ec8db | ||
|
|
1361203ead | ||
|
|
472143df03 | ||
|
|
cb7a8951ff | ||
|
|
b4b55b820d | ||
|
|
d7d626698d | ||
|
|
aecdff0503 | ||
|
|
7865b4ce3e | ||
|
|
2871d68673 | ||
|
|
4e373cfee7 | ||
|
|
f1e254f357 | ||
|
|
8f5d5b9ac2 | ||
|
|
bc3d35d51e | ||
|
|
5cb1b53ddd | ||
|
|
2d0d7fa71c | ||
|
|
4a58913e55 | ||
|
|
42be438ab6 | ||
|
|
70e307a905 | ||
|
|
ce14a441af | ||
|
|
ddfa506cf8 | ||
|
|
be5e5078b3 | ||
|
|
7e3d593684 | ||
|
|
e3dbc186fd | ||
|
|
6f78462f3c | ||
|
|
4ddfdc13c8 | ||
|
|
cdaf4afba8 | ||
|
|
f825732eb2 | ||
|
|
ecdf624bda | ||
|
|
1267f6c1e7 | ||
|
|
7e8f5473a7 | ||
|
|
d8fde4c527 | ||
|
|
96d1dd772c | ||
|
|
cc52df94b7 | ||
|
|
ad9abb8b76 | ||
|
|
5d4f82fdd4 | ||
|
|
8aae9f8dd8 | ||
|
|
cd813c5732 | ||
|
|
7456567597 | ||
|
|
b29ab72c50 | ||
|
|
5cbdbf3abf | ||
|
|
cc4fab9e75 | ||
|
|
0c1e515c8f | ||
|
|
fe83a25576 |
205 changed files with 9677 additions and 14329 deletions
|
|
@ -69,3 +69,11 @@ LITELLM_LOG="ERROR"
|
|||
# Set this environment variable to disable sending telemetry data
|
||||
# TELEMETRY_DISABLED=1
|
||||
|
||||
# Set this variable to True to enforce usage of backend access control for Cognee
|
||||
# Note: This is only currently supported by the following databases:
|
||||
# Relational: SQLite, Postgres
|
||||
# Vector: LanceDB
|
||||
# Graph: KuzuDB
|
||||
#
|
||||
# It enforces LanceDB and KuzuDB use and uses them to create databases per Cognee user + dataset
|
||||
ENABLE_BACKEND_ACCESS_CONTROL=False
|
||||
|
|
|
|||
31
.github/workflows/e2e_tests.yml
vendored
31
.github/workflows/e2e_tests.yml
vendored
|
|
@ -215,3 +215,34 @@ jobs:
|
|||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
run: poetry run python ./cognee/tests/test_s3.py
|
||||
|
||||
test-parallel-databases:
|
||||
name: Test using different async databases in parallel in Cognee
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cognee Setup
|
||||
uses: ./.github/actions/cognee_setup
|
||||
with:
|
||||
python-version: '3.11.x'
|
||||
|
||||
- name: Install specific graph db dependency
|
||||
run: |
|
||||
poetry install -E kuzu
|
||||
|
||||
- name: Run parallel databases test
|
||||
env:
|
||||
ENV: 'dev'
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
run: poetry run python ./cognee/tests/test_parallel_databases.py
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ COPY README.md pyproject.toml uv.lock entrypoint.sh ./
|
|||
|
||||
# Install the project's dependencies using the lockfile and settings
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --extra debug --extra api --extra postgres --extra weaviate --extra qdrant --extra neo4j --extra kuzu --extra llama-index --extra gemini --extra ollama --extra mistral --extra groq --extra anthropic --frozen --no-install-project --no-dev --no-editable
|
||||
uv sync --extra debug --extra api --extra crewai --extra postgres --extra weaviate --extra qdrant --extra neo4j --extra kuzu --extra llama-index --extra gemini --extra ollama --extra mistral --extra groq --extra anthropic --frozen --no-install-project --no-dev --no-editable
|
||||
|
||||
# Copy Alembic configuration
|
||||
COPY alembic.ini /app/alembic.ini
|
||||
|
|
@ -41,7 +41,7 @@ COPY alembic/ /app/alembic
|
|||
# Installing separately from its dependencies allows optimal layer caching
|
||||
COPY ./cognee /app/cognee
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --extra debug --extra api --extra postgres --extra weaviate --extra qdrant --extra neo4j --extra kuzu --extra llama-index --extra gemini --extra ollama --extra mistral --extra groq --extra anthropic --frozen --no-dev --no-editable
|
||||
uv sync --extra debug --extra api --extra postgres --extra crewai --extra weaviate --extra qdrant --extra neo4j --extra kuzu --extra llama-index --extra gemini --extra ollama --extra mistral --extra groq --extra anthropic --frozen --no-dev --no-editable
|
||||
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
|
|
|
|||
|
|
@ -156,6 +156,15 @@ Try cognee UI out locally [here](https://docs.cognee.ai/how-to-guides/cognee-ui)
|
|||
</div>
|
||||
|
||||
|
||||
## CrewAI
|
||||
|
||||
Note1: After each restart go to `localhost:3000/auth` and login again.
|
||||
Note2: Activity is not preserved in the DB, so it will be lost after page refresh.
|
||||
|
||||
1. Start FastAPI server by running `client.py` inside `cognee/api` directory
|
||||
2. Start NextJS server by running `npm run dev` inside `cognee-frontend` directory.
|
||||
3. If you are not logged-in, app will redirect to `/auth` page. Otherwise go there manually and login (if server is restarted).
|
||||
|
||||
|
||||
## Demos
|
||||
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ Create Date: 2024-10-16 22:17:18.634638
|
|||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from sqlalchemy.util import await_only
|
||||
from fastapi_users.exceptions import UserAlreadyExists
|
||||
|
||||
from cognee.modules.users.methods import create_default_user, delete_user
|
||||
|
||||
|
|
@ -21,7 +21,10 @@ depends_on: Union[str, Sequence[str], None] = "8057ae7329c2"
|
|||
|
||||
|
||||
def upgrade() -> None:
|
||||
await_only(create_default_user())
|
||||
try:
|
||||
await_only(create_default_user())
|
||||
except UserAlreadyExists:
|
||||
pass # It's fine if the default user already exists
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
|
|
|||
7
cognee-frontend/.prettierrc
Normal file
7
cognee-frontend/.prettierrc
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"plugins": ["prettier-plugin-tailwindcss"]
|
||||
}
|
||||
1796
cognee-frontend/package-lock.json
generated
1796
cognee-frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "cognee-frontend",
|
||||
"version": "0.1.0",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
|
|
@ -9,14 +9,21 @@
|
|||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@auth0/nextjs-auth0": "^4.6.0",
|
||||
"classnames": "^2.5.1",
|
||||
"next": "14.2.3",
|
||||
"culori": "^4.0.1",
|
||||
"d3-force-3d": "^3.0.6",
|
||||
"next": "15.3.2",
|
||||
"ohmy-ui": "^0.0.6",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"react-force-graph-2d": "^1.27.1",
|
||||
"tailwindcss": "^4.1.7",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.1.7",
|
||||
"@types/culori": "^4.0.0",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
|
|
|
|||
5
cognee-frontend/postcss.config.mjs
Normal file
5
cognee-frontend/postcss.config.mjs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export default {
|
||||
plugins: {
|
||||
"@tailwindcss/postcss": {},
|
||||
}
|
||||
}
|
||||
54
cognee-frontend/src/app/(graph)/ActivityLog.tsx
Normal file
54
cognee-frontend/src/app/(graph)/ActivityLog.tsx
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
"use client";
|
||||
|
||||
import { useCallback, useImperativeHandle, useState } from "react";
|
||||
|
||||
type ActivityLog = {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
activity: string;
|
||||
};
|
||||
|
||||
export interface ActivityLogAPI {
|
||||
updateActivityLog: (activityLog: ActivityLog[]) => void;
|
||||
}
|
||||
|
||||
interface ActivityLogProps {
|
||||
ref: React.RefObject<ActivityLogAPI>;
|
||||
}
|
||||
|
||||
const formatter = new Intl.DateTimeFormat("en-GB", { dateStyle: "short", timeStyle: "medium" });
|
||||
|
||||
export default function ActivityLog({ ref }: ActivityLogProps) {
|
||||
const [activityLog, updateActivityLog] = useState<ActivityLog[]>([]);
|
||||
|
||||
const handleActivityLogUpdate = useCallback(
|
||||
(newActivities: ActivityLog[]) => {
|
||||
updateActivityLog([...activityLog, ...newActivities]);
|
||||
|
||||
const activityLogContainer = document.getElementById("activityLogContainer");
|
||||
|
||||
if (activityLogContainer) {
|
||||
activityLogContainer.scrollTo({ top: 0, behavior: "smooth" });
|
||||
}
|
||||
},
|
||||
[activityLog],
|
||||
);
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
updateActivityLog: handleActivityLogUpdate,
|
||||
}));
|
||||
|
||||
return (
|
||||
<div className="overflow-y-auto max-h-96" id="activityLogContainer">
|
||||
<div className="flex flex-col-reverse gap-2">
|
||||
{activityLog.map((activity) => (
|
||||
<div key={activity.id} className="flex gap-2 items-top">
|
||||
<span className="flex-1/3 text-xs text-gray-300 whitespace-nowrap mt-1.5">{formatter.format(activity.timestamp)}: </span>
|
||||
<span className="flex-2/3 text-white whitespace-normal">{activity.activity}</span>
|
||||
</div>
|
||||
))}
|
||||
{!activityLog.length && <span className="text-white">No activity logged.</span>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
90
cognee-frontend/src/app/(graph)/CogneeAddWidget.tsx
Normal file
90
cognee-frontend/src/app/(graph)/CogneeAddWidget.tsx
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
"use client";
|
||||
|
||||
import { v4 as uuid4 } from "uuid";
|
||||
import { ChangeEvent, useEffect } from "react";
|
||||
import { CTAButton, StatusIndicator } from "@/ui/elements";
|
||||
|
||||
import addData from "@/modules/ingestion/addData";
|
||||
import cognifyDataset from "@/modules/datasets/cognifyDataset";
|
||||
import useDatasets from "@/modules/ingestion/useDatasets";
|
||||
import getDatasetGraph from '@/modules/datasets/getDatasetGraph';
|
||||
|
||||
export interface NodesAndEdges {
|
||||
nodes: { id: string; label: string }[];
|
||||
links: { source: string; target: string; label: string }[];
|
||||
}
|
||||
|
||||
interface CogneeAddWidgetProps {
|
||||
onData: (data: NodesAndEdges) => void;
|
||||
}
|
||||
|
||||
export default function CogneeAddWidget({ onData }: CogneeAddWidgetProps) {
|
||||
const {
|
||||
datasets,
|
||||
addDataset,
|
||||
removeDataset,
|
||||
refreshDatasets,
|
||||
} = useDatasets();
|
||||
|
||||
useEffect(() => {
|
||||
refreshDatasets()
|
||||
.then((datasets) => {
|
||||
const dataset = datasets?.[0];
|
||||
|
||||
// For CrewAI we don't have a dataset.
|
||||
// if (dataset) {
|
||||
getDatasetGraph(dataset || { id: uuid4() })
|
||||
.then((graph) => onData({
|
||||
nodes: graph.nodes,
|
||||
links: graph.edges,
|
||||
}));
|
||||
// }
|
||||
});
|
||||
}, [onData, refreshDatasets]);
|
||||
|
||||
const handleAddFiles = (dataset: { id?: string, name?: string }, event: ChangeEvent<HTMLInputElement>) => {
|
||||
event.stopPropagation();
|
||||
|
||||
if (!event.currentTarget.files) {
|
||||
throw new Error("Error: No files added to the uploader input.");
|
||||
}
|
||||
|
||||
const files: File[] = Array.from(event.currentTarget.files);
|
||||
|
||||
return addData(dataset, files)
|
||||
.then(() => {
|
||||
const onUpdate = (data: any) => {
|
||||
onData({
|
||||
nodes: data.payload.nodes,
|
||||
links: data.payload.edges,
|
||||
});
|
||||
};
|
||||
|
||||
return cognifyDataset(dataset, onUpdate);
|
||||
});
|
||||
};
|
||||
|
||||
return null;
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 mb-4">
|
||||
{datasets.length ? datasets.map((dataset) => (
|
||||
<div key={dataset.id} className="flex gap-8 items-center">
|
||||
<div className="flex flex-row gap-4 items-center">
|
||||
<StatusIndicator status={dataset.status} />
|
||||
<span className="text-white">{dataset.name}</span>
|
||||
</div>
|
||||
<CTAButton type="button" className="relative">
|
||||
<input type="file" multiple onChange={handleAddFiles.bind(null, dataset)} className="absolute w-full h-full cursor-pointer opacity-0" />
|
||||
<span>+ Add Data</span>
|
||||
</CTAButton>
|
||||
</div>
|
||||
)) : (
|
||||
<CTAButton type="button" className="relative">
|
||||
<input type="file" multiple onChange={handleAddFiles.bind(null, { name: "main_dataset" })} className="absolute w-full h-full cursor-pointer opacity-0" />
|
||||
<span>+ Add Data</span>
|
||||
</CTAButton>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
115
cognee-frontend/src/app/(graph)/CrewAITrigger.tsx
Normal file
115
cognee-frontend/src/app/(graph)/CrewAITrigger.tsx
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import { useState } from "react";
|
||||
import { fetch } from "@/utils";
|
||||
import { v4 as uuid4 } from "uuid";
|
||||
import { LoadingIndicator } from "@/ui/App";
|
||||
import { CTAButton, Input } from "@/ui/elements";
|
||||
|
||||
interface CrewAIFormPayload extends HTMLFormElement {
|
||||
username1: HTMLInputElement;
|
||||
username2: HTMLInputElement;
|
||||
}
|
||||
|
||||
interface CrewAITriggerProps {
|
||||
onData: (data: any) => void;
|
||||
onActivity: (activities: any) => void;
|
||||
}
|
||||
|
||||
export default function CrewAITrigger({ onData, onActivity }: CrewAITriggerProps) {
|
||||
const [isCrewAIRunning, setIsCrewAIRunning] = useState(false);
|
||||
|
||||
const handleRunCrewAI = (event: React.FormEvent<CrewAIFormPayload>) => {
|
||||
event.preventDefault();
|
||||
const formElements = event.currentTarget;
|
||||
|
||||
const crewAIConfig = {
|
||||
username1: formElements.username1.value,
|
||||
username2: formElements.username2.value,
|
||||
};
|
||||
|
||||
const websocket = new WebSocket("ws://localhost:8000/api/v1/crewai/subscribe");
|
||||
|
||||
let isCrewAIDone = false;
|
||||
onActivity([{ id: uuid4(), timestamp: Date.now(), activity: "Dispatching hiring crew agents" }]);
|
||||
|
||||
websocket.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
if (data.status === "PipelineRunActivity") {
|
||||
onActivity([data.payload]);
|
||||
return;
|
||||
}
|
||||
|
||||
onData({
|
||||
nodes: data.payload.nodes,
|
||||
links: data.payload.edges,
|
||||
});
|
||||
|
||||
const nodes_type_map: { [key: string]: number } = {};
|
||||
|
||||
for (let i = 0; i < data.payload.nodes.length; i++) {
|
||||
const node = data.payload.nodes[i];
|
||||
if (!nodes_type_map[node.type]) {
|
||||
nodes_type_map[node.type] = 0;
|
||||
}
|
||||
nodes_type_map[node.type] += 1;
|
||||
}
|
||||
|
||||
const activityMessage = Object.entries(nodes_type_map).reduce((message, [type, count]) => {
|
||||
return `${message}\n | ${type}: ${count}`;
|
||||
}, "Graph updated:");
|
||||
|
||||
onActivity([{
|
||||
id: uuid4(),
|
||||
timestamp: Date.now(),
|
||||
activity: activityMessage,
|
||||
}]);
|
||||
|
||||
if (data.status === "PipelineRunCompleted") {
|
||||
isCrewAIDone = true;
|
||||
websocket.close();
|
||||
}
|
||||
};
|
||||
|
||||
onData(null);
|
||||
setIsCrewAIRunning(true);
|
||||
|
||||
return fetch("/v1/crewai/run", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(crewAIConfig),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(() => {
|
||||
onActivity([{ id: uuid4(), timestamp: Date.now(), activity: "Hiring crew agents made a decision" }]);
|
||||
})
|
||||
.catch(() => {
|
||||
onActivity([{ id: uuid4(), timestamp: Date.now(), activity: "Hiring crew agents had problems while executing" }]);
|
||||
})
|
||||
.finally(() => {
|
||||
websocket.close();
|
||||
setIsCrewAIRunning(false);
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<form className="w-full flex flex-col gap-4" onSubmit={handleRunCrewAI}>
|
||||
<h1 className="text-2xl text-white">Cognee HR Crew Demo</h1>
|
||||
<div className="flex flex-row gap-2">
|
||||
<div className="flex flex-col w-full flex-1/2">
|
||||
<label className="block mb-1 text-white" htmlFor="username1">GitHub username</label>
|
||||
<Input name="username1" type="text" placeholder="Github Username" required defaultValue="hajdul88" />
|
||||
</div>
|
||||
<div className="flex flex-col w-full flex-1/2">
|
||||
<label className="block mb-1 text-white" htmlFor="username2">GitHub username</label>
|
||||
<Input name="username2" type="text" placeholder="Github Username" required defaultValue="lxobr" />
|
||||
</div>
|
||||
</div>
|
||||
<CTAButton type="submit" disabled={isCrewAIRunning} className="whitespace-nowrap">
|
||||
Start HR Crew Research
|
||||
{isCrewAIRunning && <LoadingIndicator />}
|
||||
</CTAButton>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
242
cognee-frontend/src/app/(graph)/GraphControls.tsx
Normal file
242
cognee-frontend/src/app/(graph)/GraphControls.tsx
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
"use client";
|
||||
|
||||
import { v4 as uuid4 } from "uuid";
|
||||
import classNames from "classnames";
|
||||
import { NodeObject, LinkObject } from "react-force-graph-2d";
|
||||
import { ChangeEvent, useEffect, useImperativeHandle, useRef, useState } from "react";
|
||||
|
||||
import { DeleteIcon } from "@/ui/Icons";
|
||||
import { FeedbackForm } from "@/ui/Partials";
|
||||
import { CTAButton, Input, NeutralButton, Select } from "@/ui/elements";
|
||||
|
||||
interface GraphControlsProps {
|
||||
data?: {
|
||||
nodes: NodeObject[];
|
||||
links: LinkObject[];
|
||||
};
|
||||
isAddNodeFormOpen: boolean;
|
||||
ref: React.RefObject<GraphControlsAPI>;
|
||||
onFitIntoView: () => void;
|
||||
onGraphShapeChange: (shape: string) => void;
|
||||
}
|
||||
|
||||
export interface GraphControlsAPI {
|
||||
setSelectedNode: (node: NodeObject | null) => void;
|
||||
getSelectedNode: () => NodeObject | null;
|
||||
}
|
||||
|
||||
type ActivityLog = {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
activity: string;
|
||||
};
|
||||
|
||||
type NodeProperty = {
|
||||
id: string;
|
||||
name: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
const formatter = new Intl.DateTimeFormat("en-GB", { dateStyle: "short", timeStyle: "medium" });
|
||||
|
||||
const DEFAULT_GRAPH_SHAPE = "lr";
|
||||
|
||||
const GRAPH_SHAPES = [{
|
||||
value: "none",
|
||||
label: "None",
|
||||
}, {
|
||||
value: "td",
|
||||
label: "Top-down",
|
||||
}, {
|
||||
value: "bu",
|
||||
label: "Bottom-up",
|
||||
}, {
|
||||
value: "lr",
|
||||
label: "Left-right",
|
||||
}, {
|
||||
value: "rl",
|
||||
label: "Right-left",
|
||||
}, {
|
||||
value: "radialin",
|
||||
label: "Radial-in",
|
||||
}, {
|
||||
value: "radialout",
|
||||
label: "Radial-out",
|
||||
}];
|
||||
|
||||
export default function GraphControls({ data, isAddNodeFormOpen, onGraphShapeChange, onFitIntoView, ref }: GraphControlsProps) {
|
||||
const [selectedNode, setSelectedNode] = useState<NodeObject | null>(null);
|
||||
const [nodeProperties, setNodeProperties] = useState<NodeProperty[]>([]);
|
||||
const [newProperty, setNewProperty] = useState<NodeProperty>({
|
||||
id: uuid4(),
|
||||
name: "",
|
||||
value: "",
|
||||
});
|
||||
|
||||
const handlePropertyChange = (property: NodeProperty, property_key: string, event: ChangeEvent<HTMLInputElement>) => {
|
||||
const value = event.target.value;
|
||||
|
||||
setNodeProperties(nodeProperties.map((nodeProperty) => (nodeProperty.id === property.id ? {...nodeProperty, [property_key]: value } : nodeProperty)));
|
||||
};
|
||||
|
||||
const handlePropertyAdd = () => {
|
||||
if (newProperty.name && newProperty.value) {
|
||||
setNodeProperties([...nodeProperties, newProperty]);
|
||||
setNewProperty({ id: uuid4(), name: "", value: "" });
|
||||
} else {
|
||||
alert("Please fill in both name and value fields for the new property.");
|
||||
}
|
||||
};
|
||||
|
||||
const handlePropertyDelete = (property: NodeProperty) => {
|
||||
setNodeProperties(nodeProperties.filter((nodeProperty) => nodeProperty.id !== property.id));
|
||||
};
|
||||
|
||||
const handleNewPropertyChange = (property: NodeProperty, property_key: string, event: ChangeEvent<HTMLInputElement>) => {
|
||||
const value = event.target.value;
|
||||
|
||||
setNewProperty({...property, [property_key]: value });
|
||||
};
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
setSelectedNode,
|
||||
getSelectedNode: () => selectedNode,
|
||||
}));
|
||||
|
||||
const [selectedTab, setSelectedTab] = useState("nodeDetails");
|
||||
|
||||
const handleGraphShapeControl = (event: ChangeEvent<HTMLSelectElement>) => {
|
||||
setIsAuthShapeChangeEnabled(false);
|
||||
onGraphShapeChange(event.target.value);
|
||||
};
|
||||
|
||||
const [isAuthShapeChangeEnabled, setIsAuthShapeChangeEnabled] = useState(true);
|
||||
const shapeChangeTimeout = useRef<number | null>();
|
||||
|
||||
useEffect(() => {
|
||||
onGraphShapeChange(DEFAULT_GRAPH_SHAPE);
|
||||
|
||||
const graphShapesNum = GRAPH_SHAPES.length;
|
||||
|
||||
function switchShape(shapeIndex: number) {
|
||||
if (!isAuthShapeChangeEnabled || !data) {
|
||||
if (shapeChangeTimeout.current) {
|
||||
clearTimeout(shapeChangeTimeout.current);
|
||||
shapeChangeTimeout.current = null;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
shapeChangeTimeout.current = setTimeout(() => {
|
||||
const newValue = GRAPH_SHAPES[shapeIndex].value;
|
||||
onGraphShapeChange(newValue);
|
||||
const graphShapeSelectElement = document.getElementById("graph-shape-select") as HTMLSelectElement;
|
||||
graphShapeSelectElement.value = newValue;
|
||||
|
||||
switchShape((shapeIndex + 1) % graphShapesNum);
|
||||
}, 5000) as unknown as number;
|
||||
};
|
||||
|
||||
switchShape(0);
|
||||
|
||||
setTimeout(() => {
|
||||
onFitIntoView();
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
if (shapeChangeTimeout.current) {
|
||||
clearTimeout(shapeChangeTimeout.current);
|
||||
shapeChangeTimeout.current = null;
|
||||
}
|
||||
};
|
||||
}, [data, isAuthShapeChangeEnabled, onFitIntoView, onGraphShapeChange]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex w-full">
|
||||
<button onClick={() => setSelectedTab("nodeDetails")} className={classNames("cursor-pointer pt-4 pb-4 align-center text-gray-300 border-b-2 w-30 flex-1/3", { "border-b-indigo-600 text-white": selectedTab === "nodeDetails" })}>
|
||||
<span className="whitespace-nowrap">Node Details</span>
|
||||
</button>
|
||||
<button onClick={() => setSelectedTab("feedback")} className={classNames("cursor-pointer pt-4 pb-4 align-center text-gray-300 border-b-2 w-30 flex-1/3", { "border-b-indigo-600 text-white": selectedTab === "feedback" })}>
|
||||
<span className="whitespace-nowrap">Feedback</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="pt-4">
|
||||
{selectedTab === "nodeDetails" && (
|
||||
<>
|
||||
<div className="w-full flex flex-row gap-2 items-center mb-4">
|
||||
<label className="text-gray-300 whitespace-nowrap flex-1/5">Graph Shape:</label>
|
||||
<Select defaultValue={DEFAULT_GRAPH_SHAPE} onChange={handleGraphShapeControl} id="graph-shape-select" className="flex-2/5">
|
||||
{GRAPH_SHAPES.map((shape) => (
|
||||
<option key={shape.value} value={shape.value}>{shape.label}</option>
|
||||
))}
|
||||
</Select>
|
||||
<NeutralButton onClick={onFitIntoView} className="flex-2/5 whitespace-nowrap">Fit Graph into View</NeutralButton>
|
||||
</div>
|
||||
|
||||
|
||||
{isAddNodeFormOpen ? (
|
||||
<form className="flex flex-col gap-4" onSubmit={() => {}}>
|
||||
<div className="flex flex-row gap-4 items-center">
|
||||
<span className="text-gray-300 whitespace-nowrap">Source Node ID:</span>
|
||||
<Input readOnly type="text" defaultValue={selectedNode!.id} />
|
||||
</div>
|
||||
<div className="flex flex-col gap-4 items-end">
|
||||
{nodeProperties.map((property) => (
|
||||
<div key={property.id} className="w-full flex flex-row gap-2 items-center">
|
||||
<Input className="flex-1/3" type="text" placeholder="Property name" required value={property.name} onChange={handlePropertyChange.bind(null, property, "name")} />
|
||||
<Input className="flex-2/3" type="text" placeholder="Property value" required value={property.value} onChange={handlePropertyChange.bind(null, property, "value")} />
|
||||
<button className="border-1 border-white p-2 rounded-sm" onClick={handlePropertyDelete.bind(null, property)}>
|
||||
<DeleteIcon width={16} height={18} color="white" />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
<div className="w-full flex flex-row gap-2 items-center">
|
||||
<Input className="flex-1/3" type="text" placeholder="Property name" required value={newProperty.name} onChange={handleNewPropertyChange.bind(null, newProperty, "name")} />
|
||||
<Input className="flex-2/3" type="text" placeholder="Property value" required value={newProperty.value} onChange={handleNewPropertyChange.bind(null, newProperty, "value")} />
|
||||
<NeutralButton type="button" className="" onClick={handlePropertyAdd}>Add</NeutralButton>
|
||||
</div>
|
||||
</div>
|
||||
<CTAButton type="submit">Add Node</CTAButton>
|
||||
</form>
|
||||
) : (
|
||||
selectedNode ? (
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col gap-2 overflow-y-auto max-h-96 pr-2">
|
||||
<div className="flex gap-2 items-top">
|
||||
<span className="text-gray-300">ID:</span>
|
||||
<span className="text-white">{selectedNode.id}</span>
|
||||
</div>
|
||||
<div className="flex gap-2 items-top">
|
||||
<span className="text-gray-300">Label:</span>
|
||||
<span className="text-white">{selectedNode.label}</span>
|
||||
</div>
|
||||
|
||||
{Object.entries(selectedNode.properties).map(([key, value]) => (
|
||||
<div key={key} className="flex gap-2 items-top">
|
||||
<span className="text-gray-300">{key.charAt(0).toUpperCase() + key.slice(1)}:</span>
|
||||
<span className="text-white">{typeof value === "object" ? JSON.stringify(value) : value as string}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* <CTAButton type="button" onClick={() => {}}>Edit Node</CTAButton> */}
|
||||
</div>
|
||||
) : (
|
||||
<span className="text-white">No node selected.</span>
|
||||
)
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{selectedTab === "feedback" && (
|
||||
<div className="flex flex-col gap-2">
|
||||
<FeedbackForm onSuccess={() => {}} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
25
cognee-frontend/src/app/(graph)/GraphLegend.tsx
Normal file
25
cognee-frontend/src/app/(graph)/GraphLegend.tsx
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import { NodeObject } from "react-force-graph-2d";
|
||||
import getColorForNodeType from './getColorForNodeType';
|
||||
|
||||
interface GraphLegendProps {
|
||||
data?: NodeObject[];
|
||||
}
|
||||
|
||||
export default function GraphLegend({ data }: GraphLegendProps) {
|
||||
const legend: Set<string> = new Set();
|
||||
|
||||
for (let i = 0; i < Math.min(data?.length || 0, 100); i++) {
|
||||
legend.add(data![i].type);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Array.from(legend).map((nodeType) => (
|
||||
<div key={nodeType} className="flex flex-row items-center gap-2">
|
||||
<span className="w-2 h-2 rounded-2xl" style={{ backgroundColor: getColorForNodeType(nodeType) }} />
|
||||
<span className="text-white">{nodeType}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
117
cognee-frontend/src/app/(graph)/GraphView.tsx
Normal file
117
cognee-frontend/src/app/(graph)/GraphView.tsx
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
"use client";
|
||||
|
||||
import { useCallback, useRef, useState, MutableRefObject } from "react";
|
||||
|
||||
import { TextLogo } from "@/ui/App";
|
||||
import { Divider } from "@/ui/Layout";
|
||||
import { Footer } from "@/ui/Partials";
|
||||
import CrewAITrigger from "./CrewAITrigger";
|
||||
import GraphVisualization, { GraphVisualizationAPI } from "./GraphVisualization";
|
||||
import CogneeAddWidget, { NodesAndEdges } from "./CogneeAddWidget";
|
||||
import GraphControls, { GraphControlsAPI } from "./GraphControls";
|
||||
import GraphLegend from "./GraphLegend";
|
||||
import ActivityLog, { ActivityLogAPI } from "./ActivityLog";
|
||||
|
||||
import { useBoolean } from "@/utils";
|
||||
|
||||
interface GraphNode {
|
||||
id: string | number;
|
||||
label: string;
|
||||
properties?: {};
|
||||
}
|
||||
|
||||
interface GraphData {
|
||||
nodes: GraphNode[];
|
||||
links: { source: string | number; target: string | number; label: string }[];
|
||||
}
|
||||
|
||||
export default function GraphView() {
|
||||
const {
|
||||
value: isAddNodeFormOpen,
|
||||
setTrue: enableAddNodeForm,
|
||||
setFalse: disableAddNodeForm,
|
||||
} = useBoolean(false);
|
||||
|
||||
const [data, updateData] = useState<GraphData>();
|
||||
|
||||
const onDataChange = useCallback((newData: NodesAndEdges) => {
|
||||
if (newData === null) {
|
||||
// Requests for resetting the data
|
||||
updateData(undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!newData.nodes.length && !newData.links.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
updateData({
|
||||
nodes: newData.nodes,
|
||||
links: newData.links,
|
||||
});
|
||||
}, []);
|
||||
|
||||
const graphRef = useRef<GraphVisualizationAPI>();
|
||||
|
||||
const graphControls = useRef<GraphControlsAPI>();
|
||||
|
||||
const activityLog = useRef<ActivityLogAPI>();
|
||||
|
||||
return (
|
||||
<main className="flex flex-col h-full">
|
||||
<div className="pt-6 pr-3 pb-3 pl-6">
|
||||
<TextLogo width={86} height={24} />
|
||||
</div>
|
||||
<Divider />
|
||||
<div className="w-full h-full relative overflow-hidden">
|
||||
<GraphVisualization
|
||||
key={data?.nodes.length}
|
||||
ref={graphRef as MutableRefObject<GraphVisualizationAPI>}
|
||||
data={data}
|
||||
graphControls={graphControls as MutableRefObject<GraphControlsAPI>}
|
||||
/>
|
||||
|
||||
<div className="absolute top-2 left-2 flex flex-col gap-2">
|
||||
{/* <div className="bg-gray-500 pt-4 pr-4 pb-4 pl-4 rounded-md w-md"> */}
|
||||
<CogneeAddWidget onData={onDataChange} />
|
||||
{/* </div> */}
|
||||
<div className="bg-gray-500 pt-4 pr-4 pb-4 pl-4 rounded-md w-md">
|
||||
<CrewAITrigger onData={onDataChange} onActivity={(activities) => activityLog.current?.updateActivityLog(activities)} />
|
||||
</div>
|
||||
<div className="bg-gray-500 pt-4 pr-4 pb-4 pl-4 rounded-md w-md">
|
||||
<h2 className="text-xl text-white mb-4">Activity Log</h2>
|
||||
<ActivityLog ref={activityLog as MutableRefObject<ActivityLogAPI>} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="absolute top-2 right-2 flex flex-col gap-2 items-end">
|
||||
<div className="bg-gray-500 pt-4 pr-4 pb-4 pl-4 rounded-md w-110">
|
||||
<GraphControls
|
||||
data={data}
|
||||
ref={graphControls as MutableRefObject<GraphControlsAPI>}
|
||||
isAddNodeFormOpen={isAddNodeFormOpen}
|
||||
onFitIntoView={() => graphRef.current!.zoomToFit(1000, 50)}
|
||||
onGraphShapeChange={(shape) => graphRef.current!.setGraphShape(shape)}
|
||||
/>
|
||||
</div>
|
||||
{data?.nodes.length && (
|
||||
<div className="bg-gray-500 pt-4 pr-4 pb-4 pl-4 rounded-md w-48">
|
||||
<GraphLegend data={data?.nodes} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<Divider />
|
||||
<div className="pl-6 pr-6">
|
||||
<Footer>
|
||||
{(data?.nodes.length || data?.links.length) && (
|
||||
<div className="flex flex-row items-center gap-6">
|
||||
<span>Nodes: {data?.nodes.length || 0}</span>
|
||||
<span>Edges: {data?.links.length || 0}</span>
|
||||
</div>
|
||||
)}
|
||||
</Footer>
|
||||
</div>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
225
cognee-frontend/src/app/(graph)/GraphVisualization.tsx
Normal file
225
cognee-frontend/src/app/(graph)/GraphVisualization.tsx
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
"use client";
|
||||
|
||||
import { MutableRefObject, useEffect, useImperativeHandle, useRef, useState } from "react";
|
||||
import { forceCollide, forceManyBody } from "d3-force-3d";
|
||||
import ForceGraph, { ForceGraphMethods, GraphData, LinkObject, NodeObject } from "react-force-graph-2d";
|
||||
import { GraphControlsAPI } from "./GraphControls";
|
||||
import getColorForNodeType from "./getColorForNodeType";
|
||||
|
||||
interface GraphVisuzaliationProps {
|
||||
ref: MutableRefObject<GraphVisualizationAPI>;
|
||||
data?: GraphData<NodeObject, LinkObject>;
|
||||
graphControls: MutableRefObject<GraphControlsAPI>;
|
||||
}
|
||||
|
||||
export interface GraphVisualizationAPI {
|
||||
zoomToFit: ForceGraphMethods["zoomToFit"];
|
||||
setGraphShape: (shape: string) => void;
|
||||
}
|
||||
|
||||
export default function GraphVisualization({ ref, data, graphControls }: GraphVisuzaliationProps) {
|
||||
const textSize = 6;
|
||||
const nodeSize = 15;
|
||||
const addNodeDistanceFromSourceNode = 15;
|
||||
|
||||
const handleNodeClick = (node: NodeObject) => {
|
||||
graphControls.current?.setSelectedNode(node);
|
||||
// ref.current?.d3ReheatSimulation()
|
||||
}
|
||||
|
||||
const handleBackgroundClick = (event: MouseEvent) => {
|
||||
const selectedNode = graphControls.current?.getSelectedNode();
|
||||
|
||||
if (!selectedNode) {
|
||||
return;
|
||||
}
|
||||
|
||||
graphControls.current?.setSelectedNode(null);
|
||||
|
||||
// const graphBoundingBox = document.getElementById("graph-container")?.querySelector("canvas")?.getBoundingClientRect();
|
||||
// const x = event.clientX - graphBoundingBox!.x;
|
||||
// const y = event.clientY - graphBoundingBox!.y;
|
||||
|
||||
// const graphClickCoords = graphRef.current!.screen2GraphCoords(x, y);
|
||||
|
||||
// const distanceFromAddNode = Math.sqrt(
|
||||
// Math.pow(graphClickCoords.x - (selectedNode!.x! + addNodeDistanceFromSourceNode), 2)
|
||||
// + Math.pow(graphClickCoords.y - (selectedNode!.y! + addNodeDistanceFromSourceNode), 2)
|
||||
// );
|
||||
|
||||
// if (distanceFromAddNode <= 10) {
|
||||
// enableAddNodeForm();
|
||||
// } else {
|
||||
// disableAddNodeForm();
|
||||
// graphControls.current?.setSelectedNode(null);
|
||||
// }
|
||||
};
|
||||
|
||||
function renderNode(node: NodeObject, ctx: CanvasRenderingContext2D, globalScale: number, renderType: string = "replace") {
|
||||
const selectedNode = graphControls.current?.getSelectedNode();
|
||||
|
||||
ctx.save();
|
||||
|
||||
// if (node.id === selectedNode?.id) {
|
||||
// ctx.fillStyle = "gray";
|
||||
|
||||
// ctx.beginPath();
|
||||
// ctx.arc(node.x! + addNodeDistanceFromSourceNode, node.y! + addNodeDistanceFromSourceNode, 10, 0, 2 * Math.PI);
|
||||
// ctx.fill();
|
||||
|
||||
// ctx.beginPath();
|
||||
// ctx.moveTo(node.x! + addNodeDistanceFromSourceNode - 5, node.y! + addNodeDistanceFromSourceNode)
|
||||
// ctx.lineTo(node.x! + addNodeDistanceFromSourceNode - 5 + 10, node.y! + addNodeDistanceFromSourceNode);
|
||||
// ctx.stroke();
|
||||
|
||||
// ctx.beginPath();
|
||||
// ctx.moveTo(node.x! + addNodeDistanceFromSourceNode, node.y! + addNodeDistanceFromSourceNode - 5)
|
||||
// ctx.lineTo(node.x! + addNodeDistanceFromSourceNode, node.y! + addNodeDistanceFromSourceNode - 5 + 10);
|
||||
// ctx.stroke();
|
||||
// }
|
||||
|
||||
if (renderType === "replace") {
|
||||
ctx.beginPath();
|
||||
ctx.fillStyle = getColorForNodeType(node.type);
|
||||
ctx.arc(node.x!, node.y!, nodeSize, 0, 2 * Math.PI);
|
||||
ctx.fill();
|
||||
}
|
||||
|
||||
// draw text label (with background rect)
|
||||
const textPos = {
|
||||
x: node.x!,
|
||||
y: node.y!,
|
||||
};
|
||||
|
||||
ctx.translate(textPos.x, textPos.y);
|
||||
ctx.textAlign = "center";
|
||||
ctx.textBaseline = "middle";
|
||||
ctx.fillStyle = "#333333";
|
||||
ctx.font = `${textSize}px Sans-Serif`;
|
||||
ctx.fillText(node.label, 0, 0);
|
||||
|
||||
ctx.restore();
|
||||
}
|
||||
|
||||
function renderLink(link: LinkObject, ctx: CanvasRenderingContext2D) {
|
||||
const MAX_FONT_SIZE = 4;
|
||||
const LABEL_NODE_MARGIN = nodeSize * 1.5;
|
||||
|
||||
const start = link.source;
|
||||
const end = link.target;
|
||||
|
||||
// ignore unbound links
|
||||
if (typeof start !== "object" || typeof end !== "object") return;
|
||||
|
||||
const textPos = {
|
||||
x: start.x! + (end.x! - start.x!) / 2,
|
||||
y: start.y! + (end.y! - start.y!) / 2,
|
||||
};
|
||||
|
||||
const relLink = { x: end.x! - start.x!, y: end.y! - start.y! };
|
||||
|
||||
const maxTextLength = Math.sqrt(Math.pow(relLink.x, 2) + Math.pow(relLink.y, 2)) - LABEL_NODE_MARGIN * 2;
|
||||
|
||||
let textAngle = Math.atan2(relLink.y, relLink.x);
|
||||
// maintain label vertical orientation for legibility
|
||||
if (textAngle > Math.PI / 2) textAngle = -(Math.PI - textAngle);
|
||||
if (textAngle < -Math.PI / 2) textAngle = -(-Math.PI - textAngle);
|
||||
|
||||
const label = link.label
|
||||
|
||||
// estimate fontSize to fit in link length
|
||||
ctx.font = "1px Sans-Serif";
|
||||
const fontSize = Math.min(MAX_FONT_SIZE, maxTextLength / ctx.measureText(label).width);
|
||||
ctx.font = `${fontSize}px Sans-Serif`;
|
||||
const textWidth = ctx.measureText(label).width;
|
||||
const bckgDimensions = [textWidth, fontSize].map(n => n + fontSize * 0.2); // some padding
|
||||
|
||||
// draw text label (with background rect)
|
||||
ctx.save();
|
||||
ctx.translate(textPos.x, textPos.y);
|
||||
ctx.rotate(textAngle);
|
||||
|
||||
ctx.fillStyle = "rgba(255, 255, 255, 0.8)";
|
||||
ctx.fillRect(- bckgDimensions[0] / 2, - bckgDimensions[1] / 2, bckgDimensions[0], bckgDimensions[1]);
|
||||
|
||||
ctx.textAlign = "center";
|
||||
ctx.textBaseline = "middle";
|
||||
ctx.fillStyle = "darkgrey";
|
||||
ctx.fillText(label, 0, 0);
|
||||
ctx.restore();
|
||||
}
|
||||
|
||||
function renderInitialNode(node: NodeObject, ctx: CanvasRenderingContext2D, globalScale: number) {
|
||||
renderNode(node, ctx, globalScale, "after");
|
||||
}
|
||||
|
||||
function handleDagError(loopNodeIds: (string | number)[]) {}
|
||||
|
||||
const graphRef = useRef<ForceGraphMethods>();
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window !== "undefined" && data && graphRef.current) {
|
||||
// add collision force
|
||||
graphRef.current.d3Force("collision", forceCollide(nodeSize * 1.5));
|
||||
graphRef.current.d3Force("charge", forceManyBody().strength(-1500).distanceMin(300).distanceMax(900));
|
||||
}
|
||||
}, [data, graphRef]);
|
||||
|
||||
const [graphShape, setGraphShape] = useState<string>();
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
zoomToFit: graphRef.current!.zoomToFit,
|
||||
setGraphShape: setGraphShape,
|
||||
}));
|
||||
|
||||
return (
|
||||
<div className="w-full h-full" id="graph-container">
|
||||
{(data && typeof window !== "undefined") ? (
|
||||
<ForceGraph
|
||||
ref={graphRef}
|
||||
dagMode={graphShape as unknown as undefined}
|
||||
dagLevelDistance={300}
|
||||
onDagError={handleDagError}
|
||||
graphData={data}
|
||||
|
||||
nodeLabel="label"
|
||||
nodeRelSize={nodeSize}
|
||||
nodeCanvasObject={renderNode}
|
||||
nodeCanvasObjectMode={() => "replace"}
|
||||
|
||||
linkLabel="label"
|
||||
linkCanvasObject={renderLink}
|
||||
linkCanvasObjectMode={() => "after"}
|
||||
linkDirectionalArrowLength={3.5}
|
||||
linkDirectionalArrowRelPos={1}
|
||||
|
||||
onNodeClick={handleNodeClick}
|
||||
onBackgroundClick={handleBackgroundClick}
|
||||
d3VelocityDecay={0.3}
|
||||
/>
|
||||
) : (
|
||||
<ForceGraph
|
||||
ref={graphRef}
|
||||
dagMode={graphShape as unknown as undefined}
|
||||
dagLevelDistance={100}
|
||||
graphData={{
|
||||
nodes: [{ id: 1, label: "Add" }, { id: 2, label: "Cognify" }, { id: 3, label: "Search" }],
|
||||
links: [{ source: 1, target: 2, label: "but don't forget to" }, { source: 2, target: 3, label: "and after that you can" }],
|
||||
}}
|
||||
|
||||
nodeLabel="label"
|
||||
nodeRelSize={20}
|
||||
nodeCanvasObject={renderInitialNode}
|
||||
nodeCanvasObjectMode={() => "after"}
|
||||
nodeAutoColorBy="type"
|
||||
|
||||
linkLabel="label"
|
||||
linkCanvasObject={renderLink}
|
||||
linkCanvasObjectMode={() => "after"}
|
||||
linkDirectionalArrowLength={3.5}
|
||||
linkDirectionalArrowRelPos={1}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
1376
cognee-frontend/src/app/(graph)/example_data.json
Normal file
1376
cognee-frontend/src/app/(graph)/example_data.json
Normal file
File diff suppressed because it is too large
Load diff
22
cognee-frontend/src/app/(graph)/getColorForNodeType.ts
Normal file
22
cognee-frontend/src/app/(graph)/getColorForNodeType.ts
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import colors from "tailwindcss/colors";
|
||||
import { formatHex } from "culori";
|
||||
|
||||
const NODE_COLORS = {
|
||||
TextDocument: formatHex(colors.blue[500]),
|
||||
DocumentChunk: formatHex(colors.green[500]),
|
||||
TextSummary: formatHex(colors.orange[500]),
|
||||
Entity: formatHex(colors.yellow[300]),
|
||||
EntityType: formatHex(colors.purple[800]),
|
||||
NodeSet: formatHex(colors.indigo[300]),
|
||||
GitHubUser: formatHex(colors.gray[300]),
|
||||
Comment: formatHex(colors.amber[500]),
|
||||
Issue: formatHex(colors.red[500]),
|
||||
Repository: formatHex(colors.stone[400]),
|
||||
Commit: formatHex(colors.teal[500]),
|
||||
File: formatHex(colors.emerald[500]),
|
||||
FileChange: formatHex(colors.sky[500]),
|
||||
};
|
||||
|
||||
export default function getColorForNodeType(type: string) {
|
||||
return NODE_COLORS[type as keyof typeof NODE_COLORS] || colors.gray[500];
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
.main {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
flex-direction: column;
|
||||
padding: 0;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.authContainer {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
padding: 24px 0;
|
||||
margin: 0 auto;
|
||||
max-width: 440px;
|
||||
width: 100%;
|
||||
}
|
||||
|
|
@ -1,29 +1,53 @@
|
|||
import { Spacer, Stack, Text } from 'ohmy-ui';
|
||||
import { TextLogo } from '@/ui/App';
|
||||
import Footer from '@/ui/Partials/Footer/Footer';
|
||||
import Link from "next/link";
|
||||
import { TextLogo } from "@/ui/App";
|
||||
import { Divider } from "@/ui/Layout";
|
||||
import Footer from "@/ui/Partials/Footer/Footer";
|
||||
|
||||
import styles from './AuthPage.module.css';
|
||||
import { Divider } from '@/ui/Layout';
|
||||
import SignInForm from '@/ui/Partials/SignInForm/SignInForm';
|
||||
import { auth0 } from "@/modules/auth/auth0";
|
||||
|
||||
import { CTAButton } from "@/ui/elements";
|
||||
|
||||
|
||||
export default async function AuthPage() {
|
||||
const session = await auth0.getSession();
|
||||
|
||||
export default function AuthPage() {
|
||||
return (
|
||||
<main className={styles.main}>
|
||||
<Spacer inset vertical="2" horizontal="2">
|
||||
<Stack orientation="horizontal" gap="between" align="center">
|
||||
<TextLogo width={158} height={44} color="white" />
|
||||
</Stack>
|
||||
</Spacer>
|
||||
<Divider />
|
||||
<div className={styles.authContainer}>
|
||||
<Stack gap="4" style={{ width: '100%' }}>
|
||||
<h1><Text size="large">Sign in</Text></h1>
|
||||
<SignInForm />
|
||||
</Stack>
|
||||
<main className="flex flex-col h-full">
|
||||
<div className="pt-6 pr-3 pb-3 pl-6">
|
||||
<TextLogo width={86} height={24} />
|
||||
</div>
|
||||
<Spacer inset horizontal="3" wrap>
|
||||
<Divider />
|
||||
<div className="flex flex-col m-auto max-w-md h-full gap-8 pb-12 pt-6">
|
||||
<h1><span className="text-xl">Welcome to cognee</span></h1>
|
||||
{session ? (
|
||||
<div className="flex flex-col gap-8">
|
||||
<span className="text-lg">Hello, {session.user.name}!</span>
|
||||
<Link href="/auth/logout">
|
||||
<CTAButton>
|
||||
Log out
|
||||
</CTAButton>
|
||||
</Link>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-row h-full gap-8">
|
||||
<Link href="/auth/login?screen_hint=signup">
|
||||
<CTAButton>
|
||||
Sign up
|
||||
</CTAButton>
|
||||
</Link>
|
||||
|
||||
<Link href="/auth/login">
|
||||
<CTAButton>
|
||||
Log in
|
||||
</CTAButton>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Divider />
|
||||
<div className="pl-6 pr-6">
|
||||
<Footer />
|
||||
</Spacer>
|
||||
</div>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
export { default } from './AuthPage';
|
||||
export { default } from "./AuthPage";
|
||||
|
|
|
|||
14
cognee-frontend/src/app/auth/token/AuthToken.tsx
Normal file
14
cognee-frontend/src/app/auth/token/AuthToken.tsx
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
"use client";
|
||||
|
||||
import { useEffect } from "react";
|
||||
|
||||
export default function AuthToken() {
|
||||
useEffect(() => {
|
||||
async function get_token() {
|
||||
await fetch("http://localhost:3000/auth/token");
|
||||
}
|
||||
get_token();
|
||||
}, []);
|
||||
|
||||
return null;
|
||||
}
|
||||
16
cognee-frontend/src/app/auth/token/route.ts
Normal file
16
cognee-frontend/src/app/auth/token/route.ts
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import { redirect } from "next/navigation";
|
||||
import { auth0 } from "@/modules/auth/auth0";
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const accessToken = await auth0.getAccessToken();
|
||||
|
||||
if (accessToken) {
|
||||
const response = new Response();
|
||||
|
||||
response.headers.set("Set-Cookie", `${process.env.AUTH_TOKEN_COOKIE_NAME}=${accessToken.token}; Expires=${new Date(accessToken.expiresAt * 1000).toUTCString()}; Path=/; SameSite=Lax; Domain=localhost; HttpOnly`);
|
||||
|
||||
return response;
|
||||
} else {
|
||||
redirect("/auth");
|
||||
}
|
||||
}
|
||||
|
|
@ -15,23 +15,16 @@
|
|||
--textarea-default-color: #0D051C !important;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
max-width: 100vw;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
body {
|
||||
background: var(--global-background-default);
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
@import "tailwindcss";
|
||||
|
|
|
|||
130
cognee-frontend/src/app/page copy.tsx
Normal file
130
cognee-frontend/src/app/page copy.tsx
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
'use client';
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import styles from "./page.module.css";
|
||||
import { GhostButton, Notification, NotificationContainer, Spacer, Stack, Text, useBoolean, useNotifications } from 'ohmy-ui';
|
||||
import useDatasets from '@/modules/ingestion/useDatasets';
|
||||
import DataView, { Data } from '@/modules/ingestion/DataView';
|
||||
import DatasetsView from '@/modules/ingestion/DatasetsView';
|
||||
import classNames from 'classnames';
|
||||
import addData from '@/modules/ingestion/addData';
|
||||
import cognifyDataset from '@/modules/datasets/cognifyDataset';
|
||||
import getDatasetData from '@/modules/datasets/getDatasetData';
|
||||
import { Footer, SettingsModal } from '@/ui/Partials';
|
||||
import { TextLogo } from '@/ui/App';
|
||||
import { SettingsIcon } from '@/ui/Icons';
|
||||
|
||||
export default function Home() {
|
||||
const {
|
||||
datasets,
|
||||
refreshDatasets,
|
||||
} = useDatasets();
|
||||
|
||||
const [datasetData, setDatasetData] = useState<Data[]>([]);
|
||||
const [selectedDataset, setSelectedDataset] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
refreshDatasets();
|
||||
}, [refreshDatasets]);
|
||||
|
||||
const openDatasetData = (dataset: { id: string }) => {
|
||||
getDatasetData(dataset)
|
||||
.then(setDatasetData)
|
||||
.then(() => setSelectedDataset(dataset.id));
|
||||
};
|
||||
|
||||
const closeDatasetData = () => {
|
||||
setDatasetData([]);
|
||||
setSelectedDataset(null);
|
||||
};
|
||||
|
||||
const { notifications, showNotification } = useNotifications();
|
||||
|
||||
const onDataAdd = useCallback((dataset: { id: string }, files: File[]) => {
|
||||
return addData(dataset, files)
|
||||
.then(() => {
|
||||
showNotification("Data added successfully. Please run \"Cognify\" when ready.", 5000);
|
||||
openDatasetData(dataset);
|
||||
});
|
||||
}, [showNotification])
|
||||
|
||||
const onDatasetCognify = useCallback((dataset: { id: string, name: string }) => {
|
||||
showNotification(`Cognification started for dataset "${dataset.name}".`, 5000);
|
||||
|
||||
return cognifyDataset(dataset)
|
||||
.then(() => {
|
||||
showNotification(`Dataset "${dataset.name}" cognified.`, 5000);
|
||||
})
|
||||
.catch(() => {
|
||||
showNotification(`Dataset "${dataset.name}" cognification failed. Please try again.`, 5000);
|
||||
});
|
||||
}, [showNotification]);
|
||||
|
||||
const onCognify = useCallback(() => {
|
||||
const dataset = datasets.find((dataset) => dataset.id === selectedDataset);
|
||||
return onDatasetCognify({
|
||||
id: dataset!.id,
|
||||
name: dataset!.name,
|
||||
});
|
||||
}, [datasets, onDatasetCognify, selectedDataset]);
|
||||
|
||||
const {
|
||||
value: isSettingsModalOpen,
|
||||
setTrue: openSettingsModal,
|
||||
setFalse: closeSettingsModal,
|
||||
} = useBoolean(false);
|
||||
|
||||
return (
|
||||
<main className={styles.main}>
|
||||
<Spacer inset vertical="2" horizontal="2">
|
||||
<Stack orientation="horizontal" gap="between" align="center">
|
||||
<TextLogo width={158} height={44} color="white" />
|
||||
<GhostButton hugContent onClick={openSettingsModal}>
|
||||
<SettingsIcon />
|
||||
</GhostButton>
|
||||
</Stack>
|
||||
</Spacer>
|
||||
<SettingsModal isOpen={isSettingsModalOpen} onClose={closeSettingsModal} />
|
||||
<Spacer inset vertical="1" horizontal="3">
|
||||
<div className={styles.data}>
|
||||
<div className={classNames(styles.datasetsView, {
|
||||
[styles.openDatasetData]: datasetData.length > 0,
|
||||
})}>
|
||||
<DatasetsView
|
||||
datasets={datasets}
|
||||
onDatasetClick={openDatasetData}
|
||||
onDatasetCognify={onDatasetCognify}
|
||||
/>
|
||||
</div>
|
||||
{datasetData.length > 0 && selectedDataset && (
|
||||
<div className={styles.dataView}>
|
||||
<DataView
|
||||
data={datasetData}
|
||||
datasetId={selectedDataset}
|
||||
onClose={closeDatasetData}
|
||||
onDataAdd={onDataAdd}
|
||||
onCognify={onCognify}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Spacer>
|
||||
<Spacer inset horizontal="3" wrap>
|
||||
<Footer />
|
||||
</Spacer>
|
||||
<NotificationContainer gap="1" bottom right>
|
||||
{notifications.map((notification, index: number) => (
|
||||
<Notification
|
||||
key={notification.id}
|
||||
isOpen={notification.isOpen}
|
||||
style={{ top: `${index * 60}px` }}
|
||||
expireIn={notification.expireIn}
|
||||
onClose={notification.delete}
|
||||
>
|
||||
<Text nowrap>{notification.message}</Text>
|
||||
</Notification>
|
||||
))}
|
||||
</NotificationContainer>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,130 +1,3 @@
|
|||
'use client';
|
||||
export { default } from "./(graph)/GraphView";
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import styles from "./page.module.css";
|
||||
import { GhostButton, Notification, NotificationContainer, Spacer, Stack, Text, useBoolean, useNotifications } from 'ohmy-ui';
|
||||
import useDatasets from '@/modules/ingestion/useDatasets';
|
||||
import DataView, { Data } from '@/modules/ingestion/DataView';
|
||||
import DatasetsView from '@/modules/ingestion/DatasetsView';
|
||||
import classNames from 'classnames';
|
||||
import addData from '@/modules/ingestion/addData';
|
||||
import cognifyDataset from '@/modules/datasets/cognifyDataset';
|
||||
import getDatasetData from '@/modules/datasets/getDatasetData';
|
||||
import { Footer, SettingsModal } from '@/ui/Partials';
|
||||
import { TextLogo } from '@/ui/App';
|
||||
import { SettingsIcon } from '@/ui/Icons';
|
||||
|
||||
export default function Home() {
|
||||
const {
|
||||
datasets,
|
||||
refreshDatasets,
|
||||
} = useDatasets();
|
||||
|
||||
const [datasetData, setDatasetData] = useState<Data[]>([]);
|
||||
const [selectedDataset, setSelectedDataset] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
refreshDatasets();
|
||||
}, [refreshDatasets]);
|
||||
|
||||
const openDatasetData = (dataset: { id: string }) => {
|
||||
getDatasetData(dataset)
|
||||
.then(setDatasetData)
|
||||
.then(() => setSelectedDataset(dataset.id));
|
||||
};
|
||||
|
||||
const closeDatasetData = () => {
|
||||
setDatasetData([]);
|
||||
setSelectedDataset(null);
|
||||
};
|
||||
|
||||
const { notifications, showNotification } = useNotifications();
|
||||
|
||||
const onDataAdd = useCallback((dataset: { id: string }, files: File[]) => {
|
||||
return addData(dataset, files)
|
||||
.then(() => {
|
||||
showNotification("Data added successfully. Please run \"Cognify\" when ready.", 5000);
|
||||
openDatasetData(dataset);
|
||||
});
|
||||
}, [showNotification])
|
||||
|
||||
const onDatasetCognify = useCallback((dataset: { id: string, name: string }) => {
|
||||
showNotification(`Cognification started for dataset "${dataset.name}".`, 5000);
|
||||
|
||||
return cognifyDataset(dataset)
|
||||
.then(() => {
|
||||
showNotification(`Dataset "${dataset.name}" cognified.`, 5000);
|
||||
})
|
||||
.catch(() => {
|
||||
showNotification(`Dataset "${dataset.name}" cognification failed. Please try again.`, 5000);
|
||||
});
|
||||
}, [showNotification]);
|
||||
|
||||
const onCognify = useCallback(() => {
|
||||
const dataset = datasets.find((dataset) => dataset.id === selectedDataset);
|
||||
return onDatasetCognify({
|
||||
id: dataset!.id,
|
||||
name: dataset!.name,
|
||||
});
|
||||
}, [datasets, onDatasetCognify, selectedDataset]);
|
||||
|
||||
const {
|
||||
value: isSettingsModalOpen,
|
||||
setTrue: openSettingsModal,
|
||||
setFalse: closeSettingsModal,
|
||||
} = useBoolean(false);
|
||||
|
||||
return (
|
||||
<main className={styles.main}>
|
||||
<Spacer inset vertical="2" horizontal="2">
|
||||
<Stack orientation="horizontal" gap="between" align="center">
|
||||
<TextLogo width={158} height={44} color="white" />
|
||||
<GhostButton hugContent onClick={openSettingsModal}>
|
||||
<SettingsIcon />
|
||||
</GhostButton>
|
||||
</Stack>
|
||||
</Spacer>
|
||||
<SettingsModal isOpen={isSettingsModalOpen} onClose={closeSettingsModal} />
|
||||
<Spacer inset vertical="1" horizontal="3">
|
||||
<div className={styles.data}>
|
||||
<div className={classNames(styles.datasetsView, {
|
||||
[styles.openDatasetData]: datasetData.length > 0,
|
||||
})}>
|
||||
<DatasetsView
|
||||
datasets={datasets}
|
||||
onDatasetClick={openDatasetData}
|
||||
onDatasetCognify={onDatasetCognify}
|
||||
/>
|
||||
</div>
|
||||
{datasetData.length > 0 && selectedDataset && (
|
||||
<div className={styles.dataView}>
|
||||
<DataView
|
||||
data={datasetData}
|
||||
datasetId={selectedDataset}
|
||||
onClose={closeDatasetData}
|
||||
onDataAdd={onDataAdd}
|
||||
onCognify={onCognify}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Spacer>
|
||||
<Spacer inset horizontal="3" wrap>
|
||||
<Footer />
|
||||
</Spacer>
|
||||
<NotificationContainer gap="1" bottom right>
|
||||
{notifications.map((notification, index: number) => (
|
||||
<Notification
|
||||
key={notification.id}
|
||||
isOpen={notification.isOpen}
|
||||
style={{ top: `${index * 60}px` }}
|
||||
expireIn={notification.expireIn}
|
||||
onClose={notification.delete}
|
||||
>
|
||||
<Text nowrap>{notification.message}</Text>
|
||||
</Notification>
|
||||
))}
|
||||
</NotificationContainer>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
export const dynamic = "force-dynamic";
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
.files {
|
||||
width: 100%;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.fileSize {
|
||||
display: block;
|
||||
}
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
import { useCallback, useState } from 'react';
|
||||
import { CTAButton, GhostButton, Stack, Text, TrashIcon, UploadIcon, UploadInput, useBoolean } from 'ohmy-ui';
|
||||
import { Divider } from '@/ui/Layout';
|
||||
import addData from '@/modules/ingestion/addData';
|
||||
import { LoadingIndicator } from '@/ui/App';
|
||||
import styles from './AddStep.module.css';
|
||||
import { WizardHeading } from '@/ui/Partials/Wizard';
|
||||
|
||||
interface ConfigStepProps {
|
||||
onNext: () => void;
|
||||
}
|
||||
|
||||
export default function AddStep({ onNext }: ConfigStepProps) {
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
|
||||
const {
|
||||
value: isUploading,
|
||||
setTrue: disableUploading,
|
||||
setFalse: enableUploading,
|
||||
} = useBoolean(false);
|
||||
|
||||
const uploadFiles = useCallback(() => {
|
||||
disableUploading()
|
||||
addData({ name: 'main' }, files)
|
||||
.then(() => {
|
||||
onNext();
|
||||
})
|
||||
.finally(() => enableUploading());
|
||||
}, [disableUploading, enableUploading, files, onNext]);
|
||||
|
||||
const addFiles = useCallback((files: File[]) => {
|
||||
setFiles((existingFiles) => {
|
||||
const newFiles = files.filter((file) => !existingFiles.some((existingFile) => existingFile.name === file.name));
|
||||
|
||||
return [...existingFiles, ...newFiles]
|
||||
});
|
||||
}, []);
|
||||
|
||||
const removeFile = useCallback((file: File) => {
|
||||
setFiles((files) => files.filter((f) => f !== file));
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<Stack orientation="vertical" gap="6">
|
||||
<WizardHeading><Text light size="large">Step 2/3</Text> Add knowledge</WizardHeading>
|
||||
<Divider />
|
||||
<Text align="center">
|
||||
Cognee lets you process your personal data, books, articles or company data.
|
||||
Simply add datasets to get started.
|
||||
</Text>
|
||||
<Stack gap="1">
|
||||
<UploadInput onChange={addFiles}>
|
||||
<Stack gap="2" orientation="horizontal" align="center/center">
|
||||
<UploadIcon key={files.length} />
|
||||
<Text>Upload your data</Text>
|
||||
</Stack>
|
||||
</UploadInput>
|
||||
<Stack gap="3" className={styles.files}>
|
||||
{files.map((file, index) => (
|
||||
<Stack gap="between" orientation="horizontal" align="center/" key={index}>
|
||||
<div key={index}>
|
||||
<Text bold>{file.name}</Text>
|
||||
<Text className={styles.fileSize} size="small">
|
||||
{getBiggestUnitSize(file.size)}
|
||||
</Text>
|
||||
</div>
|
||||
<GhostButton hugContent onClick={() => removeFile(file)}>
|
||||
<TrashIcon />
|
||||
</GhostButton>
|
||||
</Stack>
|
||||
))}
|
||||
</Stack>
|
||||
</Stack>
|
||||
<Stack align="/end">
|
||||
<CTAButton disabled={isUploading || files.length === 0} onClick={uploadFiles}>
|
||||
<Stack gap="2" orientation="horizontal" align="center/center">
|
||||
<Text>Next</Text>
|
||||
{isUploading && (
|
||||
<LoadingIndicator />
|
||||
)}
|
||||
</Stack>
|
||||
</CTAButton>
|
||||
</Stack>
|
||||
</Stack>
|
||||
)
|
||||
}
|
||||
|
||||
function getBiggestUnitSize(sizeInBytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB'];
|
||||
|
||||
let i = 0;
|
||||
while (sizeInBytes >= 1024 && i < units.length - 1) {
|
||||
sizeInBytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
return `${sizeInBytes.toFixed(2)} ${units[i]}`;
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default } from './AddStep';
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import { useEffect, useRef } from 'react';
|
||||
import { CTAButton, Stack, Text, useBoolean } from 'ohmy-ui';
|
||||
import { Divider } from '@/ui/Layout';
|
||||
import { CognifyLoadingIndicator } from '@/ui/App';
|
||||
import { WizardHeading } from '@/ui/Partials/Wizard';
|
||||
import cognifyDataset from '@/modules/datasets/cognifyDataset';
|
||||
|
||||
interface ConfigStepProps {
|
||||
onNext: () => void;
|
||||
dataset: { name: string }
|
||||
}
|
||||
|
||||
export default function CognifyStep({ onNext, dataset }: ConfigStepProps) {
|
||||
const {
|
||||
value: isCognifyRunning,
|
||||
setFalse: stopCognifyIndicator,
|
||||
} = useBoolean(true);
|
||||
const cognifyPromise = useRef<Promise<void>>()
|
||||
|
||||
useEffect(() => {
|
||||
if (cognifyPromise.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
cognifyPromise.current = cognifyDataset(dataset)
|
||||
.then(() => {
|
||||
stopCognifyIndicator();
|
||||
});
|
||||
}, [stopCognifyIndicator, dataset]);
|
||||
|
||||
return (
|
||||
<Stack orientation="vertical" gap="6">
|
||||
<WizardHeading><Text light size="large">Step 3/3</Text> Cognify</WizardHeading>
|
||||
<Divider />
|
||||
|
||||
<Stack align="/center">
|
||||
<CognifyLoadingIndicator isLoading={isCognifyRunning} />
|
||||
</Stack>
|
||||
|
||||
<Text align="center">
|
||||
Cognee decomposes your data into facts and connects them in relevant clusters,
|
||||
so that you can navigate your knowledge better.
|
||||
</Text>
|
||||
<CTAButton disabled={isCognifyRunning} onClick={onNext}>
|
||||
<Stack gap="2" orientation="horizontal" align="center/center">
|
||||
<Text>Explore data</Text>
|
||||
</Stack>
|
||||
</CTAButton>
|
||||
</Stack>
|
||||
)
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default } from './CognifyStep';
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { Stack, Text } from 'ohmy-ui';
|
||||
import { Divider } from '@/ui/Layout';
|
||||
import Settings from '@/ui/Partials/SettingsModal/Settings';
|
||||
import { WizardContent, WizardHeading } from '@/ui/Partials/Wizard';
|
||||
|
||||
interface ConfigStepProps {
|
||||
onNext: () => void;
|
||||
}
|
||||
|
||||
export default function ConfigStep({ onNext }: ConfigStepProps) {
|
||||
return (
|
||||
<Stack orientation="vertical" gap="6">
|
||||
<WizardHeading><Text light size="large">Step 1/3</Text> Basic configuration</WizardHeading>
|
||||
<Divider />
|
||||
<Text align="center">
|
||||
Cognee helps you process your data and create a mind-like structure you can explore.
|
||||
To get started you need an OpenAI API key.
|
||||
</Text>
|
||||
<Settings onDone={onNext} submitButtonText="Next" />
|
||||
</Stack>
|
||||
)
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default } from './ConfigStep';
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
import { Explorer } from '@/ui/Partials';
|
||||
import { Spacer } from 'ohmy-ui';
|
||||
|
||||
interface ExploreStepProps {
|
||||
dataset: { name: string };
|
||||
}
|
||||
|
||||
export default function ExploreStep({ dataset }: ExploreStepProps) {
|
||||
return (
|
||||
<Spacer horizontal="3">
|
||||
<Explorer dataset={dataset} />
|
||||
</Spacer>
|
||||
)
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { default } from './ExploreStep';
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
.main {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
flex-direction: column;
|
||||
padding: 0;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.wizardContainer {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
padding: 24px 0;
|
||||
}
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import { useState } from 'react';
|
||||
import { CloseIcon, GhostButton, Spacer, Stack, useBoolean } from 'ohmy-ui';
|
||||
import { TextLogo } from '@/ui/App';
|
||||
import { SettingsIcon } from '@/ui/Icons';
|
||||
import { Footer, SettingsModal } from '@/ui/Partials';
|
||||
import ConfigStep from './ConfigStep';
|
||||
import AddStep from './AddStep';
|
||||
import CognifyStep from './CognifyStep';
|
||||
import ExploreStep from './ExploreStep';
|
||||
import { WizardContent } from '@/ui/Partials/Wizard';
|
||||
|
||||
import styles from './WizardPage.module.css';
|
||||
import { Divider } from '@/ui/Layout';
|
||||
import { useSearchParams } from 'next/navigation';
|
||||
|
||||
interface WizardPageProps {
|
||||
onFinish: () => void;
|
||||
}
|
||||
|
||||
export default function WizardPage({
|
||||
onFinish,
|
||||
}: WizardPageProps) {
|
||||
const searchParams = useSearchParams()
|
||||
const presetWizardStep = searchParams.get('step') as 'config';
|
||||
const [wizardStep, setWizardStep] = useState<'config' | 'add' | 'cognify' | 'explore'>(presetWizardStep || 'config');
|
||||
const {
|
||||
value: isSettingsModalOpen,
|
||||
setTrue: openSettingsModal,
|
||||
setFalse: closeSettingsModal,
|
||||
} = useBoolean(false);
|
||||
|
||||
const dataset = { name: 'main' };
|
||||
|
||||
return (
|
||||
<main className={styles.main}>
|
||||
<Spacer inset vertical="2" horizontal="2">
|
||||
<Stack orientation="horizontal" gap="between" align="center">
|
||||
<TextLogo width={158} height={44} color="white" />
|
||||
{wizardStep === 'explore' && (
|
||||
<GhostButton hugContent onClick={onFinish}>
|
||||
<CloseIcon />
|
||||
</GhostButton>
|
||||
)}
|
||||
{wizardStep === 'add' && (
|
||||
<GhostButton hugContent onClick={openSettingsModal}>
|
||||
<SettingsIcon />
|
||||
</GhostButton>
|
||||
)}
|
||||
</Stack>
|
||||
</Spacer>
|
||||
<Divider />
|
||||
<SettingsModal isOpen={isSettingsModalOpen} onClose={closeSettingsModal} />
|
||||
<div className={styles.wizardContainer}>
|
||||
{wizardStep === 'config' && (
|
||||
<WizardContent>
|
||||
<ConfigStep onNext={() => setWizardStep('add')} />
|
||||
</WizardContent>
|
||||
)}
|
||||
|
||||
{wizardStep === 'add' && (
|
||||
<WizardContent>
|
||||
<AddStep onNext={() => setWizardStep('cognify')} />
|
||||
</WizardContent>
|
||||
)}
|
||||
|
||||
{wizardStep === 'cognify' && (
|
||||
<WizardContent>
|
||||
<CognifyStep dataset={dataset} onNext={() => setWizardStep('explore')} />
|
||||
</WizardContent>
|
||||
)}
|
||||
|
||||
{wizardStep === 'explore' && (
|
||||
<Spacer inset top="4" bottom="1" horizontal="4">
|
||||
<ExploreStep dataset={dataset} />
|
||||
</Spacer>
|
||||
)}
|
||||
</div>
|
||||
<Spacer inset horizontal="3" wrap>
|
||||
<Footer />
|
||||
</Spacer>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
'use client';
|
||||
|
||||
import { Suspense, useCallback } from 'react';
|
||||
import WizardPage from './WizardPage';
|
||||
|
||||
export default function Page() {
|
||||
const finishWizard = useCallback(() => {
|
||||
window.location.href = '/';
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<Suspense>
|
||||
<WizardPage
|
||||
onFinish={finishWizard}
|
||||
/>
|
||||
</Suspense>
|
||||
);
|
||||
}
|
||||
28
cognee-frontend/src/middleware.ts
Normal file
28
cognee-frontend/src/middleware.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import { NextResponse, type NextRequest } from "next/server";
|
||||
import { auth0 } from "./modules/auth/auth0";
|
||||
|
||||
export async function middleware(request: NextRequest) {
|
||||
if (process.env.USE_AUTH0_AUTHORIZATION?.toLowerCase() === "true") {
|
||||
if (request.nextUrl.pathname === "/auth/token") {
|
||||
return NextResponse.next();
|
||||
}
|
||||
|
||||
const response: NextResponse = await auth0.middleware(request);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
return NextResponse.next();
|
||||
}
|
||||
|
||||
export const config = {
|
||||
matcher: [
|
||||
/*
|
||||
* Match all request paths except for the ones starting with:
|
||||
* - _next/static (static files)
|
||||
* - _next/image (image optimization files)
|
||||
* - favicon.ico, sitemap.xml, robots.txt (metadata files)
|
||||
*/
|
||||
"/((?!_next/static|_next/image|favicon.ico|sitemap.xml|robots.txt).*)",
|
||||
],
|
||||
};
|
||||
8
cognee-frontend/src/modules/auth/auth0.ts
Normal file
8
cognee-frontend/src/modules/auth/auth0.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import { Auth0Client } from "@auth0/nextjs-auth0/server";
|
||||
|
||||
export const auth0 = new Auth0Client({
|
||||
authorizationParameters: {
|
||||
scope: "openid profile email",
|
||||
audience: "cognee:api",
|
||||
},
|
||||
});
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import { fetch } from '@/utils';
|
||||
|
||||
export default function cognifyDataset(dataset: { id?: string, name?: string }) {
|
||||
export default function cognifyDataset(dataset: { id?: string, name?: string }, onUpdate = (data: []) => {}) {
|
||||
return fetch('/v1/cognify', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
|
@ -9,5 +9,35 @@ export default function cognifyDataset(dataset: { id?: string, name?: string })
|
|||
body: JSON.stringify({
|
||||
datasets: [dataset.id || dataset.name],
|
||||
}),
|
||||
}).then((response) => response.json());
|
||||
})
|
||||
.then((response) => response.json())
|
||||
.then((data) => {
|
||||
const websocket = new WebSocket(`ws://localhost:8000/api/v1/cognify/subscribe/${data.pipeline_run_id}`);
|
||||
|
||||
websocket.onopen = () => {
|
||||
websocket.send(JSON.stringify({
|
||||
"Authorization": `Bearer ${localStorage.getItem("access_token")}`,
|
||||
}));
|
||||
};
|
||||
|
||||
let isCognifyDone = false;
|
||||
|
||||
websocket.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
onUpdate(data);
|
||||
|
||||
if (data.status === "PipelineRunCompleted") {
|
||||
isCognifyDone = true;
|
||||
websocket.close();
|
||||
}
|
||||
};
|
||||
|
||||
return new Promise(async (resolve) => {
|
||||
while (!isCognifyDone) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
|||
6
cognee-frontend/src/modules/datasets/getDatasetGraph.ts
Normal file
6
cognee-frontend/src/modules/datasets/getDatasetGraph.ts
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { fetch } from '@/utils';
|
||||
|
||||
export default function getDatasetGraph(dataset: { id: string }) {
|
||||
return fetch(`/v1/datasets/${dataset.id}/graph`)
|
||||
.then((response) => response.json());
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { useState } from 'react';
|
||||
import Link from 'next/link';
|
||||
import { Explorer } from '@/ui/Partials';
|
||||
import StatusIcon from './StatusIcon';
|
||||
import StatusIcon from '@/ui/elements/StatusIndicator';
|
||||
import { LoadingIndicator } from '@/ui/App';
|
||||
import { DropdownMenu, GhostButton, Stack, Text, CTAButton, useBoolean, Modal, Spacer } from "ohmy-ui";
|
||||
import styles from "./DatasetsView.module.css";
|
||||
|
|
|
|||
|
|
@ -1,15 +0,0 @@
|
|||
export default function StatusIcon({ status }: { status: 'DATASET_PROCESSING_COMPLETED' | string }) {
|
||||
const isSuccess = status === 'DATASET_PROCESSING_COMPLETED';
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
width: '16px',
|
||||
height: '16px',
|
||||
borderRadius: '4px',
|
||||
background: isSuccess ? '#53ff24' : '#ff5024',
|
||||
}}
|
||||
title={isSuccess ? 'Dataset cognified' : 'Cognify data in order to explore it'}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
|
@ -17,11 +17,6 @@ function useDatasets() {
|
|||
const fetchDatasetStatuses = useCallback((datasets: Dataset[]) => {
|
||||
fetch(
|
||||
`/v1/datasets/status?dataset=${datasets.map(d => d.id).join('&dataset=')}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${localStorage.getItem('access_token')}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
.then((response) => response.json())
|
||||
.then((statuses) => setDatasets(
|
||||
|
|
@ -42,7 +37,7 @@ function useDatasets() {
|
|||
|
||||
statusTimeout.current = setTimeout(() => {
|
||||
checkDatasetStatuses(datasets);
|
||||
}, 5000);
|
||||
}, 50000);
|
||||
}, [fetchDatasetStatuses]);
|
||||
|
||||
useEffect(() => {
|
||||
|
|
@ -73,20 +68,16 @@ function useDatasets() {
|
|||
}, []);
|
||||
|
||||
const fetchDatasets = useCallback(() => {
|
||||
fetch('/v1/datasets', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${localStorage.getItem('access_token')}`,
|
||||
},
|
||||
})
|
||||
return fetch('/v1/datasets')
|
||||
.then((response) => response.json())
|
||||
.then((datasets) => {
|
||||
setDatasets(datasets);
|
||||
|
||||
if (datasets.length > 0) {
|
||||
checkDatasetStatuses(datasets);
|
||||
} else {
|
||||
window.location.href = '/wizard';
|
||||
}
|
||||
|
||||
return datasets;
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Error fetching datasets:', error);
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
|
||||
.loadingIndicator {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
width: 1rem;
|
||||
height: 1rem;
|
||||
border-radius: 50%;
|
||||
border: 2px solid var(--global-color-primary);
|
||||
border: 0.18rem solid white;
|
||||
border-top-color: transparent;
|
||||
border-bottom-color: transparent;
|
||||
animation: spin 2s linear infinite;
|
||||
|
|
|
|||
7
cognee-frontend/src/ui/Icons/DeleteIcon.tsx
Normal file
7
cognee-frontend/src/ui/Icons/DeleteIcon.tsx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
export default function DeleteIcon({ width = 12, height = 14, color = 'currentColor' }) {
|
||||
return (
|
||||
<svg width={width} height={height} viewBox="0 0 12 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.625 1.87357H3.5C3.56875 1.87357 3.625 1.81732 3.625 1.74857V1.87357H8.375V1.74857C8.375 1.81732 8.43125 1.87357 8.5 1.87357H8.375V2.99857H9.5V1.74857C9.5 1.197 9.05156 0.748566 8.5 0.748566H3.5C2.94844 0.748566 2.5 1.197 2.5 1.74857V2.99857H3.625V1.87357ZM11.5 2.99857H0.5C0.223438 2.99857 0 3.222 0 3.49857V3.99857C0 4.06732 0.05625 4.12357 0.125 4.12357H1.06875L1.45469 12.2954C1.47969 12.8283 1.92031 13.2486 2.45313 13.2486H9.54688C10.0813 13.2486 10.5203 12.8298 10.5453 12.2954L10.9313 4.12357H11.875C11.9438 4.12357 12 4.06732 12 3.99857V3.49857C12 3.222 11.7766 2.99857 11.5 2.99857ZM9.42656 12.1236H2.57344L2.19531 4.12357H9.80469L9.42656 12.1236Z" fill={color} />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ export default function GitHubIcon({ width = 24, height = 24, color = 'currentCo
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width={width} height={height} viewBox="0 0 28 28" className={className}>
|
||||
<g transform="translate(-1477 -38)">
|
||||
<rect width="28" height="28" transform="translate(1477 38)" fill={color} opacity="0" />
|
||||
<path d="M16.142,1.9A13.854,13.854,0,0,0,11.78,28.966c.641.128,1.155-.577,1.155-1.154v-1.86c-3.848.834-5.067-1.86-5.067-1.86a4.169,4.169,0,0,0-1.411-2.052c-1.283-.9.064-.834.064-.834a2.758,2.758,0,0,1,2.117,1.283c1.09,1.86,3.528,1.668,4.3,1.347a3.463,3.463,0,0,1,.321-1.86c-4.361-.77-6.735-3.335-6.735-6.8A6.863,6.863,0,0,1,8.381,10.3a3.977,3.977,0,0,1,.192-4.1,5.708,5.708,0,0,1,4.1,1.86,9.685,9.685,0,0,1,3.463-.513,10.968,10.968,0,0,1,3.463.449,5.773,5.773,0,0,1,4.1-1.8,4.169,4.169,0,0,1,.257,4.1,6.863,6.863,0,0,1,1.8,4.875c0,3.463-2.373,6.029-6.735,6.8a3.464,3.464,0,0,1,.321,1.86v3.977a1.155,1.155,0,0,0,1.219,1.155A13.918,13.918,0,0,0,16.142,1.9Z" transform="translate(1474.913 36.102)" fill="#fdfdfd"/>
|
||||
<path d="M16.142,1.9A13.854,13.854,0,0,0,11.78,28.966c.641.128,1.155-.577,1.155-1.154v-1.86c-3.848.834-5.067-1.86-5.067-1.86a4.169,4.169,0,0,0-1.411-2.052c-1.283-.9.064-.834.064-.834a2.758,2.758,0,0,1,2.117,1.283c1.09,1.86,3.528,1.668,4.3,1.347a3.463,3.463,0,0,1,.321-1.86c-4.361-.77-6.735-3.335-6.735-6.8A6.863,6.863,0,0,1,8.381,10.3a3.977,3.977,0,0,1,.192-4.1,5.708,5.708,0,0,1,4.1,1.86,9.685,9.685,0,0,1,3.463-.513,10.968,10.968,0,0,1,3.463.449,5.773,5.773,0,0,1,4.1-1.8,4.169,4.169,0,0,1,.257,4.1,6.863,6.863,0,0,1,1.8,4.875c0,3.463-2.373,6.029-6.735,6.8a3.464,3.464,0,0,1,.321,1.86v3.977a1.155,1.155,0,0,0,1.219,1.155A13.918,13.918,0,0,0,16.142,1.9Z" transform="translate(1474.913 36.102)" fill={color}/>
|
||||
</g>
|
||||
</svg>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
export { default as DeleteIcon } from './DeleteIcon';
|
||||
export { default as GithubIcon } from './GitHubIcon';
|
||||
export { default as DiscordIcon } from './DiscordIcon';
|
||||
export { default as SettingsIcon } from './SettingsIcon';
|
||||
|
|
|
|||
69
cognee-frontend/src/ui/Partials/FeedbackForm.tsx
Normal file
69
cognee-frontend/src/ui/Partials/FeedbackForm.tsx
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { LoadingIndicator } from "@/ui/App";
|
||||
import { fetch, useBoolean } from "@/utils";
|
||||
import { CTAButton, TextArea } from "@/ui/elements";
|
||||
|
||||
interface SignInFormPayload extends HTMLFormElement {
|
||||
feedback: HTMLTextAreaElement;
|
||||
}
|
||||
|
||||
interface FeedbackFormProps {
|
||||
onSuccess: () => void;
|
||||
}
|
||||
|
||||
export default function FeedbackForm({ onSuccess }: FeedbackFormProps) {
|
||||
const {
|
||||
value: isSubmittingFeedback,
|
||||
setTrue: disableFeedbackSubmit,
|
||||
setFalse: enableFeedbackSubmit,
|
||||
} = useBoolean(false);
|
||||
|
||||
const [feedbackError, setFeedbackError] = useState<string | null>(null);
|
||||
|
||||
const signIn = (event: React.FormEvent<SignInFormPayload>) => {
|
||||
event.preventDefault();
|
||||
const formElements = event.currentTarget;
|
||||
|
||||
setFeedbackError(null);
|
||||
disableFeedbackSubmit();
|
||||
|
||||
fetch("/v1/crewai/feedback", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
feedback: formElements.feedback.value,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(() => {
|
||||
onSuccess();
|
||||
formElements.feedback.value = "";
|
||||
})
|
||||
.catch(error => setFeedbackError(error.detail))
|
||||
.finally(() => enableFeedbackSubmit());
|
||||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={signIn} className="flex flex-col gap-2">
|
||||
<div className="flex flex-col gap-2">
|
||||
<div className="mb-4">
|
||||
<label className="block text-white" htmlFor="feedback">Feedback on agent's reasoning</label>
|
||||
<TextArea id="feedback" name="feedback" type="text" placeholder="Your feedback" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<CTAButton type="submit">
|
||||
<span>Submit feedback</span>
|
||||
{isSubmittingFeedback && <LoadingIndicator />}
|
||||
</CTAButton>
|
||||
|
||||
{feedbackError && (
|
||||
<span className="text-s text-white">{feedbackError}</span>
|
||||
)}
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
.footer {
|
||||
padding: 24px 0;
|
||||
}
|
||||
|
||||
.leftSide {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.rightSide {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 24px;
|
||||
}
|
||||
|
|
@ -1,25 +1,25 @@
|
|||
import Link from 'next/link';
|
||||
import { Stack } from 'ohmy-ui';
|
||||
import { DiscordIcon, GithubIcon } from '@/ui/Icons';
|
||||
// import { TextLogo } from '@/ui/App';
|
||||
import styles from './Footer.module.css';
|
||||
import Link from "next/link";
|
||||
import { DiscordIcon, GithubIcon } from "@/ui/Icons";
|
||||
|
||||
export default function Footer() {
|
||||
interface FooterProps {
|
||||
children?: React.ReactNode;
|
||||
}
|
||||
|
||||
export default function Footer({ children }: FooterProps) {
|
||||
return (
|
||||
<footer className={styles.footer}>
|
||||
<Stack orientation="horizontal" gap="between">
|
||||
<div className={styles.leftSide}>
|
||||
{/* <TextLogo width={92} height={24} /> */}
|
||||
</div>
|
||||
<div className={styles.rightSide}>
|
||||
<Link target="_blank" href="https://github.com/topoteretes/cognee">
|
||||
<GithubIcon color="white" />
|
||||
</Link>
|
||||
<Link target="_blank" href="https://discord.gg/m63hxKsp4p">
|
||||
<DiscordIcon color="white" />
|
||||
</Link>
|
||||
</div>
|
||||
</Stack>
|
||||
<footer className="pt-6 pb-6 flex flex-row items-center justify-between">
|
||||
<div>
|
||||
{children}
|
||||
</div>
|
||||
|
||||
<div className="flex flex-row gap-4">
|
||||
<Link target="_blank" href="https://github.com/topoteretes/cognee">
|
||||
<GithubIcon color="black" />
|
||||
</Link>
|
||||
<Link target="_blank" href="https://discord.gg/m63hxKsp4p">
|
||||
<DiscordIcon color="black" />
|
||||
</Link>
|
||||
</div>
|
||||
</footer>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +1,9 @@
|
|||
"use client";
|
||||
|
||||
import {
|
||||
CTAButton,
|
||||
FormGroup,
|
||||
FormInput,
|
||||
FormLabel,
|
||||
Input,
|
||||
Spacer,
|
||||
Stack,
|
||||
Text,
|
||||
useBoolean,
|
||||
} from 'ohmy-ui';
|
||||
import { LoadingIndicator } from '@/ui/App';
|
||||
import { fetch, handleServerErrors } from '@/utils';
|
||||
import { useState } from 'react';
|
||||
import { useState } from "react";
|
||||
import { LoadingIndicator } from "@/ui/App";
|
||||
import { fetch, useBoolean } from "@/utils";
|
||||
import { CTAButton, Input } from "@/ui/elements";
|
||||
|
||||
interface SignInFormPayload extends HTMLFormElement {
|
||||
vectorDBUrl: HTMLInputElement;
|
||||
|
|
@ -22,10 +12,10 @@ interface SignInFormPayload extends HTMLFormElement {
|
|||
}
|
||||
|
||||
const errorsMap = {
|
||||
LOGIN_BAD_CREDENTIALS: 'Invalid username or password',
|
||||
LOGIN_BAD_CREDENTIALS: "Invalid username or password",
|
||||
};
|
||||
|
||||
export default function SignInForm({ onSignInSuccess = () => window.location.href = '/', submitButtonText = 'Sign in' }) {
|
||||
export default function SignInForm({ onSignInSuccess = () => window.location.href = "/", submitButtonText = "Sign in" }) {
|
||||
const {
|
||||
value: isSigningIn,
|
||||
setTrue: disableSignIn,
|
||||
|
|
@ -46,14 +36,11 @@ export default function SignInForm({ onSignInSuccess = () => window.location.hre
|
|||
setSignInError(null);
|
||||
disableSignIn();
|
||||
|
||||
fetch('/v1/auth/login', {
|
||||
method: 'POST',
|
||||
fetch("/v1/auth/login", {
|
||||
method: "POST",
|
||||
body: authCredentials,
|
||||
})
|
||||
.then(handleServerErrors)
|
||||
.then(response => response.json())
|
||||
.then((bearer) => {
|
||||
window.localStorage.setItem('access_token', bearer.access_token);
|
||||
.then(() => {
|
||||
onSignInSuccess();
|
||||
})
|
||||
.catch(error => setSignInError(errorsMap[error.detail as keyof typeof errorsMap]))
|
||||
|
|
@ -61,36 +48,26 @@ export default function SignInForm({ onSignInSuccess = () => window.location.hre
|
|||
};
|
||||
|
||||
return (
|
||||
<form onSubmit={signIn} style={{ width: '100%' }}>
|
||||
<Stack gap="4" orientation="vertical">
|
||||
<Stack gap="4" orientation="vertical">
|
||||
<FormGroup orientation="vertical" align="center/" gap="2">
|
||||
<FormLabel>Email:</FormLabel>
|
||||
<FormInput>
|
||||
<Input defaultValue="default_user@example.com" name="email" type="email" placeholder="Your email address" />
|
||||
</FormInput>
|
||||
</FormGroup>
|
||||
<FormGroup orientation="vertical" align="center/" gap="2">
|
||||
<FormLabel>Password:</FormLabel>
|
||||
<FormInput>
|
||||
<Input defaultValue="default_password" name="password" type="password" placeholder="Your password" />
|
||||
</FormInput>
|
||||
</FormGroup>
|
||||
</Stack>
|
||||
<form onSubmit={signIn} className="flex flex-col gap-2">
|
||||
<div className="flex flex-col gap-2">
|
||||
<div className="mb-4">
|
||||
<label className="block mb-2" htmlFor="email">Email</label>
|
||||
<Input id="email" defaultValue="default_user@example.com" name="email" type="email" placeholder="Your email address" />
|
||||
</div>
|
||||
<div className="mb-4">
|
||||
<label className="block mb-2" htmlFor="password">Password</label>
|
||||
<Input id="password" defaultValue="default_password" name="password" type="password" placeholder="Your password" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Spacer top="2">
|
||||
<CTAButton type="submit">
|
||||
<Stack gap="2" orientation="horizontal" align="/center">
|
||||
{submitButtonText}
|
||||
{isSigningIn && <LoadingIndicator />}
|
||||
</Stack>
|
||||
</CTAButton>
|
||||
</Spacer>
|
||||
<CTAButton type="submit">
|
||||
{submitButtonText}
|
||||
{isSigningIn && <LoadingIndicator />}
|
||||
</CTAButton>
|
||||
|
||||
{signInError && (
|
||||
<Text>{signInError}</Text>
|
||||
)}
|
||||
</Stack>
|
||||
{signInError && (
|
||||
<span className="text-s text-white">{signInError}</span>
|
||||
)}
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
export { default as Footer } from './Footer/Footer';
|
||||
export { default as SettingsModal } from './SettingsModal/SettingsModal';
|
||||
export { default as SearchView } from './SearchView/SearchView';
|
||||
export { default as IFrameView } from './IFrameView/IFrameView';
|
||||
export { default as Explorer } from './Explorer/Explorer';
|
||||
export { default as Footer } from "./Footer/Footer";
|
||||
export { default as SettingsModal } from "./SettingsModal/SettingsModal";
|
||||
export { default as SearchView } from "./SearchView/SearchView";
|
||||
export { default as IFrameView } from "./IFrameView/IFrameView";
|
||||
export { default as Explorer } from "./Explorer/Explorer";
|
||||
export { default as FeedbackForm } from "./FeedbackForm";
|
||||
|
|
|
|||
8
cognee-frontend/src/ui/elements/CTAButton.tsx
Normal file
8
cognee-frontend/src/ui/elements/CTAButton.tsx
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import classNames from 'classnames';
|
||||
import { ButtonHTMLAttributes } from "react";
|
||||
|
||||
export default function CTAButton({ children, className, ...props }: ButtonHTMLAttributes<HTMLButtonElement>) {
|
||||
return (
|
||||
<button className={classNames("flex flex-row justify-center items-center gap-2 cursor-pointer rounded-md bg-indigo-600 px-3 py-2 text-sm font-semibold text-white shadow-xs hover:bg-indigo-500 focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600", className)} {...props}>{children}</button>
|
||||
);
|
||||
}
|
||||
8
cognee-frontend/src/ui/elements/Input.tsx
Normal file
8
cognee-frontend/src/ui/elements/Input.tsx
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import classNames from "classnames"
|
||||
import { InputHTMLAttributes } from "react"
|
||||
|
||||
export default function Input({ className, ...props }: InputHTMLAttributes<HTMLInputElement>) {
|
||||
return (
|
||||
<input className={classNames("block w-full rounded-md bg-white px-3 py-1.5 text-base text-gray-900 outline-1 -outline-offset-1 outline-gray-300 placeholder:text-gray-400 focus:outline-2 focus:-outline-offset-2 focus:outline-indigo-600 sm:text-sm/6", className)} {...props} />
|
||||
)
|
||||
}
|
||||
8
cognee-frontend/src/ui/elements/NeutralButton.tsx
Normal file
8
cognee-frontend/src/ui/elements/NeutralButton.tsx
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import classNames from 'classnames';
|
||||
import { ButtonHTMLAttributes } from "react";
|
||||
|
||||
export default function CTAButton({ children, className, ...props }: ButtonHTMLAttributes<HTMLButtonElement>) {
|
||||
return (
|
||||
<button className={classNames("flex flex-row justify-center items-center gap-2 cursor-pointer rounded-md bg-transparent px-3 py-2 text-sm font-semibold text-white shadow-xs border-1 border-white hover:bg-gray-400 focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600", className)} {...props}>{children}</button>
|
||||
);
|
||||
}
|
||||
10
cognee-frontend/src/ui/elements/Select.tsx
Normal file
10
cognee-frontend/src/ui/elements/Select.tsx
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import classNames from "classnames";
|
||||
import { SelectHTMLAttributes } from "react";
|
||||
|
||||
export default function Select({ children, className, ...props }: SelectHTMLAttributes<HTMLSelectElement>) {
|
||||
return (
|
||||
<select className={classNames("block w-full appearance-none rounded-md bg-white py-1.5 pr-8 pl-3 text-base text-gray-900 outline-1 -outline-offset-1 outline-gray-300 focus:outline-2 focus:-outline-offset-2 focus:outline-indigo-600 sm:text-sm/6", className)} {...props}>
|
||||
{children}
|
||||
</select>
|
||||
);
|
||||
}
|
||||
22
cognee-frontend/src/ui/elements/StatusIndicator.tsx
Normal file
22
cognee-frontend/src/ui/elements/StatusIndicator.tsx
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
export default function StatusIndicator({ status }: { status: "DATASET_PROCESSING_COMPLETED" | string }) {
|
||||
const statusColor = {
|
||||
DATASET_PROCESSING_STARTED: "#ffd500",
|
||||
DATASET_PROCESSING_INITIATED: "#ffd500",
|
||||
DATASET_PROCESSING_COMPLETED: "#53ff24",
|
||||
DATASET_PROCESSING_ERRORED: "#ff5024",
|
||||
};
|
||||
|
||||
const isSuccess = status === "DATASET_PROCESSING_COMPLETED";
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
width: "16px",
|
||||
height: "16px",
|
||||
borderRadius: "4px",
|
||||
background: statusColor[status as keyof typeof statusColor],
|
||||
}}
|
||||
title={isSuccess ? "Dataset cognified" : "Cognify data in order to explore it"}
|
||||
/>
|
||||
);
|
||||
}
|
||||
7
cognee-frontend/src/ui/elements/TextArea.tsx
Normal file
7
cognee-frontend/src/ui/elements/TextArea.tsx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import { InputHTMLAttributes } from "react"
|
||||
|
||||
export default function TextArea(props: InputHTMLAttributes<HTMLTextAreaElement>) {
|
||||
return (
|
||||
<textarea className="block w-full mt-2 rounded-md bg-white px-3 py-1.5 text-base text-gray-900 outline-1 -outline-offset-1 outline-gray-300 placeholder:text-gray-400 focus:outline-2 focus:-outline-offset-2 focus:outline-indigo-600 sm:text-sm/6" {...props} />
|
||||
)
|
||||
}
|
||||
6
cognee-frontend/src/ui/elements/index.ts
Normal file
6
cognee-frontend/src/ui/elements/index.ts
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
export { default as Input } from "./Input";
|
||||
export { default as Select } from "./Select";
|
||||
export { default as TextArea } from "./TextArea";
|
||||
export { default as CTAButton } from "./CTAButton";
|
||||
export { default as NeutralButton } from "./NeutralButton";
|
||||
export { default as StatusIndicator } from "./StatusIndicator";
|
||||
|
|
@ -1,12 +1,35 @@
|
|||
import handleServerErrors from './handleServerErrors';
|
||||
import handleServerErrors from "./handleServerErrors";
|
||||
|
||||
export default function fetch(url: string, options: RequestInit = {}): Promise<Response> {
|
||||
return global.fetch('http://localhost:8000/api' + url, {
|
||||
let numberOfRetries = 0;
|
||||
|
||||
export default async function fetch(url: string, options: RequestInit = {}): Promise<Response> {
|
||||
function retry(lastError: Response) {
|
||||
if (numberOfRetries > 1) {
|
||||
return Promise.reject(lastError);
|
||||
}
|
||||
|
||||
numberOfRetries += 1;
|
||||
|
||||
return window.fetch("/auth/token")
|
||||
.then(() => {
|
||||
return fetch(url, options);
|
||||
});
|
||||
}
|
||||
|
||||
return global.fetch("http://localhost:8000/api" + url, {
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers,
|
||||
'Authorization': `Bearer ${localStorage.getItem('access_token')}`,
|
||||
},
|
||||
credentials: "include",
|
||||
})
|
||||
.then(handleServerErrors);
|
||||
.then((response) => handleServerErrors(response, retry))
|
||||
.then((response) => {
|
||||
numberOfRetries = 0;
|
||||
|
||||
return response;
|
||||
})
|
||||
.catch((error) => {
|
||||
if (error.status === 401) {
|
||||
return retry(error);
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
export default function handleServerErrors(response: Response): Promise<Response> {
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export default function handleServerErrors(response: Response, retry?: (response: Response) => Promise<Response>): Promise<Response> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (response.status === 401) {
|
||||
window.location.href = '/auth';
|
||||
return;
|
||||
if (retry) {
|
||||
return retry(response);
|
||||
} else {
|
||||
return redirect("/auth");
|
||||
}
|
||||
}
|
||||
if (!response.ok) {
|
||||
return response.json().then(error => reject(error));
|
||||
|
|
|
|||
|
|
@ -1,2 +1,3 @@
|
|||
export { default as fetch } from './fetch';
|
||||
export { default as handleServerErrors } from './handleServerErrors';
|
||||
export { default as fetch } from "./fetch";
|
||||
export { default as handleServerErrors } from "./handleServerErrors";
|
||||
export { default as useBoolean } from "./useBoolean";
|
||||
|
|
|
|||
14
cognee-frontend/src/utils/useBoolean.ts
Normal file
14
cognee-frontend/src/utils/useBoolean.ts
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import { useState } from "react";
|
||||
|
||||
export default function useBoolean(initialValue: boolean) {
|
||||
const [value, setValue] = useState(initialValue);
|
||||
|
||||
const setTrue = () => setValue(true);
|
||||
const setFalse = () => setValue(false);
|
||||
|
||||
return {
|
||||
value,
|
||||
setTrue,
|
||||
setFalse,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,6 +1,10 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"lib": [
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"esnext"
|
||||
],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
|
|
@ -18,9 +22,19 @@
|
|||
}
|
||||
],
|
||||
"paths": {
|
||||
"@/*": ["./src/*"]
|
||||
}
|
||||
"@/*": [
|
||||
"./src/*"
|
||||
]
|
||||
},
|
||||
"target": "ES2017"
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
"include": [
|
||||
"next-env.d.ts",
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
".next/types/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
|
|
|
|||
41
cognee-frontend/types/d3-force-3d.d.ts
vendored
Normal file
41
cognee-frontend/types/d3-force-3d.d.ts
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
declare module "d3-force-3d" {
|
||||
// Import types from d3-force if needed
|
||||
import {
|
||||
SimulationNodeDatum,
|
||||
SimulationLinkDatum,
|
||||
Force,
|
||||
Simulation,
|
||||
} from "d3-force";
|
||||
|
||||
export interface SimulationNodeDatum3D extends SimulationNodeDatum {
|
||||
x: number;
|
||||
y: number;
|
||||
z: number;
|
||||
vx: number;
|
||||
vy: number;
|
||||
vz: number;
|
||||
fx?: number | null;
|
||||
fy?: number | null;
|
||||
fz?: number | null;
|
||||
}
|
||||
|
||||
export function forceSimulation<NodeDatum extends SimulationNodeDatum3D>(
|
||||
nodes?: NodeDatum[]
|
||||
): Simulation<NodeDatum, undefined>;
|
||||
|
||||
export function forceCenter(x: number, y: number, z: number): Force<SimulationNodeDatum3D, any>;
|
||||
|
||||
export function forceManyBody(): Force<SimulationNodeDatum3D, any>;
|
||||
|
||||
export function forceLink<NodeDatum extends SimulationNodeDatum3D, Links extends SimulationLinkDatum<NodeDatum>[] = SimulationLinkDatum<NodeDatum>[]>(
|
||||
links?: Links
|
||||
): Force<NodeDatum, SimulationLinkDatum<NodeDatum>>;
|
||||
|
||||
export function forceCollide(radius?: number): Force<SimulationNodeDatum3D, any>;
|
||||
|
||||
export function forceRadial(radius: number, x?: number, y?: number, z?: number): Force<SimulationNodeDatum3D, any>;
|
||||
|
||||
export function forceX(x?: number): Force<SimulationNodeDatum3D, any>;
|
||||
export function forceY(y?: number): Force<SimulationNodeDatum3D, any>;
|
||||
export function forceZ(z?: number): Force<SimulationNodeDatum3D, any>;
|
||||
}
|
||||
|
|
@ -1,12 +1,19 @@
|
|||
"""FastAPI server for the Cognee API."""
|
||||
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
import sentry_sdk
|
||||
from traceback import format_exc
|
||||
from fastapi import Request
|
||||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.responses import JSONResponse, Response
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.api.v1.permissions.routers import get_permissions_router
|
||||
from cognee.api.v1.settings.routers import get_settings_router
|
||||
from cognee.api.v1.datasets.routers import get_datasets_router
|
||||
|
|
@ -15,11 +22,9 @@ from cognee.api.v1.search.routers import get_search_router
|
|||
from cognee.api.v1.add.routers import get_add_router
|
||||
from cognee.api.v1.delete.routers import get_delete_router
|
||||
from cognee.api.v1.responses.routers import get_responses_router
|
||||
from fastapi import Request
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from cognee.api.v1.crewai.routers import get_crewai_router
|
||||
|
||||
from cognee.exceptions import CogneeApiError
|
||||
from traceback import format_exc
|
||||
from cognee.api.v1.users.routers import (
|
||||
get_auth_router,
|
||||
get_register_router,
|
||||
|
|
@ -28,7 +33,6 @@ from cognee.api.v1.users.routers import (
|
|||
get_users_router,
|
||||
get_visualize_router,
|
||||
)
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
|
@ -46,8 +50,9 @@ app_environment = os.getenv("ENV", "prod")
|
|||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# from cognee.modules.data.deletion import prune_system, prune_data
|
||||
|
||||
# await prune_data()
|
||||
# await prune_system(metadata = True)
|
||||
# await prune_system(metadata=True)
|
||||
# if app_environment == "local" or app_environment == "dev":
|
||||
from cognee.infrastructure.databases.relational import get_relational_engine
|
||||
|
||||
|
|
@ -63,9 +68,10 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
app = FastAPI(debug=app_environment != "prod", lifespan=lifespan)
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_origins=["http://localhost:3000", "http://localhost:8000", "https://cognee.eu.auth0.com"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["OPTIONS", "GET", "POST", "DELETE"],
|
||||
allow_headers=["*"],
|
||||
|
|
@ -170,6 +176,8 @@ app.include_router(get_delete_router(), prefix="/api/v1/delete", tags=["delete"]
|
|||
|
||||
app.include_router(get_responses_router(), prefix="/api/v1/responses", tags=["responses"])
|
||||
|
||||
app.include_router(get_crewai_router(), prefix="/api/v1/crewai", tags=["crewai"])
|
||||
|
||||
codegraph_routes = get_code_pipeline_router()
|
||||
if codegraph_routes:
|
||||
app.include_router(codegraph_routes, prefix="/api/v1/code-pipeline", tags=["code-pipeline"])
|
||||
|
|
@ -185,7 +193,7 @@ def start_api_server(host: str = "0.0.0.0", port: int = 8000):
|
|||
try:
|
||||
logger.info("Starting server at %s:%s", host, port)
|
||||
|
||||
uvicorn.run(app, host=host, port=port)
|
||||
uvicorn.run(app, host=host, port=port, loop="asyncio")
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to start server: {e}")
|
||||
# Here you could add any cleanup code or error recovery code.
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from uuid import UUID
|
||||
from typing import Union, BinaryIO, List, Optional
|
||||
|
||||
from cognee.modules.pipelines import Task
|
||||
|
|
@ -11,9 +12,34 @@ async def add(
|
|||
dataset_name: str = "main_dataset",
|
||||
user: User = None,
|
||||
node_set: Optional[List[str]] = None,
|
||||
vector_db_config: dict = None,
|
||||
graph_db_config: dict = None,
|
||||
dataset_id: UUID = None,
|
||||
):
|
||||
tasks = [Task(resolve_data_directories), Task(ingest_data, dataset_name, user, node_set)]
|
||||
tasks = [
|
||||
Task(resolve_data_directories),
|
||||
Task(ingest_data, dataset_name, user, node_set, dataset_id),
|
||||
]
|
||||
|
||||
await cognee_pipeline(
|
||||
tasks=tasks, datasets=dataset_name, data=data, user=user, pipeline_name="add_pipeline"
|
||||
)
|
||||
pipeline_run_info = None
|
||||
|
||||
data_packets = {dataset_name: []}
|
||||
|
||||
async for run_info in cognee_pipeline(
|
||||
tasks=tasks,
|
||||
datasets=dataset_name,
|
||||
data=data,
|
||||
user=user,
|
||||
pipeline_name="add_pipeline",
|
||||
vector_db_config=vector_db_config,
|
||||
graph_db_config=graph_db_config,
|
||||
):
|
||||
if run_info.status == "PipelineRunYield":
|
||||
for data_yielded in run_info.payload:
|
||||
data_packets[dataset_name].append(data_yielded.id)
|
||||
pipeline_run_info = run_info
|
||||
|
||||
if hasattr(pipeline_run_info, "packets"):
|
||||
pipeline_run_info.packets = data_packets
|
||||
|
||||
return pipeline_run_info
|
||||
|
|
|
|||
|
|
@ -1,13 +1,12 @@
|
|||
from uuid import UUID
|
||||
from fastapi import Form, UploadFile, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import APIRouter
|
||||
from typing import List, Optional
|
||||
import subprocess
|
||||
from cognee.modules.data.methods import get_dataset
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
import requests
|
||||
import subprocess
|
||||
from uuid import UUID
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import Form, UploadFile, Depends
|
||||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
|
||||
|
|
@ -17,11 +16,11 @@ logger = get_logger()
|
|||
def get_add_router() -> APIRouter:
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/", response_model=None)
|
||||
@router.post("/", response_model=dict)
|
||||
async def add(
|
||||
data: List[UploadFile],
|
||||
datasetName: str = Form(),
|
||||
datasetId: Optional[UUID] = Form(default=None),
|
||||
datasetName: Optional[str] = Form(default=None),
|
||||
user: User = Depends(get_authenticated_user),
|
||||
):
|
||||
"""This endpoint is responsible for adding data to the graph."""
|
||||
|
|
@ -30,19 +29,13 @@ def get_add_router() -> APIRouter:
|
|||
if not datasetId and not datasetName:
|
||||
raise ValueError("Either datasetId or datasetName must be provided.")
|
||||
|
||||
if datasetId and not datasetName:
|
||||
dataset = await get_dataset(user_id=user.id, dataset_id=datasetId)
|
||||
try:
|
||||
datasetName = dataset.name
|
||||
except IndexError:
|
||||
raise ValueError("No dataset found with the provided datasetName.")
|
||||
|
||||
try:
|
||||
if isinstance(data, str) and data.startswith("http"):
|
||||
if "github" in data:
|
||||
# Perform git clone if the URL is from GitHub
|
||||
repo_name = data.split("/")[-1].replace(".git", "")
|
||||
subprocess.run(["git", "clone", data, f".data/{repo_name}"], check=True)
|
||||
# TODO: Update add call with dataset info
|
||||
await cognee_add(
|
||||
"data://.data/",
|
||||
f"{repo_name}",
|
||||
|
|
@ -53,10 +46,12 @@ def get_add_router() -> APIRouter:
|
|||
response.raise_for_status()
|
||||
|
||||
file_data = await response.content()
|
||||
|
||||
# TODO: Update add call with dataset info
|
||||
return await cognee_add(file_data)
|
||||
else:
|
||||
await cognee_add(data, datasetName, user=user)
|
||||
add_run = await cognee_add(data, datasetName, user=user, dataset_id=datasetId)
|
||||
|
||||
return add_run.model_dump()
|
||||
except Exception as error:
|
||||
return JSONResponse(status_code=409, content={"error": str(error)})
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +1,29 @@
|
|||
import asyncio
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from typing import Union, Optional
|
||||
from pydantic import BaseModel
|
||||
from typing import Union, Optional, Any
|
||||
|
||||
from cognee.infrastructure.llm import get_max_chunk_tokens
|
||||
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
|
||||
from cognee.modules.pipelines.tasks.task import Task
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_default_user
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from cognee.infrastructure.llm import get_max_chunk_tokens
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.pipelines import cognee_pipeline
|
||||
from cognee.modules.pipelines.tasks.task import Task
|
||||
from cognee.modules.chunking.TextChunker import TextChunker
|
||||
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
|
||||
from cognee.modules.pipelines.models.PipelineRunInfo import PipelineRunCompleted, PipelineRunStarted
|
||||
from cognee.modules.pipelines.queues.pipeline_run_info_queues import push_to_queue
|
||||
from cognee.modules.graph.operations import get_formatted_graph_data
|
||||
from cognee.modules.crewai.get_crewai_pipeline_run_id import get_crewai_pipeline_run_id
|
||||
|
||||
from cognee.tasks.documents import (
|
||||
check_permissions_on_documents,
|
||||
check_permissions_on_dataset,
|
||||
classify_documents,
|
||||
extract_chunks_from_documents,
|
||||
)
|
||||
from cognee.tasks.graph import extract_graph_from_data
|
||||
from cognee.tasks.storage import add_data_points
|
||||
from cognee.tasks.summarization import summarize_text
|
||||
from cognee.modules.chunking.TextChunker import TextChunker
|
||||
from cognee.modules.pipelines import cognee_pipeline
|
||||
|
||||
logger = get_logger("cognify")
|
||||
|
||||
|
|
@ -26,18 +32,126 @@ update_status_lock = asyncio.Lock()
|
|||
|
||||
async def cognify(
|
||||
datasets: Union[str, list[str]] = None,
|
||||
datapoints: dict[str, Any] = None,
|
||||
user: User = None,
|
||||
graph_model: BaseModel = KnowledgeGraph,
|
||||
chunker=TextChunker,
|
||||
chunk_size: int = None,
|
||||
ontology_file_path: Optional[str] = None,
|
||||
vector_db_config: dict = None,
|
||||
graph_db_config: dict = None,
|
||||
run_in_background: bool = False,
|
||||
is_stream_info_enabled: bool = False,
|
||||
pipeline_name: str = "cognify_pipeline",
|
||||
):
|
||||
tasks = await get_default_tasks(user, graph_model, chunker, chunk_size, ontology_file_path)
|
||||
|
||||
return await cognee_pipeline(
|
||||
tasks=tasks, datasets=datasets, user=user, pipeline_name="cognify_pipeline"
|
||||
tasks = await get_default_tasks(
|
||||
user=user,
|
||||
graph_model=graph_model,
|
||||
chunker=chunker,
|
||||
chunk_size=chunk_size,
|
||||
ontology_file_path=ontology_file_path,
|
||||
)
|
||||
|
||||
if not user:
|
||||
user = await get_default_user()
|
||||
|
||||
if run_in_background:
|
||||
return await run_cognify_as_background_process(
|
||||
tasks=tasks,
|
||||
user=user,
|
||||
datasets=datasets,
|
||||
datapoints=datapoints,
|
||||
pipeline_name=pipeline_name,
|
||||
vector_db_config=vector_db_config,
|
||||
graph_db_config=graph_db_config,
|
||||
)
|
||||
else:
|
||||
return await run_cognify_blocking(
|
||||
tasks=tasks,
|
||||
user=user,
|
||||
datasets=datasets,
|
||||
pipeline_name=pipeline_name,
|
||||
datapoints=datapoints,
|
||||
is_stream_info_enabled=is_stream_info_enabled,
|
||||
vector_db_config=vector_db_config,
|
||||
graph_db_config=graph_db_config,
|
||||
)
|
||||
|
||||
|
||||
async def run_cognify_blocking(
|
||||
tasks,
|
||||
user,
|
||||
datasets,
|
||||
pipeline_name,
|
||||
datapoints=None,
|
||||
is_stream_info_enabled=False,
|
||||
vector_db_config=None,
|
||||
graph_db_config=None,
|
||||
):
|
||||
pipeline_run_info = None
|
||||
|
||||
async for run_info in cognee_pipeline(
|
||||
tasks=tasks,
|
||||
datasets=datasets,
|
||||
user=user,
|
||||
pipeline_name=pipeline_name,
|
||||
datapoints=datapoints,
|
||||
vector_db_config=vector_db_config,
|
||||
graph_db_config=graph_db_config,
|
||||
):
|
||||
pipeline_run_info = run_info
|
||||
|
||||
if (
|
||||
is_stream_info_enabled
|
||||
and not isinstance(pipeline_run_info, PipelineRunStarted)
|
||||
and not isinstance(pipeline_run_info, PipelineRunCompleted)
|
||||
):
|
||||
pipeline_run_id = get_crewai_pipeline_run_id(user.id)
|
||||
pipeline_run_info.payload = await get_formatted_graph_data()
|
||||
push_to_queue(pipeline_run_id, pipeline_run_info)
|
||||
|
||||
return pipeline_run_info
|
||||
|
||||
|
||||
async def run_cognify_as_background_process(
|
||||
tasks,
|
||||
user,
|
||||
datasets,
|
||||
datapoints,
|
||||
pipeline_name,
|
||||
vector_db_config,
|
||||
graph_db_config,
|
||||
):
|
||||
pipeline_run = cognee_pipeline(
|
||||
tasks=tasks,
|
||||
user=user,
|
||||
datasets=datasets,
|
||||
pipeline_name=pipeline_name,
|
||||
datapoints=datapoints,
|
||||
vector_db_config=vector_db_config,
|
||||
graph_db_config=graph_db_config,
|
||||
)
|
||||
|
||||
pipeline_run_started_info = await anext(pipeline_run)
|
||||
|
||||
async def handle_rest_of_the_run():
|
||||
while True:
|
||||
try:
|
||||
pipeline_run_info = await anext(pipeline_run)
|
||||
|
||||
pipeline_run_info.payload = await get_formatted_graph_data()
|
||||
|
||||
push_to_queue(pipeline_run_info.pipeline_run_id, pipeline_run_info)
|
||||
|
||||
if isinstance(pipeline_run_info, PipelineRunCompleted):
|
||||
break
|
||||
except StopAsyncIteration:
|
||||
break
|
||||
|
||||
asyncio.create_task(handle_rest_of_the_run())
|
||||
|
||||
return pipeline_run_started_info
|
||||
|
||||
|
||||
async def get_default_tasks( # TODO: Find out a better way to do this (Boris's comment)
|
||||
user: User = None,
|
||||
|
|
@ -48,7 +162,7 @@ async def get_default_tasks( # TODO: Find out a better way to do this (Boris's
|
|||
) -> list[Task]:
|
||||
default_tasks = [
|
||||
Task(classify_documents),
|
||||
Task(check_permissions_on_documents, user=user, permissions=["write"]),
|
||||
Task(check_permissions_on_dataset, user=user, permissions=["write"]),
|
||||
Task(
|
||||
extract_chunks_from_documents,
|
||||
max_chunk_size=chunk_size or get_max_chunk_tokens(),
|
||||
|
|
|
|||
|
|
@ -1,15 +1,27 @@
|
|||
import asyncio
|
||||
from uuid import UUID
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
from fastapi import Depends
|
||||
from fastapi import APIRouter
|
||||
from typing import List, Optional
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import APIRouter, WebSocket, Depends, WebSocketDisconnect
|
||||
from starlette.status import WS_1000_NORMAL_CLOSURE, WS_1008_POLICY_VIOLATION
|
||||
|
||||
from cognee.modules.graph.utils import deduplicate_nodes_and_edges, get_graph_from_model
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
from cognee.modules.pipelines.models.PipelineRunInfo import PipelineRunCompleted, PipelineRunInfo
|
||||
from cognee.modules.pipelines.queues.pipeline_run_info_queues import (
|
||||
get_from_queue,
|
||||
initialize_queue,
|
||||
remove_queue,
|
||||
)
|
||||
|
||||
|
||||
class CognifyPayloadDTO(BaseModel):
|
||||
datasets: List[str]
|
||||
dataset_ids: Optional[List[UUID]] = None
|
||||
graph_model: Optional[BaseModel] = KnowledgeGraph
|
||||
|
||||
|
||||
|
|
@ -22,8 +34,109 @@ def get_cognify_router() -> APIRouter:
|
|||
from cognee.api.v1.cognify import cognify as cognee_cognify
|
||||
|
||||
try:
|
||||
await cognee_cognify(payload.datasets, user, payload.graph_model)
|
||||
# Send dataset UUIDs if they are given, if not send dataset names
|
||||
datasets = payload.dataset_ids if payload.dataset_ids else payload.datasets
|
||||
cognify_run = await cognee_cognify(
|
||||
datasets, user, payload.graph_model, run_in_background=True
|
||||
)
|
||||
|
||||
return cognify_run.model_dump()
|
||||
except Exception as error:
|
||||
return JSONResponse(status_code=409, content={"error": str(error)})
|
||||
|
||||
@router.websocket("/subscribe/{pipeline_run_id}")
|
||||
async def subscribe_to_cognify_info(websocket: WebSocket, pipeline_run_id: str):
|
||||
await websocket.accept()
|
||||
|
||||
auth_message = await websocket.receive_json()
|
||||
|
||||
try:
|
||||
await get_authenticated_user(auth_message.get("Authorization"))
|
||||
except Exception:
|
||||
await websocket.close(code=WS_1008_POLICY_VIOLATION, reason="Unauthorized")
|
||||
return
|
||||
|
||||
pipeline_run_id = UUID(pipeline_run_id)
|
||||
|
||||
initialize_queue(pipeline_run_id)
|
||||
|
||||
while True:
|
||||
pipeline_run_info = get_from_queue(pipeline_run_id)
|
||||
|
||||
if not pipeline_run_info:
|
||||
await asyncio.sleep(2)
|
||||
continue
|
||||
|
||||
if not isinstance(pipeline_run_info, PipelineRunInfo):
|
||||
continue
|
||||
|
||||
try:
|
||||
await websocket.send_json(
|
||||
{
|
||||
"pipeline_run_id": str(pipeline_run_info.pipeline_run_id),
|
||||
"status": pipeline_run_info.status,
|
||||
"payload": await get_nodes_and_edges(pipeline_run_info.payload)
|
||||
if pipeline_run_info.payload
|
||||
else None,
|
||||
}
|
||||
)
|
||||
|
||||
if isinstance(pipeline_run_info, PipelineRunCompleted):
|
||||
remove_queue(pipeline_run_id)
|
||||
await websocket.close(code=WS_1000_NORMAL_CLOSURE)
|
||||
break
|
||||
except WebSocketDisconnect:
|
||||
remove_queue(pipeline_run_id)
|
||||
break
|
||||
|
||||
return router
|
||||
|
||||
|
||||
async def get_nodes_and_edges(data_points):
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
added_nodes = {}
|
||||
added_edges = {}
|
||||
visited_properties = {}
|
||||
|
||||
results = await asyncio.gather(
|
||||
*[
|
||||
get_graph_from_model(
|
||||
data_point,
|
||||
added_nodes=added_nodes,
|
||||
added_edges=added_edges,
|
||||
visited_properties=visited_properties,
|
||||
)
|
||||
for data_point in data_points
|
||||
]
|
||||
)
|
||||
|
||||
for result_nodes, result_edges in results:
|
||||
nodes.extend(result_nodes)
|
||||
edges.extend(result_edges)
|
||||
|
||||
nodes, edges = deduplicate_nodes_and_edges(nodes, edges)
|
||||
|
||||
return {
|
||||
"nodes": list(
|
||||
map(
|
||||
lambda node: {
|
||||
"id": str(node.id),
|
||||
"label": node.name if hasattr(node, "name") else f"{node.type}_{str(node.id)}",
|
||||
"properties": {},
|
||||
},
|
||||
nodes,
|
||||
)
|
||||
),
|
||||
"edges": list(
|
||||
map(
|
||||
lambda edge: {
|
||||
"source": str(edge[0]),
|
||||
"target": str(edge[1]),
|
||||
"label": edge[2],
|
||||
},
|
||||
edges,
|
||||
)
|
||||
),
|
||||
}
|
||||
|
|
|
|||
1
cognee/api/v1/crewai/routers/__init__.py
Normal file
1
cognee/api/v1/crewai/routers/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from .get_crewai_router import get_crewai_router
|
||||
165
cognee/api/v1/crewai/routers/get_crewai_router.py
Normal file
165
cognee/api/v1/crewai/routers/get_crewai_router.py
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
import os
|
||||
import asyncio
|
||||
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
|
||||
from starlette.status import WS_1000_NORMAL_CLOSURE, WS_1008_POLICY_VIOLATION
|
||||
|
||||
from cognee.api.DTO import InDTO
|
||||
from cognee.context_global_variables import set_database_global_context_variables
|
||||
from cognee.infrastructure.databases.relational import get_relational_engine
|
||||
from cognee.modules.data.deletion import prune_data, prune_system
|
||||
from cognee.modules.data.methods import get_authorized_existing_datasets, load_or_create_datasets
|
||||
from cognee.modules.data.models import Dataset
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
from cognee.modules.users.get_user_db import get_user_db_context
|
||||
from cognee.modules.users.get_user_manager import get_user_manager_context
|
||||
from cognee.modules.users.permissions.methods import give_permission_on_dataset
|
||||
from cognee.modules.users.authentication.default.default_jwt_strategy import DefaultJWTStrategy
|
||||
from cognee.modules.users.authentication.auth0.auth0_jwt_strategy import Auth0JWTStrategy
|
||||
from cognee.modules.crewai.get_crewai_pipeline_run_id import get_crewai_pipeline_run_id
|
||||
from cognee.modules.pipelines.models import PipelineRunInfo, PipelineRunCompleted
|
||||
from cognee.modules.users.exceptions import PermissionDeniedError
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.main import (
|
||||
run_github_ingestion,
|
||||
run_hiring_crew,
|
||||
)
|
||||
from cognee.modules.pipelines.queues.pipeline_run_info_queues import (
|
||||
get_from_queue,
|
||||
initialize_queue,
|
||||
remove_queue,
|
||||
)
|
||||
|
||||
|
||||
class CrewAIRunPayloadDTO(InDTO):
|
||||
username1: str
|
||||
username2: str
|
||||
|
||||
|
||||
class CrewAIFeedbackPayloadDTO(InDTO):
|
||||
feedback: str
|
||||
|
||||
|
||||
def get_crewai_router() -> APIRouter:
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/run", response_model=bool)
|
||||
async def run_crewai(
|
||||
payload: CrewAIRunPayloadDTO,
|
||||
user: User = Depends(get_authenticated_user),
|
||||
):
|
||||
# Set context based database settings if necessary
|
||||
await set_database_global_context_variables("Github", user.id)
|
||||
|
||||
await prune_data(user)
|
||||
await prune_system(user)
|
||||
|
||||
try:
|
||||
existing_datasets = await get_authorized_existing_datasets(
|
||||
user=user, permission_type="write", datasets=["Github"]
|
||||
)
|
||||
except PermissionDeniedError:
|
||||
print("No datasets were found")
|
||||
existing_datasets = []
|
||||
|
||||
datasets = await load_or_create_datasets(["Github"], existing_datasets, user)
|
||||
github_dataset: Dataset = next(
|
||||
(dataset for dataset in datasets if dataset.name == "Github")
|
||||
)
|
||||
|
||||
# Give user proper permissions for dataset
|
||||
await give_permission_on_dataset(user, github_dataset.id, "read")
|
||||
await give_permission_on_dataset(user, github_dataset.id, "write")
|
||||
await give_permission_on_dataset(user, github_dataset.id, "delete")
|
||||
await give_permission_on_dataset(user, github_dataset.id, "share")
|
||||
|
||||
await run_github_ingestion(user, github_dataset, payload.username1, payload.username2)
|
||||
|
||||
applicants = {
|
||||
"applicant_1": payload.username1,
|
||||
"applicant_2": payload.username2,
|
||||
}
|
||||
|
||||
def run_crewai_in_thread():
|
||||
run_hiring_crew(user, applicants=applicants, number_of_rounds=2)
|
||||
|
||||
async def run_crewai_async():
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(None, run_crewai_in_thread)
|
||||
|
||||
await run_crewai_async()
|
||||
|
||||
return True
|
||||
|
||||
@router.post("/feedback", response_model=None)
|
||||
async def send_feedback(
|
||||
payload: CrewAIFeedbackPayloadDTO,
|
||||
user: User = Depends(
|
||||
get_authenticated_user,
|
||||
),
|
||||
):
|
||||
from cognee import add, cognify
|
||||
# Set context based database settings if necessary
|
||||
dataset_name = "Github"
|
||||
await set_database_global_context_variables(dataset_name, user.id)
|
||||
|
||||
await add(payload.feedback, node_set=["final_report"], dataset_name=dataset_name)
|
||||
await cognify(datasets=dataset_name, is_stream_info_enabled=True)
|
||||
|
||||
@router.websocket("/subscribe")
|
||||
async def subscribe_to_crewai_info(websocket: WebSocket):
|
||||
await websocket.accept()
|
||||
|
||||
access_token = websocket.cookies.get(os.getenv("AUTH_TOKEN_COOKIE_NAME"))
|
||||
|
||||
try:
|
||||
secret = os.getenv("FASTAPI_USERS_JWT_SECRET", "super_secret")
|
||||
|
||||
if os.getenv("USE_AUTH0_AUTHORIZATION") == "True":
|
||||
strategy = Auth0JWTStrategy(secret, lifetime_seconds=36000)
|
||||
else:
|
||||
strategy = DefaultJWTStrategy(secret, lifetime_seconds=3600)
|
||||
|
||||
db_engine = get_relational_engine()
|
||||
|
||||
async with db_engine.get_async_session() as session:
|
||||
async with get_user_db_context(session) as user_db:
|
||||
async with get_user_manager_context(user_db) as user_manager:
|
||||
user = await get_authenticated_user(
|
||||
cookie=access_token, strategy_cookie=strategy, user_manager=user_manager
|
||||
)
|
||||
except Exception:
|
||||
await websocket.close(code=WS_1008_POLICY_VIOLATION, reason="Unauthorized")
|
||||
return
|
||||
|
||||
pipeline_run_id = get_crewai_pipeline_run_id(user.id)
|
||||
|
||||
initialize_queue(pipeline_run_id)
|
||||
|
||||
while True:
|
||||
pipeline_run_info = get_from_queue(pipeline_run_id)
|
||||
|
||||
if not pipeline_run_info:
|
||||
await asyncio.sleep(2)
|
||||
continue
|
||||
|
||||
if not isinstance(pipeline_run_info, PipelineRunInfo):
|
||||
continue
|
||||
|
||||
try:
|
||||
await websocket.send_json(
|
||||
{
|
||||
"pipeline_run_id": str(pipeline_run_info.pipeline_run_id),
|
||||
"status": pipeline_run_info.status,
|
||||
"payload": pipeline_run_info.payload if pipeline_run_info.payload else None,
|
||||
}
|
||||
)
|
||||
|
||||
if isinstance(pipeline_run_info, PipelineRunCompleted):
|
||||
remove_queue(pipeline_run_id)
|
||||
await websocket.close(code=WS_1000_NORMAL_CLOSURE)
|
||||
break
|
||||
except WebSocketDisconnect:
|
||||
remove_queue(pipeline_run_id)
|
||||
break
|
||||
|
||||
return router
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from cognee.context_global_variables import set_database_global_context_variables
|
||||
from cognee.modules.graph.operations import get_formatted_graph_data
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from fastapi import APIRouter
|
||||
from datetime import datetime
|
||||
|
|
@ -39,15 +41,34 @@ class DataDTO(OutDTO):
|
|||
raw_data_location: str
|
||||
|
||||
|
||||
class GraphNodeDTO(OutDTO):
|
||||
id: UUID
|
||||
label: str
|
||||
properties: dict
|
||||
|
||||
|
||||
class GraphEdgeDTO(OutDTO):
|
||||
source: UUID
|
||||
target: UUID
|
||||
label: str
|
||||
|
||||
|
||||
class GraphDTO(OutDTO):
|
||||
nodes: List[GraphNodeDTO]
|
||||
edges: List[GraphEdgeDTO]
|
||||
|
||||
|
||||
def get_datasets_router() -> APIRouter:
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/", response_model=list[DatasetDTO])
|
||||
async def get_datasets(user: User = Depends(get_authenticated_user)):
|
||||
try:
|
||||
from cognee.modules.data.methods import get_datasets
|
||||
from cognee.modules.data.methods import get_authorized_existing_datasets
|
||||
|
||||
datasets = await get_datasets(user.id)
|
||||
datasets = await get_authorized_existing_datasets(
|
||||
user=user, permission_type="read", datasets=None
|
||||
)
|
||||
|
||||
return datasets
|
||||
except Exception as error:
|
||||
|
|
@ -94,24 +115,20 @@ def get_datasets_router() -> APIRouter:
|
|||
|
||||
await delete_data(data)
|
||||
|
||||
@router.get("/{dataset_id}/graph", response_model=str)
|
||||
@router.get("/{dataset_id}/graph", response_model=GraphDTO)
|
||||
async def get_dataset_graph(dataset_id: UUID, user: User = Depends(get_authenticated_user)):
|
||||
from cognee.shared.utils import render_graph
|
||||
from cognee.infrastructure.databases.graph import get_graph_engine
|
||||
|
||||
try:
|
||||
graph_client = await get_graph_engine()
|
||||
graph_url = await render_graph(graph_client.graph)
|
||||
await set_database_global_context_variables("Github", user.id)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content=str(graph_url),
|
||||
content=await get_formatted_graph_data(),
|
||||
)
|
||||
except Exception as error:
|
||||
print(error)
|
||||
return JSONResponse(
|
||||
status_code=409,
|
||||
content="Graphistry credentials are not set. Please set them in your .env file.",
|
||||
content="Error retrieving dataset graph data.",
|
||||
)
|
||||
|
||||
@router.get(
|
||||
|
|
|
|||
|
|
@ -1,66 +1,69 @@
|
|||
from uuid import UUID
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
|
||||
|
||||
def get_permissions_router() -> APIRouter:
|
||||
permissions_router = APIRouter()
|
||||
|
||||
@permissions_router.post("/roles/{role_id}/permissions")
|
||||
async def give_default_permission_to_role(role_id: UUID, permission_name: str):
|
||||
from cognee.modules.users.permissions.methods import (
|
||||
give_default_permission_to_role as set_default_permission_to_role,
|
||||
@permissions_router.post("/datasets/{principal_id}/")
|
||||
async def give_datasets_permission_to_principal(
|
||||
permission_name: str,
|
||||
dataset_ids: List[UUID],
|
||||
principal_id: UUID,
|
||||
user: User = Depends(get_authenticated_user),
|
||||
):
|
||||
from cognee.modules.users.permissions.methods import authorized_give_permission_on_datasets
|
||||
|
||||
await authorized_give_permission_on_datasets(
|
||||
principal_id,
|
||||
[dataset_id for dataset_id in dataset_ids],
|
||||
permission_name,
|
||||
user.id,
|
||||
)
|
||||
|
||||
await set_default_permission_to_role(role_id, permission_name)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "Permission assigned to role"})
|
||||
|
||||
@permissions_router.post("/tenants/{tenant_id}/permissions")
|
||||
async def give_default_permission_to_tenant(tenant_id: UUID, permission_name: str):
|
||||
from cognee.modules.users.permissions.methods import (
|
||||
give_default_permission_to_tenant as set_tenant_default_permissions,
|
||||
return JSONResponse(
|
||||
status_code=200, content={"message": "Permission assigned to principal"}
|
||||
)
|
||||
|
||||
await set_tenant_default_permissions(tenant_id, permission_name)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "Permission assigned to tenant"})
|
||||
|
||||
@permissions_router.post("/users/{user_id}/permissions")
|
||||
async def give_default_permission_to_user(user_id: UUID, permission_name: str):
|
||||
from cognee.modules.users.permissions.methods import (
|
||||
give_default_permission_to_user as set_default_permission_to_user,
|
||||
)
|
||||
|
||||
await set_default_permission_to_user(user_id, permission_name)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "Permission assigned to user"})
|
||||
|
||||
@permissions_router.post("/roles")
|
||||
async def create_role(
|
||||
role_name: str,
|
||||
tenant_id: UUID,
|
||||
):
|
||||
async def create_role(role_name: str, user: User = Depends(get_authenticated_user)):
|
||||
from cognee.modules.users.roles.methods import create_role as create_role_method
|
||||
|
||||
await create_role_method(role_name=role_name, tenant_id=tenant_id)
|
||||
await create_role_method(role_name=role_name, owner_id=user.id)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "Role created for tenant"})
|
||||
|
||||
@permissions_router.post("/users/{user_id}/roles")
|
||||
async def add_user_to_role(user_id: UUID, role_id: UUID):
|
||||
async def add_user_to_role(
|
||||
user_id: UUID, role_id: UUID, user: User = Depends(get_authenticated_user)
|
||||
):
|
||||
from cognee.modules.users.roles.methods import add_user_to_role as add_user_to_role_method
|
||||
|
||||
await add_user_to_role_method(user_id=user_id, role_id=role_id)
|
||||
await add_user_to_role_method(user_id=user_id, role_id=role_id, owner_id=user.id)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "User added to role"})
|
||||
|
||||
@permissions_router.post("/users/{user_id}/tenants")
|
||||
async def add_user_to_tenant(
|
||||
user_id: UUID, tenant_id: UUID, user: User = Depends(get_authenticated_user)
|
||||
):
|
||||
from cognee.modules.users.tenants.methods import add_user_to_tenant
|
||||
|
||||
await add_user_to_tenant(user_id=user_id, tenant_id=tenant_id, owner_id=user.id)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "User added to tenant"})
|
||||
|
||||
@permissions_router.post("/tenants")
|
||||
async def create_tenant(tenant_name: str):
|
||||
async def create_tenant(tenant_name: str, user: User = Depends(get_authenticated_user)):
|
||||
from cognee.modules.users.tenants.methods import create_tenant as create_tenant_method
|
||||
|
||||
await create_tenant_method(tenant_name=tenant_name)
|
||||
await create_tenant_method(tenant_name=tenant_name, user_id=user.id)
|
||||
|
||||
return JSONResponse(status_code=200, content={"message": "Tenant created."})
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from uuid import UUID
|
||||
from typing import Optional, Union
|
||||
from datetime import datetime
|
||||
from fastapi import Depends, APIRouter
|
||||
from fastapi.responses import JSONResponse
|
||||
|
|
@ -9,8 +10,12 @@ from cognee.modules.search.operations import get_history
|
|||
from cognee.modules.users.methods import get_authenticated_user
|
||||
|
||||
|
||||
# Note: Datasets sent by name will only map to datasets owned by the request sender
|
||||
# To search for datasets not owned by the request sender dataset UUID is needed
|
||||
class SearchPayloadDTO(InDTO):
|
||||
search_type: SearchType
|
||||
datasets: Optional[list[str]] = None
|
||||
dataset_ids: Optional[list[UUID]] = None
|
||||
query: str
|
||||
|
||||
|
||||
|
|
@ -39,7 +44,11 @@ def get_search_router() -> APIRouter:
|
|||
|
||||
try:
|
||||
results = await cognee_search(
|
||||
query_text=payload.query, query_type=payload.search_type, user=user
|
||||
query_text=payload.query,
|
||||
query_type=payload.search_type,
|
||||
user=user,
|
||||
datasets=payload.datasets,
|
||||
dataset_ids=payload.dataset_ids,
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -1,32 +1,45 @@
|
|||
from uuid import UUID
|
||||
from typing import Union, Optional, List, Type
|
||||
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.infrastructure.engine.models.DataPoint import DataPoint
|
||||
from cognee.modules.search.types import SearchType
|
||||
from cognee.modules.users.exceptions import UserNotFoundError
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_default_user
|
||||
from cognee.modules.search.methods import search as search_function
|
||||
from cognee.modules.data.methods import get_authorized_existing_datasets
|
||||
from cognee.modules.data.exceptions import DatasetNotFoundError
|
||||
|
||||
|
||||
async def search(
|
||||
query_text: str,
|
||||
query_type: SearchType = SearchType.GRAPH_COMPLETION,
|
||||
user: User = None,
|
||||
datasets: Union[list[str], str, None] = None,
|
||||
datasets: Optional[Union[list[str], str]] = None,
|
||||
dataset_ids: Optional[Union[list[UUID], UUID]] = None,
|
||||
system_prompt_path: str = "answer_simple_question.txt",
|
||||
top_k: int = 10,
|
||||
node_type: Optional[Type] = None,
|
||||
node_name: Optional[List[str]] = None,
|
||||
) -> list:
|
||||
# We use lists from now on for datasets
|
||||
if isinstance(datasets, str):
|
||||
if isinstance(datasets, UUID) or isinstance(datasets, str):
|
||||
datasets = [datasets]
|
||||
|
||||
if user is None:
|
||||
user = await get_default_user()
|
||||
|
||||
# Transform string based datasets to UUID - String based datasets can only be found for current user
|
||||
if datasets is not None and [all(isinstance(dataset, str) for dataset in datasets)]:
|
||||
datasets = await get_authorized_existing_datasets(datasets, "read", user)
|
||||
datasets = [dataset.id for dataset in datasets]
|
||||
if not datasets:
|
||||
raise DatasetNotFoundError(message="No datasets found.")
|
||||
|
||||
filtered_search_results = await search_function(
|
||||
query_text=query_text,
|
||||
query_type=query_type,
|
||||
datasets=datasets,
|
||||
dataset_ids=dataset_ids if dataset_ids else datasets,
|
||||
user=user,
|
||||
system_prompt_path=system_prompt_path,
|
||||
top_k=top_k,
|
||||
|
|
|
|||
49
cognee/complex_demos/crewai_demo/README
Normal file
49
cognee/complex_demos/crewai_demo/README
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# CrewAI
|
||||
|
||||
This is a demo project to showcase and test how cognee and CrewAI can work together:
|
||||
|
||||
Short description:
|
||||
|
||||
We simulate the hiring process for a technical role. These are the steps of the pipeline:
|
||||
|
||||
1. First we ingest github data including:
|
||||
-commits, comments and other soft skill related information for each of the candidates.
|
||||
-source code and other technical skill related information for each of the candidates.
|
||||
|
||||
2. We hire 3 agents to make the decision using cognee's memory engine
|
||||
|
||||
1 - HR Expert Agent focusing on soft skills:
|
||||
- Analyzes the communication skills, clarity, engagement a kindness based on the commits, comments and github communication of the candidates.
|
||||
-To analyze the soft skills of the candidates, the agent performs multiple searches using cognee.search
|
||||
-The subgraph that the agent can use is limited to the "soft" nodeset subgraph
|
||||
- Scores each candidate from 0 to 1 and gives reasoning
|
||||
|
||||
2 - Technical Expert Agent focusing on technical skills:
|
||||
- Analyzes strictly code related and technical skills based on github commits and pull requests of the candidates.
|
||||
- To analyze the technical skills of the candidates, the agent performs multiple searches using cognee.search
|
||||
- The subgraph that the agent can use is limited to the "techical" nodeset subgraph
|
||||
- Scores each candidate from 0 to 1 and gives reasoning
|
||||
|
||||
3 - CEO/CTO agent who makes the final decision:
|
||||
- Given the output of the HR expert and Technical expert agents, the decision maker agent makes the final decision about the hiring procedure.
|
||||
- The agent will choose the best candidate to hire, and will give reasoning for each of the candidates (why hire/no_hire).
|
||||
|
||||
|
||||
The following tools were implemented:
|
||||
- Cognee build: cognifies the added data (Preliminary task, therefore it is not performed by agents.)
|
||||
- Cognee search: searches the cognee memory, limiting the subgraph using the nodeset subgraph retriever (Used by many agents)
|
||||
- In the case of technical and soft skills agents the tool gets instantiated with the restricted nodeset search capability
|
||||
|
||||
|
||||
The three agents are working together to simulate a hiring process, evaluating soft and technical skills, while the CEO/CTO agent
|
||||
makes the final decision (HIRE/NOHIRE) based on the outputs of the evaluation agents.
|
||||
|
||||
|
||||
## Run in UI
|
||||
|
||||
Note1: After each restart go to `localhost:3000/auth` and login again.
|
||||
Note2: Activity is not preserved in the DB, so it will be lost after page refresh.
|
||||
|
||||
1. Start FastAPI server by running `client.py` inside `cognee/api` directory
|
||||
2. Start NextJS server by running `npm run dev` inside `cognee-frontend` directory.
|
||||
3. If you are not logged-in, app will redirect to `/auth` page. Otherwise go there manually and login (if server is restarted).
|
||||
0
cognee/complex_demos/crewai_demo/__init__.py
Normal file
0
cognee/complex_demos/crewai_demo/__init__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
User name is John Doe.
|
||||
User is an AI Engineer.
|
||||
User is interested in AI Agents.
|
||||
User is based in San Francisco, California.
|
||||
19
cognee/complex_demos/crewai_demo/pyproject.toml
Normal file
19
cognee/complex_demos/crewai_demo/pyproject.toml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
[project]
|
||||
name = "crewai_demo"
|
||||
version = "0.1.0"
|
||||
description = "Cognee crewAI demo"
|
||||
authors = [{ name = "Laszlo Hajdu", email = "laszlo@topoteretes.com" }]
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"crewai[tools]>=0.114.0,<1.0.0"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
run_crew = "association_layer_demo.main:run"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.crewai]
|
||||
type = "crew"
|
||||
0
cognee/complex_demos/crewai_demo/src/__init__.py
Normal file
0
cognee/complex_demos/crewai_demo/src/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .github_dev_profile import GitHubDevProfile
|
||||
from .github_dev_comments import GitHubDevComments
|
||||
from .github_dev_commits import GitHubDevCommits
|
||||
|
||||
__all__ = ["GitHubDevProfile", "GitHubDevComments", "GitHubDevCommits"]
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
soft_skills_expert_agent:
|
||||
role: >
|
||||
Focused on communication, collaboration, and documentation excellence.
|
||||
goal: >
|
||||
Evaluate README clarity, issue discussions, and community engagement to score
|
||||
communication clarity and open-source culture participation.
|
||||
backstory: >
|
||||
You are an active OSS community manager who values clear writing, inclusive
|
||||
discussion, and strong documentation. You look for evidence of empathy,
|
||||
responsiveness, and collaborative spirit.
|
||||
|
||||
technical_expert_agent:
|
||||
role: >
|
||||
Specialized in evaluating technical skills and code quality.
|
||||
goal: >
|
||||
Analyze repository metadata and commit histories to score coding diversity,
|
||||
depth of contributions, and commit quality.
|
||||
backstory: >
|
||||
You are a seasoned software architect and open-source maintainer. You deeply
|
||||
understand python code structure, language ecosystems, and best practices.
|
||||
Your mission is to objectively rate each candidate’s technical excellence.
|
||||
|
||||
decision_maker_agent:
|
||||
role: >
|
||||
CTO/CEO-level decision maker who integrates expert feedback.
|
||||
goal: >
|
||||
Read the technical and soft-skills evaluations and decide whether to hire
|
||||
each candidate, justifying the decision.
|
||||
backstory: >
|
||||
You are the company’s CTO. You balance technical requirements, team culture,
|
||||
and long-term vision. You weigh both skill scores and communication ratings
|
||||
to make a final hire/no-hire call.
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
soft_skills_assessment_applicant1_task:
|
||||
description: >
|
||||
Search cognee for comments authored by '{applicant_1}'.
|
||||
Use the "search_from_cognee" tool to collect information.
|
||||
Evaluate their communication clarity, community engagement, and kindness.
|
||||
Ask multiple questions if needed to uncover diverse interactions.
|
||||
Return a complete and reasoned assessment of their soft skills.
|
||||
|
||||
--- Example Output ---
|
||||
Input:
|
||||
applicant_1: Sarah Jennings
|
||||
|
||||
Output:
|
||||
- Name: Sarah Jennings
|
||||
- communication_clarity: 0.92
|
||||
- community_engagement: 0.88
|
||||
- kindness: 0.95
|
||||
- reasoning: >
|
||||
Sarah consistently communicates with clarity and structure. In several threads, her responses broke down complex issues into actionable steps,
|
||||
showing strong explanatory skills. She uses inclusive language like “let’s”, “we should”, and frequently thanks others for their input,
|
||||
which indicates a high degree of kindness. Sarah also initiates or joins collaborative threads, offering feedback or connecting people with
|
||||
relevant documentation. Her tone is encouraging and non-defensive, even when correcting misunderstandings. These patterns were observed across
|
||||
over 8 threads involving different team members over a 3-week span.
|
||||
|
||||
expected_output: >
|
||||
- Name: {applicant_1}
|
||||
- communication_clarity (0–1)
|
||||
- community_engagement (0–1)
|
||||
- kindness (0–1)
|
||||
- reasoning: (string)
|
||||
agent: soft_skills_expert_agent
|
||||
|
||||
soft_skills_assessment_applicant2_task:
|
||||
description: >
|
||||
Search cognee for comments authored by '{applicant_2}'.
|
||||
Use the "search_from_cognee" tool to collect information.
|
||||
Evaluate their communication clarity, community engagement, and kindness.
|
||||
Ask multiple questions if needed to uncover diverse interactions.
|
||||
Return a complete and reasoned assessment of their soft skills.
|
||||
|
||||
--- Example Output ---
|
||||
Input:
|
||||
applicant_1: Sarah Jennings
|
||||
|
||||
Output:
|
||||
- Name: Sarah Jennings
|
||||
- communication_clarity: 0.92
|
||||
- community_engagement: 0.88
|
||||
- kindness: 0.95
|
||||
- reasoning: >
|
||||
Sarah consistently communicates with clarity and structure. In several threads, her responses broke down complex issues into actionable steps,
|
||||
showing strong explanatory skills. She uses inclusive language like “let’s”, “we should”, and frequently thanks others for their input,
|
||||
which indicates a high degree of kindness. Sarah also initiates or joins collaborative threads, offering feedback or connecting people with
|
||||
relevant documentation. Her tone is encouraging and non-defensive, even when correcting misunderstandings. These patterns were observed across
|
||||
over 8 threads involving different team members over a 3-week span.
|
||||
|
||||
expected_output: >
|
||||
- Name: {applicant_2}
|
||||
- communication_clarity (0–1)
|
||||
- community_engagement (0–1)
|
||||
- kindness (0–1)
|
||||
- reasoning: (string)
|
||||
agent: soft_skills_expert_agent
|
||||
|
||||
technical_assessment_applicant1_task:
|
||||
description: >
|
||||
Analyze the repository metadata and commit history associated with '{applicant_1}'.
|
||||
Use the "search_from_cognee" tool to collect information.
|
||||
Score their code_diversity, depth_of_contribution, and commit_quality.
|
||||
Base your assessment strictly on technical input—ignore soft skills.
|
||||
|
||||
--- Example Output ---
|
||||
Input:
|
||||
applicant_1: Daniel Murphy
|
||||
|
||||
Output:
|
||||
- Name: Daniel Murphy
|
||||
- code_diversity: 0.87
|
||||
- depth_of_contribution: 0.91
|
||||
- commit_quality: 0.83
|
||||
- reasoning: >
|
||||
Daniel contributed to multiple areas of the codebase including frontend UI components, backend API endpoints, test coverage,
|
||||
and CI/CD configuration. His commit history spans over 6 weeks with consistent activity and includes thoughtful messages
|
||||
(e.g., “refactor auth flow to support multi-tenant login” or “add unit tests for pricing logic edge cases”).
|
||||
His pull requests often include both implementation and tests, showing technical completeness.
|
||||
Several commits show iterative problem-solving and cleanup after peer feedback, indicating thoughtful collaboration
|
||||
and improvement over time.
|
||||
expected_output: >
|
||||
- Name: {applicant_1}
|
||||
- code_diversity (0–1)
|
||||
- depth_of_contribution (0–1)
|
||||
- commit_quality (0–1)
|
||||
- reasoning: (string)
|
||||
agent: technical_expert_agent
|
||||
|
||||
technical_assessment_applicant2_task:
|
||||
description: >
|
||||
Analyze the repository metadata and commit history associated with '{applicant_2}'.
|
||||
Use the "search_from_cognee" tool to collect information.
|
||||
Score their code_diversity, depth_of_contribution, and commit_quality.
|
||||
Base your assessment strictly on technical input—ignore soft skills.
|
||||
|
||||
--- Example Output ---
|
||||
Input:
|
||||
applicant_1: Daniel Murphy
|
||||
|
||||
Output:
|
||||
- Name: Daniel Murphy
|
||||
- code_diversity: 0.87
|
||||
- depth_of_contribution: 0.91
|
||||
- commit_quality: 0.83
|
||||
- reasoning: >
|
||||
Daniel contributed to multiple areas of the codebase including frontend UI components, backend API endpoints, test coverage,
|
||||
and CI/CD configuration. His commit history spans over 6 weeks with consistent activity and includes thoughtful messages
|
||||
(e.g., “refactor auth flow to support multi-tenant login” or “add unit tests for pricing logic edge cases”).
|
||||
His pull requests often include both implementation and tests, showing technical completeness.
|
||||
Several commits show iterative problem-solving and cleanup after peer feedback, indicating thoughtful collaboration
|
||||
and improvement over time.
|
||||
|
||||
expected_output: >
|
||||
- Name: {applicant_2}
|
||||
- code_diversity (0–1)
|
||||
- depth_of_contribution (0–1)
|
||||
- commit_quality (0–1)
|
||||
- reasoning: (string)
|
||||
agent: technical_expert_agent
|
||||
|
||||
hiring_decision_task:
|
||||
description: >
|
||||
Review the technical and soft skill assessment task outputs for candidates: -{applicant_1} and -{applicant_2},
|
||||
then decide HIRE or NO_HIRE for each candidate with a detailed reasoning.
|
||||
The people to evaluate are:
|
||||
-{applicant_1}
|
||||
-{applicant_2}
|
||||
We have to hire one of them.
|
||||
|
||||
Prepare the final output for the ingest_hiring_decision_task.
|
||||
|
||||
|
||||
expected_output: >
|
||||
A string strictly containing the following for each person:
|
||||
- Person
|
||||
- decision: "HIRE" or "NO_HIRE",
|
||||
- reasoning: (string)
|
||||
agent: decision_maker_agent
|
||||
|
||||
ingest_hiring_decision_task:
|
||||
description: >
|
||||
Take the final hiring decision from the hiring_decision_task report and ingest it into Cognee using the "ingest_report_to_cognee" tool.
|
||||
Do not re-evaluate—just save the result using the tool you have.
|
||||
expected_output: >
|
||||
- confirmation: string message confirming successful ingestion into Cognee
|
||||
agent: decision_maker_agent
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
from crewai.tools import BaseTool
|
||||
|
||||
|
||||
class CogneeBuild(BaseTool):
|
||||
name: str = "Cognee Build"
|
||||
description: str = "Creates a memory and builds a knowledge graph using cognee."
|
||||
|
||||
def _run(self, inputs) -> str:
|
||||
import cognee
|
||||
import asyncio
|
||||
|
||||
async def main():
|
||||
try:
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
for meta in inputs.values():
|
||||
text = meta["file_content"]
|
||||
node_set = meta["nodeset"]
|
||||
await cognee.add(text, node_set=node_set)
|
||||
|
||||
await cognee.cognify(is_stream_info_enabled=True)
|
||||
|
||||
return "Knowledge Graph is done."
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
return loop.create_task(main())
|
||||
except Exception as e:
|
||||
return f"Tool execution error: {str(e)}"
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
import asyncio
|
||||
import nest_asyncio
|
||||
from crewai.tools import BaseTool
|
||||
from typing import Type
|
||||
from pydantic import BaseModel, Field, PrivateAttr
|
||||
|
||||
from cognee.modules.users.models import User
|
||||
|
||||
|
||||
class CogneeIngestionInput(BaseModel):
|
||||
text: str = Field(
|
||||
"",
|
||||
description="The text of the report The format you should follow is {'text': 'your report'}",
|
||||
)
|
||||
|
||||
|
||||
class CogneeIngestion(BaseTool):
|
||||
name: str = "ingest_report_to_cognee"
|
||||
description: str = "This tool can be used to ingest the final hiring report into cognee"
|
||||
args_schema: Type[BaseModel] = CogneeIngestionInput
|
||||
_user: User = PrivateAttr()
|
||||
_nodeset_name: str = PrivateAttr()
|
||||
|
||||
def __init__(self, user: User, nodeset_name: str, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self._user = user
|
||||
self._nodeset_name = nodeset_name
|
||||
|
||||
def _run(self, text: str) -> str:
|
||||
import cognee
|
||||
# from secrets import choice
|
||||
# from string import ascii_letters, digits
|
||||
|
||||
async def main():
|
||||
try:
|
||||
# hash6 = "".join(choice(ascii_letters + digits) for _ in range(6))
|
||||
dataset_name = "Github"
|
||||
data = await cognee.add(
|
||||
text,
|
||||
node_set=[self._nodeset_name],
|
||||
dataset_name=dataset_name,
|
||||
user=self._user,
|
||||
)
|
||||
await cognee.cognify(
|
||||
datasets=dataset_name,
|
||||
is_stream_info_enabled=True,
|
||||
datapoints=data.packets,
|
||||
user=self._user,
|
||||
pipeline_name="github_pipeline",
|
||||
)
|
||||
|
||||
return "Report ingested successfully into Cognee memory."
|
||||
except Exception as e:
|
||||
return f"Error during ingestion: {str(e)}"
|
||||
|
||||
try:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
loop = asyncio.new_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
result = loop.run_until_complete(main())
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
return f"Tool execution error: {str(e)}"
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
import nest_asyncio
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from typing import Type
|
||||
from pydantic import BaseModel, Field, PrivateAttr
|
||||
|
||||
from cognee.modules.engine.models import NodeSet
|
||||
from cognee import search, SearchType
|
||||
from cognee.modules.users.models import User
|
||||
|
||||
|
||||
class CogneeSearchInput(BaseModel):
|
||||
query: str = Field(
|
||||
"",
|
||||
description="The natural language question to ask the memory engine."
|
||||
"The format you should follow is {'query': 'your query'}",
|
||||
)
|
||||
|
||||
|
||||
class CogneeSearch(BaseTool):
|
||||
name: str = "search_from_cognee"
|
||||
description: str = (
|
||||
"Use this tool to search the Cognee memory graph. "
|
||||
"Provide a natural language query that describes the information you want to retrieve, "
|
||||
"such as comments authored or files changes by a specific person."
|
||||
)
|
||||
args_schema: Type[BaseModel] = CogneeSearchInput
|
||||
_user: User = PrivateAttr()
|
||||
_nodeset_name: str = PrivateAttr()
|
||||
|
||||
def __init__(self, user: User, nodeset_name: str, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self._user = user
|
||||
self._nodeset_name = nodeset_name
|
||||
|
||||
def _run(self, query: str) -> str:
|
||||
import asyncio
|
||||
# from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
|
||||
|
||||
async def main():
|
||||
try:
|
||||
print(query)
|
||||
|
||||
search_results = await search(
|
||||
query_text=query,
|
||||
query_type=SearchType.GRAPH_COMPLETION,
|
||||
user=self._user,
|
||||
datasets=["Github"],
|
||||
top_k=30,
|
||||
node_type=NodeSet,
|
||||
node_name=[self._nodeset_name],
|
||||
)
|
||||
# search_results = await GraphCompletionRetriever(
|
||||
# top_k=5,
|
||||
# node_type=NodeSet,
|
||||
# node_name=[self._nodeset_name],
|
||||
# ).get_context(query=query)
|
||||
|
||||
return search_results
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
try:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
loop = asyncio.new_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
nest_asyncio.apply(loop)
|
||||
|
||||
result = loop.run_until_complete(main())
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
return f"Tool execution error: {str(e)}"
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
from crewai.tools import BaseTool
|
||||
|
||||
from ..github_ingest_datapoints import cognify_github_data_from_username
|
||||
|
||||
|
||||
class GithubIngestion(BaseTool):
|
||||
name: str = "Github graph builder"
|
||||
description: str = "Ingests the github graph of a person into Cognee"
|
||||
|
||||
def _run(self, applicant_1, applicant_2) -> str:
|
||||
import asyncio
|
||||
|
||||
# import cognee
|
||||
import os
|
||||
# from cognee.low_level import setup as cognee_setup
|
||||
|
||||
async def main():
|
||||
try:
|
||||
# await cognee.prune.prune_data()
|
||||
# await cognee.prune.prune_system(metadata=True)
|
||||
# await cognee_setup()
|
||||
token = os.getenv("GITHUB_TOKEN")
|
||||
|
||||
await cognify_github_data_from_username(applicant_1, token)
|
||||
await cognify_github_data_from_username(applicant_2, token)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not loop.is_running():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
return loop.create_task(main())
|
||||
except Exception as e:
|
||||
return f"Tool execution error: {str(e)}"
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
import time
|
||||
from uuid import uuid4
|
||||
from crewai.utilities.events import (
|
||||
CrewKickoffStartedEvent,
|
||||
CrewKickoffCompletedEvent,
|
||||
AgentExecutionStartedEvent,
|
||||
AgentExecutionCompletedEvent,
|
||||
ToolUsageStartedEvent,
|
||||
ToolUsageFinishedEvent,
|
||||
)
|
||||
from crewai.utilities.events.base_event_listener import BaseEventListener
|
||||
|
||||
from cognee.modules.pipelines.models.PipelineRunInfo import PipelineRunActivity
|
||||
from cognee.modules.pipelines.queues.pipeline_run_info_queues import push_to_queue
|
||||
|
||||
|
||||
class CrewAIListener(BaseEventListener):
|
||||
def __init__(self, pipeline_run_id):
|
||||
super().__init__()
|
||||
self.pipeline_run_id = pipeline_run_id
|
||||
|
||||
def setup_listeners(self, crewai_event_bus):
|
||||
@crewai_event_bus.on(CrewKickoffStartedEvent)
|
||||
def on_crew_started(source, event: CrewKickoffStartedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Crew '{event.crew_name}' has started execution",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@crewai_event_bus.on(CrewKickoffCompletedEvent)
|
||||
def on_crew_completed(source, event: CrewKickoffCompletedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Crew '{event.crew_name}' has completed execution",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@crewai_event_bus.on(AgentExecutionStartedEvent)
|
||||
def on_agent_execution_completed(source, event: AgentExecutionStartedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Agent '{event.agent.role}' started execution",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@crewai_event_bus.on(AgentExecutionCompletedEvent)
|
||||
def on_agent_execution_completed(source, event: AgentExecutionCompletedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Agent '{event.agent.role}' completed execution",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@crewai_event_bus.on(ToolUsageStartedEvent)
|
||||
def on_agent_execution_completed(source, event: ToolUsageStartedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Agent tool call ({event.tool_name}) execution started",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@crewai_event_bus.on(ToolUsageFinishedEvent)
|
||||
def on_agent_execution_completed(source, event: ToolUsageFinishedEvent):
|
||||
push_to_queue(
|
||||
self.pipeline_run_id,
|
||||
PipelineRunActivity(
|
||||
pipeline_run_id=self.pipeline_run_id,
|
||||
payload={
|
||||
"id": str(uuid4()),
|
||||
"timestamp": time.time() * 1000,
|
||||
"activity": f"Agent tool call ({event.tool_name}) execution completed",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
from abc import ABC, abstractmethod
|
||||
import requests
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
|
||||
GITHUB_API_URL = "https://api.github.com/graphql"
|
||||
|
||||
logger = get_logger("github_comments")
|
||||
|
||||
|
||||
class GitHubCommentBase(ABC):
|
||||
"""Base class for GitHub comment providers."""
|
||||
|
||||
def __init__(self, token, username, limit=10):
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.limit = limit
|
||||
|
||||
def _run_query(self, query: str) -> dict:
|
||||
"""Executes a GraphQL query against GitHub's API."""
|
||||
headers = {"Authorization": f"Bearer {self.token}"}
|
||||
response = requests.post(GITHUB_API_URL, json={"query": query}, headers=headers)
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Query failed: {response.status_code} - {response.text}")
|
||||
return response.json()["data"]
|
||||
|
||||
def get_comments(self):
|
||||
"""Template method that orchestrates the comment retrieval process."""
|
||||
try:
|
||||
query = self._build_query()
|
||||
data = self._run_query(query)
|
||||
raw_comments = self._extract_comments(data)
|
||||
return [self._format_comment(item) for item in raw_comments[: self.limit]]
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching {self._get_comment_type()} comments: {e}")
|
||||
return []
|
||||
|
||||
@abstractmethod
|
||||
def _build_query(self) -> str:
|
||||
"""Builds the GraphQL query string."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _extract_comments(self, data) -> list:
|
||||
"""Extracts the comment data from the GraphQL response."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _format_comment(self, item) -> dict:
|
||||
"""Formats a single comment."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _get_comment_type(self) -> str:
|
||||
"""Returns the type of comment this provider handles."""
|
||||
pass
|
||||
|
|
@ -0,0 +1,298 @@
|
|||
from datetime import datetime, timedelta
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_comment_base import (
|
||||
GitHubCommentBase,
|
||||
logger,
|
||||
)
|
||||
|
||||
|
||||
class IssueCommentsProvider(GitHubCommentBase):
|
||||
"""Provider for GitHub issue comments."""
|
||||
|
||||
QUERY_TEMPLATE = """
|
||||
{{
|
||||
user(login: "{username}") {{
|
||||
issueComments(first: {limit}, orderBy: {{field: UPDATED_AT, direction: DESC}}) {{
|
||||
nodes {{
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
url
|
||||
issue {{
|
||||
number
|
||||
title
|
||||
url
|
||||
repository {{
|
||||
nameWithOwner
|
||||
}}
|
||||
state
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
def _build_query(self) -> str:
|
||||
"""Builds the GraphQL query for issue comments."""
|
||||
return self.QUERY_TEMPLATE.format(username=self.username, limit=self.limit)
|
||||
|
||||
def _extract_comments(self, data) -> list:
|
||||
"""Extracts issue comments from the GraphQL response."""
|
||||
return data["user"]["issueComments"]["nodes"]
|
||||
|
||||
def _format_comment(self, comment) -> dict:
|
||||
"""Formats an issue comment from GraphQL."""
|
||||
comment_id = comment["url"].split("/")[-1] if comment["url"] else None
|
||||
|
||||
return {
|
||||
"repo": comment["issue"]["repository"]["nameWithOwner"],
|
||||
"issue_number": comment["issue"]["number"],
|
||||
"comment_id": comment_id,
|
||||
"body": comment["body"],
|
||||
"text": comment["body"],
|
||||
"created_at": comment["createdAt"],
|
||||
"updated_at": comment["updatedAt"],
|
||||
"html_url": comment["url"],
|
||||
"issue_url": comment["issue"]["url"],
|
||||
"author_association": "COMMENTER",
|
||||
"issue_title": comment["issue"]["title"],
|
||||
"issue_state": comment["issue"]["state"],
|
||||
"login": self.username,
|
||||
"type": "issue_comment",
|
||||
}
|
||||
|
||||
def _get_comment_type(self) -> str:
|
||||
"""Returns the comment type for error messages."""
|
||||
return "issue"
|
||||
|
||||
|
||||
class PrReviewsProvider(GitHubCommentBase):
|
||||
"""Provider for GitHub PR reviews."""
|
||||
|
||||
QUERY_TEMPLATE = """
|
||||
{{
|
||||
user(login: "{username}") {{
|
||||
contributionsCollection {{
|
||||
pullRequestReviewContributions(first: {fetch_limit}) {{
|
||||
nodes {{
|
||||
pullRequestReview {{
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
url
|
||||
state
|
||||
pullRequest {{
|
||||
number
|
||||
title
|
||||
url
|
||||
repository {{
|
||||
nameWithOwner
|
||||
}}
|
||||
state
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
def __init__(self, token, username, limit=10, fetch_limit=None):
|
||||
"""Initialize with token, username, and optional limits."""
|
||||
super().__init__(token, username, limit)
|
||||
self.fetch_limit = fetch_limit if fetch_limit is not None else 10 * limit
|
||||
|
||||
def _build_query(self) -> str:
|
||||
"""Builds the GraphQL query for PR reviews."""
|
||||
return self.QUERY_TEMPLATE.format(username=self.username, fetch_limit=self.fetch_limit)
|
||||
|
||||
def _extract_comments(self, data) -> list:
|
||||
"""Extracts PR reviews from the GraphQL response."""
|
||||
contributions = data["user"]["contributionsCollection"]["pullRequestReviewContributions"][
|
||||
"nodes"
|
||||
]
|
||||
return [
|
||||
node["pullRequestReview"] for node in contributions if node["pullRequestReview"]["body"]
|
||||
]
|
||||
|
||||
def _format_comment(self, review) -> dict:
|
||||
"""Formats a PR review from GraphQL."""
|
||||
review_id = review["url"].split("/")[-1] if review["url"] else None
|
||||
|
||||
return {
|
||||
"repo": review["pullRequest"]["repository"]["nameWithOwner"],
|
||||
"issue_number": review["pullRequest"]["number"],
|
||||
"comment_id": review_id,
|
||||
"body": review["body"],
|
||||
"text": review["body"],
|
||||
"created_at": review["createdAt"],
|
||||
"updated_at": review["updatedAt"],
|
||||
"html_url": review["url"],
|
||||
"issue_url": review["pullRequest"]["url"],
|
||||
"author_association": "COMMENTER",
|
||||
"issue_title": review["pullRequest"]["title"],
|
||||
"issue_state": review["pullRequest"]["state"],
|
||||
"login": self.username,
|
||||
"review_state": review["state"],
|
||||
"type": "pr_review",
|
||||
}
|
||||
|
||||
def _get_comment_type(self) -> str:
|
||||
"""Returns the comment type for error messages."""
|
||||
return "PR review"
|
||||
|
||||
|
||||
class PrReviewCommentsProvider(GitHubCommentBase):
|
||||
"""Provider for GitHub PR review comments (inline code comments)."""
|
||||
|
||||
PR_CONTRIBUTIONS_TEMPLATE = """
|
||||
{{
|
||||
user(login: "{username}") {{
|
||||
contributionsCollection {{
|
||||
pullRequestReviewContributions(first: {fetch_limit}) {{
|
||||
nodes {{
|
||||
pullRequestReview {{
|
||||
pullRequest {{
|
||||
number
|
||||
title
|
||||
url
|
||||
repository {{
|
||||
nameWithOwner
|
||||
}}
|
||||
state
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
PR_COMMENTS_TEMPLATE = """
|
||||
{{
|
||||
repository(owner: "{owner}", name: "{repo}") {{
|
||||
pullRequest(number: {pr_number}) {{
|
||||
reviews(first: {reviews_limit}, author: "{username}") {{
|
||||
nodes {{
|
||||
comments(first: {comments_limit}) {{
|
||||
nodes {{
|
||||
body
|
||||
createdAt
|
||||
updatedAt
|
||||
url
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
token,
|
||||
username,
|
||||
limit=10,
|
||||
fetch_limit=None,
|
||||
reviews_limit=None,
|
||||
comments_limit=None,
|
||||
pr_limit=None,
|
||||
):
|
||||
"""Initialize with token, username, and optional limits."""
|
||||
super().__init__(token, username, limit)
|
||||
self.fetch_limit = fetch_limit if fetch_limit is not None else 4 * limit
|
||||
self.reviews_limit = reviews_limit if reviews_limit is not None else 2 * limit
|
||||
self.comments_limit = comments_limit if comments_limit is not None else 3 * limit
|
||||
self.pr_limit = pr_limit if pr_limit is not None else 2 * limit
|
||||
|
||||
def _build_query(self) -> str:
|
||||
"""Builds the GraphQL query for PR contributions."""
|
||||
return self.PR_CONTRIBUTIONS_TEMPLATE.format(
|
||||
username=self.username, fetch_limit=self.fetch_limit
|
||||
)
|
||||
|
||||
def _extract_comments(self, data) -> list:
|
||||
"""Extracts PR review comments using a two-step approach."""
|
||||
prs = self._get_reviewed_prs(data)
|
||||
return self._fetch_comments_for_prs(prs)
|
||||
|
||||
def _get_reviewed_prs(self, data) -> list:
|
||||
"""Gets a deduplicated list of PRs the user has reviewed."""
|
||||
contributions = data["user"]["contributionsCollection"]["pullRequestReviewContributions"][
|
||||
"nodes"
|
||||
]
|
||||
unique_prs = []
|
||||
|
||||
for node in contributions:
|
||||
pr = node["pullRequestReview"]["pullRequest"]
|
||||
if not any(existing_pr["url"] == pr["url"] for existing_pr in unique_prs):
|
||||
unique_prs.append(pr)
|
||||
|
||||
return unique_prs[: min(self.pr_limit, len(unique_prs))]
|
||||
|
||||
def _fetch_comments_for_prs(self, prs) -> list:
|
||||
"""Fetches inline comments for each PR in the list."""
|
||||
all_comments = []
|
||||
|
||||
for pr in prs:
|
||||
comments = self._get_comments_for_pr(pr)
|
||||
all_comments.extend(comments)
|
||||
|
||||
return all_comments
|
||||
|
||||
def _get_comments_for_pr(self, pr) -> list:
|
||||
"""Fetches the inline comments for a specific PR."""
|
||||
owner, repo = pr["repository"]["nameWithOwner"].split("/")
|
||||
|
||||
pr_query = self.PR_COMMENTS_TEMPLATE.format(
|
||||
owner=owner,
|
||||
repo=repo,
|
||||
pr_number=pr["number"],
|
||||
username=self.username,
|
||||
reviews_limit=self.reviews_limit,
|
||||
comments_limit=self.comments_limit,
|
||||
)
|
||||
|
||||
try:
|
||||
pr_comments = []
|
||||
pr_data = self._run_query(pr_query)
|
||||
reviews = pr_data["repository"]["pullRequest"]["reviews"]["nodes"]
|
||||
|
||||
for review in reviews:
|
||||
for comment in review["comments"]["nodes"]:
|
||||
comment["_pr_data"] = pr
|
||||
pr_comments.append(comment)
|
||||
|
||||
return pr_comments
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching comments for PR #{pr['number']}: {e}")
|
||||
return []
|
||||
|
||||
def _format_comment(self, comment) -> dict:
|
||||
"""Formats a PR review comment from GraphQL."""
|
||||
pr = comment["_pr_data"]
|
||||
comment_id = comment["url"].split("/")[-1] if comment["url"] else None
|
||||
|
||||
return {
|
||||
"repo": pr["repository"]["nameWithOwner"],
|
||||
"issue_number": pr["number"],
|
||||
"comment_id": comment_id,
|
||||
"body": comment["body"],
|
||||
"text": comment["body"],
|
||||
"created_at": comment["createdAt"],
|
||||
"updated_at": comment["updatedAt"],
|
||||
"html_url": comment["url"],
|
||||
"issue_url": pr["url"],
|
||||
"author_association": "COMMENTER",
|
||||
"issue_title": pr["title"],
|
||||
"issue_state": pr["state"],
|
||||
"login": self.username,
|
||||
"type": "pr_review_comment",
|
||||
}
|
||||
|
||||
def _get_comment_type(self) -> str:
|
||||
"""Returns the comment type for error messages."""
|
||||
return "PR review comment"
|
||||
|
|
@ -0,0 +1,169 @@
|
|||
from uuid import uuid5, NAMESPACE_OID
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from cognee.modules.engine.models.node_set import NodeSet
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_datapoints import (
|
||||
GitHubUser,
|
||||
Repository,
|
||||
File,
|
||||
FileChange,
|
||||
Comment,
|
||||
Issue,
|
||||
Commit,
|
||||
)
|
||||
|
||||
logger = get_logger("github_datapoints")
|
||||
|
||||
|
||||
def create_github_user_datapoint(user_data, nodesets: List[NodeSet]):
|
||||
"""Creates just the GitHubUser DataPoint object from the user data, with node sets."""
|
||||
if not user_data:
|
||||
return None
|
||||
|
||||
user_id = uuid5(NAMESPACE_OID, user_data.get("login", ""))
|
||||
|
||||
user = GitHubUser(
|
||||
id=user_id,
|
||||
name=user_data.get("login", ""),
|
||||
bio=user_data.get("bio"),
|
||||
company=user_data.get("company"),
|
||||
location=user_data.get("location"),
|
||||
public_repos=user_data.get("public_repos", 0),
|
||||
followers=user_data.get("followers", 0),
|
||||
following=user_data.get("following", 0),
|
||||
interacts_with=[],
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
|
||||
logger.debug(f"Created GitHubUser with ID: {user_id}")
|
||||
|
||||
return [user] + nodesets
|
||||
|
||||
|
||||
def create_repository_datapoint(repo_name: str, nodesets: List[NodeSet]) -> Repository:
|
||||
"""Creates a Repository DataPoint with a consistent ID."""
|
||||
repo_id = uuid5(NAMESPACE_OID, repo_name)
|
||||
repo = Repository(
|
||||
id=repo_id,
|
||||
name=repo_name,
|
||||
has_issue=[],
|
||||
has_commit=[],
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
logger.debug(f"Created Repository with ID: {repo_id} for {repo_name}")
|
||||
return repo
|
||||
|
||||
|
||||
def create_file_datapoint(filename: str, repo_name: str, nodesets: List[NodeSet]) -> File:
|
||||
"""Creates a File DataPoint with a consistent ID."""
|
||||
file_key = f"{repo_name}:{filename}"
|
||||
file_id = uuid5(NAMESPACE_OID, file_key)
|
||||
file = File(
|
||||
id=file_id, name=filename, filename=filename, repo=repo_name, belongs_to_set=nodesets
|
||||
)
|
||||
logger.debug(f"Created File with ID: {file_id} for {filename}")
|
||||
return file
|
||||
|
||||
|
||||
def create_commit_datapoint(
|
||||
commit_data: Dict[str, Any], user: GitHubUser, nodesets: List[NodeSet]
|
||||
) -> Commit:
|
||||
"""Creates a Commit DataPoint with a consistent ID and connection to user."""
|
||||
commit_id = uuid5(NAMESPACE_OID, commit_data.get("commit_sha", ""))
|
||||
commit = Commit(
|
||||
id=commit_id,
|
||||
name=commit_data.get("commit_sha", ""),
|
||||
commit_sha=commit_data.get("commit_sha", ""),
|
||||
text="Commit message:" + (str)(commit_data.get("commit_message", "")),
|
||||
commit_date=commit_data.get("commit_date", ""),
|
||||
commit_url=commit_data.get("commit_url", ""),
|
||||
author_name=commit_data.get("login", ""),
|
||||
repo=commit_data.get("repo", ""),
|
||||
has_change=[],
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
logger.debug(f"Created Commit with ID: {commit_id} for {commit_data.get('commit_sha', '')}")
|
||||
return commit
|
||||
|
||||
|
||||
def create_file_change_datapoint(
|
||||
fc_data: Dict[str, Any], user: GitHubUser, file: File, nodesets: List[NodeSet]
|
||||
) -> FileChange:
|
||||
"""Creates a FileChange DataPoint with a consistent ID."""
|
||||
fc_key = (
|
||||
f"{fc_data.get('repo', '')}:{fc_data.get('commit_sha', '')}:{fc_data.get('filename', '')}"
|
||||
)
|
||||
fc_id = uuid5(NAMESPACE_OID, fc_key)
|
||||
|
||||
file_change = FileChange(
|
||||
id=fc_id,
|
||||
name=fc_data.get("filename", ""),
|
||||
filename=fc_data.get("filename", ""),
|
||||
status=fc_data.get("status", ""),
|
||||
additions=fc_data.get("additions", 0),
|
||||
deletions=fc_data.get("deletions", 0),
|
||||
changes=fc_data.get("changes", 0),
|
||||
text=fc_data.get("diff", ""),
|
||||
commit_sha=fc_data.get("commit_sha", ""),
|
||||
repo=fc_data.get("repo", ""),
|
||||
modifies=file.filename,
|
||||
changed_by=user,
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
logger.debug(f"Created FileChange with ID: {fc_id} for {fc_data.get('filename', '')}")
|
||||
return file_change
|
||||
|
||||
|
||||
def create_issue_datapoint(
|
||||
issue_data: Dict[str, Any], repo_name: str, nodesets: List[NodeSet]
|
||||
) -> Issue:
|
||||
"""Creates an Issue DataPoint with a consistent ID."""
|
||||
issue_key = f"{repo_name}:{issue_data.get('issue_number', '')}"
|
||||
issue_id = uuid5(NAMESPACE_OID, issue_key)
|
||||
|
||||
issue = Issue(
|
||||
id=issue_id,
|
||||
name=str(issue_data.get("issue_number", 0)),
|
||||
number=issue_data.get("issue_number", 0),
|
||||
text=issue_data.get("issue_title", ""),
|
||||
state=issue_data.get("issue_state", ""),
|
||||
repository=repo_name,
|
||||
is_pr=False,
|
||||
has_comment=[],
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
logger.debug(f"Created Issue with ID: {issue_id} for {issue_data.get('issue_title', '')}")
|
||||
return issue
|
||||
|
||||
|
||||
def create_comment_datapoint(
|
||||
comment_data: Dict[str, Any], user: GitHubUser, nodesets: List[NodeSet]
|
||||
) -> Comment:
|
||||
"""Creates a Comment DataPoint with a consistent ID and connection to user."""
|
||||
comment_key = f"{comment_data.get('repo', '')}:{comment_data.get('issue_number', '')}:{comment_data.get('comment_id', '')}"
|
||||
comment_id = uuid5(NAMESPACE_OID, comment_key)
|
||||
|
||||
comment = Comment(
|
||||
id=comment_id,
|
||||
name=str(comment_data.get("comment_id", "")),
|
||||
comment_id=str(comment_data.get("comment_id", "")),
|
||||
text=comment_data.get("body", ""),
|
||||
created_at=comment_data.get("created_at", ""),
|
||||
updated_at=comment_data.get("updated_at", ""),
|
||||
author_name=comment_data.get("login", ""),
|
||||
issue_number=comment_data.get("issue_number", 0),
|
||||
repo=comment_data.get("repo", ""),
|
||||
authored_by=user,
|
||||
belongs_to_set=nodesets,
|
||||
)
|
||||
logger.debug(f"Created Comment with ID: {comment_id}")
|
||||
return comment
|
||||
|
||||
|
||||
def create_github_datapoints(github_data, nodesets: List[NodeSet]):
|
||||
"""Creates DataPoint objects from GitHub data - simplified to just create user for now."""
|
||||
if not github_data:
|
||||
return None
|
||||
|
||||
return create_github_user_datapoint(github_data["user"], nodesets)
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
from uuid import uuid5, NAMESPACE_OID
|
||||
from typing import Optional, List
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
|
||||
|
||||
class File(DataPoint):
|
||||
"""File is now a leaf node without any lists of other DataPoints"""
|
||||
|
||||
filename: str
|
||||
name: str
|
||||
repo: str
|
||||
metadata: dict = {"index_fields": ["filename"]}
|
||||
|
||||
|
||||
class GitHubUser(DataPoint):
|
||||
name: Optional[str]
|
||||
bio: Optional[str]
|
||||
company: Optional[str]
|
||||
location: Optional[str]
|
||||
public_repos: int
|
||||
followers: int
|
||||
following: int
|
||||
interacts_with: List["Repository"] = []
|
||||
metadata: dict = {"index_fields": ["name"]}
|
||||
|
||||
|
||||
class FileChange(DataPoint):
|
||||
filename: str
|
||||
name: str
|
||||
status: str
|
||||
additions: int
|
||||
deletions: int
|
||||
changes: int
|
||||
text: str
|
||||
commit_sha: str
|
||||
repo: str
|
||||
modifies: str
|
||||
changed_by: GitHubUser
|
||||
metadata: dict = {"index_fields": ["text"]}
|
||||
|
||||
|
||||
class Comment(DataPoint):
|
||||
comment_id: str
|
||||
name: str
|
||||
text: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
author_name: str
|
||||
issue_number: int
|
||||
repo: str
|
||||
authored_by: GitHubUser
|
||||
metadata: dict = {"index_fields": ["text"]}
|
||||
|
||||
|
||||
class Issue(DataPoint):
|
||||
number: int
|
||||
name: str
|
||||
text: str
|
||||
state: str
|
||||
repository: str
|
||||
is_pr: bool
|
||||
has_comment: List[Comment] = []
|
||||
|
||||
|
||||
class Commit(DataPoint):
|
||||
commit_sha: str
|
||||
name: str
|
||||
text: str
|
||||
commit_date: str
|
||||
commit_url: str
|
||||
author_name: str
|
||||
repo: str
|
||||
has_change: List[FileChange] = []
|
||||
|
||||
|
||||
class Repository(DataPoint):
|
||||
name: str
|
||||
has_issue: List[Issue] = []
|
||||
has_commit: List[Commit] = []
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
from github import Github
|
||||
from datetime import datetime
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_comment_providers import (
|
||||
IssueCommentsProvider,
|
||||
PrReviewsProvider,
|
||||
PrReviewCommentsProvider,
|
||||
)
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_comment_base import logger
|
||||
|
||||
|
||||
class GitHubDevComments:
|
||||
"""Facade class for working with a GitHub developer's comments."""
|
||||
|
||||
def __init__(self, profile, limit=10, include_issue_details=True):
|
||||
"""Initialize with a GitHubDevProfile instance and default parameters."""
|
||||
self.profile = profile
|
||||
self.limit = limit
|
||||
self.include_issue_details = include_issue_details
|
||||
|
||||
def get_issue_comments(self):
|
||||
"""Fetches the most recent comments made by the user on issues and PRs across repositories."""
|
||||
if not self.profile.user:
|
||||
logger.warning(f"No user found for profile {self.profile.username}")
|
||||
return None
|
||||
|
||||
logger.debug(f"Fetching comments for {self.profile.username} with limit={self.limit}")
|
||||
|
||||
# Create providers with just the basic limit - they will handle their own multipliers
|
||||
issue_provider = IssueCommentsProvider(
|
||||
self.profile.token, self.profile.username, self.limit
|
||||
)
|
||||
pr_review_provider = PrReviewsProvider(
|
||||
self.profile.token, self.profile.username, self.limit
|
||||
)
|
||||
pr_comment_provider = PrReviewCommentsProvider(
|
||||
self.profile.token, self.profile.username, self.limit
|
||||
)
|
||||
|
||||
issue_comments = issue_provider.get_comments()
|
||||
pr_reviews = pr_review_provider.get_comments()
|
||||
pr_review_comments = pr_comment_provider.get_comments()
|
||||
|
||||
total_comments = issue_comments + pr_reviews + pr_review_comments
|
||||
logger.info(
|
||||
f"Retrieved {len(total_comments)} comments for {self.profile.username} "
|
||||
f"({len(issue_comments)} issue, {len(pr_reviews)} PR reviews, "
|
||||
f"{len(pr_review_comments)} PR review comments)"
|
||||
)
|
||||
|
||||
return total_comments
|
||||
|
||||
def set_limit(self, limit=None, include_issue_details=None):
|
||||
"""Sets the limit for comments to retrieve."""
|
||||
if limit is not None:
|
||||
self.limit = limit
|
||||
if include_issue_details is not None:
|
||||
self.include_issue_details = include_issue_details
|
||||
|
|
@ -0,0 +1,195 @@
|
|||
from github import Github
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class GitHubDevCommits:
|
||||
"""Class for working with a GitHub developer's commits in pull requests."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
profile,
|
||||
days=30,
|
||||
prs_limit=10,
|
||||
commits_per_pr=5,
|
||||
include_files=False,
|
||||
skip_no_diff=False,
|
||||
):
|
||||
"""Initialize with a GitHubDevProfile instance and default parameters."""
|
||||
self.profile = profile
|
||||
self.days = days
|
||||
self.prs_limit = prs_limit
|
||||
self.commits_per_pr = commits_per_pr
|
||||
self.include_files = include_files
|
||||
self.skip_no_diff = skip_no_diff
|
||||
self.file_keys = ["filename", "status", "additions", "deletions", "changes", "diff"]
|
||||
|
||||
def get_user_commits(self):
|
||||
"""Fetches user's most recent commits from pull requests."""
|
||||
if not self.profile.user:
|
||||
return None
|
||||
|
||||
commits = self._collect_user_pr_commits()
|
||||
return {"user": self.profile.get_user_info(), "commits": commits}
|
||||
|
||||
def get_user_file_changes(self):
|
||||
"""Returns a flat list of file changes with associated commit information from PRs."""
|
||||
if not self.profile.user:
|
||||
return None
|
||||
|
||||
all_files = []
|
||||
commits = self._collect_user_pr_commits(include_files=True)
|
||||
|
||||
for commit in commits:
|
||||
if "files" not in commit:
|
||||
continue
|
||||
|
||||
commit_info = {
|
||||
"repo": commit["repo"],
|
||||
"commit_sha": commit["sha"],
|
||||
"commit_message": commit["message"],
|
||||
"commit_date": commit["date"],
|
||||
"commit_url": commit["url"],
|
||||
"pr_number": commit.get("pr_number"),
|
||||
"pr_title": commit.get("pr_title"),
|
||||
}
|
||||
|
||||
file_changes = []
|
||||
for file in commit["files"]:
|
||||
file_data = {key: file.get(key) for key in self.file_keys}
|
||||
file_changes.append({**file_data, **commit_info})
|
||||
|
||||
all_files.extend(file_changes)
|
||||
|
||||
return all_files
|
||||
|
||||
def set_options(
|
||||
self, days=None, prs_limit=None, commits_per_pr=None, include_files=None, skip_no_diff=None
|
||||
):
|
||||
"""Sets commit search parameters."""
|
||||
if days is not None:
|
||||
self.days = days
|
||||
if prs_limit is not None:
|
||||
self.prs_limit = prs_limit
|
||||
if commits_per_pr is not None:
|
||||
self.commits_per_pr = commits_per_pr
|
||||
if include_files is not None:
|
||||
self.include_files = include_files
|
||||
if skip_no_diff is not None:
|
||||
self.skip_no_diff = skip_no_diff
|
||||
|
||||
def _get_date_filter(self, days):
|
||||
"""Creates a date filter string for GitHub search queries."""
|
||||
if not days:
|
||||
return ""
|
||||
|
||||
date_limit = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
||||
return f" created:>={date_limit}"
|
||||
|
||||
def _collect_user_pr_commits(self, include_files=None):
|
||||
"""Collects and sorts a user's recent commits from pull requests they authored."""
|
||||
include_files = include_files if include_files is not None else self.include_files
|
||||
|
||||
prs = self._get_user_prs()
|
||||
|
||||
if not prs:
|
||||
return []
|
||||
|
||||
all_commits = []
|
||||
for pr in prs[: self.prs_limit]:
|
||||
pr_commits = self._get_commits_from_pr(pr, include_files)
|
||||
all_commits.extend(pr_commits)
|
||||
|
||||
sorted_commits = sorted(all_commits, key=lambda x: x["date"], reverse=True)
|
||||
return sorted_commits
|
||||
|
||||
def _get_user_prs(self):
|
||||
"""Gets pull requests authored by the user."""
|
||||
date_filter = self._get_date_filter(self.days)
|
||||
query = f"author:{self.profile.username} is:pr is:merged{date_filter}"
|
||||
|
||||
try:
|
||||
return list(self.profile.github.search_issues(query))
|
||||
except Exception as e:
|
||||
print(f"Error searching for PRs: {e}")
|
||||
return []
|
||||
|
||||
def _get_commits_from_pr(self, pr_issue, include_files=None):
|
||||
"""Gets commits by the user from a specific PR."""
|
||||
include_files = include_files if include_files is not None else self.include_files
|
||||
|
||||
pr_info = self._get_pull_request_object(pr_issue)
|
||||
if not pr_info:
|
||||
return []
|
||||
|
||||
repo_name, pr = pr_info
|
||||
|
||||
all_commits = self._get_all_pr_commits(pr, pr_issue.number)
|
||||
if not all_commits:
|
||||
return []
|
||||
|
||||
user_commits = [
|
||||
c
|
||||
for c in all_commits
|
||||
if c.author and hasattr(c.author, "login") and c.author.login == self.profile.username
|
||||
]
|
||||
|
||||
commit_data = [
|
||||
self._extract_commit_data(commit, repo_name, pr_issue, include_files)
|
||||
for commit in user_commits[: self.commits_per_pr]
|
||||
]
|
||||
|
||||
return commit_data
|
||||
|
||||
def _get_pull_request_object(self, pr_issue):
|
||||
"""Gets repository and pull request objects from an issue."""
|
||||
try:
|
||||
repo_name = pr_issue.repository.full_name
|
||||
repo = self.profile.github.get_repo(repo_name)
|
||||
pr = repo.get_pull(pr_issue.number)
|
||||
return (repo_name, pr)
|
||||
except Exception as e:
|
||||
print(f"Error accessing PR #{pr_issue.number}: {e}")
|
||||
return None
|
||||
|
||||
def _get_all_pr_commits(self, pr, pr_number):
|
||||
"""Gets all commits from a pull request."""
|
||||
try:
|
||||
return list(pr.get_commits())
|
||||
except Exception as e:
|
||||
print(f"Error retrieving commits from PR #{pr_number}: {e}")
|
||||
return None
|
||||
|
||||
def _extract_commit_data(self, commit, repo_name, pr_issue, include_files=None):
|
||||
"""Extracts relevant data from a commit object within a PR context."""
|
||||
commit_data = {
|
||||
"repo": repo_name,
|
||||
"sha": commit.sha,
|
||||
"message": commit.commit.message,
|
||||
"date": commit.commit.author.date,
|
||||
"url": commit.html_url,
|
||||
"pr_number": pr_issue.number,
|
||||
"pr_title": pr_issue.title,
|
||||
"pr_url": pr_issue.html_url,
|
||||
}
|
||||
|
||||
include_files = include_files if include_files is not None else self.include_files
|
||||
|
||||
if include_files:
|
||||
commit_data["files"] = self._extract_commit_files(commit)
|
||||
|
||||
return commit_data
|
||||
|
||||
def _extract_commit_files(self, commit):
|
||||
"""Extracts files changed in a commit, including diffs."""
|
||||
files = []
|
||||
for file in commit.files:
|
||||
if self.skip_no_diff and not file.patch:
|
||||
continue
|
||||
|
||||
file_data = {key: getattr(file, key, None) for key in self.file_keys}
|
||||
|
||||
if "diff" in self.file_keys:
|
||||
file_data["diff"] = file.patch if file.patch else "No diff available for this file"
|
||||
|
||||
files.append(file_data)
|
||||
return files
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
from github import Github
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_dev_comments import GitHubDevComments
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_dev_commits import GitHubDevCommits
|
||||
|
||||
|
||||
class GitHubDevProfile:
|
||||
"""Class for working with a GitHub developer's profile, commits, and activity."""
|
||||
|
||||
def __init__(self, username, token):
|
||||
"""Initialize with a username and GitHub API token."""
|
||||
self.github = Github(token) if token else Github()
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.user = self._get_user(username)
|
||||
self.user_info = self._extract_user_info() if self.user else None
|
||||
self.comments = GitHubDevComments(self) if self.user else None
|
||||
self.commits = GitHubDevCommits(self) if self.user else None
|
||||
|
||||
def get_user_info(self):
|
||||
"""Returns the cached user information."""
|
||||
return self.user_info
|
||||
|
||||
def get_user_repos(self, limit=None):
|
||||
"""Returns a list of user's repositories with limit."""
|
||||
if not self.user:
|
||||
return []
|
||||
|
||||
repos = list(self.user.get_repos())
|
||||
if limit:
|
||||
repos = repos[:limit]
|
||||
return repos
|
||||
|
||||
def get_user_commits(self, days=30, prs_limit=5, commits_per_pr=3, include_files=False):
|
||||
"""Fetches user's most recent commits from pull requests."""
|
||||
if not self.commits:
|
||||
return None
|
||||
|
||||
self.commits.set_options(
|
||||
days=days,
|
||||
prs_limit=prs_limit,
|
||||
commits_per_pr=commits_per_pr,
|
||||
include_files=include_files,
|
||||
)
|
||||
|
||||
return self.commits.get_user_commits()
|
||||
|
||||
def get_user_file_changes(self, days=30, prs_limit=5, commits_per_pr=3, skip_no_diff=True):
|
||||
"""Returns a flat list of file changes from PRs with associated commit information."""
|
||||
if not self.commits:
|
||||
return None
|
||||
|
||||
self.commits.set_options(
|
||||
days=days,
|
||||
prs_limit=prs_limit,
|
||||
commits_per_pr=commits_per_pr,
|
||||
include_files=True,
|
||||
skip_no_diff=skip_no_diff,
|
||||
)
|
||||
|
||||
return self.commits.get_user_file_changes()
|
||||
|
||||
def get_issue_comments(self, limit=10, include_issue_details=True):
|
||||
"""Fetches the most recent comments made by the user on issues and PRs across repositories."""
|
||||
if not self.comments:
|
||||
return None
|
||||
|
||||
self.comments.set_limit(
|
||||
limit=limit,
|
||||
include_issue_details=include_issue_details,
|
||||
)
|
||||
|
||||
return self.comments.get_issue_comments()
|
||||
|
||||
def _get_user(self, username):
|
||||
"""Fetches a GitHub user object."""
|
||||
try:
|
||||
return self.github.get_user(username)
|
||||
except Exception as e:
|
||||
print(f"Error connecting to GitHub API: {e}")
|
||||
return None
|
||||
|
||||
def _extract_user_info(self):
|
||||
"""Extracts basic information from a GitHub user object."""
|
||||
return {
|
||||
"login": self.user.login,
|
||||
"name": self.user.name,
|
||||
"bio": self.user.bio,
|
||||
"company": self.user.company,
|
||||
"location": self.user.location,
|
||||
"public_repos": self.user.public_repos,
|
||||
"followers": self.user.followers,
|
||||
"following": self.user.following,
|
||||
}
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
import json
|
||||
import asyncio
|
||||
import cognee
|
||||
from cognee.complex_demos.crewai_demo.src.crewai_demo.github_dev_profile import GitHubDevProfile
|
||||
|
||||
|
||||
def get_github_profile_data(
|
||||
username, token=None, days=30, prs_limit=5, commits_per_pr=3, issues_limit=5, max_comments=3
|
||||
):
|
||||
"""Fetches comprehensive GitHub profile data including user info, commits from PRs, and comments."""
|
||||
token = token or ""
|
||||
profile = GitHubDevProfile(username, token)
|
||||
|
||||
if not profile.user:
|
||||
return None
|
||||
|
||||
commits_result = profile.get_user_commits(
|
||||
days=days, prs_limit=prs_limit, commits_per_pr=commits_per_pr, include_files=True
|
||||
)
|
||||
comments = profile.get_issue_comments(limit=max_comments, include_issue_details=True)
|
||||
|
||||
return {
|
||||
"user": profile.get_user_info(),
|
||||
"commits": commits_result["commits"] if commits_result else [],
|
||||
"comments": comments or [],
|
||||
}
|
||||
|
||||
|
||||
def get_github_file_changes(
|
||||
username, token=None, days=30, prs_limit=5, commits_per_pr=3, skip_no_diff=True
|
||||
):
|
||||
"""Fetches a flat list of file changes from PRs with associated commit information for a GitHub user."""
|
||||
token = token or ""
|
||||
profile = GitHubDevProfile(username, token)
|
||||
|
||||
if not profile.user:
|
||||
return None
|
||||
|
||||
file_changes = profile.get_user_file_changes(
|
||||
days=days, prs_limit=prs_limit, commits_per_pr=commits_per_pr, skip_no_diff=skip_no_diff
|
||||
)
|
||||
|
||||
return {"user": profile.get_user_info(), "file_changes": file_changes or []}
|
||||
|
||||
|
||||
def get_github_data_for_cognee(
|
||||
username,
|
||||
token=None,
|
||||
days=30,
|
||||
prs_limit=3,
|
||||
commits_per_pr=3,
|
||||
issues_limit=3,
|
||||
max_comments=3,
|
||||
skip_no_diff=True,
|
||||
):
|
||||
"""Fetches enriched GitHub data for a user with PR file changes and comments combined with user data."""
|
||||
token = token or ""
|
||||
profile = GitHubDevProfile(username, token)
|
||||
|
||||
if not profile.user:
|
||||
return None
|
||||
|
||||
user_info = profile.get_user_info()
|
||||
|
||||
file_changes = profile.get_user_file_changes(
|
||||
days=days, prs_limit=prs_limit, commits_per_pr=commits_per_pr, skip_no_diff=skip_no_diff
|
||||
)
|
||||
|
||||
enriched_file_changes = []
|
||||
if file_changes:
|
||||
enriched_file_changes = [item | user_info for item in file_changes]
|
||||
|
||||
comments = profile.get_issue_comments(limit=max_comments, include_issue_details=True)
|
||||
|
||||
enriched_comments = []
|
||||
if comments:
|
||||
enriched_comments = []
|
||||
for comment in comments:
|
||||
safe_user_info = {k: v for k, v in user_info.items() if k not in comment}
|
||||
enriched_comments.append(comment | safe_user_info)
|
||||
|
||||
return {"user": user_info, "file_changes": enriched_file_changes, "comments": enriched_comments}
|
||||
|
||||
|
||||
async def cognify_github_profile(username, token=None):
|
||||
"""Ingests GitHub data into Cognee with soft and technical node sets."""
|
||||
github_data = get_github_data_for_cognee(username=username, token=token)
|
||||
if not github_data:
|
||||
return False
|
||||
|
||||
await cognee.add(
|
||||
json.dumps(github_data["user"], default=str), node_set=["soft", "technical", username]
|
||||
)
|
||||
|
||||
for comment in github_data["comments"]:
|
||||
await cognee.add(
|
||||
"Comment: " + json.dumps(comment, default=str), node_set=["soft", username]
|
||||
)
|
||||
|
||||
for file_change in github_data["file_changes"]:
|
||||
await cognee.add(
|
||||
"File Change: " + json.dumps(file_change, default=str), node_set=["technical", username]
|
||||
)
|
||||
|
||||
await cognee.cognify()
|
||||
return True
|
||||
|
||||
|
||||
async def main(username):
|
||||
"""Main function for testing Cognee ingest."""
|
||||
import os
|
||||
import dotenv
|
||||
from cognee.api.v1.visualize.visualize import visualize_graph
|
||||
|
||||
dotenv.load_dotenv()
|
||||
token = os.getenv("GITHUB_TOKEN")
|
||||
|
||||
await cognify_github_profile(username, token)
|
||||
|
||||
# success = await cognify_github_profile(username, token)
|
||||
|
||||
# if success:
|
||||
# visualization_path = os.path.join(os.path.dirname(__file__), "./.artifacts/github_graph.html")
|
||||
# await visualize_graph(visualization_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
import dotenv
|
||||
|
||||
dotenv.load_dotenv()
|
||||
|
||||
username = ""
|
||||
asyncio.run(main(username))
|
||||
# token = os.getenv("GITHUB_TOKEN")
|
||||
# github_data = get_github_data_for_cognee(username=username, token=token)
|
||||
# print(json.dumps(github_data, indent=2, default=str))
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue