<!-- .github/pull_request_template.md -->

## Description
<!-- Provide a clear description of the changes in this PR -->

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
Boris 2025-07-23 15:35:21 +02:00 committed by GitHub
parent 2b1c17404c
commit f77183d001
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 1741 additions and 721 deletions

View file

@ -1,12 +1,11 @@
import asyncio
from alembic import context
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from cognee.infrastructure.databases.relational import Base
from alembic import context
from cognee.infrastructure.databases.relational import get_relational_engine
from cognee.infrastructure.databases.relational import get_relational_engine, Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@ -88,6 +87,11 @@ db_engine = get_relational_engine()
print("Using database:", db_engine.db_uri)
if "sqlite" in db_engine.db_uri:
from cognee.infrastructure.utils.run_sync import run_sync
run_sync(db_engine.create_database())
config.set_section_option(
config.config_ini_section,
"SQLALCHEMY_DATABASE_URI",

File diff suppressed because it is too large Load diff

View file

@ -18,6 +18,11 @@ function useDatasets() {
const fetchDatasetStatuses = useCallback((datasets: Dataset[]) => {
fetch(
`/v1/datasets/status?dataset=${datasets.map(d => d.id).join('&dataset=')}`,
{
headers: {
"Content-Type": "application/json",
},
},
)
.then((response) => response.json())
.then((statuses) => setDatasets(
@ -69,7 +74,11 @@ function useDatasets() {
}, []);
const fetchDatasets = useCallback(() => {
return fetch('/v1/datasets')
return fetch('/v1/datasets', {
headers: {
"Content-Type": "application/json",
},
})
.then((response) => response.json())
.then((datasets) => {
setDatasets(datasets);

View file

@ -4,7 +4,7 @@ let numberOfRetries = 0;
const isAuth0Enabled = process.env.USE_AUTH0_AUTHORIZATION?.toLowerCase() === "true";
const backendApiUrl = process.env.NEXT_PUBLIC_BACKEND_API_URL;
const backendApiUrl = process.env.NEXT_PUBLIC_BACKEND_API_URL || "http://localhost:8000/api";
export default async function fetch(url: string, options: RequestInit = {}): Promise<Response> {
function retry(lastError: Response) {
@ -18,7 +18,7 @@ export default async function fetch(url: string, options: RequestInit = {}): Pro
numberOfRetries += 1;
return window.fetch("/auth/token")
return global.fetch("/auth/token")
.then(() => {
return fetch(url, options);
});

View file

@ -74,7 +74,9 @@ if CORS_ALLOWED_ORIGINS:
origin.strip() for origin in CORS_ALLOWED_ORIGINS.split(",") if origin.strip()
]
else:
allowed_origins = [] # Block all except explicitly set origins
allowed_origins = [
"http://localhost:3000",
] # Block all except explicitly set origins
app.add_middleware(
CORSMiddleware,

View file

@ -290,7 +290,7 @@ def get_datasets_router() -> APIRouter:
if dataset is None:
raise DatasetNotFoundError(message=f"Dataset ({str(dataset_id)}) not found.")
graph_data = await get_formatted_graph_data(dataset)
graph_data = await get_formatted_graph_data(dataset.id, user.id)
return graph_data