diff --git a/cognee/api/v1/ontologies/routers/get_ontology_router.py b/cognee/api/v1/ontologies/routers/get_ontology_router.py
index ee31c683f..d1d5920a6 100644
--- a/cognee/api/v1/ontologies/routers/get_ontology_router.py
+++ b/cognee/api/v1/ontologies/routers/get_ontology_router.py
@@ -15,9 +15,9 @@ def get_ontology_router() -> APIRouter:
@router.post("", response_model=dict)
async def upload_ontology(
- ontology_key: str = Form(...),
+ ontology_key: List[str] = Form(...),
ontology_file: List[UploadFile] = File(...),
- descriptions: Optional[str] = Form(None),
+ descriptions: Optional[List[str]] = Form(None),
user: User = Depends(get_authenticated_user),
):
"""
@@ -28,9 +28,9 @@ def get_ontology_router() -> APIRouter:
- Multiple files: ontology_key=["key1", "key2"], ontology_file=[file1, file2]
## Request Parameters
- - **ontology_key** (str): JSON array string of user-defined identifiers for the ontologies
+ - **ontology_key** (List[str]): Repeated field (e.g. ontology_key=foo&ontology_key=bar) of user-defined identifiers
- **ontology_file** (List[UploadFile]): OWL format ontology files
- - **descriptions** (Optional[str]): JSON array string of optional descriptions
+ - **descriptions** (Optional[List[str]]): Repeated optional descriptions aligned with ontology_key
## Response
Returns metadata about uploaded ontologies including keys, filenames, sizes, and upload timestamps.
@@ -49,16 +49,8 @@ def get_ontology_router() -> APIRouter:
)
try:
- import json
-
- ontology_keys = json.loads(ontology_key)
- description_list = json.loads(descriptions) if descriptions else None
-
- if not isinstance(ontology_keys, list):
- raise ValueError("ontology_key must be a JSON array")
-
results = await ontology_service.upload_ontologies(
- ontology_keys, ontology_file, user, description_list
+ ontology_key, ontology_file, user, descriptions
)
return {
@@ -73,8 +65,6 @@ def get_ontology_router() -> APIRouter:
for result in results
]
}
- except (json.JSONDecodeError, ValueError) as e:
- return JSONResponse(status_code=400, content={"error": str(e)})
except Exception as e:
return JSONResponse(status_code=500, content={"error": str(e)})
diff --git a/cognee/tests/unit/api/Ontologies_test.py b/cognee/tests/unit/api/Ontologies_test.py
new file mode 100644
index 000000000..89a1e22b8
--- /dev/null
+++ b/cognee/tests/unit/api/Ontologies_test.py
@@ -0,0 +1,138 @@
+import io
+import json
+import tempfile
+from types import SimpleNamespace
+
+import pytest
+from fastapi import UploadFile
+
+from cognee.api.v1.ontologies.ontologies import OntologyService
+
+
+@pytest.mark.asyncio
+async def test_upload_single_ontology_creates_metadata(tmp_path, monkeypatch):
+ monkeypatch.setattr(tempfile, "gettempdir", lambda: str(tmp_path))
+ service = OntologyService()
+ user = SimpleNamespace(id="ontology-user")
+ file_content = b"Ontology content"
+ ontology_file = UploadFile(filename="animals.owl", file=io.BytesIO(file_content))
+
+ result = await service.upload_ontology(
+ ontology_key="animals",
+ file=ontology_file,
+ user=user,
+ description="Animal relationships",
+ )
+
+ assert result.ontology_key == "animals"
+ assert result.filename == "animals.owl"
+ assert result.size_bytes == len(file_content)
+ assert result.description == "Animal relationships"
+
+ user_dir = service.base_dir / user.id
+ stored_file = user_dir / "animals.owl"
+ assert stored_file.exists()
+ assert stored_file.read_bytes() == file_content
+
+ metadata = json.loads((user_dir / "metadata.json").read_text())
+ saved_metadata = metadata["animals"]
+ assert saved_metadata["filename"] == "animals.owl"
+ assert saved_metadata["size_bytes"] == len(file_content)
+ assert saved_metadata["description"] == "Animal relationships"
+ assert saved_metadata["uploaded_at"] == result.uploaded_at
+
+
+@pytest.mark.asyncio
+async def test_upload_multiple_ontologies(tmp_path, monkeypatch):
+ monkeypatch.setattr(tempfile, "gettempdir", lambda: str(tmp_path))
+ service = OntologyService()
+ user = SimpleNamespace(id="ontology-user")
+ contents = {
+ "animals": b"Animal ontology",
+ "plants": b"Plant ontology",
+ }
+ filenames = {"animals": "animals.owl", "plants": "plants.owl"}
+ descriptions = {"animals": "Animal data", "plants": "Plant data"}
+ files = [
+ UploadFile(filename=filenames[key], file=io.BytesIO(contents[key]))
+ for key in ["animals", "plants"]
+ ]
+
+ results = await service.upload_ontologies(
+ ["animals", "plants"], files, user, [descriptions["animals"], descriptions["plants"]]
+ )
+
+ assert [res.ontology_key for res in results] == ["animals", "plants"]
+ for res in results:
+ assert res.filename == filenames[res.ontology_key]
+ assert res.size_bytes == len(contents[res.ontology_key])
+ assert res.description == descriptions[res.ontology_key]
+
+ user_dir = service.base_dir / user.id
+ metadata = json.loads((user_dir / "metadata.json").read_text())
+
+ for key in ["animals", "plants"]:
+ stored_file = user_dir / f"{key}.owl"
+ assert stored_file.exists()
+ assert stored_file.read_bytes() == contents[key]
+
+ saved_metadata = metadata[key]
+ assert saved_metadata["filename"] == filenames[key]
+ assert saved_metadata["size_bytes"] == len(contents[key])
+ assert saved_metadata["description"] == descriptions[key]
+
+
+@pytest.mark.asyncio
+async def test_get_ontology_contents_returns_uploaded_data(tmp_path, monkeypatch):
+ monkeypatch.setattr(tempfile, "gettempdir", lambda: str(tmp_path))
+ service = OntologyService()
+ user = SimpleNamespace(id="ontology-user")
+ uploads = {
+ "animals": b"Animals",
+ "plants": b"Plants",
+ }
+
+ for key, content in uploads.items():
+ await service.upload_ontology(
+ ontology_key=key,
+ file=UploadFile(filename=f"{key}.owl", file=io.BytesIO(content)),
+ user=user,
+ )
+
+ contents = service.get_ontology_contents(["animals", "plants"], user)
+
+ assert contents == [uploads["animals"].decode(), uploads["plants"].decode()]
+
+
+@pytest.mark.asyncio
+async def test_list_ontologies_returns_metadata(tmp_path, monkeypatch):
+ monkeypatch.setattr(tempfile, "gettempdir", lambda: str(tmp_path))
+ service = OntologyService()
+ user = SimpleNamespace(id="ontology-user")
+
+ uploads = {
+ "animals": {
+ "content": b"Animals",
+ "description": "Animal ontology",
+ },
+ "plants": {
+ "content": b"Plants",
+ "description": "Plant ontology",
+ },
+ }
+
+ for key, payload in uploads.items():
+ await service.upload_ontology(
+ ontology_key=key,
+ file=UploadFile(filename=f"{key}.owl", file=io.BytesIO(payload["content"])),
+ user=user,
+ description=payload["description"],
+ )
+
+ metadata = service.list_ontologies(user)
+
+ for key, payload in uploads.items():
+ entry = metadata[key]
+ assert entry["filename"] == f"{key}.owl"
+ assert entry["size_bytes"] == len(payload["content"])
+ assert entry["description"] == payload["description"]