feat: replace Owlready2 with RDFLib (#981)
<!-- .github/pull_request_template.md --> ## Description Replaces Owlready2 with RDFLib ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin. --------- Co-authored-by: Igor Ilic <igorilic03@gmail.com>
This commit is contained in:
parent
456f3b58c0
commit
acdcb0e8d9
7 changed files with 283 additions and 168 deletions
2
.github/workflows/test_mcp.yml
vendored
2
.github/workflows/test_mcp.yml
vendored
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
# Remove Cognee wheel that came from PyPI
|
||||
uv pip uninstall cognee
|
||||
# Install of the freshly-checked-out Cognee branch
|
||||
uv pip install --no-deps --force-reinstall -e ../
|
||||
uv pip install --force-reinstall -e ../
|
||||
|
||||
- name: Run MCP test
|
||||
env:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from cognee.modules.engine.utils import (
|
|||
generate_node_id,
|
||||
generate_node_name,
|
||||
)
|
||||
from owlready2 import Thing, ThingClass
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
|
||||
|
||||
|
|
@ -76,7 +75,7 @@ def expand_with_nodes_and_edges(
|
|||
ont_node_id = generate_node_id(ontology_node_to_store.name)
|
||||
ont_node_name = generate_node_name(ontology_node_to_store.name)
|
||||
|
||||
if isinstance(ontology_node_to_store, ThingClass):
|
||||
if ontology_node_to_store.category == "classes":
|
||||
ont_node_key = f"{ont_node_id}_type"
|
||||
if (ont_node_key not in added_nodes_map) and (
|
||||
ont_node_key not in added_ontology_nodes_map
|
||||
|
|
@ -88,7 +87,7 @@ def expand_with_nodes_and_edges(
|
|||
ontology_valid=True,
|
||||
)
|
||||
|
||||
elif isinstance(ontology_node_to_store, Thing):
|
||||
elif ontology_node_to_store.category == "individuals":
|
||||
ont_node_key = f"{ont_node_id}_entity"
|
||||
if (ont_node_key not in added_nodes_map) and (
|
||||
ont_node_key not in added_ontology_nodes_map
|
||||
|
|
@ -157,7 +156,7 @@ def expand_with_nodes_and_edges(
|
|||
ont_node_id = generate_node_id(ontology_node_to_store.name)
|
||||
ont_node_name = generate_node_name(ontology_node_to_store.name)
|
||||
|
||||
if isinstance(ontology_node_to_store, ThingClass):
|
||||
if ontology_node_to_store.category == "classes":
|
||||
ont_node_key = f"{ont_node_id}_type"
|
||||
if (ont_node_key not in added_nodes_map) and (
|
||||
ont_node_key not in added_ontology_nodes_map
|
||||
|
|
@ -169,7 +168,7 @@ def expand_with_nodes_and_edges(
|
|||
ontology_valid=True,
|
||||
)
|
||||
|
||||
elif isinstance(ontology_node_to_store, Thing):
|
||||
elif ontology_node_to_store.category == "individuals":
|
||||
ont_node_key = f"{ont_node_id}_entity"
|
||||
if (ont_node_key not in added_nodes_map) and (
|
||||
ont_node_key not in added_ontology_nodes_map
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import difflib
|
|||
from cognee.shared.logging_utils import get_logger
|
||||
from collections import deque
|
||||
from typing import List, Tuple, Dict, Optional, Any
|
||||
from owlready2 import get_ontology, ClassConstruct, Ontology, Thing
|
||||
from rdflib import Graph, URIRef, RDF, RDFS, OWL
|
||||
|
||||
from cognee.modules.ontology.exceptions import (
|
||||
OntologyInitializationError,
|
||||
|
|
@ -14,46 +14,85 @@ from cognee.modules.ontology.exceptions import (
|
|||
logger = get_logger("OntologyAdapter")
|
||||
|
||||
|
||||
class AttachedOntologyNode:
|
||||
"""Lightweight wrapper to be able to parse any ontology solution and generalize cognee interface."""
|
||||
|
||||
def __init__(self, uri: URIRef, category: str):
|
||||
self.uri = uri
|
||||
self.name = self._extract_name(uri)
|
||||
self.category = category
|
||||
|
||||
@staticmethod
|
||||
def _extract_name(uri: URIRef) -> str:
|
||||
uri_str = str(uri)
|
||||
if "#" in uri_str:
|
||||
return uri_str.split("#")[-1]
|
||||
return uri_str.rstrip("/").split("/")[-1]
|
||||
|
||||
def __repr__(self):
|
||||
return f"AttachedOntologyNode(name={self.name}, category={self.category})"
|
||||
|
||||
|
||||
class OntologyResolver:
|
||||
def __init__(
|
||||
self,
|
||||
ontology_file: Optional[str] = None,
|
||||
fallback_url: str = "http://example.org/empty_ontology",
|
||||
):
|
||||
def __init__(self, ontology_file: Optional[str] = None):
|
||||
self.ontology_file = ontology_file
|
||||
try:
|
||||
if ontology_file and os.path.exists(ontology_file):
|
||||
self.ontology: Ontology = get_ontology(ontology_file).load()
|
||||
self.graph = Graph()
|
||||
self.graph.parse(ontology_file)
|
||||
logger.info("Ontology loaded successfully from file: %s", ontology_file)
|
||||
else:
|
||||
logger.info(
|
||||
"Ontology file '%s' not found. Using fallback ontology at %s",
|
||||
"Ontology file '%s' not found. No owl ontology will be attached to the graph.",
|
||||
ontology_file,
|
||||
fallback_url,
|
||||
)
|
||||
self.ontology = get_ontology(fallback_url)
|
||||
self.graph = None
|
||||
self.build_lookup()
|
||||
except Exception as e:
|
||||
logger.error("Failed to load ontology", exc_info=e)
|
||||
raise OntologyInitializationError() from e
|
||||
|
||||
def _uri_to_key(self, uri: URIRef) -> str:
|
||||
uri_str = str(uri)
|
||||
if "#" in uri_str:
|
||||
name = uri_str.split("#")[-1]
|
||||
else:
|
||||
name = uri_str.rstrip("/").split("/")[-1]
|
||||
return name.lower().replace(" ", "_").strip()
|
||||
|
||||
def build_lookup(self):
|
||||
try:
|
||||
self.lookup: Dict[str, Dict[str, Thing]] = {
|
||||
"classes": {
|
||||
cls.name.lower().replace(" ", "_").strip(): cls
|
||||
for cls in self.ontology.classes()
|
||||
},
|
||||
"individuals": {
|
||||
ind.name.lower().replace(" ", "_").strip(): ind
|
||||
for ind in self.ontology.individuals()
|
||||
},
|
||||
classes: Dict[str, URIRef] = {}
|
||||
individuals: Dict[str, URIRef] = {}
|
||||
|
||||
if not self.graph:
|
||||
self.lookup: Dict[str, Dict[str, URIRef]] = {
|
||||
"classes": classes,
|
||||
"individuals": individuals,
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
for cls in self.graph.subjects(RDF.type, OWL.Class):
|
||||
key = self._uri_to_key(cls)
|
||||
classes[key] = cls
|
||||
|
||||
for subj, _, obj in self.graph.triples((None, RDF.type, None)):
|
||||
if obj in classes.values():
|
||||
key = self._uri_to_key(subj)
|
||||
individuals[key] = subj
|
||||
|
||||
self.lookup = {
|
||||
"classes": classes,
|
||||
"individuals": individuals,
|
||||
}
|
||||
logger.info(
|
||||
"Lookup built: %d classes, %d individuals",
|
||||
len(self.lookup["classes"]),
|
||||
len(self.lookup["individuals"]),
|
||||
len(classes),
|
||||
len(individuals),
|
||||
)
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error("Failed to build lookup dictionary: %s", str(e))
|
||||
raise RuntimeError("Lookup build failed") from e
|
||||
|
|
@ -77,64 +116,90 @@ class OntologyResolver:
|
|||
logger.error("Error in find_closest_match: %s", str(e))
|
||||
raise FindClosestMatchError() from e
|
||||
|
||||
def _get_category(self, uri: URIRef) -> str:
|
||||
if uri in self.lookup.get("classes", {}).values():
|
||||
return "classes"
|
||||
if uri in self.lookup.get("individuals", {}).values():
|
||||
return "individuals"
|
||||
return "unknown"
|
||||
|
||||
def get_subgraph(
|
||||
self, node_name: str, node_type: str = "individuals"
|
||||
self, node_name: str, node_type: str = "individuals", directed: bool = True
|
||||
) -> Tuple[List[Any], List[Tuple[str, str, str]], Optional[Any]]:
|
||||
nodes_set = set()
|
||||
edges: List[Tuple[str, str, str]] = []
|
||||
visited_nodes = set()
|
||||
visited = set()
|
||||
queue = deque()
|
||||
|
||||
try:
|
||||
closest_match = self.find_closest_match(name=node_name, category=node_type)
|
||||
if not closest_match:
|
||||
logger.info("No close match found for '%s' in category '%s'", node_name, node_type)
|
||||
return list(nodes_set), edges, None
|
||||
return [], [], None
|
||||
|
||||
node = self.lookup[node_type].get(closest_match)
|
||||
if node is None:
|
||||
logger.info("Node '%s' not found in lookup.", closest_match)
|
||||
return list(nodes_set), edges, None
|
||||
return [], [], None
|
||||
|
||||
logger.info("%s match was found for found for '%s' node", node.name, node_name)
|
||||
logger.info("%s match was found for found for '%s' node", node, node_name)
|
||||
|
||||
queue.append(node)
|
||||
visited_nodes.add(node)
|
||||
visited.add(node)
|
||||
nodes_set.add(node)
|
||||
|
||||
obj_props = set(self.graph.subjects(RDF.type, OWL.ObjectProperty))
|
||||
|
||||
while queue:
|
||||
current_node = queue.popleft()
|
||||
current = queue.popleft()
|
||||
current_label = self._uri_to_key(current)
|
||||
|
||||
if hasattr(current_node, "is_a"):
|
||||
for parent in current_node.is_a:
|
||||
if isinstance(parent, ClassConstruct):
|
||||
if hasattr(parent, "value") and hasattr(parent.value, "name"):
|
||||
parent = parent.value
|
||||
else:
|
||||
continue
|
||||
edges.append((current_node.name, "is_a", parent.name))
|
||||
nodes_set.add(parent)
|
||||
if parent not in visited_nodes:
|
||||
visited_nodes.add(parent)
|
||||
if node_type == "individuals":
|
||||
for parent in self.graph.objects(current, RDF.type):
|
||||
parent_label = self._uri_to_key(parent)
|
||||
edges.append((current_label, "is_a", parent_label))
|
||||
if parent not in visited:
|
||||
visited.add(parent)
|
||||
queue.append(parent)
|
||||
nodes_set.add(parent)
|
||||
|
||||
for prop in self.ontology.object_properties():
|
||||
for target in prop[current_node]:
|
||||
edges.append((current_node.name, prop.name, target.name))
|
||||
nodes_set.add(target)
|
||||
if target not in visited_nodes:
|
||||
visited_nodes.add(target)
|
||||
for parent in self.graph.objects(current, RDFS.subClassOf):
|
||||
parent_label = self._uri_to_key(parent)
|
||||
edges.append((current_label, "is_a", parent_label))
|
||||
if parent not in visited:
|
||||
visited.add(parent)
|
||||
queue.append(parent)
|
||||
nodes_set.add(parent)
|
||||
|
||||
for prop in obj_props:
|
||||
prop_label = self._uri_to_key(prop)
|
||||
for target in self.graph.objects(current, prop):
|
||||
target_label = self._uri_to_key(target)
|
||||
edges.append((current_label, prop_label, target_label))
|
||||
if target not in visited:
|
||||
visited.add(target)
|
||||
queue.append(target)
|
||||
|
||||
for source in prop.range:
|
||||
if current_node in prop[source]:
|
||||
edges.append((source.name, prop.name, current_node.name))
|
||||
nodes_set.add(source)
|
||||
if source not in visited_nodes:
|
||||
visited_nodes.add(source)
|
||||
nodes_set.add(target)
|
||||
if not directed:
|
||||
for source in self.graph.subjects(prop, current):
|
||||
source_label = self._uri_to_key(source)
|
||||
edges.append((source_label, prop_label, current_label))
|
||||
if source not in visited:
|
||||
visited.add(source)
|
||||
queue.append(source)
|
||||
nodes_set.add(source)
|
||||
|
||||
return list(nodes_set), edges, node
|
||||
rdf_nodes = [
|
||||
AttachedOntologyNode(uri=uri, category=self._get_category(uri))
|
||||
for uri in list(nodes_set)
|
||||
]
|
||||
rdf_root = (
|
||||
AttachedOntologyNode(uri=node, category=self._get_category(node))
|
||||
if node is not None
|
||||
else None
|
||||
)
|
||||
|
||||
return rdf_nodes, edges, rdf_root
|
||||
except Exception as e:
|
||||
logger.error("Error in get_subgraph: %s", str(e))
|
||||
raise GetSubgraphError() from e
|
||||
|
|
|
|||
|
|
@ -1,161 +1,174 @@
|
|||
import pytest
|
||||
from owlready2 import get_ontology, Thing
|
||||
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
|
||||
from rdflib import Graph, Namespace, RDF, OWL, RDFS
|
||||
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver, AttachedOntologyNode
|
||||
|
||||
|
||||
def test_ontology_adapter_initialization_success():
|
||||
"""Test successful initialization of OntologyAdapter."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
|
||||
assert adapter.ontology is not None
|
||||
assert isinstance(adapter.lookup, dict)
|
||||
|
||||
|
||||
def test_ontology_adapter_initialization_file_not_found():
|
||||
"""Test OntologyAdapter initialization with nonexistent file."""
|
||||
adapter = OntologyResolver(ontology_file="nonexistent.owl")
|
||||
assert adapter.ontology.base_iri == "http://example.org/empty_ontology#"
|
||||
assert adapter.graph is None
|
||||
|
||||
|
||||
def test_build_lookup():
|
||||
"""Test the lookup dictionary is correctly built."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
ns = Namespace("http://example.org/test#")
|
||||
g = Graph()
|
||||
|
||||
with ontology:
|
||||
g.add((ns.Car, RDF.type, OWL.Class))
|
||||
|
||||
class Car(Thing):
|
||||
pass
|
||||
g.add((ns.Audi, RDF.type, ns.Car))
|
||||
|
||||
Car("Audi")
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
lookup = resolver.lookup
|
||||
assert isinstance(lookup, dict)
|
||||
|
||||
assert isinstance(adapter.lookup, dict)
|
||||
assert "car" in adapter.lookup["classes"]
|
||||
assert "audi" in adapter.lookup["individuals"]
|
||||
assert "car" in lookup["classes"]
|
||||
assert lookup["classes"]["car"] == ns.Car
|
||||
|
||||
assert "audi" in lookup["individuals"]
|
||||
assert lookup["individuals"]["audi"] == ns.Audi
|
||||
|
||||
|
||||
def test_find_closest_match_exact():
|
||||
"""Test finding exact match in lookup."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
|
||||
with ontology:
|
||||
ns = Namespace("http://example.org/test#")
|
||||
g = Graph()
|
||||
|
||||
class Car(Thing):
|
||||
pass
|
||||
g.add((ns.Car, RDF.type, OWL.Class))
|
||||
g.add((ns.Audi, RDF.type, ns.Car))
|
||||
|
||||
Car("Audi")
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
|
||||
result = adapter.find_closest_match("Audi", "individuals")
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
result = resolver.find_closest_match("Audi", "individuals")
|
||||
assert result is not None
|
||||
assert result == "audi"
|
||||
|
||||
|
||||
def test_find_closest_match_fuzzy():
|
||||
"""Test fuzzy matching for lookup."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
"""Test fuzzy matching for lookup using the RDFlib adapter."""
|
||||
|
||||
with ontology:
|
||||
ns = Namespace("http://example.org/test#")
|
||||
|
||||
class Car(Thing):
|
||||
pass
|
||||
g = Graph()
|
||||
|
||||
Car("Audi")
|
||||
Car("BMW")
|
||||
g.add((ns.Car, RDF.type, OWL.Class))
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
g.add((ns.Audi, RDF.type, ns.Car))
|
||||
g.add((ns.BMW, RDF.type, ns.Car))
|
||||
|
||||
result = adapter.find_closest_match("Audii", "individuals")
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
result = resolver.find_closest_match("Audii", "individuals")
|
||||
|
||||
assert result == "audi"
|
||||
|
||||
|
||||
def test_find_closest_match_no_match():
|
||||
"""Test no match found in lookup."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
"""Test that find_closest_match returns None when there is no match."""
|
||||
ns = Namespace("http://example.org/test#")
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
g = Graph()
|
||||
|
||||
result = adapter.find_closest_match("Nonexistent", "individuals")
|
||||
g.add((ns.Car, RDF.type, OWL.Class))
|
||||
|
||||
g.add((ns.Audi, RDF.type, ns.Car))
|
||||
g.add((ns.BMW, RDF.type, ns.Car))
|
||||
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
result = resolver.find_closest_match("Nonexistent", "individuals")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_get_subgraph_no_match():
|
||||
"""Test get_subgraph with no matching node."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
def test_get_subgraph_no_match_rdflib():
|
||||
"""Test get_subgraph returns empty results for a non-existent node."""
|
||||
g = Graph()
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
nodes, relationships, start_node = adapter.get_subgraph("Nonexistent", "individuals")
|
||||
nodes, relationships, start_node = resolver.get_subgraph("Nonexistent", "individuals")
|
||||
|
||||
assert nodes == []
|
||||
assert relationships == []
|
||||
assert start_node is None
|
||||
|
||||
|
||||
def test_get_subgraph_success():
|
||||
"""Test successful retrieval of subgraph."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
def test_get_subgraph_success_rdflib():
|
||||
"""Test successful retrieval of subgraph using the RDFlib adapter."""
|
||||
|
||||
with ontology:
|
||||
ns = Namespace("http://example.org/test#")
|
||||
g = Graph()
|
||||
|
||||
class Company(Thing):
|
||||
pass
|
||||
g.add((ns.Company, RDF.type, OWL.Class))
|
||||
g.add((ns.Vehicle, RDF.type, OWL.Class))
|
||||
g.add((ns.Car, RDF.type, OWL.Class))
|
||||
|
||||
class Vehicle(Thing):
|
||||
pass
|
||||
g.add((ns.Vehicle, RDFS.subClassOf, OWL.Thing))
|
||||
g.add((ns.Car, RDFS.subClassOf, ns.Vehicle))
|
||||
|
||||
class Car(Vehicle):
|
||||
pass
|
||||
g.add((ns.Audi, RDF.type, ns.Car))
|
||||
g.add((ns.Porsche, RDF.type, ns.Car))
|
||||
g.add((ns.VW, RDF.type, ns.Company))
|
||||
|
||||
audi = Car("Audi")
|
||||
porsche = Car("Porsche")
|
||||
vw = Company("VW")
|
||||
owns = ns.owns
|
||||
g.add((owns, RDF.type, OWL.ObjectProperty))
|
||||
g.add((ns.VW, owns, ns.Audi))
|
||||
g.add((ns.VW, owns, ns.Porsche))
|
||||
|
||||
vw.owns = [audi, porsche]
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
nodes, relationships, start_node = resolver.get_subgraph("Audi", "individuals")
|
||||
|
||||
nodes, relationships, start_node = adapter.get_subgraph("Audi", "individuals")
|
||||
uris = {n.uri for n in nodes}
|
||||
assert ns.Audi in uris
|
||||
assert ns.Car in uris
|
||||
assert ns.Vehicle in uris
|
||||
assert OWL.Thing in uris
|
||||
|
||||
assert audi in nodes
|
||||
assert Car in nodes
|
||||
assert Vehicle in nodes
|
||||
assert Thing in nodes
|
||||
assert ("Audi", "is_a", "Car") in relationships
|
||||
assert ("Car", "is_a", "Vehicle") in relationships
|
||||
assert ("Vehicle", "is_a", "Thing") in relationships
|
||||
rels = set(relationships)
|
||||
assert ("audi", "is_a", "car") in rels
|
||||
assert ("car", "is_a", "vehicle") in rels
|
||||
assert ("vehicle", "is_a", "thing") in rels
|
||||
|
||||
assert isinstance(start_node, AttachedOntologyNode)
|
||||
assert start_node.uri == ns.Audi
|
||||
|
||||
|
||||
def test_refresh_lookup():
|
||||
"""Test refreshing lookup rebuilds the dictionary."""
|
||||
ontology = get_ontology("http://example.org/test_ontology")
|
||||
def test_refresh_lookup_rdflib():
|
||||
"""Test that refresh_lookup rebuilds the lookup dict into a new object."""
|
||||
g = Graph()
|
||||
|
||||
adapter = OntologyResolver()
|
||||
adapter.ontology = ontology
|
||||
adapter.build_lookup()
|
||||
resolver = OntologyResolver()
|
||||
resolver.graph = g
|
||||
resolver.build_lookup()
|
||||
|
||||
original_lookup = adapter.lookup.copy()
|
||||
adapter.refresh_lookup()
|
||||
original_lookup = resolver.lookup
|
||||
|
||||
assert adapter.lookup is not original_lookup
|
||||
resolver.refresh_lookup()
|
||||
|
||||
assert resolver.lookup is not original_lookup
|
||||
|
|
|
|||
52
poetry.lock
generated
52
poetry.lock
generated
|
|
@ -3650,6 +3650,19 @@ files = [
|
|||
[package.dependencies]
|
||||
pygments = "*"
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
version = "0.7.2"
|
||||
description = "An ISO 8601 date/time/duration parser and formatter"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "python_version < \"3.11\""
|
||||
files = [
|
||||
{file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"},
|
||||
{file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isoduration"
|
||||
version = "20.11.0"
|
||||
|
|
@ -6829,20 +6842,6 @@ files = [
|
|||
{file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "owlready2"
|
||||
version = "0.47"
|
||||
description = "A package for ontology-oriented programming in Python: load OWL 2.0 ontologies as Python objects, modify them, save them, and perform reasoning via HermiT. Includes an optimized RDF quadstore."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "owlready2-0.47.tar.gz", hash = "sha256:af7e1d2205c0b5886d2e34397ab8c10ca29ff68c3dc3702d43393966ac7f6eb0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["flask", "gevent", "rdflib"]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
|
|
@ -9100,6 +9099,29 @@ files = [
|
|||
[package.extras]
|
||||
all = ["numpy"]
|
||||
|
||||
[[package]]
|
||||
name = "rdflib"
|
||||
version = "7.1.4"
|
||||
description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information."
|
||||
optional = false
|
||||
python-versions = "<4.0.0,>=3.8.1"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "rdflib-7.1.4-py3-none-any.whl", hash = "sha256:72f4adb1990fa5241abd22ddaf36d7cafa5d91d9ff2ba13f3086d339b213d997"},
|
||||
{file = "rdflib-7.1.4.tar.gz", hash = "sha256:fed46e24f26a788e2ab8e445f7077f00edcf95abb73bcef4b86cefa8b62dd174"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""}
|
||||
pyparsing = ">=2.1.0,<4"
|
||||
|
||||
[package.extras]
|
||||
berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"]
|
||||
html = ["html5rdf (>=1.2,<2)"]
|
||||
lxml = ["lxml (>=4.3,<6.0)"]
|
||||
networkx = ["networkx (>=2,<4)"]
|
||||
orjson = ["orjson (>=3.9.14,<4)"]
|
||||
|
||||
[[package]]
|
||||
name = "readme-renderer"
|
||||
version = "44.0"
|
||||
|
|
@ -12037,4 +12059,4 @@ weaviate = ["weaviate-client"]
|
|||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<=3.13"
|
||||
content-hash = "4d5f5cfe7072a53e4d9d38e5503a9839b555add9087b8947e5eecf0f80b9cbbb"
|
||||
content-hash = "caff6dd76472193be899359bd8f74842050b0bd296239349035f2f77a656a809"
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ dependencies = [
|
|||
"filetype>=1.2.0",
|
||||
"aiohttp>=3.11.14",
|
||||
"aiofiles>=23.2.1",
|
||||
"owlready2>=0.47,<0.48",
|
||||
"rdflib>=7.1.4,<7.2.0",
|
||||
"graphistry>=0.33.5,<0.34",
|
||||
"pypdf>=4.1.0,<6.0.0",
|
||||
"jinja2>=3.1.3,<4",
|
||||
|
|
|
|||
32
uv.lock
generated
32
uv.lock
generated
|
|
@ -892,7 +892,6 @@ dependencies = [
|
|||
{ name = "numpy", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "onnxruntime" },
|
||||
{ name = "openai" },
|
||||
{ name = "owlready2" },
|
||||
{ name = "pandas" },
|
||||
{ name = "pre-commit" },
|
||||
{ name = "pydantic" },
|
||||
|
|
@ -901,6 +900,7 @@ dependencies = [
|
|||
{ name = "pypdf" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "rdflib" },
|
||||
{ name = "scikit-learn" },
|
||||
{ name = "sentry-sdk", extra = ["fastapi"] },
|
||||
{ name = "sqlalchemy" },
|
||||
|
|
@ -1078,7 +1078,6 @@ requires-dist = [
|
|||
{ name = "numpy", specifier = ">=1.26.4,<=2.1" },
|
||||
{ name = "onnxruntime", specifier = "<=1.21.1" },
|
||||
{ name = "openai", specifier = ">=1.59.4,<2" },
|
||||
{ name = "owlready2", specifier = ">=0.47,<0.48" },
|
||||
{ name = "pandas", specifier = ">=2.2.2" },
|
||||
{ name = "pgvector", marker = "extra == 'postgres'", specifier = ">=0.3.5,<0.4" },
|
||||
{ name = "pgvector", marker = "extra == 'postgres-binary'", specifier = ">=0.3.5,<0.4" },
|
||||
|
|
@ -1103,6 +1102,7 @@ requires-dist = [
|
|||
{ name = "python-multipart", specifier = "==0.0.20" },
|
||||
{ name = "qasync", marker = "extra == 'gui'", specifier = ">=0.27.1,<0.28" },
|
||||
{ name = "qdrant-client", marker = "extra == 'qdrant'", specifier = ">=1.14.2,<2" },
|
||||
{ name = "rdflib", specifier = ">=7.1.4,<7.2.0" },
|
||||
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.2,<1.0.0" },
|
||||
{ name = "s3fs", extras = ["boto3"], marker = "extra == 'aws'", specifier = "==2025.3.2" },
|
||||
{ name = "scikit-learn", specifier = ">=1.6.1,<2" },
|
||||
|
|
@ -2784,6 +2784,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
version = "0.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isoduration"
|
||||
version = "20.11.0"
|
||||
|
|
@ -5075,12 +5084,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "owlready2"
|
||||
version = "0.47"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/8b/6f0534ff7285e8d97e36a3838f88ffc8deb7cd0bc7b34f9f97d8bfdaae90/owlready2-0.47.tar.gz", hash = "sha256:af7e1d2205c0b5886d2e34397ab8c10ca29ff68c3dc3702d43393966ac7f6eb0", size = 27271190 }
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
|
|
@ -6635,6 +6638,19 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/c1/c5/c243b05a15a27b946180db0d1e4c999bef3f4221505dff9748f1f6c917be/rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86", size = 1553782 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rdflib"
|
||||
version = "7.1.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "isodate", marker = "python_full_version < '3.11'" },
|
||||
{ name = "pyparsing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/7e/cb2d74466bd8495051ebe2d241b1cb1d4acf9740d481126aef19ef2697f5/rdflib-7.1.4.tar.gz", hash = "sha256:fed46e24f26a788e2ab8e445f7077f00edcf95abb73bcef4b86cefa8b62dd174", size = 4692745 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/31/e9b6f04288dcd3fa60cb3179260d6dad81b92aef3063d679ac7d80a827ea/rdflib-7.1.4-py3-none-any.whl", hash = "sha256:72f4adb1990fa5241abd22ddaf36d7cafa5d91d9ff2ba13f3086d339b213d997", size = 565051 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "readme-renderer"
|
||||
version = "44.0"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue