clean up poetry

This commit is contained in:
vasilije 2025-08-27 17:33:59 +02:00
parent 66673af56d
commit 10ece0638f
4 changed files with 35 additions and 24 deletions

View file

@ -3,7 +3,10 @@ from typing import Optional, ClassVar
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import model_validator
from baml_py import ClientRegistry
try:
from baml_py import ClientRegistry
except ImportError:
ClientRegistry = None
class LLMConfig(BaseSettings):
@ -63,25 +66,27 @@ class LLMConfig(BaseSettings):
fallback_endpoint: str = ""
fallback_model: str = ""
baml_registry: ClassVar[ClientRegistry] = ClientRegistry()
baml_registry: ClassVar = None
model_config = SettingsConfigDict(env_file=".env", extra="allow")
def model_post_init(self, __context) -> None:
"""Initialize the BAML registry after the model is created."""
self.baml_registry.add_llm_client(
name=self.baml_llm_provider,
provider=self.baml_llm_provider,
options={
"model": self.baml_llm_model,
"temperature": self.baml_llm_temperature,
"api_key": self.baml_llm_api_key,
"base_url": self.baml_llm_endpoint,
"api_version": self.baml_llm_api_version,
},
)
# Sets the primary client
self.baml_registry.set_primary(self.baml_llm_provider)
if ClientRegistry is not None:
self.baml_registry = ClientRegistry()
self.baml_registry.add_llm_client(
name=self.baml_llm_provider,
provider=self.baml_llm_provider,
options={
"model": self.baml_llm_model,
"temperature": self.baml_llm_temperature,
"api_key": self.baml_llm_api_key,
"base_url": self.baml_llm_endpoint,
"api_version": self.baml_llm_api_version,
},
)
# Sets the primary client
self.baml_registry.set_primary(self.baml_llm_provider)
@model_validator(mode="after")
def ensure_env_vars_for_ollama(self) -> "LLMConfig":

View file

@ -37,6 +37,9 @@ async def extract_summary(content: str, response_model: Type[BaseModel]):
"""
config = get_llm_config()
if config.baml_registry is None:
raise ImportError("BAML is not available. Please install with 'pip install cognee[baml]' to use BAML extraction features.")
# Use BAML's SummarizeContent function
summary_result = await b.SummarizeContent(
content, baml_options={"client_registry": config.baml_registry}
@ -77,6 +80,9 @@ async def extract_code_summary(content: str):
try:
config = get_llm_config()
if config.baml_registry is None:
raise ImportError("BAML is not available. Please install with 'pip install cognee[baml]' to use BAML extraction features.")
result = await b.SummarizeCode(
content, baml_options={"client_registry": config.baml_registry}
)

View file

@ -16,6 +16,9 @@ async def extract_content_graph(
get_logger(level="INFO")
if config.baml_registry is None:
raise ImportError("BAML is not available. Please install with 'pip install cognee[baml]' to use BAML extraction features.")
# if response_model:
# # tb = TypeBuilder()
# # country = tb.union \

View file

@ -28,9 +28,7 @@ dependencies = [
"nltk>=3.9.1,<4.0.0",
"numpy>=1.26.4, <=4.0.0",
"pandas>=2.2.2,<3.0.0",
# Note: New s3fs and boto3 versions don't work well together
# Always use comaptible fixed versions of these two dependencies
"s3fs[boto3]==2025.3.2",
"sqlalchemy>=2.0.39,<3.0.0",
"aiosqlite>=0.20.0,<1.0.0",
"tiktoken>=0.8.0,<1.0.0",
@ -53,10 +51,10 @@ dependencies = [
"fastapi>=0.115.7,<1.0.0",
"python-multipart>=0.0.20,<1.0.0",
"fastapi-users[sqlalchemy]>=14.0.1,<15.0.0",
"dlt[sqlalchemy]>=1.9.0,<2",
"sentry-sdk[fastapi]>=2.9.0,<3",
"structlog>=25.2.0,<26",
"baml-py (>=0.201.0,<0.202.0)",
"pympler>=1.1,<2.0.0",
"onnxruntime>=1.0.0,<2.0.0",
"pylance>=0.22.0,<1.0.0",
@ -116,14 +114,13 @@ evals = [
"plotly>=6.0.0,<7",
"gdown>=5.2.0,<6",
]
gui = [
"pyside6>=6.8.3,<7",
"qasync>=0.27.1,<0.28",
]
graphiti = ["graphiti-core>=0.7.0,<0.8"]
# Note: New s3fs and boto3 versions don't work well together
# Always use comaptible fixed versions of these two dependencies
aws = ["s3fs[boto3]==2025.3.2"]
dlt = ["dlt[sqlalchemy]>=1.9.0,<2"]
baml = ["baml-py (>=0.201.0,<0.202.0)"]
dev = [
"pytest>=7.4.0,<8",
"pytest-cov>=6.1.1,<7.0.0",