implement settings

This commit is contained in:
Vasilije 2024-05-25 18:50:59 +02:00
parent eb7d02833d
commit 56e64b4f71
7 changed files with 60 additions and 14 deletions

View file

@ -45,7 +45,6 @@ class Config:
qdrant_api_key: str = os.getenv("QDRANT_API_KEY", None)
sqlalchemy_logging: bool = os.getenv("SQLALCHEMY_LOGGING", True)
graph_filename = os.getenv("GRAPH_NAME", "cognee_graph.pkl")

View file

@ -50,9 +50,9 @@ class InfrastructureConfig():
def get_config(self, config_entity: str = None) -> dict:
if (config_entity is None or config_entity == "database_engine") and self.database_engine is None:
logging.debug("cf sdsds:")
db_path = os.path.join(self.system_root_directory,config.db_path)
db_path = os.path.join(self.system_root_directory,relational.db_path)
LocalStorage.ensure_directory_exists(db_path)
@ -99,9 +99,6 @@ class InfrastructureConfig():
if (config_entity is None or config_entity == "database_directory_path") and self.database_directory_path is None:
self.database_directory_path = self.system_root_directory + "/" + relational.db_path
if self.database_directory_path is None:
self.database_directory_path = self.system_root_directory + "/" + relational.db_path

View file

@ -0,0 +1,22 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
class VectorConfig(BaseSettings):
vector_db_url: str = ""
vector_db_key: str = ""
vector_db_path: str = ""
vector_db_engine: object = ""
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"vector_db_url": self.vector_db_url,
"vector_db_key": self.vector_db_key,
"vector_db_path": self.vector_db_path,
"vector_db_engine": self.vector_db_engine,
}
@lru_cache
def get_vectordb_config():
return VectorConfig()

View file

@ -12,8 +12,8 @@ from litellm import aembedding
import litellm
litellm.set_verbose = True
config = Config()
config.load()
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import get_embedding_config
config = get_embedding_config()
class DefaultEmbeddingEngine(EmbeddingEngine):
async def embed_text(self, text: List[str]) -> List[float]:

View file

@ -0,0 +1,22 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
class EmbeddingConfig(BaseSettings):
openai_embedding_model: str = "text-embedding-3-large"
openai_embedding_dimensions: int = 3072
litellm_embedding_model: str = "text-embedding-3-large"
litellm_embedding_dimensions: str = 3072
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"openai_embedding_model": self.openai_embedding_model,
"openai_embedding_dimensions": self.openai_embedding_dimensions,
"litellm_embedding_model": self.litellm_embedding_model,
"litellm_embedding_dimensions": self.litellm_embedding_dimensions,
}
@lru_cache
def get_embedding_config():
return EmbeddingConfig()

View file

@ -1,9 +1,13 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
class LLMConfig():
llm_provider: str = None
llm_model: str = None
llm_endpoint: str = None
llm_api_key: str = None
class LLMConfig(BaseSettings):
llm_provider: str = "openai"
llm_model: str = "gpt-4o"
llm_endpoint: str = ""
llm_api_key: str = ""
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
@ -13,4 +17,6 @@ class LLMConfig():
"apiKey": self.llm_api_key,
}
llm_config = LLMConfig()
@lru_cache
def get_llm_config():
return LLMConfig()