FIXES FOR COLLAB

This commit is contained in:
Vasilije 2024-05-27 14:31:42 +02:00
parent ebabae0308
commit 5ef81956c9
6 changed files with 540 additions and 699 deletions

View file

@ -6,6 +6,7 @@ class LLMConfig(BaseSettings):
llm_model: str = "gpt-4o"
llm_endpoint: str = ""
llm_api_key: str = ""
llm_streaming:bool = False
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")

View file

@ -18,7 +18,7 @@ def get_llm_client():
if provider == LLMProvider.OPENAI:
from .openai.adapter import OpenAIAdapter
return OpenAIAdapter(llm_config.llm_api_key, llm_config.llm_model)
return OpenAIAdapter(llm_config.llm_api_key, llm_config.llm_model, llm_config.llm_streaming)
elif provider == LLMProvider.OLLAMA:
from .generic_llm_api.adapter import GenericAPIAdapter
return GenericAPIAdapter(llm_config.llm_endpoint, llm_config.llm_api_key, llm_config.llm_model, "Ollama")

View file

@ -32,12 +32,12 @@ class OpenAIAdapter(LLMInterface):
api_key: str
"""Adapter for OpenAI's GPT-3, GPT=4 API"""
def __init__(self, api_key: str, model:str):
def __init__(self, api_key: str, model:str, streaming:bool = False):
self.aclient = instructor.from_openai(AsyncOpenAI(api_key = api_key))
self.client = instructor.from_openai(OpenAI(api_key = api_key))
self.model = model
self.api_key = api_key
self.streaming = streaming
@retry(stop = stop_after_attempt(5))
def completions_with_backoff(self, **kwargs):
"""Wrapper around ChatCompletion.create w/ backoff"""

View file

@ -1,7 +1,7 @@
""" Fetches the context of a given node in the graph"""
from typing import Union, Dict
from neo4j import AsyncSession
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
import networkx as nx
@ -43,6 +43,7 @@ async def search_neighbour(graph: Union[nx.Graph, any], query: str,
elif graph_config.graph_engine == GraphDBType.NEO4J:
from neo4j import AsyncSession
if isinstance(graph, AsyncSession):
cypher_query = """
MATCH (target {id: $node_id})

1187
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "cognee"
version = "0.1.8"
version = "0.1.9"
description = "Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning."
authors = ["Vasilije Markovic", "Boris Arzentar"]
readme = "README.md"
@ -19,7 +19,7 @@ classifiers = [
[tool.poetry.dependencies]
python = ">=3.9.0,<3.12"
openai = "1.14.3"
openai = "1.27.0"
pydantic = "^2.5.0"
python-dotenv = "1.0.1"
fastapi = "^0.109.2"
@ -33,45 +33,46 @@ debugpy = "^1.8.0"
pyarrow = "^15.0.0"
pylint = "^3.0.3"
aiosqlite = "^0.20.0"
pandas = "^2.2.0"
pandas = "2.0.3"
greenlet = "^3.0.3"
ruff = "^0.2.2"
filetype = "^1.2.0"
nltk = "^3.8.1"
dlt = "0.4.10"
dlt = "0.4.11"
duckdb = {version = "^0.10.0", extras = ["dlt"]}
overrides = "^7.7.0"
aiofiles = "^23.2.1"
qdrant-client = "^1.9.0"
duckdb-engine = "^0.11.2"
duckdb-engine = "0.12.1"
graphistry = "^0.33.5"
tenacity = "^8.2.3"
weaviate-client = "^4.5.4"
scikit-learn = "^1.4.1.post1"
fastembed = "^0.2.5"
fastembed = "0.2.7"
pypdf = "^4.1.0"
anthropic = "^0.21.3"
neo4j = "^5.18.0"
neo4j = "5.20.0"
jinja2 = "^3.1.3"
matplotlib = "^3.8.3"
nest-asyncio = "^1.6.0"
structlog = "^24.1.0"
tiktoken = "^0.6.0"
dspy-ai = "2.4.3"
tiktoken = "0.7.0"
dspy-ai = "2.4.9"
posthog = "^3.5.0"
lancedb = "^0.6.10"
importlib-metadata = "6.8.0"
litellm = "^1.37.3"
groq = "^0.5.0"
tantivy = "^0.21.0"
lancedb = "0.8.0"
importlib-metadata = "7.1.0"
litellm = "1.38.10"
groq = "0.8.0"
tantivy = "^0.22.0"
huggingface-hub ="0.20.0"
tokenizers ="0.15.2"
transformers ="4.39.0"
python-multipart = "^0.0.9"
langfuse = "^2.32.0"
spacy = "^3.7.4"
protobuf = "<5.0.0"
langchain-community = "0.0.38"
deepeval = "^0.21.42"
falkordb = "^1.0.4"
pydantic-settings = "^2.2.1"
anthropic = "^0.26.1"
langchain-community = "0.0.38"
[tool.poetry.extras]
@ -102,10 +103,11 @@ mkdocs-jupyter = "^0.24.6"
mkdocs-minify-plugin = "^0.8.0"
mkdocs-redirects = "^1.2.1"
[tool.poetry.group.test-docs.dependencies]
fastapi = "^0.109.2"
diskcache = "^5.6.3"
pandas = "^2.2.0"
pandas = "2.0.3"
tabulate = "^0.9.0"
@ -117,3 +119,5 @@ ignore-init-module-imports = true
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"