try few things

This commit is contained in:
Vasilije 2024-05-25 14:42:19 +02:00
parent fa5028a072
commit 8a082ad384
2 changed files with 13 additions and 11 deletions

View file

@ -81,13 +81,7 @@ jobs:
- name: Run tests
run: poetry run pytest tests/
# - name: Build with Poetry
# run: poetry build
#
# - name: Install Package
# run: |
# cd dist
# pip install *.whl
# - name: Download NLTK Punkt Tokenizer Models
# run: |
@ -102,6 +96,14 @@ jobs:
ENV: 'dev'
run: poetry run python ./cognee/tests/test_library.py
- name: Build with Poetry
run: poetry build
- name: Install Package
run: |
cd dist
pip install *.whl
# - run: |
# poetry run
# if: runner.os != 'Windows'

View file

@ -21,15 +21,15 @@ def get_llm_client():
if provider == LLMProvider.OPENAI:
from .openai.adapter import OpenAIAdapter
return OpenAIAdapter(llm_config.llm_api_key, llm_config.llm_model)
return OpenAIAdapter(config.llm_api_key, config.llm_model)
elif provider == LLMProvider.OLLAMA:
from .generic_llm_api.adapter import GenericAPIAdapter
return GenericAPIAdapter(llm_config.llm_endpoint, llm_config.llm_api_key, llm_config.llm_model, "Ollama")
return GenericAPIAdapter(config.llm_endpoint, config.llm_api_key, config.llm_model, "Ollama")
elif provider == LLMProvider.ANTHROPIC:
from .anthropic.adapter import AnthropicAdapter
return AnthropicAdapter(llm_config.llm_model)
return AnthropicAdapter(config.llm_model)
elif provider == LLMProvider.CUSTOM:
from .generic_llm_api.adapter import GenericAPIAdapter
return GenericAPIAdapter(llm_config.llm_endpoint, llm_config.llm_api_key, llm_config.llm_model, "Custom")
return GenericAPIAdapter(config.llm_endpoint, config.llm_api_key, config.llm_model, "Custom")
else:
raise ValueError(f"Unsupported LLM provider: {provider}")