From 8a082ad384e2840ac137a11720b4b581ed8de843 Mon Sep 17 00:00:00 2001 From: Vasilije <8619304+Vasilije1990@users.noreply.github.com> Date: Sat, 25 May 2024 14:42:19 +0200 Subject: [PATCH] try few things --- .github/workflows/test_common.yml | 16 +++++++++------- cognee/infrastructure/llm/get_llm_client.py | 8 ++++---- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test_common.yml b/.github/workflows/test_common.yml index d3d8dac0e..0c050202c 100644 --- a/.github/workflows/test_common.yml +++ b/.github/workflows/test_common.yml @@ -81,13 +81,7 @@ jobs: - name: Run tests run: poetry run pytest tests/ - # - name: Build with Poetry - # run: poetry build - # - # - name: Install Package - # run: | - # cd dist - # pip install *.whl + # - name: Download NLTK Punkt Tokenizer Models # run: | @@ -102,6 +96,14 @@ jobs: ENV: 'dev' run: poetry run python ./cognee/tests/test_library.py + - name: Build with Poetry + run: poetry build + + - name: Install Package + run: | + cd dist + pip install *.whl + # - run: | # poetry run # if: runner.os != 'Windows' diff --git a/cognee/infrastructure/llm/get_llm_client.py b/cognee/infrastructure/llm/get_llm_client.py index 4b74a5fe7..d9714e6be 100644 --- a/cognee/infrastructure/llm/get_llm_client.py +++ b/cognee/infrastructure/llm/get_llm_client.py @@ -21,15 +21,15 @@ def get_llm_client(): if provider == LLMProvider.OPENAI: from .openai.adapter import OpenAIAdapter - return OpenAIAdapter(llm_config.llm_api_key, llm_config.llm_model) + return OpenAIAdapter(config.llm_api_key, config.llm_model) elif provider == LLMProvider.OLLAMA: from .generic_llm_api.adapter import GenericAPIAdapter - return GenericAPIAdapter(llm_config.llm_endpoint, llm_config.llm_api_key, llm_config.llm_model, "Ollama") + return GenericAPIAdapter(config.llm_endpoint, config.llm_api_key, config.llm_model, "Ollama") elif provider == LLMProvider.ANTHROPIC: from .anthropic.adapter import AnthropicAdapter - return AnthropicAdapter(llm_config.llm_model) + return AnthropicAdapter(config.llm_model) elif provider == LLMProvider.CUSTOM: from .generic_llm_api.adapter import GenericAPIAdapter - return GenericAPIAdapter(llm_config.llm_endpoint, llm_config.llm_api_key, llm_config.llm_model, "Custom") + return GenericAPIAdapter(config.llm_endpoint, config.llm_api_key, config.llm_model, "Custom") else: raise ValueError(f"Unsupported LLM provider: {provider}")