diff --git a/.github/workflows/disable_independent_workflows.sh b/.github/workflows/disable_independent_workflows.sh index 693c3092d..ff57da80d 100755 --- a/.github/workflows/disable_independent_workflows.sh +++ b/.github/workflows/disable_independent_workflows.sh @@ -10,7 +10,7 @@ WORKFLOWS=( "test_kuzu.yml" "test_multimetric_qa_eval_run.yaml" "test_graphrag_vs_rag_notebook.yml" - "test_gemini.yml" + "test_llms.yml" "test_multimedia_example.yaml" "test_deduplication.yml" "test_eval_framework.yml" diff --git a/.github/workflows/test_gemini.yml b/.github/workflows/test_gemini.yml deleted file mode 100644 index 544e15a5e..000000000 --- a/.github/workflows/test_gemini.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: test | gemini - -on: - workflow_call: - -jobs: - test-gemini: - name: Run Gemini Test - runs-on: ubuntu-22.04 - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Cognee Setup - uses: ./.github/actions/cognee_setup - with: - python-version: '3.11.x' - - - name: Run Gemini Simple Example - env: - LLM_PROVIDER: "gemini" - LLM_API_KEY: ${{ secrets.GEMINI_API_KEY }} - LLM_MODEL: "gemini/gemini-1.5-flash" - EMBEDDING_PROVIDER: "gemini" - EMBEDDING_API_KEY: ${{ secrets.GEMINI_API_KEY }} - EMBEDDING_MODEL: "gemini/text-embedding-004" - EMBEDDING_DIMENSIONS: "768" - EMBEDDING_MAX_TOKENS: "8076" - run: uv run python ./examples/python/simple_example.py diff --git a/.github/workflows/test_llms.yml b/.github/workflows/test_llms.yml new file mode 100644 index 000000000..5a0f947c9 --- /dev/null +++ b/.github/workflows/test_llms.yml @@ -0,0 +1,86 @@ +name: LLM Test Suites + +permissions: + contents: read + +on: + workflow_call: + +env: + RUNTIME__LOG_LEVEL: ERROR + ENV: 'dev' + +jobs: + test-gemini: + name: Run Gemini Test + runs-on: ubuntu-22.04 + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Cognee Setup + uses: ./.github/actions/cognee_setup + with: + python-version: '3.11.x' + + - name: Run Gemini Simple Example + env: + LLM_PROVIDER: "gemini" + LLM_API_KEY: ${{ secrets.GEMINI_API_KEY }} + LLM_MODEL: "gemini/gemini-1.5-flash" + EMBEDDING_PROVIDER: "gemini" + EMBEDDING_API_KEY: ${{ secrets.GEMINI_API_KEY }} + EMBEDDING_MODEL: "gemini/text-embedding-004" + EMBEDDING_DIMENSIONS: "768" + EMBEDDING_MAX_TOKENS: "8076" + run: uv run python ./examples/python/simple_example.py + + test-fastembed: + name: Run Fastembed Test + runs-on: ubuntu-22.04 + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Cognee Setup + uses: ./.github/actions/cognee_setup + with: + python-version: '3.11.x' + + - name: Run Fastembed Simple Example + env: + LLM_PROVIDER: "openai" + LLM_API_KEY: ${{ secrets.LLM_API_KEY }} + LLM_MODEL: ${{ secrets.LLM_MODEL }} + LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} + LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} + EMBEDDING_PROVIDER: "fastembed" + EMBEDDING_MODEL: "sentence-transformers/all-MiniLM-L6-v2" + EMBEDDING_DIMENSIONS: "384" + EMBEDDING_MAX_TOKENS: "256" + run: uv run python ./examples/python/simple_example.py + + test-openrouter: + name: Run OpenRouter Test + runs-on: ubuntu-22.04 + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Cognee Setup + uses: ./.github/actions/cognee_setup + with: + python-version: '3.11.x' + + - name: Run OpenRouter Simple Example + env: + LLM_PROVIDER: "custom" + LLM_API_KEY: ${{ secrets.OPENROUTER_API_KEY }} + LLM_MODEL: "openrouter/x-ai/grok-code-fast-1" + LLM_ENDPOINT: "https://openrouter.ai/api/v1" + EMBEDDING_PROVIDER: "openai" + EMBEDDING_API_KEY: ${{ secrets.OPENAI_API_KEY }} + EMBEDDING_MODEL: "openai/text-embedding-3-large" + EMBEDDING_DIMENSIONS: "3072" + EMBEDDING_MAX_TOKENS: "8191" + run: uv run python ./examples/python/simple_example.py \ No newline at end of file diff --git a/.github/workflows/test_openrouter.yml b/.github/workflows/test_openrouter.yml deleted file mode 100644 index 9c2dcdebe..000000000 --- a/.github/workflows/test_openrouter.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: test | openrouter - -on: - workflow_call: - -jobs: - test-openrouter: - name: Run OpenRouter Test - runs-on: ubuntu-22.04 - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Cognee Setup - uses: ./.github/actions/cognee_setup - with: - python-version: '3.11.x' - - - name: Run OpenRouter Simple Example - env: - LLM_PROVIDER: "custom" - LLM_API_KEY: ${{ secrets.OPENROUTER_API_KEY }} - LLM_MODEL: "openrouter/x-ai/grok-code-fast-1" - LLM_ENDPOINT: "https://openrouter.ai/api/v1" - EMBEDDING_PROVIDER: "openai" - EMBEDDING_API_KEY: ${{ secrets.OPENAI_API_KEY }} - EMBEDDING_MODEL: "openai/text-embedding-3-large" - EMBEDDING_DIMENSIONS: "3072" - EMBEDDING_MAX_TOKENS: "8191" - run: uv run python ./examples/python/simple_example.py diff --git a/.github/workflows/test_suites.yml b/.github/workflows/test_suites.yml index 86f89249d..ff18f2962 100644 --- a/.github/workflows/test_suites.yml +++ b/.github/workflows/test_suites.yml @@ -115,16 +115,10 @@ jobs: secrets: inherit # Additional LLM tests - gemini-tests: - name: Gemini Tests - needs: [basic-tests, e2e-tests] - uses: ./.github/workflows/test_gemini.yml - secrets: inherit - - openrouter-tests: - name: OpenRouter Tests - needs: [basic-tests, e2e-tests] - uses: ./.github/workflows/test_openrouter.yml + llm-tests: + name: LLM Test Suite + needs: [ basic-tests, e2e-tests ] + uses: ./.github/workflows/test_llms.yml secrets: inherit # Ollama tests moved to the end @@ -138,8 +132,7 @@ jobs: different-operating-systems-tests, vector-db-tests, example-tests, - gemini-tests, - openrouter-tests, + llm-tests, mcp-test, relational-db-migration-tests, docker-compose-test, @@ -161,8 +154,7 @@ jobs: example-tests, db-examples-tests, mcp-test, - gemini-tests, - openrouter-tests, + llm-tests, ollama-tests, relational-db-migration-tests, docker-compose-test, @@ -183,8 +175,7 @@ jobs: "${{ needs.example-tests.result }}" == "success" && "${{ needs.db-examples-tests.result }}" == "success" && "${{ needs.relational-db-migration-tests.result }}" == "success" && - "${{ needs.gemini-tests.result }}" == "success" && - "${{ needs.openrouter-tests.result }}" == "success" && + "${{ needs.llm-tests.result }}" == "success" && "${{ needs.docker-compose-test.result }}" == "success" && "${{ needs.docker-ci-test.result }}" == "success" && "${{ needs.ollama-tests.result }}" == "success" ]]; then