diff --git a/.github/workflows/test_dynamic_steps_example.yml b/.github/workflows/test_dynamic_steps_example.yml index 0e22fa7ec..f746ed174 100644 --- a/.github/workflows/test_dynamic_steps_example.yml +++ b/.github/workflows/test_dynamic_steps_example.yml @@ -16,13 +16,7 @@ jobs: with: example-location: ./examples/python/dynamic_steps_example.py secrets: - LLM_MODEL: ${{ secrets.LLM_MODEL }} - LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} - LLM_API_KEY: ${{ secrets.LLM_API_KEY }} - LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} - EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} - EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} - EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} - EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} GRAPHISTRY_USERNAME: ${{ secrets.GRAPHISTRY_USERNAME }} GRAPHISTRY_PASSWORD: ${{ secrets.GRAPHISTRY_PASSWORD }} diff --git a/.github/workflows/test_llama_index_cognee_integration_notebook.yml b/.github/workflows/test_llama_index_cognee_integration_notebook.yml index 5974009d6..72408ddb6 100644 --- a/.github/workflows/test_llama_index_cognee_integration_notebook.yml +++ b/.github/workflows/test_llama_index_cognee_integration_notebook.yml @@ -9,20 +9,51 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +env: + RUNTIME__LOG_LEVEL: ERROR + jobs: run_notebook_test: - uses: ./.github/workflows/reusable_notebook.yml - with: - notebook-location: notebooks/llama_index_cognee_integration.ipynb - secrets: - #LLM_MODEL: ${{ secrets.LLM_MODEL }} - #LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} - LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - #LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} - EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} - EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} - EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} - EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} - GRAPHISTRY_USERNAME: ${{ secrets.GRAPHISTRY_USERNAME }} - GRAPHISTRY_PASSWORD: ${{ secrets.GRAPHISTRY_PASSWORD }} + name: test + runs-on: ubuntu-22.04 + defaults: + run: + shell: bash + steps: + - name: Check out + uses: actions/checkout@master + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11.x' + + - name: Install Poetry + uses: snok/install-poetry@v1.4.1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: Install dependencies + run: | + pip install jupyter + pip install llama-index-graph-rag-cognee==0.1.2 + + - name: Execute Jupyter Notebook + env: + ENV: 'dev' + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} + EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} + EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} + EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} + GRAPHISTRY_USERNAME: ${{ secrets.GRAPHISTRY_USERNAME }} + GRAPHISTRY_PASSWORD: ${{ secrets.GRAPHISTRY_PASSWORD }} + run: | + poetry run jupyter nbconvert \ + --to notebook \ + --execute notebooks/llama_index_cognee_integration.ipynb \ + --output executed_notebook.ipynb \ + --ExecutePreprocessor.timeout=1200 diff --git a/.github/workflows/test_multimedia_example.yaml b/.github/workflows/test_multimedia_example.yaml index 3d06959de..ea67e646a 100644 --- a/.github/workflows/test_multimedia_example.yaml +++ b/.github/workflows/test_multimedia_example.yaml @@ -16,7 +16,7 @@ jobs: with: example-location: ./examples/python/multimedia_example.py secrets: - LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} # Use OpenAI until we deploy models to handle multimedia + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} GRAPHISTRY_USERNAME: ${{ secrets.GRAPHISTRY_USERNAME }} GRAPHISTRY_PASSWORD: ${{ secrets.GRAPHISTRY_PASSWORD }} diff --git a/.github/workflows/test_simple_example.yml b/.github/workflows/test_simple_example.yml index 21912414b..970d07e96 100644 --- a/.github/workflows/test_simple_example.yml +++ b/.github/workflows/test_simple_example.yml @@ -16,13 +16,7 @@ jobs: with: example-location: ./examples/python/simple_example.py secrets: - LLM_MODEL: ${{ secrets.LLM_MODEL }} - LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} - LLM_API_KEY: ${{ secrets.LLM_API_KEY }} - LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} - EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} - EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} - EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} - EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} GRAPHISTRY_USERNAME: ${{ secrets.GRAPHISTRY_USERNAME }} GRAPHISTRY_PASSWORD: ${{ secrets.GRAPHISTRY_PASSWORD }} diff --git a/README.md b/README.md index 07cf51859..d49d2bf1c 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ We build for developers who need a reliable, production-ready data layer for AI ## What is cognee? -Cognee implements scalable, modular ECL (Extract, Cognify, Load) pipelines that allow you to interconnect and retrieve past conversations, documents, and audio transcriptions while reducing hallucinations, developer effort, and cost. +Cognee implements scalable, modular ECL (Extract, Cognify, Load) pipelines that allow you to interconnect and retrieve past conversations, documents, and audio transcriptions while reducing hallucinations, developer effort, and cost. Cognee merges graph and vector databases to uncover hidden relationships and new patterns in your data. You can automatically model, load and retrieve entities and objects representing your business domain and analyze their relationships, uncovering insights that neither vector stores nor graph stores alone can provide. Learn more about use-cases [here](https://docs.cognee.ai/use_cases) @@ -170,7 +170,7 @@ async def main(): print(f"Searching cognee for insights with query: '{query_text}'") # Query cognee for insights on the added text search_results = await cognee.search( - SearchType.INSIGHTS, query_text=query_text + query_text=query_text, query_type=SearchType.INSIGHTS ) print("Search results:") diff --git a/cognee/api/v1/search/search_v2.py b/cognee/api/v1/search/search_v2.py index e187181d5..49faa0dc5 100644 --- a/cognee/api/v1/search/search_v2.py +++ b/cognee/api/v1/search/search_v2.py @@ -8,8 +8,8 @@ from cognee.modules.search.methods import search as search_function async def search( - query_type: SearchType, query_text: str, + query_type: SearchType = SearchType.GRAPH_COMPLETION, user: User = None, datasets: Union[list[str], str, None] = None, ) -> list: diff --git a/cognee/tests/test_falkordb.py b/cognee/tests/test_falkordb.py index 501c61af4..5f838cb20 100755 --- a/cognee/tests/test_falkordb.py +++ b/cognee/tests/test_falkordb.py @@ -50,19 +50,23 @@ async def main(): random_node = (await vector_engine.search("entity.name", "AI"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\nExtracted summaries are:\n") for result in search_results: diff --git a/cognee/tests/test_library.py b/cognee/tests/test_library.py index cd78b144e..e444c4563 100755 --- a/cognee/tests/test_library.py +++ b/cognee/tests/test_library.py @@ -47,19 +47,23 @@ async def main(): random_node = (await vector_engine.search("entity_name", "AI"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\nExtracted summaries are:\n") for result in search_results: diff --git a/cognee/tests/test_milvus.py b/cognee/tests/test_milvus.py index bd16c04f0..37a98f621 100644 --- a/cognee/tests/test_milvus.py +++ b/cognee/tests/test_milvus.py @@ -58,19 +58,23 @@ async def main(): random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted INSIGHTS are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted CHUNKS are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\nExtracted SUMMARIES are:\n") for result in search_results: diff --git a/cognee/tests/test_neo4j.py b/cognee/tests/test_neo4j.py index bf93e2c52..854873ad7 100644 --- a/cognee/tests/test_neo4j.py +++ b/cognee/tests/test_neo4j.py @@ -51,19 +51,23 @@ async def main(): random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\nExtracted summaries are:\n") for result in search_results: diff --git a/cognee/tests/test_pgvector.py b/cognee/tests/test_pgvector.py index 99b5ca724..78e70c71c 100644 --- a/cognee/tests/test_pgvector.py +++ b/cognee/tests/test_pgvector.py @@ -126,21 +126,25 @@ async def main(): random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") search_results = await cognee.search( - SearchType.CHUNKS, query_text=random_node_name, datasets=[dataset_name_2] + query_type=SearchType.CHUNKS, query_text=random_node_name, datasets=[dataset_name_2] ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\n\nExtracted summaries are:\n") for result in search_results: diff --git a/cognee/tests/test_qdrant.py b/cognee/tests/test_qdrant.py index 16adc0494..f1ba995ab 100644 --- a/cognee/tests/test_qdrant.py +++ b/cognee/tests/test_qdrant.py @@ -51,19 +51,23 @@ async def main(): random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.INSIGHTS, query_text=random_node_name + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\nExtracted summaries are:\n") for result in search_results: diff --git a/cognee/tests/test_weaviate.py b/cognee/tests/test_weaviate.py index 01021e6fc..6af6a73a3 100644 --- a/cognee/tests/test_weaviate.py +++ b/cognee/tests/test_weaviate.py @@ -51,19 +51,23 @@ async def main(): random_node = (await vector_engine.search("entity_name", "Quantum computer"))[0] random_node_name = random_node.payload["text"] - search_results = await cognee.search(SearchType.INSIGHTS, query_text=random_node_name) + search_results = await cognee.search( + query_text=random_node_name, query_type=SearchType.INSIGHTS + ) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted sentences are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.CHUNKS, query_text=random_node_name) + search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=random_node_name) assert len(search_results) != 0, "The search results list is empty." print("\n\nExtracted chunks are:\n") for result in search_results: print(f"{result}\n") - search_results = await cognee.search(SearchType.SUMMARIES, query_text=random_node_name) + search_results = await cognee.search( + query_type=SearchType.SUMMARIES, query_text=random_node_name + ) assert len(search_results) != 0, "Query related summaries don't exist." print("\nExtracted summaries are:\n") for result in search_results: diff --git a/evals/qa_context_provider_utils.py b/evals/qa_context_provider_utils.py index 100ceb290..6a0314278 100644 --- a/evals/qa_context_provider_utils.py +++ b/evals/qa_context_provider_utils.py @@ -76,7 +76,9 @@ async def get_context_with_cognee( search_results = [] for search_type in search_types: - raw_search_results = await cognee.search(search_type, query_text=instance["question"]) + raw_search_results = await cognee.search( + query_type=search_type, query_text=instance["question"] + ) if search_type == SearchType.INSIGHTS: res_list = [_insight_to_string(edge) for edge in raw_search_results] diff --git a/evals/simple_rag_vs_cognee_eval.py b/evals/simple_rag_vs_cognee_eval.py index ab4acbe53..c0aaa567b 100644 --- a/evals/simple_rag_vs_cognee_eval.py +++ b/evals/simple_rag_vs_cognee_eval.py @@ -94,7 +94,7 @@ async def cognify_search_base_rag(content: str, context: str): async def cognify_search_graph(content: str, context: str): from cognee.api.v1.search import search, SearchType - results = await search(SearchType.INSIGHTS, query_text="Donald Trump") + results = await search(query_type=SearchType.INSIGHTS, query_text="Donald Trump") print("results", results) return results diff --git a/examples/python/dynamic_steps_example.py b/examples/python/dynamic_steps_example.py index 75881c5be..5117dda92 100644 --- a/examples/python/dynamic_steps_example.py +++ b/examples/python/dynamic_steps_example.py @@ -186,7 +186,7 @@ async def main(enable_steps): # Step 4: Query insights if enable_steps.get("retriever"): search_results = await cognee.search( - SearchType.GRAPH_COMPLETION, query_text="Who has experience in design tools?" + query_type=SearchType.GRAPH_COMPLETION, query_text="Who has experience in design tools?" ) print(search_results) diff --git a/examples/python/multimedia_example.py b/examples/python/multimedia_example.py index 5b7a581fc..2b7899554 100644 --- a/examples/python/multimedia_example.py +++ b/examples/python/multimedia_example.py @@ -37,7 +37,7 @@ async def main(): # Query cognee for summaries of the data in the multimedia files search_results = await cognee.search( - SearchType.SUMMARIES, + query_type=SearchType.SUMMARIES, query_text="What is in the multimedia files?", ) diff --git a/examples/python/simple_example.py b/examples/python/simple_example.py index 27441bb45..f4063596e 100644 --- a/examples/python/simple_example.py +++ b/examples/python/simple_example.py @@ -51,7 +51,7 @@ async def main(): query_text = "Tell me about NLP" print(f"Searching cognee for insights with query: '{query_text}'") # Query cognee for insights on the added text - search_results = await cognee.search(SearchType.INSIGHTS, query_text=query_text) + search_results = await cognee.search(query_type=SearchType.INSIGHTS, query_text=query_text) print("Search results:") # Display results diff --git a/modal_deployment.py b/modal_deployment.py index 28e36d8ef..3975a1ef2 100644 --- a/modal_deployment.py +++ b/modal_deployment.py @@ -27,7 +27,7 @@ async def entry(text: str, query: str): await cognee.prune.prune_system(metadata=True) await cognee.add(text) await cognee.cognify() - search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query) + search_results = await cognee.search(query_type=SearchType.GRAPH_COMPLETION, query_text=query) return { "text": text, diff --git a/notebooks/cognee_demo.ipynb b/notebooks/cognee_demo.ipynb index a90c02958..9ee3670a1 100644 --- a/notebooks/cognee_demo.ipynb +++ b/notebooks/cognee_demo.ipynb @@ -830,7 +830,7 @@ "node = (await vector_engine.search(\"entity_name\", \"sarah.nguyen@example.com\"))[0]\n", "node_name = node.payload[\"text\"]\n", "\n", - "search_results = await cognee.search(SearchType.SUMMARIES, query_text = node_name)\n", + "search_results = await cognee.search(query_type=SearchType.SUMMARIES, query_text = node_name)\n", "print(\"\\n\\Extracted summaries are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")" @@ -851,7 +851,7 @@ "metadata": {}, "outputs": [], "source": [ - "search_results = await cognee.search(SearchType.CHUNKS, query_text = node_name)\n", + "search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text = node_name)\n", "print(\"\\n\\nExtracted chunks are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")" @@ -872,7 +872,7 @@ "metadata": {}, "outputs": [], "source": [ - "search_results = await cognee.search(SearchType.INSIGHTS, query_text = node_name)\n", + "search_results = await cognee.search(query_type=SearchType.INSIGHTS, query_text = node_name)\n", "print(\"\\n\\nExtracted sentences are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")" diff --git a/notebooks/cognee_llama_index.ipynb b/notebooks/cognee_llama_index.ipynb index 82db4d4ae..4889d1411 100644 --- a/notebooks/cognee_llama_index.ipynb +++ b/notebooks/cognee_llama_index.ipynb @@ -179,7 +179,7 @@ "\n", "# Query cognee for summaries\n", "search_results = await cognee.search(\n", - " SearchType.SUMMARIES, query_text=\"What are the main news discussed in the document?\"\n", + " query_type=SearchType.SUMMARIES, query_text=\"What are the main news discussed in the document?\"\n", ")\n", "# Display search results\n", "print(\"\\n Summary of main news discussed:\\n\")\n", diff --git a/notebooks/cognee_multimedia_demo.ipynb b/notebooks/cognee_multimedia_demo.ipynb index d373b5bd9..9dde1aec1 100644 --- a/notebooks/cognee_multimedia_demo.ipynb +++ b/notebooks/cognee_multimedia_demo.ipynb @@ -137,7 +137,7 @@ "\n", "# Query cognee for summaries of the data in the multimedia files\n", "search_results = await cognee.search(\n", - " SearchType.SUMMARIES,\n", + " query_type=SearchType.SUMMARIES,\n", " query_text=\"What is in the multimedia files?\",\n", ")\n", "\n", diff --git a/notebooks/hr_demo.ipynb b/notebooks/hr_demo.ipynb index 63df77775..f186c6f3d 100644 --- a/notebooks/hr_demo.ipynb +++ b/notebooks/hr_demo.ipynb @@ -566,7 +566,7 @@ "node = (await vector_engine.search(\"entity_name\", \"sarah.nguyen@example.com\"))[0]\n", "node_name = node.payload[\"text\"]\n", "\n", - "search_results = await cognee.search(SearchType.SUMMARIES, query_text=node_name)\n", + "search_results = await cognee.search(query_type=SearchType.SUMMARIES, query_text=node_name)\n", "print(\"\\n\\Extracted summaries are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")" @@ -587,7 +587,7 @@ "metadata": {}, "outputs": [], "source": [ - "search_results = await cognee.search(SearchType.CHUNKS, query_text=node_name)\n", + "search_results = await cognee.search(query_type=SearchType.CHUNKS, query_text=node_name)\n", "print(\"\\n\\nExtracted chunks are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")" @@ -608,7 +608,7 @@ "metadata": {}, "outputs": [], "source": [ - "search_results = await cognee.search(SearchType.INSIGHTS, query_text=node_name)\n", + "search_results = await cognee.search(query_type=SearchType.INSIGHTS, query_text=node_name)\n", "print(\"\\n\\nExtracted sentences are:\\n\")\n", "for result in search_results:\n", " print(f\"{result}\\n\")"