Merge pull request #449 from topoteretes/feature/cog-186-run-cognee-on-windows

Feature/cog 186 run cognee on windows
This commit is contained in:
Vasilije 2025-01-17 14:16:37 +01:00 committed by GitHub
commit ffa3c2daa0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 62 additions and 27 deletions

View file

@ -85,7 +85,7 @@ import os
os.environ["LLM_API_KEY"] = "YOUR OPENAI_API_KEY"
```
or
or
```
import cognee
cognee.config.set_llm_api_key("YOUR_OPENAI_API_KEY")
@ -115,7 +115,7 @@ DB_PORT=5432
DB_NAME=cognee_db
DB_USERNAME=cognee
DB_PASSWORD=cognee
```
```
### Simple example
@ -140,14 +140,14 @@ async def main():
Natural language processing (NLP) is an interdisciplinary
subfield of computer science and information retrieval.
"""
print("Adding text to cognee:")
print(text.strip())
print(text.strip())
# Add the text, and make it available for cognify
await cognee.add(text)
print("Text added successfully.\n")
print("Running cognify to create knowledge graph...\n")
print("Cognify process steps:")
print("1. Classifying the document: Determining the type and category of the input text.")
@ -156,19 +156,19 @@ async def main():
print("4. Adding data points: Storing the extracted chunks for processing.")
print("5. Generating knowledge graph: Extracting entities and relationships to form a knowledge graph.")
print("6. Summarizing text: Creating concise summaries of the content for quick insights.\n")
# Use LLMs and cognee to create knowledge graph
await cognee.cognify()
print("Cognify process complete.\n")
query_text = 'Tell me about NLP'
print(f"Searching cognee for insights with query: '{query_text}'")
# Query cognee for insights on the added text
search_results = await cognee.search(
SearchType.INSIGHTS, query_text=query_text
)
print("Search results:")
# Display results
for result_text in search_results:
@ -212,7 +212,7 @@ Cognee supports a variety of tools and services for different operations:
- **Language Models (LLMs)**: You can use either Anyscale or Ollama as your LLM provider.
- **Graph Stores**: In addition to NetworkX, Neo4j is also supported for graph storage.
- **User management**: Create individual user graphs and manage permissions
## Demo
@ -258,13 +258,13 @@ pip install cognee
| Name | Type | Current state | Known Issues |
|----------|--------------------|-------------------|--------------|
| Qdrant | Vector | Stable ✅ | |
| Weaviate | Vector | Stable ✅ | |
| LanceDB | Vector | Stable ✅ | |
| Neo4j | Graph | Stable ✅ | |
| NetworkX | Graph | Stable ✅ | |
| FalkorDB | Vector/Graph | Unstable ❌ | |
| PGVector | Vector | Stable ✅ | |
| Milvus | Vector | Stable ✅ | |
| Name | Type | Current state (Mac/Linux) | Known Issues | Current state (Windows) | Known Issues |
|----------|--------------------|---------------------------|--------------|-------------------------|--------------|
| Qdrant | Vector | Stable ✅ | | Unstable ❌ | |
| Weaviate | Vector | Stable ✅ | | Unstable ❌ | |
| LanceDB | Vector | Stable ✅ | | Stable ✅ | |
| Neo4j | Graph | Stable ✅ | | Stable ✅ | |
| NetworkX | Graph | Stable ✅ | | Stable ✅ | |
| FalkorDB | Vector/Graph | Stable ✅ | | Unstable ❌ | |
| PGVector | Vector | Stable ✅ | | Unstable ❌ | |
| Milvus | Vector | Stable ✅ | | Unstable ❌ | |

View file

@ -152,7 +152,9 @@ class LanceDBAdapter(VectorDBInterface):
connection = await self.get_connection()
collection = await connection.open_table(collection_name)
results = await collection.vector_search(query_vector).to_pandas()
collection_size = await collection.count_rows()
results = await collection.vector_search(query_vector).limit(collection_size).to_pandas()
result_values = list(results.to_dict("index").values())
@ -250,9 +252,16 @@ class LanceDBAdapter(VectorDBInterface):
)
async def prune(self):
# Clean up the database if it was set up as temporary
connection = await self.get_connection()
collection_names = await connection.table_names()
for collection_name in collection_names:
collection = await connection.open_table(collection_name)
await collection.delete("id IS NOT NULL")
await connection.drop_table(collection_name)
if self.url.startswith("/"):
LocalStorage.remove_all(self.url) # Remove the temporary directory and files inside
LocalStorage.remove_all(self.url)
def get_data_point_schema(self, model_type):
return copy_model(

View file

@ -30,4 +30,4 @@ if [ "$ENVIRONMENT" = "dev" ]; then
else
gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --log-level error cognee.api.client:app
# python ./cognee/api/client.py
fi
fi

View file

@ -204,4 +204,9 @@ if __name__ == "__main__":
"retriever": retrieve,
}
asyncio.run(main(steps_to_enable))
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(main(steps_to_enable))
finally:
loop.run_until_complete(loop.shutdown_asyncgens())

View file

@ -69,4 +69,9 @@ async def main():
if __name__ == "__main__":
setup_logging(logging.ERROR)
asyncio.run(main())
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(main())
finally:
loop.run_until_complete(loop.shutdown_asyncgens())

View file

@ -1,9 +1,11 @@
import os
import asyncio
import pathlib
import logging
import cognee
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
# Prerequisites:
# 1. Copy `.env.template` and rename it to `.env`.
@ -45,4 +47,10 @@ async def main():
if __name__ == "__main__":
asyncio.run(main())
setup_logging(logging.ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(main())
finally:
loop.run_until_complete(loop.shutdown_asyncgens())

View file

@ -1,6 +1,8 @@
import asyncio
import cognee
import logging
from cognee.api.v1.search import SearchType
from cognee.shared.utils import setup_logging
# Prerequisites:
# 1. Copy `.env.template` and rename it to `.env`.
@ -66,4 +68,10 @@ async def main():
if __name__ == "__main__":
asyncio.run(main())
setup_logging(logging.ERROR)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(main())
finally:
loop.run_until_complete(loop.shutdown_asyncgens())