fix: change API client according to api implementation
This commit is contained in:
parent
e3dbdd79b9
commit
d315248bfa
7 changed files with 44 additions and 62 deletions
24
README.md
24
README.md
|
|
@ -55,29 +55,29 @@ poetry add "cognee[weaviate]"
|
|||
|
||||
### Setup
|
||||
|
||||
Create `.env` file in your project in order to store environment variables such as API keys.
|
||||
Create `.env` file in your project root directory in order to store environment variables such as API keys.
|
||||
|
||||
Note: Don't push `.env` file to git repo as it will expose those keys to others.
|
||||
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables.
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables:
|
||||
```
|
||||
WEAVIATE_URL = {YOUR_WEAVIATE_URL}
|
||||
WEAVIATE_API_KEY = {YOUR_WEAVIATE_API_KEY}
|
||||
```
|
||||
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables.
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables:
|
||||
```
|
||||
QDRANT_URL = {YOUR_QDRANT_URL}
|
||||
QDRANT_API_KEY = {YOUR_QDRANT_API_KEY}
|
||||
```
|
||||
|
||||
Add OpenAI API Key environment variable
|
||||
Add OpenAI API Key environment variable:
|
||||
```
|
||||
OPENAI_API_KEY = {YOUR_OPENAI_API_KEY}
|
||||
```
|
||||
|
||||
Cognee stores data and system files inside the library directory, which is lost if the library folder is removed.
|
||||
You can change the directories where cognee will store data and system files by calling config functions.
|
||||
You can change the directories where cognee will store data and system files by calling config functions:
|
||||
```
|
||||
import cognee
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ cognee.config.data_root_directory(absolute_path_to_directory)
|
|||
|
||||
### Run
|
||||
|
||||
Add a new piece of information to storage
|
||||
Add a new piece of information to the storage:
|
||||
```
|
||||
import cognee
|
||||
|
||||
|
|
@ -132,12 +132,12 @@ cognee.add("data://{absolute_path_to_directory}", dataset_name)
|
|||
# This will add just directory 2024 under reports.
|
||||
```
|
||||
|
||||
Use LLMs and cognee to create graphs
|
||||
Use LLMs and cognee to create graphs:
|
||||
```
|
||||
cognee.cognify(dataset_name)
|
||||
```
|
||||
|
||||
Render the graph with our util function
|
||||
Render the graph with our util function:
|
||||
|
||||
```
|
||||
from cognee.utils import render_graph
|
||||
|
|
@ -147,13 +147,9 @@ graph_url = await render_graph(graph)
|
|||
print(graph_url)
|
||||
```
|
||||
|
||||
Query the graph for a piece of information
|
||||
Query the graph for a piece of information:
|
||||
```
|
||||
query_params = {
|
||||
SearchType.SIMILARITY: {'query': 'your search query here'}
|
||||
}
|
||||
|
||||
search_results = cognee.search(graph, query_params)
|
||||
search_results = cognee.search('SIMILARITY', "query_search")
|
||||
|
||||
print(search_results)
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,12 +1,7 @@
|
|||
""" FastAPI server for the Cognee API. """
|
||||
|
||||
import os
|
||||
import json
|
||||
from uuid import UUID
|
||||
|
||||
import uvicorn
|
||||
from fastapi import Depends
|
||||
|
||||
import logging
|
||||
|
||||
# Set up logging
|
||||
|
|
@ -23,7 +18,7 @@ config = Config()
|
|||
config.load()
|
||||
|
||||
from typing import Dict, Any, List, Union, BinaryIO
|
||||
from fastapi import FastAPI, BackgroundTasks, HTTPException
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
|
@ -69,12 +64,11 @@ class SearchPayload(BaseModel):
|
|||
@app.post("/add", response_model=dict)
|
||||
async def add(payload: AddPayload):
|
||||
""" This endpoint is responsible for adding data to the graph."""
|
||||
from v1.add.add_standalone import add_standalone
|
||||
from v1.add.add import add
|
||||
|
||||
try:
|
||||
await add_standalone(
|
||||
await add(
|
||||
payload.data,
|
||||
payload.dataset_id,
|
||||
payload.dataset_name,
|
||||
)
|
||||
except Exception as error:
|
||||
|
|
@ -89,9 +83,7 @@ async def cognify(payload: CognifyPayload):
|
|||
from v1.cognify.cognify import cognify
|
||||
|
||||
try:
|
||||
await cognify(
|
||||
payload.datasets,
|
||||
)
|
||||
await cognify(payload.datasets)
|
||||
except Exception as error:
|
||||
return JSONResponse(
|
||||
status_code = 409,
|
||||
|
|
@ -105,9 +97,8 @@ async def search(payload: SearchPayload):
|
|||
from v1.search.search import search
|
||||
|
||||
try:
|
||||
await search(
|
||||
payload.query_params,
|
||||
)
|
||||
search_type = 'SIMILARITY'
|
||||
await search(search_type, payload.query_params)
|
||||
except Exception as error:
|
||||
return JSONResponse(
|
||||
status_code = 409,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
""" This module contains the search function that is used to search for nodes in the graph."""
|
||||
import asyncio
|
||||
from enum import Enum, auto
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, Callable, List
|
||||
from pydantic import BaseModel, field_validator
|
||||
from cognee.modules.search.graph.search_adjacent import search_adjacent
|
||||
|
|
@ -8,7 +8,8 @@ from cognee.modules.search.vector.search_similarity import search_similarity
|
|||
from cognee.modules.search.graph.search_categories import search_categories
|
||||
from cognee.modules.search.graph.search_neighbour import search_neighbour
|
||||
from cognee.modules.search.graph.search_summary import search_summary
|
||||
|
||||
from cognee.shared.data_models import GraphDBType
|
||||
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
|
||||
class SearchType(Enum):
|
||||
ADJACENT = 'ADJACENT'
|
||||
|
|
@ -35,12 +36,16 @@ class SearchParameters(BaseModel):
|
|||
return value
|
||||
|
||||
|
||||
async def search(graph, search_type: str, params: Dict[str, Any]) -> List:
|
||||
search_params = SearchParameters(search_type=search_type, params=params)
|
||||
return await specific_search(graph, [search_params])
|
||||
async def search(search_type: str, params: Dict[str, Any]) -> List:
|
||||
search_params = SearchParameters(search_type = search_type, params = params)
|
||||
return await specific_search([search_params])
|
||||
|
||||
|
||||
async def specific_search(graph, query_params: List[SearchParameters]) -> List:
|
||||
async def specific_search(query_params: List[SearchParameters]) -> List:
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
await graph_client.load_graph_from_file()
|
||||
graph = graph_client.graph
|
||||
|
||||
search_functions: Dict[SearchType, Callable] = {
|
||||
SearchType.ADJACENT: search_adjacent,
|
||||
SearchType.SIMILARITY: search_similarity,
|
||||
|
|
@ -71,23 +76,16 @@ async def specific_search(graph, query_params: List[SearchParameters]) -> List:
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from cognee.shared.data_models import GraphDBType
|
||||
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
|
||||
|
||||
async def main(graph_client):
|
||||
await graph_client.load_graph_from_file()
|
||||
graph = graph_client.graph
|
||||
async def main():
|
||||
# Assuming 'graph' is your graph object, obtained from somewhere
|
||||
search_type = 'CATEGORIES'
|
||||
params = {'query': 'Ministarstvo', 'other_param': {"node_id": "LLM_LAYER_SUMMARY:DOCUMENT:881ecb36-2819-54c3-8147-ed80293084d6"}}
|
||||
|
||||
results = await search(graph, search_type, params)
|
||||
results = await search(search_type, params)
|
||||
print(results)
|
||||
|
||||
# Run the async main function
|
||||
asyncio.run(main(graph_client=graph_client))
|
||||
asyncio.run(main())
|
||||
# if __name__ == "__main__":
|
||||
# import asyncio
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ async def search_similarity(query: str, graph, other_param: str = None):
|
|||
for id in unique_layer_uuids:
|
||||
vector_engine = infrastructure_config.get_config()["vector_engine"]
|
||||
|
||||
result = await vector_engine.search(id, query, 10)
|
||||
result = await vector_engine.search(id, query_text = query, limit = 10)
|
||||
|
||||
if result:
|
||||
result_ = [ result_.id for result_ in result]
|
||||
|
|
|
|||
|
|
@ -28,29 +28,29 @@ We leverage Neo4j to do the heavy lifting and dlt to load the data, and we've bu
|
|||
|
||||
### Setup
|
||||
|
||||
Create `.env` file in your project in order to store environment variables such as API keys.
|
||||
Create `.env` file in your project root directory in order to store environment variables such as API keys.
|
||||
|
||||
Note: Don't push `.env` file to git repo as it will expose those keys to others.
|
||||
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables.
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables:
|
||||
```
|
||||
WEAVIATE_URL = {YOUR_WEAVIATE_URL}
|
||||
WEAVIATE_API_KEY = {YOUR_WEAVIATE_API_KEY}
|
||||
```
|
||||
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables.
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables:
|
||||
```
|
||||
QDRANT_URL = {YOUR_QDRANT_URL}
|
||||
QDRANT_API_KEY = {YOUR_QDRANT_API_KEY}
|
||||
```
|
||||
|
||||
Add OpenAI API Key environment variable
|
||||
Add OpenAI API Key environment variable:
|
||||
```
|
||||
OPENAI_API_KEY = {YOUR_OPENAI_API_KEY}
|
||||
```
|
||||
|
||||
Cognee stores data and system files inside the library directory, which is lost if the library folder is removed.
|
||||
You can change the directories where cognee will store data and system files by calling config functions.
|
||||
You can change the directories where cognee will store data and system files by calling config functions:
|
||||
```
|
||||
import cognee
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ cognee.config.data_root_directory(absolute_path_to_directory)
|
|||
|
||||
### Run
|
||||
|
||||
Add a new piece of information to storage
|
||||
Add a new piece of information to the storage:
|
||||
```
|
||||
import cognee
|
||||
|
||||
|
|
@ -105,12 +105,12 @@ cognee.add("data://{absolute_path_to_directory}", dataset_name)
|
|||
# This will add just directory 2024 under reports.
|
||||
```
|
||||
|
||||
Use LLMs and cognee to create graphs
|
||||
Use LLMs and cognee to create graphs:
|
||||
```
|
||||
cognee.cognify(dataset_name)
|
||||
```
|
||||
|
||||
Render the graph with our util function
|
||||
Render the graph with our util function:
|
||||
|
||||
```
|
||||
from cognee.utils import render_graph
|
||||
|
|
@ -120,13 +120,9 @@ graph_url = await render_graph(graph)
|
|||
print(graph_url)
|
||||
```
|
||||
|
||||
Query the graph for a piece of information
|
||||
Query the graph for a piece of information:
|
||||
```
|
||||
query_params = {
|
||||
SearchType.SIMILARITY: {'query': 'your search query here'}
|
||||
}
|
||||
|
||||
search_results = cognee.search(graph, query_params)
|
||||
search_results = cognee.search('SIMILARITY', "query_search")
|
||||
|
||||
print(search_results)
|
||||
```
|
||||
|
|
|
|||
|
|
@ -316,7 +316,7 @@
|
|||
"search_type = 'SIMILARITY'\n",
|
||||
"params = {'query': 'Ministarstvo', 'other_param': {\"node_id\": \"LLM_LAYER_SUMMARY:DOCUMENT:881ecb36-2819-54c3-8147-ed80293084d6\"}}\n",
|
||||
"\n",
|
||||
"results = await search(graph, search_type, params)\n",
|
||||
"results = await search(search_type, params)\n",
|
||||
"\n",
|
||||
"for result in results[0]:\n",
|
||||
" print(result)"
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ classifiers = [
|
|||
"Topic :: Software Development :: Libraries",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Operating System :: Microsoft :: Windows",]
|
||||
"Operating System :: Microsoft :: Windows"
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "~3.11"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue