Fix linter issues

This commit is contained in:
vasilije 2025-01-05 19:21:09 +01:00
parent 60c8fd103b
commit 649fcf2ba8
9 changed files with 31 additions and 35 deletions

View file

@ -4,7 +4,7 @@ from logging.config import fileConfig
from sqlalchemy import pool from sqlalchemy import pool
from sqlalchemy.engine import Connection from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config from sqlalchemy.ext.asyncio import async_engine_from_config
from cognee.infrastructure.databases.relational import Base
from alembic import context from alembic import context
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
@ -20,7 +20,6 @@ if config.config_file_name is not None:
# for 'autogenerate' support # for 'autogenerate' support
# from myapp import mymodel # from myapp import mymodel
# target_metadata = mymodel.Base.metadata # target_metadata = mymodel.Base.metadata
from cognee.infrastructure.databases.relational import Base
target_metadata = Base.metadata target_metadata = Base.metadata

View file

@ -7,9 +7,25 @@ import sentry_sdk
from fastapi import FastAPI, status from fastapi import FastAPI, status
from fastapi.responses import JSONResponse, Response from fastapi.responses import JSONResponse, Response
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from cognee.api.v1.permissions.routers import get_permissions_router
from cognee.api.v1.settings.routers import get_settings_router
from cognee.api.v1.datasets.routers import get_datasets_router
from cognee.api.v1.cognify.routers import get_cognify_router
from cognee.api.v1.search.routers import get_search_router
from cognee.api.v1.add.routers import get_add_router
from fastapi import Request
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError
from cognee.exceptions import CogneeApiError from cognee.exceptions import CogneeApiError
from traceback import format_exc from traceback import format_exc
from cognee.api.v1.users.routers import (
get_auth_router,
get_register_router,
get_reset_password_router,
get_verify_router,
get_users_router,
)
from contextlib import asynccontextmanager
# Set up logging # Set up logging
logging.basicConfig( logging.basicConfig(
@ -25,7 +41,6 @@ if os.getenv("ENV", "prod") == "prod":
profiles_sample_rate=1.0, profiles_sample_rate=1.0,
) )
from contextlib import asynccontextmanager
app_environment = os.getenv("ENV", "prod") app_environment = os.getenv("ENV", "prod")
@ -58,23 +73,6 @@ app.add_middleware(
allow_headers=["*"], allow_headers=["*"],
) )
from cognee.api.v1.users.routers import (
get_auth_router,
get_register_router,
get_reset_password_router,
get_verify_router,
get_users_router,
)
from cognee.api.v1.permissions.routers import get_permissions_router
from cognee.api.v1.settings.routers import get_settings_router
from cognee.api.v1.datasets.routers import get_datasets_router
from cognee.api.v1.cognify.routers import get_cognify_router
from cognee.api.v1.search.routers import get_search_router
from cognee.api.v1.add.routers import get_add_router
from fastapi import Request
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError
@app.exception_handler(RequestValidationError) @app.exception_handler(RequestValidationError)

View file

@ -21,12 +21,12 @@ from cognee.tasks.repo_processor import (
) )
from cognee.tasks.repo_processor.get_source_code_chunks import get_source_code_chunks from cognee.tasks.repo_processor.get_source_code_chunks import get_source_code_chunks
from cognee.tasks.storage import add_data_points from cognee.tasks.storage import add_data_points
from cognee.tasks.summarization import summarize_code, summarize_text
monitoring = get_base_config().monitoring_tool monitoring = get_base_config().monitoring_tool
if monitoring == MonitoringTool.LANGFUSE: if monitoring == MonitoringTool.LANGFUSE:
from langfuse.decorators import observe from langfuse.decorators import observe
from cognee.tasks.summarization import summarize_code, summarize_text
logger = logging.getLogger("code_graph_pipeline") logger = logging.getLogger("code_graph_pipeline")
update_status_lock = asyncio.Lock() update_status_lock = asyncio.Lock()

View file

@ -217,7 +217,7 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface):
async def retrieve(self, data_point_ids: list[UUID]): async def retrieve(self, data_point_ids: list[UUID]):
result = self.query( result = self.query(
f"MATCH (node) WHERE node.id IN $node_ids RETURN node", "MATCH (node) WHERE node.id IN $node_ids RETURN node",
{ {
"node_ids": [str(data_point) for data_point in data_point_ids], "node_ids": [str(data_point) for data_point in data_point_ids],
}, },
@ -343,7 +343,7 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface):
async def delete_data_points(self, collection_name: str, data_point_ids: list[UUID]): async def delete_data_points(self, collection_name: str, data_point_ids: list[UUID]):
return self.query( return self.query(
f"MATCH (node) WHERE node.id IN $node_ids DETACH DELETE node", "MATCH (node) WHERE node.id IN $node_ids DETACH DELETE node",
{ {
"node_ids": [str(data_point) for data_point in data_point_ids], "node_ids": [str(data_point) for data_point in data_point_ids],
}, },

View file

@ -42,7 +42,7 @@ class OpenAIAdapter(LLMInterface):
self.endpoint = endpoint self.endpoint = endpoint
self.api_version = api_version self.api_version = api_version
self.streaming = streaming self.streaming = streaming
base_config = get_base_config()
@observe(as_type="generation") @observe(as_type="generation")
async def acreate_structured_output( async def acreate_structured_output(

View file

@ -51,7 +51,7 @@ def remove_stop_words(text: str):
stop_words = set(stopwords.words("english")) stop_words = set(stopwords.words("english"))
text = text.split() text = text.split()
text = [word for word in text if not word in stop_words] text = [word for word in text if word not in stop_words]
return " ".join(text) return " ".join(text)

View file

@ -1,8 +1,7 @@
import logging
logger = logging.getLogger("task:repo_processor")
from .enrich_dependency_graph import enrich_dependency_graph from .enrich_dependency_graph import enrich_dependency_graph
from .expand_dependency_graph import expand_dependency_graph from .expand_dependency_graph import expand_dependency_graph
from .get_non_code_files import get_data_list_for_user, get_non_py_files from .get_non_code_files import get_data_list_for_user, get_non_py_files
from .get_repo_file_dependencies import get_repo_file_dependencies from .get_repo_file_dependencies import get_repo_file_dependencies
import logging
logger = logging.getLogger("task:repo_processor")

View file

@ -33,14 +33,14 @@ print(dataset.goldens)
print(dataset) print(dataset)
import pytest # import pytest
from deepeval import assert_test # from deepeval import assert_test
from deepeval.metrics import AnswerRelevancyMetric from deepeval.metrics import AnswerRelevancyMetric
answer_relevancy_metric = AnswerRelevancyMetric(threshold=0.5) answer_relevancy_metric = AnswerRelevancyMetric(threshold=0.5)
from deepeval import evaluate # from deepeval import evaluate
# evaluate(dataset, [answer_relevancy_metric]) # evaluate(dataset, [answer_relevancy_metric])

View file

@ -33,7 +33,7 @@ def benchmark_function(func: Callable, *args, num_runs: int = 5) -> Dict[str, An
start_time = time.perf_counter() start_time = time.perf_counter()
start_cpu_time = process.cpu_times() start_cpu_time = process.cpu_times()
result = func(*args)
end_cpu_time = process.cpu_times() end_cpu_time = process.cpu_times()
end_time = time.perf_counter() end_time = time.perf_counter()
@ -45,7 +45,7 @@ def benchmark_function(func: Callable, *args, num_runs: int = 5) -> Dict[str, An
) )
current, peak = tracemalloc.get_traced_memory() current, peak = tracemalloc.get_traced_memory()
final_memory = process.memory_info().rss final_memory = process.memory_info().rss
memory_used = final_memory - initial_memory
# Store results # Store results
execution_times.append(execution_time) execution_times.append(execution_time)