added logger supression and database logs

This commit is contained in:
vasilije 2025-07-01 13:35:25 +02:00
parent d14e5149c1
commit 41f8eaf28d
5 changed files with 67 additions and 1 deletions

View file

@ -102,7 +102,7 @@ handlers =
qualname = sqlalchemy.engine qualname = sqlalchemy.engine
[logger_alembic] [logger_alembic]
level = INFO level = WARN
handlers = handlers =
qualname = alembic qualname = alembic

View file

@ -18,6 +18,11 @@ from cognee.modules.search.types import SearchType
from cognee.shared.data_models import KnowledgeGraph from cognee.shared.data_models import KnowledgeGraph
from cognee.modules.storage.utils import JSONEncoder from cognee.modules.storage.utils import JSONEncoder
# Import database configuration functions for logging
from cognee.infrastructure.databases.relational.config import get_relational_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
from cognee.infrastructure.databases.graph.config import get_graph_config
try: try:
from codingagents.coding_rule_associations import ( from codingagents.coding_rule_associations import (
add_rule_associations, add_rule_associations,
@ -539,6 +544,39 @@ def load_class(model_file, model_name):
return model_class return model_class
def log_database_configuration():
"""Log the current database configuration for all database types"""
try:
# Log relational database configuration
relational_config = get_relational_config()
logger.info(f"Relational database: {relational_config.db_provider}")
if relational_config.db_provider == "postgres":
logger.info(f"Postgres host: {relational_config.db_host}:{relational_config.db_port}")
logger.info(f"Postgres database: {relational_config.db_name}")
elif relational_config.db_provider == "sqlite":
logger.info(f"SQLite path: {relational_config.db_path}")
logger.info(f"SQLite database: {relational_config.db_name}")
# Log vector database configuration
vector_config = get_vectordb_config()
logger.info(f"Vector database: {vector_config.vector_db_provider}")
if vector_config.vector_db_provider == "lancedb":
logger.info(f"Vector database path: {vector_config.vector_db_url}")
elif vector_config.vector_db_provider in ["qdrant", "weaviate", "pgvector"]:
logger.info(f"Vector database URL: {vector_config.vector_db_url}")
# Log graph database configuration
graph_config = get_graph_config()
logger.info(f"Graph database: {graph_config.graph_database_provider}")
if graph_config.graph_database_provider == "kuzu":
logger.info(f"Graph database path: {graph_config.graph_file_path}")
elif graph_config.graph_database_provider in ["neo4j", "falkordb"]:
logger.info(f"Graph database URL: {graph_config.graph_database_url}")
except Exception as e:
logger.warning(f"Could not retrieve database configuration: {str(e)}")
async def main(): async def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -551,6 +589,9 @@ async def main():
args = parser.parse_args() args = parser.parse_args()
# Log database configurations
log_database_configuration()
logger.info(f"Starting MCP server with transport: {args.transport}") logger.info(f"Starting MCP server with transport: {args.transport}")
if args.transport == "stdio": if args.transport == "stdio":
await mcp.run_stdio_async() await mcp.run_stdio_async()

View file

@ -1,4 +1,5 @@
import litellm import litellm
import logging
from pydantic import BaseModel from pydantic import BaseModel
from typing import Type, Optional from typing import Type, Optional
from litellm import acompletion, JSONSchemaValidationError from litellm import acompletion, JSONSchemaValidationError
@ -13,6 +14,13 @@ from cognee.infrastructure.llm.rate_limiter import (
sleep_and_retry_async, sleep_and_retry_async,
) )
# Configure Litellm logging to reduce verbosity
litellm.set_verbose = False
# Suppress Litellm ERROR logging (using standard logging for external library configuration)
logging.getLogger("LiteLLM").setLevel(logging.CRITICAL)
logging.getLogger("litellm").setLevel(logging.CRITICAL)
logger = get_logger() logger = get_logger()
observe = get_observe() observe = get_observe()

View file

@ -1,5 +1,6 @@
"""Adapter for Generic API LLM provider API""" """Adapter for Generic API LLM provider API"""
import logging
from typing import Type from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
@ -7,8 +8,15 @@ import instructor
from cognee.infrastructure.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.config import get_llm_config from cognee.infrastructure.llm.config import get_llm_config
from cognee.infrastructure.llm.rate_limiter import rate_limit_async, sleep_and_retry_async from cognee.infrastructure.llm.rate_limiter import rate_limit_async, sleep_and_retry_async
from cognee.shared.logging_utils import get_logger
import litellm import litellm
# Configure Litellm logging to reduce verbosity
litellm.set_verbose = False
# Suppress Litellm ERROR logging (using standard logging for external library configuration)
logging.getLogger("LiteLLM").setLevel(logging.CRITICAL)
logging.getLogger("litellm").setLevel(logging.CRITICAL)
class GenericAPIAdapter(LLMInterface): class GenericAPIAdapter(LLMInterface):
""" """

View file

@ -16,6 +16,15 @@ from cognee.infrastructure.llm.rate_limiter import (
sleep_and_retry_sync, sleep_and_retry_sync,
) )
from cognee.modules.observability.get_observe import get_observe from cognee.modules.observability.get_observe import get_observe
from cognee.shared.logging_utils import get_logger
import logging
# Configure Litellm logging to reduce verbosity
litellm.set_verbose = False
# Suppress Litellm ERROR logging using standard logging
logging.getLogger("LiteLLM").setLevel(logging.CRITICAL)
logging.getLogger("litellm").setLevel(logging.CRITICAL)
observe = get_observe() observe = get_observe()