better looking example

This commit is contained in:
Alexander Belikov 2025-11-13 17:15:39 +01:00
parent fc0a417775
commit 3f33d30c33

View file

@ -1,17 +1,83 @@
import os
import asyncio
import argparse
import logging
import logging.config
import json
from pathlib import Path
from lightrag import LightRAG
from lightrag.llm.openai import gpt_4o_mini_complete, openai_embed
from lightrag.kg.shared_storage import initialize_pipeline_status
from lightrag.utils import setup_logger
import os
import asyncio
import json
from pathlib import Path
from lightrag.utils import logger, set_verbose_debug
WORKING_DIR = "./tigergraph_test_dir"
if not os.path.exists(WORKING_DIR):
os.mkdir(WORKING_DIR)
setup_logger("lightrag", level="INFO")
def configure_logging():
"""Configure logging for the application"""
# Reset any existing handlers to ensure clean configuration
for logger_name in ["uvicorn", "uvicorn.access", "uvicorn.error", "lightrag"]:
logger_instance = logging.getLogger(logger_name)
logger_instance.handlers = []
logger_instance.filters = []
# Get log directory path from environment variable or use current directory
log_dir = os.getenv("LOG_DIR", os.getcwd())
log_file_path = os.path.abspath(
os.path.join(log_dir, "lightrag_tigergraph_demo.log")
)
print(f"\nLightRAG TigerGraph demo log file: {log_file_path}\n")
os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
# Get log file max size and backup count from environment variables
log_max_bytes = int(os.getenv("LOG_MAX_BYTES", 10485760)) # Default 10MB
log_backup_count = int(os.getenv("LOG_BACKUP_COUNT", 5)) # Default 5 backups
logging.config.dictConfig(
{
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"format": "%(levelname)s: %(message)s",
},
"detailed": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
},
},
"handlers": {
"console": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
"file": {
"formatter": "detailed",
"class": "logging.handlers.RotatingFileHandler",
"filename": log_file_path,
"maxBytes": log_max_bytes,
"backupCount": log_backup_count,
"encoding": "utf-8",
},
},
"loggers": {
"lightrag": {
"handlers": ["console", "file"],
"level": "INFO",
"propagate": False,
},
},
}
)
# Set the logger level to INFO
logger.setLevel(logging.INFO)
# Enable verbose debug if needed
set_verbose_debug(os.getenv("VERBOSE_DEBUG", "false").lower() == "true")
def load_json_texts(json_path: str | Path) -> list[str]:
@ -66,7 +132,7 @@ async def initialize_rag():
return rag
async def test_ingestion():
async def test_ingestion(json_file=None):
"""Test document ingestion into TigerGraph"""
print("=" * 60)
print("Initializing LightRAG with TigerGraph...")
@ -92,8 +158,8 @@ async def test_ingestion():
track_id = await rag.ainsert(input=doc, file_paths=f"test_doc_{i}.txt")
print(f" ✓ Document inserted with track_id: {track_id}")
# Test JSON ingestion if JSON file exists
json_test_file = Path("test_data.json")
# Test JSON ingestion if JSON file is provided or exists
json_test_file = Path(json_file) if json_file else Path("test_data.json")
if json_test_file.exists():
print("\n" + "=" * 60)
print("Ingesting JSON file...")
@ -118,6 +184,7 @@ async def test_ingestion():
)
print(" Create a test_data.json file with format:")
print(' [{"text": "Your text here"}, {"text": "Another text"}]')
print(" Or use --json-file parameter to specify a JSON file")
print("\n" + "=" * 60)
print("Verifying ingestion...")
@ -159,4 +226,18 @@ async def test_ingestion():
if __name__ == "__main__":
asyncio.run(test_ingestion())
parser = argparse.ArgumentParser(
description="LightRAG TigerGraph demo",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--json-file",
type=str,
default=None,
help='Path to JSON file with texts to ingest (format: [{"text": "..."}, ...]). Defaults to test_data.json if not specified.',
)
args = parser.parse_args()
# Configure logging before running the main function
configure_logging()
asyncio.run(test_ingestion(json_file=args.json_file))