Remove redundant tokenizer checks

This commit is contained in:
yangdx 2025-07-23 10:19:45 +08:00
parent ce9dac9bcf
commit 2d41e5313a

View file

@ -2020,7 +2020,6 @@ async def _build_query_context(
# Unified token control system - Apply precise token limits to entities and relations # Unified token control system - Apply precise token limits to entities and relations
tokenizer = text_chunks_db.global_config.get("tokenizer") tokenizer = text_chunks_db.global_config.get("tokenizer")
if tokenizer:
# Get new token limits from query_param (with fallback to global_config) # Get new token limits from query_param (with fallback to global_config)
max_entity_tokens = getattr( max_entity_tokens = getattr(
query_param, query_param,
@ -2145,9 +2144,9 @@ async def _build_query_context(
if chunks: if chunks:
all_chunks.extend(chunks) all_chunks.extend(chunks)
# Apply token processing to chunks if tokenizer is available # Apply token processing to chunks
text_units_context = [] text_units_context = []
if tokenizer and all_chunks: if all_chunks:
# Calculate dynamic token limit for text chunks # Calculate dynamic token limit for text chunks
entities_str = json.dumps(entities_context, ensure_ascii=False) entities_str = json.dumps(entities_context, ensure_ascii=False)
relations_str = json.dumps(relations_context, ensure_ascii=False) relations_str = json.dumps(relations_context, ensure_ascii=False)