diff --git a/lightrag/base.py b/lightrag/base.py index 9671f1b7..b89e114d 100644 --- a/lightrag/base.py +++ b/lightrag/base.py @@ -233,8 +233,12 @@ class BaseVectorStorage(StorageNameSpace, ABC): return self.embedding_func.get_model_identifier() elif 'embedding_func' in self.global_config: original_embedding_func = self.global_config['embedding_func'] - if hasattr(original_embedding_func, 'get_model_identifier'): + if original_embedding_func is not None and hasattr(original_embedding_func, 'get_model_identifier'): return original_embedding_func.get_model_identifier() + else: + # Debug: log why we couldn't get model identifier + from lightrag.utils import logger + logger.debug(f"Could not get model_identifier: embedding_func is {type(original_embedding_func)}, has method={hasattr(original_embedding_func, 'get_model_identifier') if original_embedding_func else False}") # Fallback: no model identifier available return "" diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index 8a638759..9fd5a4b3 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -518,14 +518,10 @@ class LightRAG: f"max_total_tokens({self.summary_max_tokens}) should greater than summary_length_recommended({self.summary_length_recommended})" ) - # Fix global_config now - global_config = asdict(self) - - _print_config = ",\n ".join([f"{k} = {v}" for k, v in global_config.items()]) - logger.debug(f"LightRAG init with param:\n {_print_config}\n") - # Init Embedding - # Step 1: Capture max_token_size before applying decorator (decorator strips dataclass attributes) + # Step 1: Capture embedding_func and max_token_size before applying decorator + # (decorator strips dataclass attributes, and asdict() converts EmbeddingFunc to dict) + original_embedding_func = self.embedding_func embedding_max_token_size = None if self.embedding_func and hasattr(self.embedding_func, "max_token_size"): embedding_max_token_size = self.embedding_func.max_token_size @@ -534,6 +530,14 @@ class LightRAG: ) self.embedding_token_limit = embedding_max_token_size + # Fix global_config now + global_config = asdict(self) + # Restore original EmbeddingFunc object (asdict converts it to dict) + global_config['embedding_func'] = original_embedding_func + + _print_config = ",\n ".join([f"{k} = {v}" for k, v in global_config.items()]) + logger.debug(f"LightRAG init with param:\n {_print_config}\n") + # Step 2: Apply priority wrapper decorator self.embedding_func = priority_limit_async_func_call( self.embedding_func_max_async,