Modernize type hints and remove Python 3.8 compatibility code

• Use collections.abc.AsyncIterator only
• Remove sys.version_info checks
• Use union syntax for None types
• Simplify string emptiness checks
This commit is contained in:
yangdx 2025-10-02 23:15:42 +08:00
parent cec784f60e
commit 112349ed5b
2 changed files with 15 additions and 27 deletions

View file

@ -1,11 +1,6 @@
import sys from collections.abc import AsyncIterator
if sys.version_info < (3, 9): import pipmaster as pm
from typing import AsyncIterator
else:
from collections.abc import AsyncIterator
import pipmaster as pm # Pipmaster for dynamic library install
# install specific modules # install specific modules
if not pm.is_installed("ollama"): if not pm.is_installed("ollama"):

View file

@ -1,14 +1,10 @@
from ..utils import verbose_debug, VERBOSE_DEBUG from ..utils import verbose_debug, VERBOSE_DEBUG
import sys
import os import os
import logging import logging
if sys.version_info < (3, 9): from collections.abc import AsyncIterator
from typing import AsyncIterator
else:
from collections.abc import AsyncIterator
import pipmaster as pm # Pipmaster for dynamic library install
import pipmaster as pm
# install specific modules # install specific modules
if not pm.is_installed("openai"): if not pm.is_installed("openai"):
pm.install("openai") pm.install("openai")
@ -54,7 +50,7 @@ class InvalidResponseError(Exception):
def create_openai_async_client( def create_openai_async_client(
api_key: str | None = None, api_key: str | None = None,
base_url: str | None = None, base_url: str | None = None,
client_configs: dict[str, Any] = None, client_configs: dict[str, Any] | None = None,
) -> AsyncOpenAI: ) -> AsyncOpenAI:
"""Create an AsyncOpenAI client with the given configuration. """Create an AsyncOpenAI client with the given configuration.
@ -119,7 +115,7 @@ async def openai_complete_if_cache(
) -> str: ) -> str:
"""Complete a prompt using OpenAI's API with caching support and Chain of Thought (COT) integration. """Complete a prompt using OpenAI's API with caching support and Chain of Thought (COT) integration.
This function supports automatic integration of reasoning content (思维链) from models that provide This function supports automatic integration of reasoning content from models that provide
Chain of Thought capabilities. The reasoning content is seamlessly integrated into the response Chain of Thought capabilities. The reasoning content is seamlessly integrated into the response
using <think>...</think> tags. using <think>...</think> tags.
@ -264,19 +260,16 @@ async def openai_complete_if_cache(
delta = chunk.choices[0].delta delta = chunk.choices[0].delta
content = getattr(delta, "content", None) content = getattr(delta, "content", None)
reasoning_content = getattr(delta, "reasoning_content", None) reasoning_content = getattr(delta, "reasoning_content", "")
# Handle COT logic for streaming (only if enabled) # Handle COT logic for streaming (only if enabled)
if enable_cot: if enable_cot:
if content is not None and content != "": if content:
# Regular content is present # Regular content is present
if not initial_content_seen: if not initial_content_seen:
initial_content_seen = True initial_content_seen = True
# If both content and reasoning_content are present initially, don't start COT # If both content and reasoning_content are present initially, don't start COT
if ( if reasoning_content:
reasoning_content is not None
and reasoning_content != ""
):
cot_active = False cot_active = False
cot_started = False cot_started = False
@ -290,7 +283,7 @@ async def openai_complete_if_cache(
content = safe_unicode_decode(content.encode("utf-8")) content = safe_unicode_decode(content.encode("utf-8"))
yield content yield content
elif reasoning_content is not None and reasoning_content != "": elif reasoning_content:
# Only reasoning content is present # Only reasoning content is present
if not initial_content_seen and not cot_started: if not initial_content_seen and not cot_started:
# Start COT if we haven't seen initial content yet # Start COT if we haven't seen initial content yet
@ -308,7 +301,7 @@ async def openai_complete_if_cache(
yield reasoning_content yield reasoning_content
else: else:
# COT disabled, only process regular content # COT disabled, only process regular content
if content is not None and content != "": if content:
if r"\u" in content: if r"\u" in content:
content = safe_unicode_decode(content.encode("utf-8")) content = safe_unicode_decode(content.encode("utf-8"))
yield content yield content
@ -415,7 +408,7 @@ async def openai_complete_if_cache(
message = response.choices[0].message message = response.choices[0].message
content = getattr(message, "content", None) content = getattr(message, "content", None)
reasoning_content = getattr(message, "reasoning_content", None) reasoning_content = getattr(message, "reasoning_content", "")
# Handle COT logic for non-streaming responses (only if enabled) # Handle COT logic for non-streaming responses (only if enabled)
final_content = "" final_content = ""
@ -582,9 +575,9 @@ async def nvidia_openai_complete(
async def openai_embed( async def openai_embed(
texts: list[str], texts: list[str],
model: str = "text-embedding-3-small", model: str = "text-embedding-3-small",
base_url: str = None, base_url: str | None = None,
api_key: str = None, api_key: str | None = None,
client_configs: dict[str, Any] = None, client_configs: dict[str, Any] | None = None,
) -> np.ndarray: ) -> np.ndarray:
"""Generate embeddings for a list of texts using OpenAI's API. """Generate embeddings for a list of texts using OpenAI's API.