Merge PR #949: fix(provider): filter empty text content blocks causing API 400
This commit is contained in:
@@ -39,6 +39,46 @@ class LLMProvider(ABC):
|
|||||||
def __init__(self, api_key: str | None = None, api_base: str | None = None):
|
def __init__(self, api_key: str | None = None, api_base: str | None = None):
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
self.api_base = api_base
|
self.api_base = api_base
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _sanitize_empty_content(messages: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||||
|
"""Replace empty text content that causes provider 400 errors.
|
||||||
|
|
||||||
|
Empty content can appear when MCP tools return nothing. Most providers
|
||||||
|
reject empty-string content or empty text blocks in list content.
|
||||||
|
"""
|
||||||
|
result: list[dict[str, Any]] = []
|
||||||
|
for msg in messages:
|
||||||
|
content = msg.get("content")
|
||||||
|
|
||||||
|
if isinstance(content, str) and not content:
|
||||||
|
clean = dict(msg)
|
||||||
|
clean["content"] = None if (msg.get("role") == "assistant" and msg.get("tool_calls")) else "(empty)"
|
||||||
|
result.append(clean)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(content, list):
|
||||||
|
filtered = [
|
||||||
|
item for item in content
|
||||||
|
if not (
|
||||||
|
isinstance(item, dict)
|
||||||
|
and item.get("type") in ("text", "input_text", "output_text")
|
||||||
|
and not item.get("text")
|
||||||
|
)
|
||||||
|
]
|
||||||
|
if len(filtered) != len(content):
|
||||||
|
clean = dict(msg)
|
||||||
|
if filtered:
|
||||||
|
clean["content"] = filtered
|
||||||
|
elif msg.get("role") == "assistant" and msg.get("tool_calls"):
|
||||||
|
clean["content"] = None
|
||||||
|
else:
|
||||||
|
clean["content"] = "(empty)"
|
||||||
|
result.append(clean)
|
||||||
|
continue
|
||||||
|
|
||||||
|
result.append(msg)
|
||||||
|
return result
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def chat(
|
async def chat(
|
||||||
|
|||||||
@@ -19,8 +19,12 @@ class CustomProvider(LLMProvider):
|
|||||||
|
|
||||||
async def chat(self, messages: list[dict[str, Any]], tools: list[dict[str, Any]] | None = None,
|
async def chat(self, messages: list[dict[str, Any]], tools: list[dict[str, Any]] | None = None,
|
||||||
model: str | None = None, max_tokens: int = 4096, temperature: float = 0.7) -> LLMResponse:
|
model: str | None = None, max_tokens: int = 4096, temperature: float = 0.7) -> LLMResponse:
|
||||||
kwargs: dict[str, Any] = {"model": model or self.default_model, "messages": messages,
|
kwargs: dict[str, Any] = {
|
||||||
"max_tokens": max(1, max_tokens), "temperature": temperature}
|
"model": model or self.default_model,
|
||||||
|
"messages": self._sanitize_empty_content(messages),
|
||||||
|
"max_tokens": max(1, max_tokens),
|
||||||
|
"temperature": temperature,
|
||||||
|
}
|
||||||
if tools:
|
if tools:
|
||||||
kwargs.update(tools=tools, tool_choice="auto")
|
kwargs.update(tools=tools, tool_choice="auto")
|
||||||
try:
|
try:
|
||||||
@@ -45,3 +49,4 @@ class CustomProvider(LLMProvider):
|
|||||||
|
|
||||||
def get_default_model(self) -> str:
|
def get_default_model(self) -> str:
|
||||||
return self.default_model
|
return self.default_model
|
||||||
|
|
||||||
|
|||||||
@@ -196,7 +196,7 @@ class LiteLLMProvider(LLMProvider):
|
|||||||
|
|
||||||
kwargs: dict[str, Any] = {
|
kwargs: dict[str, Any] = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": self._sanitize_messages(messages),
|
"messages": self._sanitize_messages(self._sanitize_empty_content(messages)),
|
||||||
"max_tokens": max_tokens,
|
"max_tokens": max_tokens,
|
||||||
"temperature": temperature,
|
"temperature": temperature,
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user