diff --git a/nanobot/providers/base.py b/nanobot/providers/base.py index c69c38b..eb1599a 100644 --- a/nanobot/providers/base.py +++ b/nanobot/providers/base.py @@ -39,6 +39,46 @@ class LLMProvider(ABC): def __init__(self, api_key: str | None = None, api_base: str | None = None): self.api_key = api_key self.api_base = api_base + + @staticmethod + def _sanitize_empty_content(messages: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Replace empty text content that causes provider 400 errors. + + Empty content can appear when MCP tools return nothing. Most providers + reject empty-string content or empty text blocks in list content. + """ + result: list[dict[str, Any]] = [] + for msg in messages: + content = msg.get("content") + + if isinstance(content, str) and not content: + clean = dict(msg) + clean["content"] = None if (msg.get("role") == "assistant" and msg.get("tool_calls")) else "(empty)" + result.append(clean) + continue + + if isinstance(content, list): + filtered = [ + item for item in content + if not ( + isinstance(item, dict) + and item.get("type") in ("text", "input_text", "output_text") + and not item.get("text") + ) + ] + if len(filtered) != len(content): + clean = dict(msg) + if filtered: + clean["content"] = filtered + elif msg.get("role") == "assistant" and msg.get("tool_calls"): + clean["content"] = None + else: + clean["content"] = "(empty)" + result.append(clean) + continue + + result.append(msg) + return result @abstractmethod async def chat( diff --git a/nanobot/providers/custom_provider.py b/nanobot/providers/custom_provider.py index 4022d9e..a578d14 100644 --- a/nanobot/providers/custom_provider.py +++ b/nanobot/providers/custom_provider.py @@ -19,8 +19,12 @@ class CustomProvider(LLMProvider): async def chat(self, messages: list[dict[str, Any]], tools: list[dict[str, Any]] | None = None, model: str | None = None, max_tokens: int = 4096, temperature: float = 0.7) -> LLMResponse: - kwargs: dict[str, Any] = {"model": model or self.default_model, "messages": messages, - "max_tokens": max(1, max_tokens), "temperature": temperature} + kwargs: dict[str, Any] = { + "model": model or self.default_model, + "messages": self._sanitize_empty_content(messages), + "max_tokens": max(1, max_tokens), + "temperature": temperature, + } if tools: kwargs.update(tools=tools, tool_choice="auto") try: @@ -45,3 +49,4 @@ class CustomProvider(LLMProvider): def get_default_model(self) -> str: return self.default_model + diff --git a/nanobot/providers/litellm_provider.py b/nanobot/providers/litellm_provider.py index 784f02c..7402a2b 100644 --- a/nanobot/providers/litellm_provider.py +++ b/nanobot/providers/litellm_provider.py @@ -196,7 +196,7 @@ class LiteLLMProvider(LLMProvider): kwargs: dict[str, Any] = { "model": model, - "messages": self._sanitize_messages(messages), + "messages": self._sanitize_messages(self._sanitize_empty_content(messages)), "max_tokens": max_tokens, "temperature": temperature, }