From 15529c668e51c623ab860509b12346e3dfe956d6 Mon Sep 17 00:00:00 2001 From: Re-bin Date: Sun, 1 Mar 2026 12:53:18 +0000 Subject: [PATCH] fix(web): sanitize proxy logs and polish search key hint --- nanobot/agent/tools/web.py | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/nanobot/agent/tools/web.py b/nanobot/agent/tools/web.py index 0d2135d..0d8f4d1 100644 --- a/nanobot/agent/tools/web.py +++ b/nanobot/agent/tools/web.py @@ -71,17 +71,14 @@ class WebSearchTool(Tool): async def execute(self, query: str, count: int | None = None, **kwargs: Any) -> str: if not self.api_key: return ( - "Error: Brave Search API key not configured. " - "Set it in ~/.nanobot/config.json under tools.web.search.apiKey " - "(or export BRAIVE_API_KEY), then restart the gateway." + "Error: Brave Search API key not configured. Set it in " + "~/.nanobot/config.json under tools.web.search.apiKey " + "(or export BRAVE_API_KEY), then restart the gateway." ) try: n = min(max(count or self.max_results, 1), 10) - if self.proxy: - logger.info("WebSearch: using proxy {} for query: {}", self.proxy, query[:50]) - else: - logger.debug("WebSearch: direct connection for query: {}", query[:50]) + logger.debug("WebSearch: {}", "proxy enabled" if self.proxy else "direct connection") async with httpx.AsyncClient(proxy=self.proxy) as client: r = await client.get( "https://api.search.brave.com/res/v1/web/search", @@ -91,12 +88,12 @@ class WebSearchTool(Tool): ) r.raise_for_status() - results = r.json().get("web", {}).get("results", []) + results = r.json().get("web", {}).get("results", [])[:n] if not results: return f"No results for: {query}" lines = [f"Results for: {query}\n"] - for i, item in enumerate(results[:n], 1): + for i, item in enumerate(results, 1): lines.append(f"{i}. {item.get('title', '')}\n {item.get('url', '')}") if desc := item.get("description"): lines.append(f" {desc}") @@ -132,17 +129,12 @@ class WebFetchTool(Tool): from readability import Document max_chars = maxChars or self.max_chars - - # Validate URL before fetching is_valid, error_msg = _validate_url(url) if not is_valid: return json.dumps({"error": f"URL validation failed: {error_msg}", "url": url}, ensure_ascii=False) try: - if self.proxy: - logger.info("WebFetch: using proxy {} for {}", self.proxy, url) - else: - logger.debug("WebFetch: direct connection for {}", url) + logger.debug("WebFetch: {}", "proxy enabled" if self.proxy else "direct connection") async with httpx.AsyncClient( follow_redirects=True, max_redirects=MAX_REDIRECTS, @@ -154,10 +146,8 @@ class WebFetchTool(Tool): ctype = r.headers.get("content-type", "") - # JSON if "application/json" in ctype: text, extractor = json.dumps(r.json(), indent=2, ensure_ascii=False), "json" - # HTML elif "text/html" in ctype or r.text[:256].lower().startswith((" max_chars - if truncated: - text = text[:max_chars] + if truncated: text = text[:max_chars] return json.dumps({"url": url, "finalUrl": str(r.url), "status": r.status_code, "extractor": extractor, "truncated": truncated, "length": len(text), "text": text}, ensure_ascii=False)