fix(web): sanitize proxy logs and polish search key hint
This commit is contained in:
@@ -71,17 +71,14 @@ class WebSearchTool(Tool):
|
||||
async def execute(self, query: str, count: int | None = None, **kwargs: Any) -> str:
|
||||
if not self.api_key:
|
||||
return (
|
||||
"Error: Brave Search API key not configured. "
|
||||
"Set it in ~/.nanobot/config.json under tools.web.search.apiKey "
|
||||
"(or export BRAIVE_API_KEY), then restart the gateway."
|
||||
"Error: Brave Search API key not configured. Set it in "
|
||||
"~/.nanobot/config.json under tools.web.search.apiKey "
|
||||
"(or export BRAVE_API_KEY), then restart the gateway."
|
||||
)
|
||||
|
||||
try:
|
||||
n = min(max(count or self.max_results, 1), 10)
|
||||
if self.proxy:
|
||||
logger.info("WebSearch: using proxy {} for query: {}", self.proxy, query[:50])
|
||||
else:
|
||||
logger.debug("WebSearch: direct connection for query: {}", query[:50])
|
||||
logger.debug("WebSearch: {}", "proxy enabled" if self.proxy else "direct connection")
|
||||
async with httpx.AsyncClient(proxy=self.proxy) as client:
|
||||
r = await client.get(
|
||||
"https://api.search.brave.com/res/v1/web/search",
|
||||
@@ -91,12 +88,12 @@ class WebSearchTool(Tool):
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json().get("web", {}).get("results", [])
|
||||
results = r.json().get("web", {}).get("results", [])[:n]
|
||||
if not results:
|
||||
return f"No results for: {query}"
|
||||
|
||||
lines = [f"Results for: {query}\n"]
|
||||
for i, item in enumerate(results[:n], 1):
|
||||
for i, item in enumerate(results, 1):
|
||||
lines.append(f"{i}. {item.get('title', '')}\n {item.get('url', '')}")
|
||||
if desc := item.get("description"):
|
||||
lines.append(f" {desc}")
|
||||
@@ -132,17 +129,12 @@ class WebFetchTool(Tool):
|
||||
from readability import Document
|
||||
|
||||
max_chars = maxChars or self.max_chars
|
||||
|
||||
# Validate URL before fetching
|
||||
is_valid, error_msg = _validate_url(url)
|
||||
if not is_valid:
|
||||
return json.dumps({"error": f"URL validation failed: {error_msg}", "url": url}, ensure_ascii=False)
|
||||
|
||||
try:
|
||||
if self.proxy:
|
||||
logger.info("WebFetch: using proxy {} for {}", self.proxy, url)
|
||||
else:
|
||||
logger.debug("WebFetch: direct connection for {}", url)
|
||||
logger.debug("WebFetch: {}", "proxy enabled" if self.proxy else "direct connection")
|
||||
async with httpx.AsyncClient(
|
||||
follow_redirects=True,
|
||||
max_redirects=MAX_REDIRECTS,
|
||||
@@ -154,10 +146,8 @@ class WebFetchTool(Tool):
|
||||
|
||||
ctype = r.headers.get("content-type", "")
|
||||
|
||||
# JSON
|
||||
if "application/json" in ctype:
|
||||
text, extractor = json.dumps(r.json(), indent=2, ensure_ascii=False), "json"
|
||||
# HTML
|
||||
elif "text/html" in ctype or r.text[:256].lower().startswith(("<!doctype", "<html")):
|
||||
doc = Document(r.text)
|
||||
content = self._to_markdown(doc.summary()) if extractMode == "markdown" else _strip_tags(doc.summary())
|
||||
@@ -167,8 +157,7 @@ class WebFetchTool(Tool):
|
||||
text, extractor = r.text, "raw"
|
||||
|
||||
truncated = len(text) > max_chars
|
||||
if truncated:
|
||||
text = text[:max_chars]
|
||||
if truncated: text = text[:max_chars]
|
||||
|
||||
return json.dumps({"url": url, "finalUrl": str(r.url), "status": r.status_code,
|
||||
"extractor": extractor, "truncated": truncated, "length": len(text), "text": text}, ensure_ascii=False)
|
||||
|
||||
Reference in New Issue
Block a user