Files
claw/tests/browser_script_skill_tool_test.rs

774 lines
24 KiB
Rust

mod common;
use std::collections::HashMap;
use std::fs;
use std::panic::AssertUnwindSafe;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
use common::MockTransport;
use futures_util::FutureExt;
use serde_json::json;
use sgclaw::browser::{BrowserBackend, PipeBrowserBackend};
use sgclaw::compat::browser_script_skill_tool::{
execute_browser_script_tool, BrowserScriptSkillTool,
};
use sgclaw::pipe::{Action, AgentMessage, BrowserMessage, BrowserPipeTool, Timing};
use sgclaw::security::MacPolicy;
use zeroclaw::skills::SkillTool;
use zeroclaw::tools::Tool;
fn test_policy() -> MacPolicy {
MacPolicy::from_json_str(
r#"{
"version": "1.0",
"domains": { "allowed": ["www.zhihu.com"] },
"pipe_actions": {
"allowed": ["click", "type", "navigate", "getText", "eval"],
"blocked": []
}
}"#,
)
.unwrap()
}
#[tokio::test]
async fn execute_browser_script_tool_runs_packaged_script_with_expected_domain() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("extract_hotlist.js"),
"return { wrapped_args: args, source: \"packaged script\" };\n",
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "https://WWW.ZHIHU.COM/hot?foo=bar",
"top_n": "10"
}),
)
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("\"expected_domain\":\"www.zhihu.com\"")
&& params["script"].as_str().unwrap().contains("\"top_n\":\"10\"")
&& params["script"].as_str().unwrap().contains("source: \"packaged script\"")
));
}
#[tokio::test]
async fn execute_browser_script_tool_rejects_non_browser_script_tool_kind() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-invalid-kind");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(scripts_dir.join("extract_hotlist.js"), "return 'unused';\n").unwrap();
let transport = Arc::new(MockTransport::new(vec![]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "shell".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "www.zhihu.com",
"top_n": "10"
}),
)
.await
.unwrap();
assert!(!result.success);
assert_eq!(
result.error.as_deref(),
Some("browser script tool kind must be browser_script, got shell")
);
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn execute_browser_script_tool_rejects_missing_expected_domain() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-invalid-domain");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(scripts_dir.join("extract_hotlist.js"), "return 'unused';\n").unwrap();
let transport = Arc::new(MockTransport::new(vec![]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": " ",
"top_n": "10"
}),
)
.await
.unwrap();
assert!(!result.success);
assert_eq!(
result.error.as_deref(),
Some("expected_domain must be a non-empty string, got \" \"")
);
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn execute_browser_script_tool_handles_multibyte_wrapped_script_preview_without_panicking() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-utf8-preview");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
let args_json = json!({
"expected_domain": "www.zhihu.com"
});
let prefix_len = format!("(function() {{\nconst args = {};\n", args_json)
.len();
let comment_prefix = "//";
let ascii_padding = (0..3)
.find(|pad| (500usize - (prefix_len + comment_prefix.len() + *pad)) % 3 != 0)
.expect("should find a padding value that forces byte 500 off a char boundary");
let script_body = format!(
"{}{}{}\nreturn {{ ok: true }};\n",
comment_prefix,
"a".repeat(ascii_padding),
"".repeat(220)
);
fs::write(scripts_dir.join("utf8_preview.js"), script_body).unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"ok": true
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let skill_tool = SkillTool {
name: "utf8_preview".to_string(),
description: "Regression for UTF-8 safe wrapped script preview logging".to_string(),
kind: "browser_script".to_string(),
command: "scripts/utf8_preview.js".to_string(),
args: HashMap::new(),
};
let result = AssertUnwindSafe(execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "www.zhihu.com"
}),
))
.catch_unwind()
.await;
assert!(
result.is_ok(),
"wrapped script preview should not panic on multibyte UTF-8 content"
);
let tool_result = result.unwrap().unwrap();
assert!(tool_result.success);
}
#[tokio::test]
async fn browser_script_skill_tool_executes_packaged_script_via_eval() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-skill");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("extract_hotlist.js"),
r#"
const topN = Number(args.top_n || 10);
return {
sheet_name: "知乎热榜",
rows: [[1, "标题", `${topN}条`]]
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend: Arc<dyn BrowserBackend> = Arc::new(PipeBrowserBackend::from_inner(browser_tool));
let mut args = HashMap::new();
args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args,
};
let tool =
BrowserScriptSkillTool::new("zhihu-hotlist", &skill_tool, &skill_dir, backend).unwrap();
let result = tool
.execute(json!({
"expected_domain": "https://www.zhihu.com/hot",
"top_n": "10"
}))
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("\"expected_domain\":\"www.zhihu.com\"")
&& params["script"].as_str().unwrap().contains("\"top_n\":\"10\"")
&& params["script"].as_str().unwrap().contains("return {")
));
}
#[tokio::test]
async fn browser_script_skill_tool_executes_script_directly_under_skill_root() {
let skill_root = unique_temp_dir("sgclaw-browser-script-direct-root");
let script_name = "extract_hotlist_direct.js";
let script_path = skill_root.join(script_name);
fs::write(
&script_path,
r#"
return {
sheet_name: "知乎热榜",
rows: [[1, "标题", args.top_n]]
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend: Arc<dyn BrowserBackend> = Arc::new(PipeBrowserBackend::from_inner(browser_tool));
let mut args = HashMap::new();
args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: script_name.to_string(),
args,
};
let tool =
BrowserScriptSkillTool::new("zhihu-hotlist", &skill_tool, &skill_root, backend).unwrap();
let result = tool
.execute(json!({
"expected_domain": "https://www.zhihu.com/hot",
"top_n": "10条"
}))
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("\"expected_domain\":\"www.zhihu.com\"")
&& params["script"].as_str().unwrap().contains("\"top_n\":\"10条\"")
&& params["script"].as_str().unwrap().contains("rows: [[1, \"标题\", args.top_n]]")
));
}
#[tokio::test]
async fn browser_script_helper_executes_packaged_script_via_eval() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-fault-details");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("collect_fault_details.js"),
r#"
return {
sheet_name: "故障明细",
rows: [[args.period, "已完成"]]
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "故障明细",
"rows": [["2026-04", "已完成"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut args = HashMap::new();
args.insert("period".to_string(), "Target report period".to_string());
let skill_tool = SkillTool {
name: "collect_fault_details".to_string(),
description: "Collect fault detail rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/collect_fault_details.js".to_string(),
args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"expected_domain": "https://www.zhihu.com/hot",
"period": "2026-04"
}),
)
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "故障明细",
"rows": [["2026-04", "已完成"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("\"expected_domain\":\"www.zhihu.com\"")
&& params["script"].as_str().unwrap().contains("\"period\":\"2026-04\"")
&& params["script"].as_str().unwrap().contains("sheet_name")
));
}
#[tokio::test]
async fn browser_script_helper_requires_expected_domain() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-missing-domain");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("collect_fault_details.js"),
"return { ok: true };\n",
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut args = HashMap::new();
args.insert("period".to_string(), "Target report period".to_string());
let skill_tool = SkillTool {
name: "collect_fault_details".to_string(),
description: "Collect fault detail rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/collect_fault_details.js".to_string(),
args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"period": "2026-04"
}),
)
.await
.unwrap();
assert!(!result.success);
assert_eq!(
result.error.as_deref(),
Some("missing required field expected_domain")
);
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn execute_browser_script_tool_preserves_structured_report_artifact_payload() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-report-artifact");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("collect_fault_details.js"),
r#"
return {
type: "report-artifact",
report_name: "fault-details-report",
period: args.period,
selected_range: {
start: "2026-03-08 16:00:00",
end: "2026-03-09 16:00:00"
},
columns: ["qxdbh"],
rows: [{ qxdbh: "QX-1" }],
sections: [{ name: "summary-sheet", columns: ["index"], rows: [{ index: 1 }] }],
counts: { detail_rows: 1, summary_rows: 1 },
status: "partial",
partial_reasons: ["report_log_failed"],
downstream: {
export: { attempted: true, success: true, path: "http://localhost/export.xlsx" },
report_log: { attempted: true, success: false, error: "500" }
}
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"type": "report-artifact",
"report_name": "fault-details-report",
"period": "2026-03",
"selected_range": {
"start": "2026-03-08 16:00:00",
"end": "2026-03-09 16:00:00"
},
"columns": ["qxdbh"],
"rows": [{ "qxdbh": "QX-1" }],
"sections": [{ "name": "summary-sheet", "columns": ["index"], "rows": [{ "index": 1 }] }],
"counts": { "detail_rows": 1, "summary_rows": 1 },
"status": "partial",
"partial_reasons": ["report_log_failed"],
"downstream": {
"export": { "attempted": true, "success": true, "path": "http://localhost/export.xlsx" },
"report_log": { "attempted": true, "success": false, "error": "500" }
}
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut tool_args = HashMap::new();
tool_args.insert(
"period".to_string(),
"YYYY-MM period to collect".to_string(),
);
let skill_tool = SkillTool {
name: "collect_fault_details".to_string(),
description: "Collect structured fault details".to_string(),
kind: "browser_script".to_string(),
command: "scripts/collect_fault_details.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"expected_domain": "https://www.zhihu.com/",
"period": "2026-03"
}),
)
.await
.unwrap();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"type": "report-artifact",
"report_name": "fault-details-report",
"period": "2026-03",
"selected_range": {
"start": "2026-03-08 16:00:00",
"end": "2026-03-09 16:00:00"
},
"columns": ["qxdbh"],
"rows": [{ "qxdbh": "QX-1" }],
"sections": [{ "name": "summary-sheet", "columns": ["index"], "rows": [{ "index": 1 }] }],
"counts": { "detail_rows": 1, "summary_rows": 1 },
"status": "partial",
"partial_reasons": ["report_log_failed"],
"downstream": {
"export": { "attempted": true, "success": true, "path": "http://localhost/export.xlsx" },
"report_log": { "attempted": true, "success": false, "error": "500" }
}
})
);
}
#[tokio::test]
async fn execute_browser_script_tool_awaits_async_script() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-async");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
// 异步脚本,返回 Promise
fs::write(
scripts_dir.join("async_extract.js"),
"return (async function() { return { async: true, args: args }; })();\n",
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"async": true,
"args": { "expected_domain": "example.com" }
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let policy_json = MacPolicy::from_json_str(
r#"{
"version": "1.0",
"domains": { "allowed": ["www.zhihu.com", "example.com"] },
"pipe_actions": {
"allowed": ["click", "type", "navigate", "getText", "eval"],
"blocked": []
}
}"#,
)
.unwrap();
let browser_tool =
BrowserPipeTool::new(transport.clone(), policy_json, vec![1, 2, 3, 4, 5, 6, 7, 8])
.with_response_timeout(Duration::from_secs(1));
let skill_tool = SkillTool {
name: "async_extract".to_string(),
description: "Extract data asynchronously".to_string(),
kind: "browser_script".to_string(),
command: "scripts/async_extract.js".to_string(),
args: HashMap::new(),
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "example.com"
}),
)
.await
.unwrap();
assert!(result.success);
let output = serde_json::from_str::<serde_json::Value>(&result.output).unwrap();
assert_eq!(output["async"], true);
}
fn unique_temp_dir(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
let path = std::env::temp_dir().join(format!("{prefix}-{nanos}"));
fs::create_dir_all(&path).unwrap();
path
}