feat: add config-owned direct submit runtime

Keep browser-attached workflows on the configured direct-skill path and align the Zhihu export/browser regression contracts with the current ws merge state.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
木炎
2026-04-11 15:45:42 +08:00
29 changed files with 5218 additions and 585 deletions

View File

@@ -32,6 +32,174 @@ fn test_policy() -> MacPolicy {
.unwrap()
}
#[tokio::test]
async fn execute_browser_script_tool_runs_packaged_script_with_expected_domain() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("extract_hotlist.js"),
"return { wrapped_args: args, source: \"packaged script\" };\n",
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "https://WWW.ZHIHU.COM/hot?foo=bar",
"top_n": "10"
}),
)
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("const args = {\"top_n\":\"10\"};")
&& params["script"].as_str().unwrap().contains("source: \"packaged script\"")
));
}
#[tokio::test]
async fn execute_browser_script_tool_rejects_non_browser_script_tool_kind() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-invalid-kind");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(scripts_dir.join("extract_hotlist.js"), "return 'unused';\n").unwrap();
let transport = Arc::new(MockTransport::new(vec![]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "shell".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": "www.zhihu.com",
"top_n": "10"
}),
)
.await
.unwrap();
assert!(!result.success);
assert_eq!(
result.error.as_deref(),
Some("browser script tool kind must be browser_script, got shell")
);
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn execute_browser_script_tool_rejects_missing_expected_domain() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-invalid-domain");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(scripts_dir.join("extract_hotlist.js"), "return 'unused';\n").unwrap();
let transport = Arc::new(MockTransport::new(vec![]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let mut tool_args = HashMap::new();
tool_args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: "scripts/extract_hotlist.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&PipeBrowserBackend::from_inner(browser_tool),
json!({
"expected_domain": " ",
"top_n": "10"
}),
)
.await
.unwrap();
assert!(!result.success);
assert_eq!(
result.error.as_deref(),
Some("expected_domain must be a non-empty string, got \" \"")
);
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn browser_script_skill_tool_executes_packaged_script_via_eval() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-skill");
@@ -115,9 +283,91 @@ return {
));
}
#[tokio::test]
async fn browser_script_skill_tool_executes_script_directly_under_skill_root() {
let skill_root = unique_temp_dir("sgclaw-browser-script-direct-root");
let script_name = "extract_hotlist_direct.js";
let script_path = skill_root.join(script_name);
fs::write(
&script_path,
r#"
return {
sheet_name: "知乎热榜",
rows: [[1, "标题", args.top_n]]
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend: Arc<dyn BrowserBackend> = Arc::new(PipeBrowserBackend::from_inner(browser_tool));
let mut args = HashMap::new();
args.insert("top_n".to_string(), "How many rows to extract".to_string());
let skill_tool = SkillTool {
name: "extract_hotlist".to_string(),
description: "Extract structured hotlist rows".to_string(),
kind: "browser_script".to_string(),
command: script_name.to_string(),
args,
};
let tool = BrowserScriptSkillTool::new("zhihu-hotlist", &skill_tool, &skill_root, backend)
.unwrap();
let result = tool
.execute(json!({
"expected_domain": "https://www.zhihu.com/hot",
"top_n": "10条"
}))
.await
.unwrap();
let sent = transport.sent_messages();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"sheet_name": "知乎热榜",
"rows": [[1, "标题", "10条"]]
})
);
assert!(matches!(
&sent[0],
AgentMessage::Command {
action,
params,
security,
..
} if action == &Action::Eval
&& security.expected_domain == "www.zhihu.com"
&& params["script"].as_str().unwrap().contains("const args = {\"top_n\":\"10条\"};")
&& params["script"].as_str().unwrap().contains("rows: [[1, \"标题\", args.top_n]]")
));
}
#[tokio::test]
async fn browser_script_helper_executes_packaged_script_via_eval() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper");
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-fault-details");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
@@ -152,7 +402,7 @@ return {
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend: Arc<dyn BrowserBackend> = Arc::new(PipeBrowserBackend::from_inner(browser_tool));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut args = HashMap::new();
args.insert("period".to_string(), "Target report period".to_string());
@@ -164,10 +414,15 @@ return {
args,
};
let result = execute_browser_script_tool(&skill_tool, &skill_dir, backend, json!({
"expected_domain": "https://www.zhihu.com/hot",
"period": "2026-04"
}))
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"expected_domain": "https://www.zhihu.com/hot",
"period": "2026-04"
}),
)
.await
.unwrap();
@@ -208,7 +463,7 @@ async fn browser_script_helper_requires_expected_domain() {
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend: Arc<dyn BrowserBackend> = Arc::new(PipeBrowserBackend::from_inner(browser_tool));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut args = HashMap::new();
args.insert("period".to_string(), "Target report period".to_string());
@@ -220,9 +475,14 @@ async fn browser_script_helper_requires_expected_domain() {
args,
};
let result = execute_browser_script_tool(&skill_tool, &skill_dir, backend, json!({
"period": "2026-04"
}))
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"period": "2026-04"
}),
)
.await
.unwrap();
@@ -234,6 +494,122 @@ async fn browser_script_helper_requires_expected_domain() {
assert!(transport.sent_messages().is_empty());
}
#[tokio::test]
async fn execute_browser_script_tool_preserves_structured_report_artifact_payload() {
let skill_dir = unique_temp_dir("sgclaw-browser-script-helper-report-artifact");
let scripts_dir = skill_dir.join("scripts");
fs::create_dir_all(&scripts_dir).unwrap();
fs::write(
scripts_dir.join("collect_fault_details.js"),
r#"
return {
type: "report-artifact",
report_name: "fault-details-report",
period: args.period,
selected_range: {
start: "2026-03-08 16:00:00",
end: "2026-03-09 16:00:00"
},
columns: ["qxdbh"],
rows: [{ qxdbh: "QX-1" }],
sections: [{ name: "summary-sheet", columns: ["index"], rows: [{ index: 1 }] }],
counts: { detail_rows: 1, summary_rows: 1 },
status: "partial",
partial_reasons: ["report_log_failed"],
downstream: {
export: { attempted: true, success: true, path: "http://localhost/export.xlsx" },
report_log: { attempted: true, success: false, error: "500" }
}
};
"#,
)
.unwrap();
let transport = Arc::new(MockTransport::new(vec![BrowserMessage::Response {
seq: 1,
success: true,
data: json!({
"text": {
"type": "report-artifact",
"report_name": "fault-details-report",
"period": "2026-03",
"selected_range": {
"start": "2026-03-08 16:00:00",
"end": "2026-03-09 16:00:00"
},
"columns": ["qxdbh"],
"rows": [{ "qxdbh": "QX-1" }],
"sections": [{ "name": "summary-sheet", "columns": ["index"], "rows": [{ "index": 1 }] }],
"counts": { "detail_rows": 1, "summary_rows": 1 },
"status": "partial",
"partial_reasons": ["report_log_failed"],
"downstream": {
"export": { "attempted": true, "success": true, "path": "http://localhost/export.xlsx" },
"report_log": { "attempted": true, "success": false, "error": "500" }
}
}
}),
aom_snapshot: vec![],
timing: Timing {
queue_ms: 1,
exec_ms: 5,
},
}]));
let browser_tool = BrowserPipeTool::new(
transport.clone(),
test_policy(),
vec![1, 2, 3, 4, 5, 6, 7, 8],
)
.with_response_timeout(Duration::from_secs(1));
let backend = PipeBrowserBackend::from_inner(browser_tool);
let mut tool_args = HashMap::new();
tool_args.insert("period".to_string(), "YYYY-MM period to collect".to_string());
let skill_tool = SkillTool {
name: "collect_fault_details".to_string(),
description: "Collect structured fault details".to_string(),
kind: "browser_script".to_string(),
command: "scripts/collect_fault_details.js".to_string(),
args: tool_args,
};
let result = execute_browser_script_tool(
&skill_tool,
&skill_dir,
&backend,
json!({
"expected_domain": "https://www.zhihu.com/",
"period": "2026-03"
}),
)
.await
.unwrap();
assert!(result.success);
assert_eq!(
serde_json::from_str::<serde_json::Value>(&result.output).unwrap(),
json!({
"type": "report-artifact",
"report_name": "fault-details-report",
"period": "2026-03",
"selected_range": {
"start": "2026-03-08 16:00:00",
"end": "2026-03-09 16:00:00"
},
"columns": ["qxdbh"],
"rows": [{ "qxdbh": "QX-1" }],
"sections": [{ "name": "summary-sheet", "columns": ["index"], "rows": [{ "index": 1 }] }],
"counts": { "detail_rows": 1, "summary_rows": 1 },
"status": "partial",
"partial_reasons": ["report_log_failed"],
"downstream": {
"export": { "attempted": true, "success": true, "path": "http://localhost/export.xlsx" },
"report_log": { "attempted": true, "success": false, "error": "500" }
}
})
);
}
fn unique_temp_dir(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)