Keep browser-attached workflows on the configured direct-skill path and align the Zhihu export/browser regression contracts with the current ws merge state. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2440 lines
85 KiB
Rust
2440 lines
85 KiB
Rust
use std::fs;
|
||
use std::path::Path;
|
||
use std::sync::Arc;
|
||
use std::thread;
|
||
use std::time::Duration;
|
||
|
||
use regex::Regex;
|
||
use serde_json::{json, Value};
|
||
use zeroclaw::tools::Tool;
|
||
|
||
use crate::browser::{BrowserBackend, PipeBrowserBackend};
|
||
use crate::compat::artifact_open::{open_exported_xlsx, open_local_dashboard, PostExportOpen};
|
||
use crate::compat::openxml_office_tool::OpenXmlOfficeTool;
|
||
use crate::compat::runtime::CompatTaskContext;
|
||
use crate::compat::screen_html_export_tool::ScreenHtmlExportTool;
|
||
use crate::config::SgClawSettings;
|
||
use crate::pipe::{
|
||
Action, AgentMessage, BrowserPipeTool, ConversationMessage, PipeError, Transport,
|
||
};
|
||
|
||
const ZHIHU_DOMAIN: &str = "www.zhihu.com";
|
||
const ZHIHU_EDITOR_DOMAIN: &str = "zhuanlan.zhihu.com";
|
||
const ZHIHU_HOT_URL: &str = "https://www.zhihu.com/hot";
|
||
const ZHIHU_CREATOR_URL: &str = "https://www.zhihu.com/creator";
|
||
const ZHIHU_EDITOR_URL: &str = "https://zhuanlan.zhihu.com/write";
|
||
const HOTLIST_READY_POLL_ATTEMPTS: usize = 10;
|
||
const HOTLIST_READY_POLL_INTERVAL: Duration = Duration::from_millis(500);
|
||
const EDITOR_READY_POLL_ATTEMPTS: usize = 12;
|
||
const EDITOR_READY_POLL_INTERVAL: Duration = Duration::from_millis(500);
|
||
// Readiness pattern: requires the "热度" suffix so that sidebar "大家都在搜"
|
||
// entries (which show bare "414万" without "热度") do NOT trigger a premature
|
||
// readiness signal. The main hotlist always renders "538万热度".
|
||
const HOTLIST_TEXT_READY_PATTERN: &str =
|
||
r"\d+(?:\.\d+)?\s*(?:万|亿|k|K|m|M)\s*热度";
|
||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||
pub enum WorkflowRoute {
|
||
ZhihuHotlistExportXlsx,
|
||
ZhihuHotlistScreen,
|
||
ZhihuArticleEntry,
|
||
ZhihuArticleDraft,
|
||
ZhihuArticlePublish,
|
||
ZhihuArticleAutoPublishGenerated,
|
||
}
|
||
|
||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||
struct HotlistItem {
|
||
rank: u64,
|
||
title: String,
|
||
heat: String,
|
||
}
|
||
|
||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||
pub(crate) struct ArticleDraft {
|
||
title: String,
|
||
body: String,
|
||
}
|
||
|
||
pub fn detect_route(
|
||
instruction: &str,
|
||
page_url: Option<&str>,
|
||
page_title: Option<&str>,
|
||
) -> Option<WorkflowRoute> {
|
||
if crate::runtime::is_zhihu_hotlist_task(instruction, page_url, page_title) {
|
||
let normalized = instruction.to_ascii_lowercase();
|
||
if normalized.contains("dashboard")
|
||
|| instruction.contains("大屏")
|
||
|| instruction.contains("新标签页")
|
||
{
|
||
return Some(WorkflowRoute::ZhihuHotlistScreen);
|
||
}
|
||
if normalized.contains("excel")
|
||
|| normalized.contains("xlsx")
|
||
|| instruction.contains("导出")
|
||
{
|
||
return Some(WorkflowRoute::ZhihuHotlistExportXlsx);
|
||
}
|
||
}
|
||
if task_requests_zhihu_generated_article_publish(instruction, page_url, page_title) {
|
||
return Some(WorkflowRoute::ZhihuArticleAutoPublishGenerated);
|
||
}
|
||
if task_requests_zhihu_article_entry(instruction, page_url, page_title) {
|
||
return Some(WorkflowRoute::ZhihuArticleEntry);
|
||
}
|
||
if crate::runtime::task_requests_zhihu_article_publish(instruction, page_url, page_title) {
|
||
return Some(WorkflowRoute::ZhihuArticlePublish);
|
||
}
|
||
if crate::runtime::is_zhihu_write_task(instruction, page_url, page_title) {
|
||
return Some(WorkflowRoute::ZhihuArticleDraft);
|
||
}
|
||
None
|
||
}
|
||
|
||
pub fn prefers_direct_execution(route: &WorkflowRoute) -> bool {
|
||
matches!(
|
||
route,
|
||
WorkflowRoute::ZhihuHotlistExportXlsx
|
||
| WorkflowRoute::ZhihuHotlistScreen
|
||
| WorkflowRoute::ZhihuArticleEntry
|
||
| WorkflowRoute::ZhihuArticleDraft
|
||
| WorkflowRoute::ZhihuArticlePublish
|
||
| WorkflowRoute::ZhihuArticleAutoPublishGenerated
|
||
)
|
||
}
|
||
|
||
pub fn should_fallback_after_summary(summary: &str, route: &WorkflowRoute) -> bool {
|
||
let normalized = summary.to_ascii_lowercase();
|
||
if normalized.contains(".xlsx") || normalized.contains(".html") {
|
||
return false;
|
||
}
|
||
|
||
let looks_like_denial = summary.contains("拒绝")
|
||
|| normalized.contains("denied")
|
||
|| normalized.contains("failed")
|
||
|| normalized.contains("protocol error")
|
||
|| normalized.contains("maximum tool iterations")
|
||
|| summary.contains("失败")
|
||
|| summary.contains("无法");
|
||
|
||
looks_like_denial
|
||
|| matches!(
|
||
route,
|
||
WorkflowRoute::ZhihuHotlistExportXlsx
|
||
| WorkflowRoute::ZhihuHotlistScreen
|
||
| WorkflowRoute::ZhihuArticleEntry
|
||
| WorkflowRoute::ZhihuArticleDraft
|
||
| WorkflowRoute::ZhihuArticlePublish
|
||
| WorkflowRoute::ZhihuArticleAutoPublishGenerated
|
||
)
|
||
}
|
||
|
||
pub fn execute_route_with_browser_backend(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_backend: Arc<dyn BrowserBackend>,
|
||
workspace_root: &Path,
|
||
skills_dir: &Path,
|
||
instruction: &str,
|
||
task_context: &CompatTaskContext,
|
||
route: WorkflowRoute,
|
||
settings: &SgClawSettings,
|
||
) -> Result<String, PipeError> {
|
||
match route {
|
||
WorkflowRoute::ZhihuHotlistExportXlsx | WorkflowRoute::ZhihuHotlistScreen => {
|
||
let top_n = extract_top_n(instruction);
|
||
let items = collect_hotlist_items(
|
||
transport,
|
||
browser_backend.as_ref(),
|
||
skills_dir,
|
||
top_n,
|
||
task_context,
|
||
)?;
|
||
if items.is_empty() {
|
||
return Err(PipeError::Protocol(
|
||
"知乎热榜采集失败:未能从页面文本中解析到热榜条目".to_string(),
|
||
));
|
||
}
|
||
match route {
|
||
WorkflowRoute::ZhihuHotlistExportXlsx => export_xlsx(transport, workspace_root, &items),
|
||
WorkflowRoute::ZhihuHotlistScreen => {
|
||
export_screen(transport, browser_backend.as_ref(), workspace_root, &items)
|
||
}
|
||
_ => unreachable!("handled by outer match"),
|
||
}
|
||
}
|
||
WorkflowRoute::ZhihuArticleEntry => {
|
||
execute_zhihu_article_entry_route(transport, browser_backend.as_ref(), skills_dir)
|
||
}
|
||
WorkflowRoute::ZhihuArticleDraft => execute_zhihu_article_route(
|
||
transport,
|
||
browser_backend.as_ref(),
|
||
skills_dir,
|
||
instruction,
|
||
task_context,
|
||
false,
|
||
false,
|
||
None,
|
||
),
|
||
WorkflowRoute::ZhihuArticlePublish => execute_zhihu_article_route(
|
||
transport,
|
||
browser_backend.as_ref(),
|
||
skills_dir,
|
||
instruction,
|
||
task_context,
|
||
true,
|
||
false,
|
||
None,
|
||
),
|
||
WorkflowRoute::ZhihuArticleAutoPublishGenerated => {
|
||
execute_generated_zhihu_article_publish_route(
|
||
transport,
|
||
browser_backend.as_ref(),
|
||
skills_dir,
|
||
instruction,
|
||
task_context,
|
||
workspace_root,
|
||
settings,
|
||
)
|
||
}
|
||
}
|
||
}
|
||
|
||
pub fn execute_route<T: Transport + 'static>(
|
||
transport: &T,
|
||
browser_tool: &BrowserPipeTool<T>,
|
||
workspace_root: &Path,
|
||
skills_dir: &Path,
|
||
instruction: &str,
|
||
task_context: &CompatTaskContext,
|
||
route: WorkflowRoute,
|
||
settings: &SgClawSettings,
|
||
) -> Result<String, PipeError> {
|
||
let browser_backend: Arc<dyn BrowserBackend> =
|
||
Arc::new(PipeBrowserBackend::from_inner(browser_tool.clone()));
|
||
execute_route_with_browser_backend(
|
||
transport,
|
||
browser_backend,
|
||
workspace_root,
|
||
skills_dir,
|
||
instruction,
|
||
task_context,
|
||
route,
|
||
settings,
|
||
)
|
||
}
|
||
|
||
fn collect_hotlist_items(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
top_n: usize,
|
||
task_context: &CompatTaskContext,
|
||
) -> Result<Vec<HotlistItem>, PipeError> {
|
||
if let Some(items) =
|
||
ensure_hotlist_page_ready(transport, browser_tool, skills_dir, top_n, task_context)?
|
||
{
|
||
return Ok(items);
|
||
}
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-hotlist.extract_hotlist".to_string(),
|
||
})?;
|
||
let response = browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": load_hotlist_extractor_script(skills_dir, top_n)? }),
|
||
ZHIHU_DOMAIN,
|
||
)?;
|
||
if !response.success {
|
||
return Err(PipeError::Protocol(format!(
|
||
"知乎热榜采集失败:{}",
|
||
response
|
||
.data
|
||
.get("error")
|
||
.and_then(|value| value.get("message"))
|
||
.and_then(Value::as_str)
|
||
.unwrap_or("browser script execution failed")
|
||
)));
|
||
}
|
||
|
||
parse_hotlist_items_payload(response.data.get("text").unwrap_or(&response.data))
|
||
}
|
||
|
||
fn ensure_hotlist_page_ready(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
top_n: usize,
|
||
task_context: &CompatTaskContext,
|
||
) -> Result<Option<Vec<HotlistItem>>, PipeError> {
|
||
let starts_on_hotlist = task_context
|
||
.page_url
|
||
.as_deref()
|
||
.is_some_and(|url| url.starts_with(ZHIHU_HOT_URL))
|
||
|| task_context
|
||
.page_title
|
||
.as_deref()
|
||
.is_some_and(|title| title.contains("热榜"));
|
||
|
||
// Always validate via probe_hotlist_extractor rather than returning
|
||
// Ok(None) on a bare readiness pass. The readiness poll uses getText(body)
|
||
// which can be triggered by sidebar / nav-bar content before the main
|
||
// hotlist DOM has rendered. probe_hotlist_extractor runs the full
|
||
// extraction script and returns None when no valid rows are found,
|
||
// allowing the retry loop to kick in.
|
||
if starts_on_hotlist {
|
||
// Best-effort wait for content to appear; ignore the boolean result –
|
||
// we always follow up with the probe.
|
||
let _ = poll_for_hotlist_readiness(browser_tool);
|
||
if let Some(items) = probe_hotlist_extractor(transport, browser_tool, skills_dir, top_n)? {
|
||
return Ok(Some(items));
|
||
}
|
||
}
|
||
|
||
let mut last_error = None;
|
||
for attempt in 0..2 {
|
||
navigate_hotlist_page(transport, browser_tool)?;
|
||
let _ = poll_for_hotlist_readiness(browser_tool);
|
||
if let Some(items) = probe_hotlist_extractor(transport, browser_tool, skills_dir, top_n)? {
|
||
return Ok(Some(items));
|
||
}
|
||
last_error = Some(format!(
|
||
"知乎热榜页面已打开,但在短轮询窗口内仍未出现可读热榜内容(attempt={})",
|
||
attempt + 1
|
||
));
|
||
}
|
||
|
||
// Log the last failure for diagnostics, then let caller try one final
|
||
// extraction as a last resort.
|
||
if let Some(msg) = last_error {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "warn".to_string(),
|
||
message: msg,
|
||
}).ok();
|
||
}
|
||
Ok(None)
|
||
}
|
||
|
||
/// Poll the Zhihu write page until `prepare_article_editor.js` reports
|
||
/// "editor_ready" or a terminal state (login_required). The editor page
|
||
/// is a React SPA whose title textarea and Draft.js body take noticeable
|
||
/// time to mount after navigation, so a single immediate check frequently
|
||
/// reports "editor_unavailable".
|
||
fn poll_for_editor_readiness(
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
desired_mode: &str,
|
||
) -> Result<Value, PipeError> {
|
||
let args = json!({ "desired_mode": desired_mode });
|
||
let mut last_state: Option<Value> = None;
|
||
|
||
for attempt in 0..EDITOR_READY_POLL_ATTEMPTS {
|
||
match execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-write",
|
||
"prepare_article_editor.js",
|
||
args.clone(),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
) {
|
||
Ok(state) => {
|
||
let status = payload_status(&state);
|
||
if status == Some("editor_ready") || status == Some("login_required") {
|
||
return Ok(state);
|
||
}
|
||
last_state = Some(state);
|
||
}
|
||
Err(PipeError::PipeClosed) => return Err(PipeError::PipeClosed),
|
||
Err(_) => {}
|
||
}
|
||
|
||
if attempt + 1 < EDITOR_READY_POLL_ATTEMPTS {
|
||
thread::sleep(EDITOR_READY_POLL_INTERVAL);
|
||
}
|
||
}
|
||
|
||
match last_state {
|
||
Some(state) => Ok(state),
|
||
None => execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-write",
|
||
"prepare_article_editor.js",
|
||
args,
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
),
|
||
}
|
||
}
|
||
|
||
fn probe_hotlist_extractor(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
top_n: usize,
|
||
) -> Result<Option<Vec<HotlistItem>>, PipeError> {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-hotlist.extract_hotlist".to_string(),
|
||
})?;
|
||
let response = browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": load_hotlist_extractor_script(skills_dir, top_n)? }),
|
||
ZHIHU_DOMAIN,
|
||
)?;
|
||
if !response.success {
|
||
return Ok(None);
|
||
}
|
||
|
||
let eval_text = response.data.get("text").unwrap_or(&response.data);
|
||
|
||
match parse_hotlist_items_payload(eval_text) {
|
||
Ok(items) if !items.is_empty() => Ok(Some(items)),
|
||
Ok(_) => Ok(None),
|
||
Err(_) => Ok(None),
|
||
}
|
||
}
|
||
|
||
fn navigate_hotlist_page(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
) -> Result<(), PipeError> {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: format!("navigate {ZHIHU_HOT_URL}"),
|
||
})?;
|
||
let response = browser_tool.invoke(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_HOT_URL }),
|
||
ZHIHU_DOMAIN,
|
||
)?;
|
||
if response.success {
|
||
Ok(())
|
||
} else {
|
||
Err(PipeError::Protocol(format!(
|
||
"navigate failed: {}",
|
||
response.data
|
||
)))
|
||
}
|
||
}
|
||
|
||
fn poll_for_hotlist_readiness(browser_tool: &dyn BrowserBackend) -> Result<bool, PipeError> {
|
||
let ready_pattern =
|
||
Regex::new(HOTLIST_TEXT_READY_PATTERN).expect("hotlist readiness regex must compile");
|
||
for attempt in 0..HOTLIST_READY_POLL_ATTEMPTS {
|
||
// Tolerate individual GetText failures (e.g. callback timeout) – they
|
||
// are expected while the page is still loading or the callback delivery
|
||
// path is not yet established. Only a PipeClosed error is fatal.
|
||
let response = match browser_tool.invoke(
|
||
Action::GetText,
|
||
json!({ "selector": "body" }),
|
||
ZHIHU_DOMAIN,
|
||
) {
|
||
Ok(resp) => resp,
|
||
Err(PipeError::PipeClosed) => return Err(PipeError::PipeClosed),
|
||
Err(_) => {
|
||
if attempt + 1 < HOTLIST_READY_POLL_ATTEMPTS {
|
||
thread::sleep(HOTLIST_READY_POLL_INTERVAL);
|
||
}
|
||
continue;
|
||
}
|
||
};
|
||
if response.success {
|
||
let payload = response.data.get("text").unwrap_or(&response.data);
|
||
if hotlist_text_looks_ready(payload, &ready_pattern) {
|
||
return Ok(true);
|
||
}
|
||
}
|
||
|
||
if attempt + 1 < HOTLIST_READY_POLL_ATTEMPTS {
|
||
thread::sleep(HOTLIST_READY_POLL_INTERVAL);
|
||
}
|
||
}
|
||
Ok(false)
|
||
}
|
||
|
||
fn hotlist_text_looks_ready(payload: &Value, ready_pattern: &Regex) -> bool {
|
||
let text = payload.as_str().unwrap_or_default();
|
||
text.contains("热榜") && ready_pattern.is_match(text)
|
||
}
|
||
|
||
fn export_xlsx(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
workspace_root: &Path,
|
||
items: &[HotlistItem],
|
||
) -> Result<String, PipeError> {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call openxml_office".to_string(),
|
||
})?;
|
||
let tool = OpenXmlOfficeTool::new(workspace_root.to_path_buf());
|
||
let rows = items
|
||
.iter()
|
||
.map(|item| json!([item.rank, item.title, item.heat]))
|
||
.collect::<Vec<_>>();
|
||
let runtime = tokio::runtime::Runtime::new()
|
||
.map_err(|err| PipeError::Protocol(format!("failed to create tokio runtime: {err}")))?;
|
||
let result = runtime
|
||
.block_on(tool.execute(json!({
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": rows,
|
||
})))
|
||
.map_err(|err| PipeError::Protocol(err.to_string()))?;
|
||
if !result.success {
|
||
return Err(PipeError::Protocol(
|
||
result
|
||
.error
|
||
.unwrap_or_else(|| "openxml_office failed".to_string()),
|
||
));
|
||
}
|
||
|
||
let payload: Value = serde_json::from_str(&result.output)
|
||
.map_err(|err| PipeError::Protocol(format!("invalid openxml_office output: {err}")))?;
|
||
let output_path = payload["output_path"].as_str().ok_or_else(|| {
|
||
PipeError::Protocol("openxml_office did not return output_path".to_string())
|
||
})?;
|
||
Ok(match open_exported_xlsx(Path::new(output_path)) {
|
||
PostExportOpen::Opened => format!("已导出并打开知乎热榜 Excel {output_path}"),
|
||
PostExportOpen::Failed(reason) => {
|
||
format!("已导出知乎热榜 Excel {output_path},但自动打开失败:{reason}")
|
||
}
|
||
})
|
||
}
|
||
|
||
fn export_screen(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_backend: &dyn BrowserBackend,
|
||
workspace_root: &Path,
|
||
items: &[HotlistItem],
|
||
) -> Result<String, PipeError> {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call screen_html_export".to_string(),
|
||
})?;
|
||
let tool = ScreenHtmlExportTool::new(workspace_root.to_path_buf());
|
||
let rows = items
|
||
.iter()
|
||
.map(|item| json!([item.rank, item.title, item.heat]))
|
||
.collect::<Vec<_>>();
|
||
let runtime = tokio::runtime::Runtime::new()
|
||
.map_err(|err| PipeError::Protocol(format!("failed to create tokio runtime: {err}")))?;
|
||
let result = runtime
|
||
.block_on(tool.execute(json!({ "rows": rows })))
|
||
.map_err(|err| PipeError::Protocol(err.to_string()))?;
|
||
if !result.success {
|
||
return Err(PipeError::Protocol(
|
||
result
|
||
.error
|
||
.unwrap_or_else(|| "screen_html_export failed".to_string()),
|
||
));
|
||
}
|
||
|
||
finalize_screen_export(browser_backend, &result.output)
|
||
}
|
||
|
||
pub fn finalize_screen_export(
|
||
browser_backend: &dyn BrowserBackend,
|
||
output: &str,
|
||
) -> Result<String, PipeError> {
|
||
let payload: Value = serde_json::from_str(output)
|
||
.map_err(|err| PipeError::Protocol(format!("invalid screen_html_export output: {err}")))?;
|
||
let output_path = payload["output_path"].as_str().ok_or_else(|| {
|
||
PipeError::Protocol("screen_html_export did not return output_path".to_string())
|
||
})?;
|
||
let presentation_url = payload["presentation"]["url"].as_str().unwrap_or_default();
|
||
Ok(match open_local_dashboard(browser_backend, Path::new(output_path), presentation_url) {
|
||
PostExportOpen::Opened => format!("已在浏览器中打开知乎热榜大屏 {output_path}"),
|
||
PostExportOpen::Failed(reason) => {
|
||
format!("已生成知乎热榜大屏 {output_path},但浏览器自动打开失败:{reason}")
|
||
}
|
||
})
|
||
}
|
||
|
||
fn execute_zhihu_article_route(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
instruction: &str,
|
||
task_context: &CompatTaskContext,
|
||
publish_mode: bool,
|
||
publish_authorized: bool,
|
||
article_override: Option<ArticleDraft>,
|
||
) -> Result<String, PipeError> {
|
||
let Some(article) = article_override.or_else(|| extract_article_draft(instruction, &task_context.messages)) else {
|
||
return Ok(
|
||
"这类知乎文章任务需要同时提供标题和正文后我才能继续确定性写作流程。请按“标题:…\\n正文:…”的格式补充内容。"
|
||
.to_string(),
|
||
);
|
||
};
|
||
|
||
if publish_mode && !publish_authorized && !has_explicit_publish_confirmation(instruction) {
|
||
return Ok(build_publish_confirmation_message(&article));
|
||
}
|
||
|
||
navigate_zhihu_page(transport, browser_tool, ZHIHU_CREATOR_URL)?;
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-navigate.open_creator_entry".to_string(),
|
||
})?;
|
||
let creator_state = execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-navigate",
|
||
"open_creator_entry.js",
|
||
json!({ "desired_target": "article_editor" }),
|
||
ZHIHU_DOMAIN,
|
||
)?;
|
||
if is_login_required_payload(&creator_state) {
|
||
return Ok(build_login_block_message(payload_current_url(
|
||
&creator_state,
|
||
)));
|
||
}
|
||
if payload_status(&creator_state) == Some("creator_home") {
|
||
return Ok(build_creator_entry_missing_message(payload_current_url(
|
||
&creator_state,
|
||
)));
|
||
}
|
||
navigate_to_editor_after_creator_entry(transport, browser_tool, &creator_state)?;
|
||
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-write.prepare_article_editor".to_string(),
|
||
})?;
|
||
let editor_state = poll_for_editor_readiness(
|
||
browser_tool,
|
||
skills_dir,
|
||
if publish_mode { "publish" } else { "draft" },
|
||
)?;
|
||
if is_login_required_payload(&editor_state) {
|
||
return Ok(build_login_block_message(payload_current_url(
|
||
&editor_state,
|
||
)));
|
||
}
|
||
if payload_status(&editor_state) != Some("editor_ready") {
|
||
return Ok(build_editor_unavailable_message(payload_current_url(
|
||
&editor_state,
|
||
)));
|
||
}
|
||
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-write.fill_article_draft".to_string(),
|
||
})?;
|
||
let fill_result = if browser_tool.supports_live_input() {
|
||
execute_zhihu_fill_via_live_input(browser_tool, skills_dir, &article, publish_mode)?
|
||
} else {
|
||
execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-write",
|
||
"fill_article_draft.js",
|
||
json!({
|
||
"title": article.title,
|
||
"body": article.body,
|
||
"publish_mode": publish_mode.to_string(),
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?
|
||
};
|
||
if is_login_required_payload(&fill_result) {
|
||
return Ok(build_login_block_message(payload_current_url(&fill_result)));
|
||
}
|
||
|
||
match payload_status(&fill_result) {
|
||
Some("draft_ready") => Ok(format!(
|
||
"已进入知乎文章编辑器并写入草稿《{}》",
|
||
article.title
|
||
)),
|
||
Some("publish_clicked") | Some("publish_submitted") => {
|
||
Ok(format!("已提交知乎文章发布流程《{}》", article.title))
|
||
}
|
||
Some("publish_button_missing") => Err(PipeError::Protocol(
|
||
"知乎文章流程失败:未找到发布按钮".to_string(),
|
||
)),
|
||
Some("editor_not_ready") => Err(PipeError::Protocol(
|
||
"知乎文章流程失败:编辑器尚未准备就绪".to_string(),
|
||
)),
|
||
_ => Err(PipeError::Protocol(format!(
|
||
"知乎文章流程失败:浏览器脚本返回了未知状态 {fill_result}"
|
||
))),
|
||
}
|
||
}
|
||
|
||
fn execute_generated_zhihu_article_publish_route(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
instruction: &str,
|
||
task_context: &CompatTaskContext,
|
||
workspace_root: &Path,
|
||
settings: &SgClawSettings,
|
||
) -> Result<String, PipeError> {
|
||
let Some(topic) = extract_generated_article_topic(instruction) else {
|
||
return Ok("请按“在知乎自动发表一篇名称为…”或“在知乎自动发布一篇标题为…”的格式提供文章名称。".to_string());
|
||
};
|
||
|
||
let article = crate::compat::runtime::generate_zhihu_article_draft(
|
||
instruction,
|
||
&topic,
|
||
task_context,
|
||
workspace_root,
|
||
settings,
|
||
)?;
|
||
|
||
execute_zhihu_article_route(
|
||
transport,
|
||
browser_tool,
|
||
skills_dir,
|
||
instruction,
|
||
task_context,
|
||
true,
|
||
true,
|
||
Some(article),
|
||
)
|
||
}
|
||
|
||
fn extract_generated_article_topic(instruction: &str) -> Option<String> {
|
||
let normalized = normalize_article_draft_input(instruction);
|
||
let name_re = Regex::new(r"(?:名称|标题)(?:是|为)\s*([^,,。\n]+)").expect("valid generated zhihu topic regex");
|
||
name_re
|
||
.captures(&normalized)
|
||
.and_then(|capture| capture.get(1))
|
||
.map(|value| value.as_str().trim().to_string())
|
||
.filter(|value| !value.is_empty())
|
||
}
|
||
|
||
fn task_requests_zhihu_generated_article_publish(
|
||
instruction: &str,
|
||
page_url: Option<&str>,
|
||
page_title: Option<&str>,
|
||
) -> bool {
|
||
if !crate::runtime::is_zhihu_write_task(instruction, page_url, page_title) {
|
||
return false;
|
||
}
|
||
|
||
let normalized = instruction.to_ascii_lowercase();
|
||
let asks_auto_publish = instruction.contains("自动发表")
|
||
|| instruction.contains("自动发布")
|
||
|| (normalized.contains("auto publish") && normalized.contains("zhihu"));
|
||
let has_topic_only = extract_generated_article_topic(instruction).is_some();
|
||
let already_has_full_draft = parse_article_draft(instruction).is_some();
|
||
|
||
asks_auto_publish && has_topic_only && !already_has_full_draft
|
||
}
|
||
|
||
fn execute_zhihu_article_entry_route(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
) -> Result<String, PipeError> {
|
||
navigate_zhihu_page(transport, browser_tool, ZHIHU_CREATOR_URL)?;
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-navigate.open_creator_entry".to_string(),
|
||
})?;
|
||
let creator_state = execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-navigate",
|
||
"open_creator_entry.js",
|
||
json!({ "desired_target": "article_editor" }),
|
||
ZHIHU_DOMAIN,
|
||
)?;
|
||
if is_login_required_payload(&creator_state) {
|
||
return Ok(build_login_block_message(payload_current_url(
|
||
&creator_state,
|
||
)));
|
||
}
|
||
if payload_status(&creator_state) == Some("creator_home") {
|
||
return Ok(build_creator_entry_missing_message(payload_current_url(
|
||
&creator_state,
|
||
)));
|
||
}
|
||
navigate_to_editor_after_creator_entry(transport, browser_tool, &creator_state)?;
|
||
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: "call zhihu-write.prepare_article_editor".to_string(),
|
||
})?;
|
||
let editor_state = poll_for_editor_readiness(browser_tool, skills_dir, "draft")?;
|
||
if is_login_required_payload(&editor_state) {
|
||
return Ok(build_login_block_message(payload_current_url(
|
||
&editor_state,
|
||
)));
|
||
}
|
||
if payload_status(&editor_state) == Some("editor_ready") {
|
||
return Ok("已进入知乎文章编辑器。".to_string());
|
||
}
|
||
|
||
Ok(build_editor_unavailable_message(payload_current_url(
|
||
&editor_state,
|
||
)))
|
||
}
|
||
|
||
fn load_hotlist_extractor_script(skills_dir: &Path, top_n: usize) -> Result<String, PipeError> {
|
||
load_browser_skill_script(
|
||
skills_dir,
|
||
"zhihu-hotlist",
|
||
"extract_hotlist.js",
|
||
json!({ "top_n": top_n.to_string() }),
|
||
)
|
||
}
|
||
|
||
fn parse_hotlist_items_payload(payload: &Value) -> Result<Vec<HotlistItem>, PipeError> {
|
||
let normalized_payload = if let Some(text) = payload.as_str() {
|
||
serde_json::from_str::<Value>(text).unwrap_or_else(|_| Value::String(text.to_string()))
|
||
} else {
|
||
payload.clone()
|
||
};
|
||
|
||
let rows = normalized_payload
|
||
.get("rows")
|
||
.and_then(Value::as_array)
|
||
.ok_or_else(|| {
|
||
PipeError::Protocol("知乎热榜采集失败:浏览器脚本未返回 rows".to_string())
|
||
})?;
|
||
|
||
let mut items = Vec::new();
|
||
for row in rows {
|
||
let Some(cells) = row.as_array() else {
|
||
continue;
|
||
};
|
||
if cells.len() != 3 {
|
||
continue;
|
||
}
|
||
|
||
let rank = cells[0]
|
||
.as_u64()
|
||
.or_else(|| {
|
||
cells[0]
|
||
.as_str()
|
||
.and_then(|value| value.parse::<u64>().ok())
|
||
})
|
||
.unwrap_or((items.len() + 1) as u64);
|
||
let title = cells[1].as_str().unwrap_or_default().trim().to_string();
|
||
let heat = cells[2].as_str().unwrap_or_default().trim().to_string();
|
||
if title.is_empty() || heat.is_empty() {
|
||
continue;
|
||
}
|
||
items.push(HotlistItem { rank, title, heat });
|
||
}
|
||
|
||
if items.is_empty() {
|
||
return Err(PipeError::Protocol(
|
||
"知乎热榜采集失败:浏览器脚本未返回有效热榜条目".to_string(),
|
||
));
|
||
}
|
||
|
||
Ok(items)
|
||
}
|
||
fn extract_top_n(instruction: &str) -> usize {
|
||
let re = Regex::new(r"(?:前|top\s*)(\d{1,2})").expect("valid top-n regex");
|
||
re.captures(&instruction.to_ascii_lowercase())
|
||
.and_then(|capture| capture.get(1))
|
||
.and_then(|value| value.as_str().parse::<usize>().ok())
|
||
.filter(|value| *value > 0)
|
||
.unwrap_or(10)
|
||
}
|
||
|
||
fn navigate_zhihu_page(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
url: &str,
|
||
) -> Result<(), PipeError> {
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: format!("navigate {url}"),
|
||
})?;
|
||
let response = browser_tool.invoke(Action::Navigate, json!({ "url": url }), ZHIHU_DOMAIN)?;
|
||
if response.success {
|
||
Ok(())
|
||
} else {
|
||
Err(PipeError::Protocol(format!(
|
||
"navigate failed: {}",
|
||
response.data
|
||
)))
|
||
}
|
||
}
|
||
|
||
fn execute_browser_skill_script(
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
skill_name: &str,
|
||
script_name: &str,
|
||
args: Value,
|
||
expected_domain: &str,
|
||
) -> Result<Value, PipeError> {
|
||
let wrapped_script =
|
||
load_browser_skill_script(skills_dir, skill_name, script_name, args)?;
|
||
let response = browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": wrapped_script }),
|
||
expected_domain,
|
||
)?;
|
||
if !response.success {
|
||
return Err(PipeError::Protocol(format!(
|
||
"browser script failed: {}",
|
||
response.data
|
||
)));
|
||
}
|
||
|
||
Ok(normalize_payload(
|
||
response.data.get("text").unwrap_or(&response.data),
|
||
))
|
||
}
|
||
|
||
fn live_input_probe_script(selector_candidates: &[&str]) -> String {
|
||
let selectors_json = serde_json::to_string(selector_candidates).expect("valid selector candidates");
|
||
format!(
|
||
"var selectors={selectors_json};for(var i=0;i<selectors.length;i++){{var nodes=Array.from(document.querySelectorAll(selectors[i]));for(var j=0;j<nodes.length;j++){{var node=nodes[j];if(!node){{continue;}}var rect=(typeof node.getBoundingClientRect==='function')?node.getBoundingClientRect():null;var visible=!rect||rect.width>0||rect.height>0;if(!visible){{continue;}}return node;}}}}return null;"
|
||
)
|
||
}
|
||
|
||
fn execute_zhihu_fill_via_live_input(
|
||
browser_tool: &dyn BrowserBackend,
|
||
skills_dir: &Path,
|
||
article: &ArticleDraft,
|
||
publish_mode: bool,
|
||
) -> Result<Value, PipeError> {
|
||
let title_probe = live_input_probe_script(&[
|
||
"textarea[placeholder*='标题']",
|
||
"input[placeholder*='标题']",
|
||
"textarea[data-placeholder*='标题']",
|
||
"input[data-placeholder*='标题']",
|
||
"[role='textbox'][aria-label*='标题']",
|
||
"[contenteditable='true'][aria-label*='标题']",
|
||
"[contenteditable='true'][data-placeholder*='标题']",
|
||
]);
|
||
let body_probe = live_input_probe_script(&[
|
||
"div[contenteditable='true'][role='textbox']",
|
||
"div.public-DraftEditor-content[contenteditable='true']",
|
||
"[role='textbox'][contenteditable='true']",
|
||
"[contenteditable='true'][data-placeholder]",
|
||
"div[contenteditable='true']",
|
||
]);
|
||
|
||
// ── Step 1: Click title field ──────────────────────────────
|
||
browser_tool.invoke(
|
||
Action::Click,
|
||
json!({
|
||
"target_url": ZHIHU_EDITOR_URL,
|
||
"probe_script": title_probe,
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
|
||
// ── Step 2: Animated title typing (eval-based) ──────────────
|
||
// Instead of fire-and-forget keyboard simulation, we use a single eval
|
||
// that types the title character-by-character using setTimeout. This
|
||
// produces a visible typing animation for demo purposes and also uses
|
||
// the native value setter so React detects the changes.
|
||
let title_json = serde_json::to_string(&article.title).unwrap_or_else(|_| "\"\"".into());
|
||
let title_chars = article.title.chars().count();
|
||
let title_chunk = 3usize;
|
||
let title_delay = 50u64;
|
||
let title_wait = ((title_chars + title_chunk - 1) / title_chunk) as u64 * title_delay + 300;
|
||
let title_script = format!(
|
||
r#"(function(){{
|
||
var sels=["textarea[placeholder*='标题']","input[placeholder*='标题']",
|
||
"textarea[data-placeholder*='标题']","input[data-placeholder*='标题']",
|
||
"[role='textbox'][aria-label*='标题']"];
|
||
var node=null;
|
||
for(var i=0;i<sels.length;i++){{var ns=document.querySelectorAll(sels[i]);
|
||
for(var j=0;j<ns.length;j++){{var r=ns[j].getBoundingClientRect();
|
||
if(r.width>0&&r.height>0){{node=ns[j];break;}}}}if(node)break;}}
|
||
if(!node)return JSON.stringify({{status:'error',msg:'title_not_found'}});
|
||
node.focus();
|
||
var text={title_json};
|
||
var proto=node instanceof HTMLTextAreaElement?HTMLTextAreaElement.prototype:HTMLInputElement.prototype;
|
||
var desc=Object.getOwnPropertyDescriptor(proto,'value');
|
||
var set=(desc&&desc.set)?function(v){{desc.set.call(node,v);}}:function(v){{node.value=v;}};
|
||
var typed='',cs={title_chunk},chunks=[];
|
||
for(var i=0;i<text.length;i+=cs)chunks.push(text.substring(i,i+cs));
|
||
var idx=0;
|
||
function tick(){{if(idx>=chunks.length)return;typed+=chunks[idx];set(typed);
|
||
node.dispatchEvent(new Event('input',{{bubbles:true}}));idx++;
|
||
if(idx<chunks.length)setTimeout(tick,{title_delay});}}
|
||
tick();
|
||
return JSON.stringify({{status:'ok',chunks:chunks.length}});
|
||
}})()"#,
|
||
title_json = title_json,
|
||
title_chunk = title_chunk,
|
||
title_delay = title_delay,
|
||
);
|
||
browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": title_script }),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
std::thread::sleep(std::time::Duration::from_millis(title_wait));
|
||
|
||
// ── Step 3: Click body field ────────────────────────────────
|
||
browser_tool.invoke(
|
||
Action::Click,
|
||
json!({
|
||
"target_url": ZHIHU_EDITOR_URL,
|
||
"probe_script": body_probe,
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
|
||
// ── Step 4: Animated body typing (eval-based, paste chunks) ─
|
||
// Each chunk is pasted via ClipboardEvent so Draft.js updates its
|
||
// ContentState incrementally. The audience sees text appearing in
|
||
// bursts — visually proving "the AI is writing the article".
|
||
let body_json = serde_json::to_string(&article.body).unwrap_or_else(|_| "\"\"".into());
|
||
let body_chars = article.body.chars().count();
|
||
let body_chunk = 20usize;
|
||
let body_delay = 80u64;
|
||
let body_wait = ((body_chars + body_chunk - 1) / body_chunk) as u64 * body_delay + 500;
|
||
let body_script = format!(
|
||
r#"(function(){{
|
||
var sels=["div.public-DraftEditor-content[contenteditable='true']",
|
||
"div[contenteditable='true'][role='textbox']",
|
||
"[role='textbox'][contenteditable='true']",
|
||
"[contenteditable='true'][data-placeholder]",
|
||
"div[contenteditable='true']"];
|
||
var ed=null;
|
||
for(var i=0;i<sels.length;i++){{var ns=document.querySelectorAll(sels[i]);
|
||
for(var j=0;j<ns.length;j++){{var r=ns[j].getBoundingClientRect();
|
||
if(r.width>0&&r.height>0){{ed=ns[j];break;}}}}if(ed)break;}}
|
||
if(!ed)return JSON.stringify({{status:'error',msg:'body_not_found'}});
|
||
ed.focus();
|
||
var text={body_json};var cs={body_chunk};var chunks=[];
|
||
for(var i=0;i<text.length;i+=cs)chunks.push(text.substring(i,i+cs));
|
||
var idx=0;
|
||
function tick(){{if(idx>=chunks.length)return;
|
||
ed.focus();
|
||
var sel=window.getSelection();var range=document.createRange();
|
||
range.selectNodeContents(ed);range.collapse(false);
|
||
sel.removeAllRanges();sel.addRange(range);
|
||
var dt=new DataTransfer();dt.setData('text/plain',chunks[idx]);
|
||
ed.dispatchEvent(new ClipboardEvent('paste',{{bubbles:true,cancelable:true,clipboardData:dt}}));
|
||
idx++;if(idx<chunks.length)setTimeout(tick,{body_delay});}}
|
||
tick();
|
||
return JSON.stringify({{status:'ok',chunks:chunks.length}});
|
||
}})()"#,
|
||
body_json = body_json,
|
||
body_chunk = body_chunk,
|
||
body_delay = body_delay,
|
||
);
|
||
browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": body_script }),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
std::thread::sleep(std::time::Duration::from_millis(body_wait));
|
||
|
||
// Step 5: Fill content only. The publish-button click is split into a
|
||
// separate eval (step 6) because React needs a full render cycle to
|
||
// enable the button after the content fill updates the editor state.
|
||
let fill_result = execute_browser_skill_script(
|
||
browser_tool,
|
||
skills_dir,
|
||
"zhihu-write",
|
||
"fill_article_draft.js",
|
||
json!({
|
||
"title": article.title,
|
||
"body": article.body,
|
||
"publish_mode": "false",
|
||
"input_mode": "live_input",
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
|
||
if !publish_mode || payload_status(&fill_result) != Some("draft_ready") {
|
||
return Ok(fill_result);
|
||
}
|
||
|
||
// Step 6: After React has rendered the enabled publish button, click it.
|
||
std::thread::sleep(std::time::Duration::from_millis(1500));
|
||
|
||
let publish_script = r#"(function(){
|
||
function ct(s){return String(s||'').replace(/[\s\u00a0\u200b\u200c\u200d\ufeff]+/g,' ').trim();}
|
||
function vis(n){if(!n)return false;var r=n.getBoundingClientRect();return r.width>0&&r.height>0;}
|
||
var cs=Array.from(document.querySelectorAll("button,[role='button'],a")).filter(vis);
|
||
var btn=cs.find(function(n){return ct(n.textContent)==='发布';})
|
||
||cs.find(function(n){var t=ct(n.textContent);return t.includes('发布')&&!t.includes('设置');});
|
||
if(!btn)return JSON.stringify({status:'publish_button_missing'});
|
||
if(btn.disabled)return JSON.stringify({status:'publish_button_missing',publish_button_disabled:true});
|
||
btn.click();
|
||
var cs2=Array.from(document.querySelectorAll("button,[role='button'],a")).filter(vis);
|
||
var cfm=cs2.find(function(n){return ct(n.textContent)==='确认发布';});
|
||
if(cfm&&!cfm.disabled){cfm.click();return JSON.stringify({status:'publish_submitted'});}
|
||
return JSON.stringify({status:'publish_clicked'});
|
||
})()"#;
|
||
|
||
let response = browser_tool.invoke(
|
||
Action::Eval,
|
||
json!({ "script": publish_script }),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
if !response.success {
|
||
return Err(PipeError::Protocol(format!(
|
||
"publish button click failed: {}", response.data
|
||
)));
|
||
}
|
||
Ok(normalize_payload(
|
||
response.data.get("text").unwrap_or(&response.data),
|
||
))
|
||
}
|
||
|
||
fn navigate_to_editor_after_creator_entry(
|
||
transport: &dyn crate::agent::AgentEventSink,
|
||
browser_tool: &dyn BrowserBackend,
|
||
creator_state: &Value,
|
||
) -> Result<(), PipeError> {
|
||
let status = payload_status(creator_state);
|
||
let current_url = payload_current_url(creator_state).unwrap_or("");
|
||
if status == Some("editor_ready") && current_url.starts_with(ZHIHU_EDITOR_URL) {
|
||
return Ok(());
|
||
}
|
||
|
||
let target_url = payload_next_url(creator_state).unwrap_or(ZHIHU_EDITOR_URL);
|
||
if status == Some("creator_entry_clicked")
|
||
|| status == Some("creator_entry_found")
|
||
|| status == Some("editor_ready")
|
||
{
|
||
transport.send(&AgentMessage::LogEntry {
|
||
level: "info".to_string(),
|
||
message: format!("navigate {target_url}"),
|
||
})?;
|
||
let response = browser_tool.invoke(
|
||
Action::Navigate,
|
||
json!({ "url": target_url }),
|
||
ZHIHU_EDITOR_DOMAIN,
|
||
)?;
|
||
if !response.success {
|
||
return Err(PipeError::Protocol(format!(
|
||
"navigate failed: {}",
|
||
response.data
|
||
)));
|
||
}
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
use std::collections::VecDeque;
|
||
use std::sync::{Arc, Mutex};
|
||
|
||
use crate::config::SgClawSettings;
|
||
use crate::pipe::{BrowserMessage, CommandOutput, ExecutionSurfaceMetadata, Timing};
|
||
use crate::security::MacPolicy;
|
||
|
||
fn test_settings() -> SgClawSettings {
|
||
SgClawSettings::from_legacy_deepseek_fields(
|
||
"test-key".to_string(),
|
||
"http://127.0.0.1:9".to_string(),
|
||
"deepseek-chat".to_string(),
|
||
None,
|
||
)
|
||
.unwrap()
|
||
}
|
||
|
||
fn test_skills_dir() -> &'static Path {
|
||
Path::new("D:/data/ideaSpace/rust/sgClaw/claw/claw/skills")
|
||
}
|
||
|
||
struct MockWorkflowTransport {
|
||
sent: Mutex<Vec<AgentMessage>>,
|
||
responses: Mutex<VecDeque<BrowserMessage>>,
|
||
}
|
||
|
||
impl MockWorkflowTransport {
|
||
fn new(responses: Vec<BrowserMessage>) -> Self {
|
||
Self {
|
||
sent: Mutex::new(Vec::new()),
|
||
responses: Mutex::new(VecDeque::from(responses)),
|
||
}
|
||
}
|
||
|
||
fn sent_messages(&self) -> Vec<AgentMessage> {
|
||
self.sent.lock().unwrap().clone()
|
||
}
|
||
}
|
||
|
||
impl Transport for MockWorkflowTransport {
|
||
fn send(&self, message: &AgentMessage) -> Result<(), PipeError> {
|
||
self.sent.lock().unwrap().push(message.clone());
|
||
Ok(())
|
||
}
|
||
|
||
fn recv_timeout(&self, _timeout: Duration) -> Result<BrowserMessage, PipeError> {
|
||
self.responses
|
||
.lock()
|
||
.unwrap()
|
||
.pop_front()
|
||
.ok_or(PipeError::Timeout)
|
||
}
|
||
}
|
||
|
||
fn zhihu_test_policy() -> MacPolicy {
|
||
MacPolicy::from_json_str(
|
||
&json!({
|
||
"version": "1.0",
|
||
"domains": { "allowed": ["www.zhihu.com"] },
|
||
"pipe_actions": {
|
||
"allowed": ["navigate", "getText", "eval"],
|
||
"blocked": []
|
||
}
|
||
})
|
||
.to_string(),
|
||
)
|
||
.unwrap()
|
||
}
|
||
|
||
fn success_browser_response(seq: u64, data: Value) -> BrowserMessage {
|
||
BrowserMessage::Response {
|
||
seq,
|
||
success: true,
|
||
data,
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 10,
|
||
},
|
||
}
|
||
}
|
||
|
||
#[derive(Default)]
|
||
struct FakeBrowserBackend {
|
||
responses: Mutex<VecDeque<Result<CommandOutput, PipeError>>>,
|
||
invocations: Mutex<Vec<(Action, Value, String)>>,
|
||
supports_live_input: bool,
|
||
}
|
||
|
||
impl FakeBrowserBackend {
|
||
fn new(responses: Vec<Result<CommandOutput, PipeError>>) -> Self {
|
||
Self {
|
||
responses: Mutex::new(VecDeque::from(responses)),
|
||
invocations: Mutex::new(Vec::new()),
|
||
supports_live_input: false,
|
||
}
|
||
}
|
||
|
||
fn with_live_input(mut self) -> Self {
|
||
self.supports_live_input = true;
|
||
self
|
||
}
|
||
|
||
fn invocations(&self) -> Vec<(Action, Value, String)> {
|
||
self.invocations.lock().unwrap().clone()
|
||
}
|
||
}
|
||
|
||
impl BrowserBackend for FakeBrowserBackend {
|
||
fn invoke(
|
||
&self,
|
||
action: Action,
|
||
params: Value,
|
||
expected_domain: &str,
|
||
) -> Result<CommandOutput, PipeError> {
|
||
self.invocations
|
||
.lock()
|
||
.unwrap()
|
||
.push((action, params, expected_domain.to_string()));
|
||
self.responses
|
||
.lock()
|
||
.unwrap()
|
||
.pop_front()
|
||
.unwrap_or_else(|| Err(PipeError::Timeout))
|
||
}
|
||
|
||
fn surface_metadata(&self) -> ExecutionSurfaceMetadata {
|
||
ExecutionSurfaceMetadata::privileged_browser_pipe("fake_backend")
|
||
}
|
||
|
||
fn supports_live_input(&self) -> bool {
|
||
self.supports_live_input
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn execute_route_with_browser_backend_runs_direct_route_with_ws_style_backend() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({"text": "已进入编辑器"}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": "https://zhuanlan.zhihu.com/write"
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
]));
|
||
|
||
let summary = execute_route_with_browser_backend(
|
||
transport.as_ref(),
|
||
backend.clone(),
|
||
Path::new("."),
|
||
test_skills_dir(),
|
||
"打开知乎写文章页面",
|
||
&CompatTaskContext::default(),
|
||
WorkflowRoute::ZhihuArticleEntry,
|
||
&test_settings(),
|
||
)
|
||
.expect("ws-style backend should satisfy direct route execution");
|
||
|
||
assert_eq!(summary, "已进入知乎文章编辑器。");
|
||
assert_eq!(
|
||
backend.invocations(),
|
||
vec![
|
||
(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_CREATOR_URL }),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-navigate",
|
||
"open_creator_entry.js",
|
||
json!({ "desired_target": "article_editor" })
|
||
)
|
||
.expect("zhihu navigate script should load")
|
||
}),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"prepare_article_editor.js",
|
||
json!({ "desired_mode": "draft" })
|
||
)
|
||
.expect("zhihu write script should load")
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN.to_string(),
|
||
),
|
||
]
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn execute_route_with_browser_backend_navigates_to_editor_when_creator_script_misreports_ready_on_www() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": "https://www.zhihu.com/",
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 4,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
]));
|
||
|
||
let summary = execute_route_with_browser_backend(
|
||
transport.as_ref(),
|
||
backend.clone(),
|
||
Path::new("."),
|
||
test_skills_dir(),
|
||
"打开知乎写文章页面",
|
||
&CompatTaskContext::default(),
|
||
WorkflowRoute::ZhihuArticleEntry,
|
||
&test_settings(),
|
||
)
|
||
.expect("route should recover by navigating to real editor url");
|
||
|
||
assert_eq!(summary, "已进入知乎文章编辑器。");
|
||
assert_eq!(
|
||
backend.invocations(),
|
||
vec![
|
||
(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_CREATOR_URL }),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-navigate",
|
||
"open_creator_entry.js",
|
||
json!({ "desired_target": "article_editor" })
|
||
)
|
||
.expect("zhihu navigate script should load")
|
||
}),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_EDITOR_URL }),
|
||
ZHIHU_EDITOR_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"prepare_article_editor.js",
|
||
json!({ "desired_mode": "draft" })
|
||
)
|
||
.expect("zhihu write script should load")
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN.to_string(),
|
||
),
|
||
]
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn execute_route_with_browser_backend_keeps_eval_for_fill_script_without_live_input_support() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "creator_entry_clicked",
|
||
"current_url": "https://www.zhihu.com/creator",
|
||
"next_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 4,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 5,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "draft_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
"title": "测试标题"
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
]));
|
||
|
||
let summary = execute_zhihu_article_route(
|
||
transport.as_ref(),
|
||
backend.as_ref(),
|
||
test_skills_dir(),
|
||
"标题:测试标题\n正文:第一段内容",
|
||
&CompatTaskContext::default(),
|
||
false,
|
||
false,
|
||
None,
|
||
)
|
||
.expect("route should succeed");
|
||
|
||
assert_eq!(summary, "已进入知乎文章编辑器并写入草稿《测试标题》");
|
||
let invocations = backend.invocations();
|
||
assert_eq!(invocations.len(), 5);
|
||
assert_eq!(invocations[4].0, Action::Eval);
|
||
}
|
||
|
||
#[test]
|
||
fn execute_route_with_browser_backend_uses_live_input_probes_for_zhihu_fill_when_supported() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "creator_entry_clicked",
|
||
"current_url": "https://www.zhihu.com/creator",
|
||
"next_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 4,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 5,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 6,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 7,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 8,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 9,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "draft_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
"title": "测试标题"
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
]).with_live_input());
|
||
|
||
let summary = execute_zhihu_article_route(
|
||
transport.as_ref(),
|
||
backend.as_ref(),
|
||
test_skills_dir(),
|
||
"标题:测试标题\n正文:第一段内容",
|
||
&CompatTaskContext::default(),
|
||
false,
|
||
false,
|
||
None,
|
||
)
|
||
.expect("route should succeed");
|
||
|
||
assert_eq!(summary, "已进入知乎文章编辑器并写入草稿《测试标题》");
|
||
let invocations = backend.invocations();
|
||
assert_eq!(invocations.len(), 9);
|
||
assert_eq!(invocations[4].0, Action::Click);
|
||
assert!(invocations[4].1["probe_script"]
|
||
.as_str()
|
||
.is_some_and(|script| script.contains("placeholder*='标题'")));
|
||
assert_eq!(invocations[5].0, Action::Eval);
|
||
assert!(invocations[5].1["script"]
|
||
.as_str()
|
||
.is_some_and(|s| s.contains("测试标题") && s.contains("placeholder*='标题'")));
|
||
assert_eq!(invocations[6].0, Action::Click);
|
||
assert!(invocations[6].1["probe_script"]
|
||
.as_str()
|
||
.is_some_and(|script| script.contains("contenteditable='true'")));
|
||
assert_eq!(invocations[7].0, Action::Eval);
|
||
assert!(invocations[7].1["script"]
|
||
.as_str()
|
||
.is_some_and(|s| s.contains("第一段内容") && s.contains("ClipboardEvent")));
|
||
assert_eq!(invocations[8].0, Action::Eval);
|
||
assert_eq!(invocations[8].1["script"], json!(
|
||
load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"fill_article_draft.js",
|
||
json!({
|
||
"title": "测试标题",
|
||
"body": "第一段内容",
|
||
"publish_mode": "false",
|
||
"input_mode": "live_input",
|
||
})
|
||
)
|
||
.expect("zhihu write fill script should load")
|
||
));
|
||
}
|
||
|
||
#[test]
|
||
fn live_input_zhihu_body_type_preserves_multiline_content() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "creator_entry_clicked",
|
||
"current_url": "https://www.zhihu.com/creator",
|
||
"next_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 4,
|
||
success: true,
|
||
data: json!({ "text": { "status": "editor_ready", "current_url": ZHIHU_EDITOR_URL } }),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 5,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 5,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 6,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 7,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 9,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "draft_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
"title": "测试标题",
|
||
"body_text": "第一段内容 第二段内容"
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing { queue_ms: 1, exec_ms: 1 },
|
||
}),
|
||
]).with_live_input());
|
||
|
||
let _ = execute_zhihu_article_route(
|
||
transport.as_ref(),
|
||
backend.as_ref(),
|
||
test_skills_dir(),
|
||
"标题:测试标题\n正文:第一段内容\n第二段内容",
|
||
&CompatTaskContext::default(),
|
||
false,
|
||
false,
|
||
None,
|
||
)
|
||
.expect("route should succeed");
|
||
|
||
let invocations = backend.invocations();
|
||
assert_eq!(invocations[7].0, Action::Eval);
|
||
assert!(invocations[7].1["script"]
|
||
.as_str()
|
||
.is_some_and(|s| s.contains("第一段内容\\n第二段内容") && s.contains("ClipboardEvent")));
|
||
}
|
||
|
||
#[test]
|
||
fn zhihu_fill_script_checks_live_input_before_dom_fill_fallback() {
|
||
let script = load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"fill_article_draft.js",
|
||
json!({
|
||
"title": "测试标题",
|
||
"body": "第一段内容",
|
||
"publish_mode": "false",
|
||
"input_mode": "live_input",
|
||
}),
|
||
)
|
||
.expect("zhihu write fill script should load");
|
||
|
||
let live_input_index = script
|
||
.find("const liveInputMode = String(args.input_mode || '').toLowerCase() === 'live_input';")
|
||
.expect("live_input switch should exist");
|
||
let fill_input_index = script
|
||
.find("fillInput(titleInput, String(args.title || ''));")
|
||
.expect("title DOM fill should exist");
|
||
let fill_editable_index = script
|
||
.find("fillEditable(bodyEditor, String(args.body || ''));")
|
||
.expect("body DOM fill should exist");
|
||
|
||
assert!(
|
||
live_input_index < fill_input_index,
|
||
"live_input check must run before title DOM fill fallback"
|
||
);
|
||
assert!(
|
||
live_input_index < fill_editable_index,
|
||
"live_input check must run before body DOM fill fallback"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn zhihu_fill_script_live_input_uses_editor_content_instead_of_whole_page_text() {
|
||
let script = load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"fill_article_draft.js",
|
||
json!({
|
||
"title": "测试标题",
|
||
"body": "第一段内容",
|
||
"publish_mode": "true",
|
||
"input_mode": "live_input",
|
||
}),
|
||
)
|
||
.expect("zhihu write fill script should load");
|
||
|
||
assert!(
|
||
script.contains("var draftState = collectDraftState(titleInput, bodyEditor);"),
|
||
"live_input mode must validate the actual title/body editor state"
|
||
);
|
||
assert!(
|
||
!script.contains("const liveBodyText = cleanText(pageText());"),
|
||
"live_input mode must not treat whole-page text as editor body content"
|
||
);
|
||
let draft_state_index = script
|
||
.find("var draftState = collectDraftState(titleInput, bodyEditor);")
|
||
.expect("draft state validation should exist");
|
||
let publish_button_index = script
|
||
.find("const publishButton = findPreferredButtonByText('发布');")
|
||
.expect("publish button lookup should exist");
|
||
assert!(
|
||
draft_state_index < publish_button_index,
|
||
"live_input mode must validate editor content before attempting publish"
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn execute_route_with_browser_backend_keeps_bridge_style_article_entry_direct_route() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![]));
|
||
let backend = Arc::new(FakeBrowserBackend::new(vec![
|
||
Ok(CommandOutput {
|
||
seq: 1,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 2,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "creator_entry_clicked",
|
||
"current_url": "https://www.zhihu.com/creator",
|
||
"next_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 3,
|
||
success: true,
|
||
data: json!({}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
Ok(CommandOutput {
|
||
seq: 4,
|
||
success: true,
|
||
data: json!({
|
||
"text": {
|
||
"status": "editor_ready",
|
||
"current_url": ZHIHU_EDITOR_URL,
|
||
}
|
||
}),
|
||
aom_snapshot: vec![],
|
||
timing: Timing {
|
||
queue_ms: 1,
|
||
exec_ms: 1,
|
||
},
|
||
}),
|
||
]));
|
||
|
||
let summary = execute_route_with_browser_backend(
|
||
transport.as_ref(),
|
||
backend.clone(),
|
||
Path::new("."),
|
||
test_skills_dir(),
|
||
"打开知乎写文章页面",
|
||
&CompatTaskContext::default(),
|
||
WorkflowRoute::ZhihuArticleEntry,
|
||
&test_settings(),
|
||
)
|
||
.expect("bridge-style backend should satisfy direct route execution");
|
||
|
||
assert_eq!(summary, "已进入知乎文章编辑器。");
|
||
assert_eq!(
|
||
backend.invocations(),
|
||
vec![
|
||
(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_CREATOR_URL }),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-navigate",
|
||
"open_creator_entry.js",
|
||
json!({ "desired_target": "article_editor" })
|
||
)
|
||
.expect("zhihu navigate script should load")
|
||
}),
|
||
ZHIHU_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Navigate,
|
||
json!({ "url": ZHIHU_EDITOR_URL }),
|
||
ZHIHU_EDITOR_DOMAIN.to_string(),
|
||
),
|
||
(
|
||
Action::Eval,
|
||
json!({
|
||
"script": load_browser_skill_script(
|
||
test_skills_dir(),
|
||
"zhihu-write",
|
||
"prepare_article_editor.js",
|
||
json!({ "desired_mode": "draft" })
|
||
)
|
||
.expect("zhihu write script should load")
|
||
}),
|
||
ZHIHU_EDITOR_DOMAIN.to_string(),
|
||
),
|
||
]
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn collect_hotlist_items_skips_navigation_when_hot_page_is_already_readable() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![
|
||
success_browser_response(
|
||
1,
|
||
json!({ "text": "知乎热榜\n1 问题一 344万热度\n2 问题二 266万热度" }),
|
||
),
|
||
success_browser_response(
|
||
2,
|
||
json!({
|
||
"text": {
|
||
"source": "https://www.zhihu.com/hot",
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": [[1, "问题一", "344万"], [2, "问题二", "266万"]]
|
||
}
|
||
}),
|
||
),
|
||
]));
|
||
let browser_tool =
|
||
BrowserPipeTool::new(transport.clone(), zhihu_test_policy(), vec![1, 2, 3, 4])
|
||
.with_response_timeout(Duration::from_secs(1));
|
||
let task_context = CompatTaskContext {
|
||
page_url: Some("https://www.zhihu.com/hot".to_string()),
|
||
page_title: Some("知乎热榜".to_string()),
|
||
..CompatTaskContext::default()
|
||
};
|
||
|
||
let browser_backend = PipeBrowserBackend::from_inner(browser_tool);
|
||
let items = collect_hotlist_items(
|
||
transport.as_ref(),
|
||
&browser_backend,
|
||
test_skills_dir(),
|
||
10,
|
||
&task_context,
|
||
)
|
||
.expect("hotlist collection should succeed");
|
||
|
||
assert_eq!(items.len(), 2);
|
||
let sent = transport.sent_messages();
|
||
assert!(sent.iter().any(|message| {
|
||
matches!(
|
||
message,
|
||
AgentMessage::Command { action, .. } if action == &Action::GetText
|
||
)
|
||
}));
|
||
assert!(sent.iter().any(|message| {
|
||
matches!(
|
||
message,
|
||
AgentMessage::Command { action, .. } if action == &Action::Eval
|
||
)
|
||
}));
|
||
assert!(!sent.iter().any(|message| {
|
||
matches!(
|
||
message,
|
||
AgentMessage::Command { action, .. } if action == &Action::Navigate
|
||
)
|
||
}));
|
||
}
|
||
|
||
#[test]
|
||
fn collect_hotlist_items_polls_after_navigation_before_retrying_navigation() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![
|
||
success_browser_response(1, json!({ "navigated": true })),
|
||
success_browser_response(2, json!({ "text": "" })),
|
||
success_browser_response(3, json!({ "text": "" })),
|
||
success_browser_response(4, json!({ "text": "知乎热榜\n1 问题一 344万热度" })),
|
||
success_browser_response(
|
||
5,
|
||
json!({
|
||
"text": {
|
||
"source": "https://www.zhihu.com/hot",
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": [[1, "问题一", "344万"]]
|
||
}
|
||
}),
|
||
),
|
||
]));
|
||
let browser_tool =
|
||
BrowserPipeTool::new(transport.clone(), zhihu_test_policy(), vec![1, 2, 3, 4, 5])
|
||
.with_response_timeout(Duration::from_secs(1));
|
||
let task_context = CompatTaskContext {
|
||
page_url: Some("https://www.zhihu.com/".to_string()),
|
||
page_title: Some("知乎".to_string()),
|
||
..CompatTaskContext::default()
|
||
};
|
||
|
||
let browser_backend = PipeBrowserBackend::from_inner(browser_tool);
|
||
let items = collect_hotlist_items(
|
||
transport.as_ref(),
|
||
&browser_backend,
|
||
test_skills_dir(),
|
||
10,
|
||
&task_context,
|
||
)
|
||
.expect("hotlist collection should succeed after readiness polling");
|
||
|
||
assert_eq!(items.len(), 1);
|
||
let sent = transport.sent_messages();
|
||
let actions = sent
|
||
.iter()
|
||
.filter_map(|message| match message {
|
||
AgentMessage::Command { action, .. } => Some(action.clone()),
|
||
_ => None,
|
||
})
|
||
.collect::<Vec<_>>();
|
||
assert_eq!(
|
||
actions,
|
||
vec![
|
||
Action::Navigate,
|
||
Action::GetText,
|
||
Action::GetText,
|
||
Action::GetText,
|
||
Action::Eval
|
||
]
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn collect_hotlist_items_retries_navigation_after_short_readiness_budget_expires() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![
|
||
success_browser_response(1, json!({ "navigated": true })),
|
||
success_browser_response(2, json!({ "text": "" })),
|
||
success_browser_response(3, json!({ "text": "" })),
|
||
success_browser_response(4, json!({ "text": "" })),
|
||
success_browser_response(5, json!({ "text": "" })),
|
||
success_browser_response(6, json!({ "text": "" })),
|
||
success_browser_response(7, json!({ "text": "" })),
|
||
success_browser_response(8, json!({ "text": "" })),
|
||
success_browser_response(9, json!({ "text": "" })),
|
||
success_browser_response(10, json!({ "text": "" })),
|
||
success_browser_response(11, json!({ "text": "" })),
|
||
success_browser_response(12, json!({ "text": { "rows": [] } })),
|
||
success_browser_response(13, json!({ "navigated": true })),
|
||
success_browser_response(
|
||
14,
|
||
json!({ "text": "知乎热榜\n1 问题一 344万热度" }),
|
||
),
|
||
success_browser_response(
|
||
15,
|
||
json!({
|
||
"text": {
|
||
"source": "https://www.zhihu.com/hot",
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": [[1, "问题一", "344万"]]
|
||
}
|
||
}),
|
||
),
|
||
]));
|
||
let browser_tool = BrowserPipeTool::new(
|
||
transport.clone(),
|
||
zhihu_test_policy(),
|
||
vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
|
||
)
|
||
.with_response_timeout(Duration::from_secs(1));
|
||
let task_context = CompatTaskContext {
|
||
page_url: Some("https://www.zhihu.com/".to_string()),
|
||
page_title: Some("知乎".to_string()),
|
||
..CompatTaskContext::default()
|
||
};
|
||
|
||
let browser_backend = PipeBrowserBackend::from_inner(browser_tool);
|
||
let items = collect_hotlist_items(
|
||
transport.as_ref(),
|
||
&browser_backend,
|
||
test_skills_dir(),
|
||
10,
|
||
&task_context,
|
||
)
|
||
.expect("hotlist collection should succeed after one navigation retry");
|
||
|
||
assert_eq!(items.len(), 1);
|
||
let sent = transport.sent_messages();
|
||
let navigate_count = sent
|
||
.iter()
|
||
.filter(|message| {
|
||
matches!(
|
||
message,
|
||
AgentMessage::Command { action, .. } if action == &Action::Navigate
|
||
)
|
||
})
|
||
.count();
|
||
assert_eq!(navigate_count, 2);
|
||
}
|
||
|
||
#[test]
|
||
fn collect_hotlist_items_uses_extractor_probe_before_second_navigation() {
|
||
let transport = Arc::new(MockWorkflowTransport::new(vec![
|
||
success_browser_response(1, json!({ "navigated": true })),
|
||
success_browser_response(2, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(3, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(4, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(5, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(6, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(7, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(8, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(9, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(10, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(11, json!({ "text": "知乎热榜" })),
|
||
success_browser_response(
|
||
12,
|
||
json!({
|
||
"text": {
|
||
"source": "https://www.zhihu.com/hot",
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": [[1, "问题一", "344万"]]
|
||
}
|
||
}),
|
||
),
|
||
success_browser_response(
|
||
13,
|
||
json!({
|
||
"text": {
|
||
"source": "https://www.zhihu.com/hot",
|
||
"sheet_name": "知乎热榜",
|
||
"columns": ["rank", "title", "heat"],
|
||
"rows": [[1, "问题一", "344万"]]
|
||
}
|
||
}),
|
||
),
|
||
]));
|
||
let browser_tool = BrowserPipeTool::new(
|
||
transport.clone(),
|
||
zhihu_test_policy(),
|
||
vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13],
|
||
)
|
||
.with_response_timeout(Duration::from_secs(1));
|
||
let task_context = CompatTaskContext {
|
||
page_url: Some("https://www.zhihu.com/".to_string()),
|
||
page_title: Some("知乎".to_string()),
|
||
..CompatTaskContext::default()
|
||
};
|
||
|
||
let browser_backend = PipeBrowserBackend::from_inner(browser_tool);
|
||
let items = collect_hotlist_items(
|
||
transport.as_ref(),
|
||
&browser_backend,
|
||
test_skills_dir(),
|
||
10,
|
||
&task_context,
|
||
)
|
||
.expect("hotlist collection should succeed via extractor probe");
|
||
|
||
assert_eq!(items.len(), 1);
|
||
let sent = transport.sent_messages();
|
||
let navigate_count = sent
|
||
.iter()
|
||
.filter(|message| {
|
||
matches!(
|
||
message,
|
||
AgentMessage::Command { action, .. } if action == &Action::Navigate
|
||
)
|
||
})
|
||
.count();
|
||
assert_eq!(navigate_count, 1);
|
||
}
|
||
}
|
||
|
||
fn load_browser_skill_script(
|
||
skills_dir: &Path,
|
||
skill_name: &str,
|
||
script_name: &str,
|
||
args: Value,
|
||
) -> Result<String, PipeError> {
|
||
let script_path = skills_dir
|
||
.join(skill_name)
|
||
.join("scripts")
|
||
.join(script_name);
|
||
let script = fs::read_to_string(&script_path).map_err(|err| {
|
||
PipeError::Protocol(format!(
|
||
"failed to read browser script {}: {err}",
|
||
script_path.display()
|
||
))
|
||
})?;
|
||
Ok(format!(
|
||
"(function() {{\nconst args = {};\n{}\n}})()",
|
||
args, script
|
||
))
|
||
}
|
||
|
||
fn normalize_payload(payload: &Value) -> Value {
|
||
if let Some(text) = payload.as_str() {
|
||
serde_json::from_str::<Value>(text).unwrap_or_else(|_| Value::String(text.to_string()))
|
||
} else {
|
||
payload.clone()
|
||
}
|
||
}
|
||
|
||
fn payload_status(payload: &Value) -> Option<&str> {
|
||
payload.get("status").and_then(Value::as_str)
|
||
}
|
||
|
||
fn payload_current_url(payload: &Value) -> Option<&str> {
|
||
payload.get("current_url").and_then(Value::as_str)
|
||
}
|
||
|
||
fn payload_next_url(payload: &Value) -> Option<&str> {
|
||
payload.get("next_url").and_then(Value::as_str)
|
||
}
|
||
|
||
fn is_login_required_payload(payload: &Value) -> bool {
|
||
payload_status(payload) == Some("login_required")
|
||
}
|
||
|
||
fn build_login_block_message(current_url: Option<&str>) -> String {
|
||
let suffix = current_url
|
||
.filter(|value| !value.is_empty())
|
||
.map(|value| format!(" 当前页面:{value}。"))
|
||
.unwrap_or_default();
|
||
format!(
|
||
"当前知乎浏览器会话未登录,无法进入创作者中心或发布文章。请先登录知乎后再继续。{suffix}"
|
||
)
|
||
}
|
||
|
||
fn build_editor_unavailable_message(current_url: Option<&str>) -> String {
|
||
let suffix = current_url
|
||
.filter(|value| !value.is_empty())
|
||
.map(|value| format!(" 当前页面:{value}。"))
|
||
.unwrap_or_default();
|
||
format!(
|
||
"已进入知乎创作者流程,但当前未检测到文章编辑器。可能原因是页面仍在加载、当前账号暂未开放写作入口,或知乎页面结构发生变化。请确认当前知乎账号已登录且具备发文权限,然后在页面稳定后重试。{suffix}"
|
||
)
|
||
}
|
||
|
||
fn build_creator_entry_missing_message(current_url: Option<&str>) -> String {
|
||
let suffix = current_url
|
||
.filter(|value| !value.is_empty())
|
||
.map(|value| format!(" 当前页面:{value}。"))
|
||
.unwrap_or_default();
|
||
format!(
|
||
"已进入知乎创作者中心,但当前未找到“写文章”入口。请确认页面已加载完成,且当前账号具备文章发布入口后再重试。{suffix}"
|
||
)
|
||
}
|
||
|
||
fn build_publish_confirmation_message(article: &ArticleDraft) -> String {
|
||
format!(
|
||
"我已收到这篇知乎文章的内容,但在当前会话里还没有拿到明确发布确认。\n\n标题:{}\n正文:{}\n\n如果你确定现在要发布,请直接回复“确认发布”。在收到明确确认之前,我不会执行任何发布动作。",
|
||
article.title,
|
||
article.body
|
||
)
|
||
}
|
||
|
||
fn has_explicit_publish_confirmation(instruction: &str) -> bool {
|
||
let trimmed = instruction.trim();
|
||
trimmed.contains("确认发布")
|
||
|| trimmed.contains("确认发表")
|
||
|| trimmed.contains("现在发布")
|
||
|| trimmed.contains("立即发布")
|
||
|| trimmed.contains("可以发布")
|
||
}
|
||
|
||
fn task_requests_zhihu_article_entry(
|
||
instruction: &str,
|
||
page_url: Option<&str>,
|
||
page_title: Option<&str>,
|
||
) -> bool {
|
||
if !crate::runtime::is_zhihu_write_task(instruction, page_url, page_title) {
|
||
return false;
|
||
}
|
||
|
||
let normalized = instruction.to_ascii_lowercase();
|
||
let asks_to_open = normalized.contains("open")
|
||
|| normalized.contains("goto")
|
||
|| normalized.contains("go to")
|
||
|| instruction.contains("打开")
|
||
|| instruction.contains("进入")
|
||
|| instruction.contains("去");
|
||
let mentions_entry = instruction.contains("页面")
|
||
|| instruction.contains("入口")
|
||
|| instruction.contains("创作中心")
|
||
|| instruction.contains("写文章")
|
||
|| instruction.contains("发文章");
|
||
let has_article_inputs = parse_article_draft(instruction).is_some();
|
||
|
||
asks_to_open && mentions_entry && !has_article_inputs
|
||
}
|
||
|
||
fn extract_article_draft(
|
||
instruction: &str,
|
||
messages: &[ConversationMessage],
|
||
) -> Option<ArticleDraft> {
|
||
parse_article_draft(instruction).or_else(|| {
|
||
messages
|
||
.iter()
|
||
.rev()
|
||
.filter(|message| message.role == "user")
|
||
.find_map(|message| parse_article_draft(&message.content))
|
||
})
|
||
}
|
||
|
||
pub(crate) fn parse_generated_article_draft(text: &str) -> Option<ArticleDraft> {
|
||
parse_article_draft(text)
|
||
}
|
||
|
||
fn parse_article_draft(text: &str) -> Option<ArticleDraft> {
|
||
let normalized = normalize_article_draft_input(text);
|
||
let title_re = Regex::new(r"(?m)^标题[::]\s*(.+?)\s*$").expect("valid zhihu title regex");
|
||
let body_re = Regex::new(r"(?s)正文[::]\s*(.+)$").expect("valid zhihu body regex");
|
||
let inline_title_re =
|
||
Regex::new(r"标题(?:是|为)\s*([^,,\n]+)").expect("valid inline zhihu title regex");
|
||
let inline_body_re =
|
||
Regex::new(r"(?s)正文(?:是|为)\s*(.+)$").expect("valid inline zhihu body regex");
|
||
|
||
let title = title_re
|
||
.captures(&normalized)
|
||
.and_then(|capture| capture.get(1))
|
||
.map(|value| value.as_str().trim().to_string())
|
||
.or_else(|| {
|
||
inline_title_re
|
||
.captures(&normalized)
|
||
.and_then(|capture| capture.get(1))
|
||
.map(|value| value.as_str().trim().to_string())
|
||
})?;
|
||
let body = body_re
|
||
.captures(&normalized)
|
||
.and_then(|capture| capture.get(1))
|
||
.map(|value| value.as_str().trim().to_string())
|
||
.or_else(|| {
|
||
inline_body_re
|
||
.captures(&normalized)
|
||
.and_then(|capture| capture.get(1))
|
||
.map(|value| value.as_str().trim().trim_end_matches('。').to_string())
|
||
})?;
|
||
|
||
if title.is_empty() || body.is_empty() {
|
||
return None;
|
||
}
|
||
|
||
Some(ArticleDraft { title, body })
|
||
}
|
||
|
||
fn normalize_article_draft_input(text: &str) -> String {
|
||
let trimmed = text.trim();
|
||
let unquoted = if trimmed.len() >= 2
|
||
&& ((trimmed.starts_with('"') && trimmed.ends_with('"'))
|
||
|| (trimmed.starts_with('\'') && trimmed.ends_with('\'')))
|
||
{
|
||
&trimmed[1..trimmed.len() - 1]
|
||
} else {
|
||
trimmed
|
||
};
|
||
unquoted.replace("\\n", "\n")
|
||
}
|