wip: checkpoint 2026-03-29 runtime work
This commit is contained in:
53
tests/compat_openxml_office_tool_test.rs
Normal file
53
tests/compat_openxml_office_tool_test.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command as ProcessCommand;
|
||||
|
||||
use serde_json::json;
|
||||
use sgclaw::compat::openxml_office_tool::OpenXmlOfficeTool;
|
||||
use uuid::Uuid;
|
||||
use zeroclaw::tools::Tool;
|
||||
|
||||
fn temp_workspace_root() -> PathBuf {
|
||||
let root = std::env::temp_dir().join(format!("sgclaw-openxml-office-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&root).unwrap();
|
||||
root
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn openxml_office_tool_renders_hotlist_xlsx_from_rows() {
|
||||
let workspace_root = temp_workspace_root();
|
||||
let output_path = workspace_root.join("out/zhihu-hotlist.xlsx");
|
||||
let tool = OpenXmlOfficeTool::new(workspace_root.clone());
|
||||
|
||||
let result = tool
|
||||
.execute(json!({
|
||||
"sheet_name": "知乎热榜",
|
||||
"columns": ["rank", "title", "heat"],
|
||||
"rows": [
|
||||
[1, "问题一", "344万"],
|
||||
[2, "问题二", "266万"]
|
||||
],
|
||||
"output_path": output_path
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success, "{result:?}");
|
||||
assert!(output_path.exists());
|
||||
assert!(result.output.contains(output_path.to_str().unwrap()));
|
||||
|
||||
let unzip = ProcessCommand::new("unzip")
|
||||
.args([
|
||||
"-p",
|
||||
output_path.to_str().unwrap(),
|
||||
"xl/worksheets/sheet1.xml",
|
||||
])
|
||||
.output()
|
||||
.unwrap();
|
||||
assert!(unzip.status.success());
|
||||
|
||||
let xml = String::from_utf8(unzip.stdout).unwrap();
|
||||
assert!(xml.contains("问题一"));
|
||||
assert!(xml.contains("344万"));
|
||||
assert!(xml.contains("问题二"));
|
||||
assert!(!xml.contains("{{TITLE_1}}"));
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
50
tests/compat_screen_html_export_tool_test.rs
Normal file
50
tests/compat_screen_html_export_tool_test.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
use sgclaw::compat::screen_html_export_tool::ScreenHtmlExportTool;
|
||||
use uuid::Uuid;
|
||||
use zeroclaw::tools::Tool;
|
||||
|
||||
fn temp_workspace_root() -> PathBuf {
|
||||
let root = std::env::temp_dir().join(format!("sgclaw-screen-html-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&root).unwrap();
|
||||
root
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn screen_html_export_tool_renders_dashboard_html_with_presentation_contract() {
|
||||
let workspace_root = temp_workspace_root();
|
||||
let output_path = workspace_root.join("out/zhihu-hotlist-screen.html");
|
||||
let tool = ScreenHtmlExportTool::new(workspace_root.clone());
|
||||
|
||||
let result = tool
|
||||
.execute(json!({
|
||||
"snapshot_id": "snapshot-20260329",
|
||||
"generated_at_ms": 1774713600000u64,
|
||||
"rows": [
|
||||
[1, "问题一", "344万"],
|
||||
[2, "问题二", "266万"]
|
||||
],
|
||||
"output_path": output_path
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.success, "{result:?}");
|
||||
assert!(output_path.exists());
|
||||
|
||||
let payload: Value = serde_json::from_str(&result.output).unwrap();
|
||||
let html = std::fs::read_to_string(&output_path).unwrap();
|
||||
|
||||
assert_eq!(payload["output_path"], json!(output_path));
|
||||
assert_eq!(payload["presentation"]["mode"], json!("new_tab"));
|
||||
assert_eq!(payload["renderer"], json!("screen_html_export"));
|
||||
assert!(payload["presentation"]["url"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.starts_with("file://"));
|
||||
assert!(html.contains("snapshot-20260329"));
|
||||
assert!(html.contains("问题一"));
|
||||
assert!(html.contains("344万"));
|
||||
assert!(html.contains("const defaultPayload ="));
|
||||
}
|
||||
31
tests/live_acceptance_score_test.py
Normal file
31
tests/live_acceptance_score_test.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import unittest
|
||||
|
||||
from tools.live_acceptance.run_zhihu_hotlist_excel_acceptance import HotItem, score_acceptance
|
||||
|
||||
|
||||
class LiveAcceptanceScoreTest(unittest.TestCase):
|
||||
def test_score_acceptance_handles_preloaded_office_skill_without_read_skill_log(self):
|
||||
result = {
|
||||
"logs": [
|
||||
{"message": "navigate https://www.zhihu.com/hot"},
|
||||
{"message": "navigate https://www.zhihu.com/hot"},
|
||||
{"message": "getText body"},
|
||||
{"message": "call openxml_office"},
|
||||
],
|
||||
"final_task": {
|
||||
"success": True,
|
||||
"summary": "已导出 Excel",
|
||||
},
|
||||
"stderr": [],
|
||||
"exports": [],
|
||||
}
|
||||
items = [HotItem(rank=1, title="标题", heat="123万")]
|
||||
|
||||
score = score_acceptance(result, items)
|
||||
|
||||
self.assertEqual(score["skill_selection"], 30)
|
||||
self.assertEqual(score["final_response_quality"], 5)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
80
tests/read_skill_tool_test.rs
Normal file
80
tests/read_skill_tool_test.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
use zeroclaw::tools::{ReadSkillTool, Tool};
|
||||
|
||||
#[tokio::test]
|
||||
async fn read_skill_inlines_referenced_markdown_files() {
|
||||
let workspace_dir = temp_workspace_dir();
|
||||
let skill_dir = workspace_dir.join("skills/zhihu-hotlist");
|
||||
let refs_dir = skill_dir.join("references");
|
||||
std::fs::create_dir_all(&refs_dir).unwrap();
|
||||
std::fs::write(
|
||||
skill_dir.join("SKILL.md"),
|
||||
concat!(
|
||||
"# Zhihu Hotlist\n\n",
|
||||
"Follow [collection-flow.md](references/collection-flow.md).\n",
|
||||
"Apply [data-quality.md](references/data-quality.md).\n",
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(
|
||||
refs_dir.join("collection-flow.md"),
|
||||
"# Collection Flow\n\nCollect rows from the hotlist first.\n",
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(
|
||||
refs_dir.join("data-quality.md"),
|
||||
"# Data Quality\n\nMark partial metrics explicitly.\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let tool = ReadSkillTool::new(workspace_dir, false, None);
|
||||
let result = tool.execute(json!({ "name": "zhihu-hotlist" })).await.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert!(result.output.contains("# Zhihu Hotlist"));
|
||||
assert!(result.output.contains("## Referenced File: references/collection-flow.md"));
|
||||
assert!(result.output.contains("Collect rows from the hotlist first."));
|
||||
assert!(result.output.contains("## Referenced File: references/data-quality.md"));
|
||||
assert!(result.output.contains("Mark partial metrics explicitly."));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn read_skill_recursively_inlines_relative_asset_references() {
|
||||
let workspace_dir = temp_workspace_dir();
|
||||
let skill_dir = workspace_dir.join("skills/zhihu-hotlist");
|
||||
let refs_dir = skill_dir.join("references");
|
||||
let assets_dir = skill_dir.join("assets");
|
||||
std::fs::create_dir_all(&refs_dir).unwrap();
|
||||
std::fs::create_dir_all(&assets_dir).unwrap();
|
||||
std::fs::write(
|
||||
skill_dir.join("SKILL.md"),
|
||||
"# Zhihu Hotlist\n\nFollow [collection-flow.md](references/collection-flow.md).\n",
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(
|
||||
refs_dir.join("collection-flow.md"),
|
||||
"Use `assets/zhihu_hotlist_flow.source.json` for exact selectors.\n",
|
||||
)
|
||||
.unwrap();
|
||||
std::fs::write(
|
||||
assets_dir.join("zhihu_hotlist_flow.source.json"),
|
||||
"{\n \"selectors\": [\".HotList-list\", \".HotItem\"]\n}\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let tool = ReadSkillTool::new(workspace_dir, false, None);
|
||||
let result = tool.execute(json!({ "name": "zhihu-hotlist" })).await.unwrap();
|
||||
|
||||
assert!(result.success);
|
||||
assert!(result.output.contains("## Referenced File: references/collection-flow.md"));
|
||||
assert!(result.output.contains("## Referenced File: assets/zhihu_hotlist_flow.source.json"));
|
||||
assert!(result.output.contains("\"selectors\": [\".HotList-list\", \".HotItem\"]"));
|
||||
}
|
||||
|
||||
fn temp_workspace_dir() -> PathBuf {
|
||||
let dir = std::env::temp_dir().join(format!("sgclaw-read-skill-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
dir
|
||||
}
|
||||
115
tests/skill_lib_validation_test.py
Normal file
115
tests/skill_lib_validation_test.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import importlib.util
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[1]
|
||||
SKILL_LIB_ROOT = REPO_ROOT.parent / "skill_lib"
|
||||
SKILLS_DIR = SKILL_LIB_ROOT / "skills"
|
||||
VALIDATOR_PATH = REPO_ROOT / "scripts" / "validate_skill_lib.py"
|
||||
EXPECTED_SKILL_NAMES = [
|
||||
"office-export-xlsx",
|
||||
"zhihu-hotlist",
|
||||
"zhihu-hotlist-screen",
|
||||
"zhihu-navigate",
|
||||
"zhihu-write",
|
||||
]
|
||||
|
||||
|
||||
def load_validator_module():
|
||||
spec = importlib.util.spec_from_file_location("validate_skill_lib", VALIDATOR_PATH)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
assert spec is not None
|
||||
assert spec.loader is not None
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
class SkillLibValidationTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.validator = load_validator_module()
|
||||
|
||||
def test_discovers_expected_skill_packages(self):
|
||||
skill_dirs = self.validator.discover_skill_dirs()
|
||||
self.assertEqual([path.name for path in skill_dirs], EXPECTED_SKILL_NAMES)
|
||||
|
||||
def test_load_skill_matches_current_metadata(self):
|
||||
loaded = {}
|
||||
for skill_dir in self.validator.discover_skill_dirs():
|
||||
record = self.validator.load_skill(skill_dir)
|
||||
loaded[record.name] = record
|
||||
|
||||
self.assertEqual(sorted(loaded), EXPECTED_SKILL_NAMES)
|
||||
|
||||
for name, record in loaded.items():
|
||||
self.assertEqual(record.name, name)
|
||||
self.assertEqual(record.version, "0.1.0")
|
||||
self.assertEqual(record.author, "sgclaw")
|
||||
self.assertTrue(record.description.startswith("Use when"))
|
||||
if name.startswith("zhihu-"):
|
||||
self.assertIn("zhihu", record.tags)
|
||||
self.assertIn("browser", record.tags)
|
||||
if name == "office-export-xlsx":
|
||||
self.assertIn("office", record.tags)
|
||||
self.assertIn("xlsx", record.tags)
|
||||
self.assertEqual(record.location, SKILLS_DIR / name / "SKILL.md")
|
||||
self.assertTrue(record.prompt_body.lstrip().startswith("# "))
|
||||
self.assertNotIn("\n---\n", record.prompt_body)
|
||||
|
||||
def test_each_skill_passes_audit_without_scripts(self):
|
||||
for skill_dir in self.validator.discover_skill_dirs():
|
||||
report = self.validator.audit_skill_directory(skill_dir, allow_scripts=False)
|
||||
self.assertEqual(
|
||||
report.findings,
|
||||
[],
|
||||
f"{skill_dir.name} findings: {report.findings}",
|
||||
)
|
||||
|
||||
def test_current_packages_keep_required_structure(self):
|
||||
for name in EXPECTED_SKILL_NAMES:
|
||||
skill_dir = SKILLS_DIR / name
|
||||
self.assertTrue((skill_dir / "SKILL.md").is_file())
|
||||
self.assertTrue((skill_dir / "references").is_dir())
|
||||
self.assertTrue((skill_dir / "assets").is_dir())
|
||||
|
||||
def test_each_skill_declares_superrpa_browser_contract(self):
|
||||
for name in [name for name in EXPECTED_SKILL_NAMES if name.startswith("zhihu-")]:
|
||||
content = (SKILLS_DIR / name / "SKILL.md").read_text(encoding="utf-8")
|
||||
self.assertIn("superrpa_browser", content)
|
||||
self.assertIn("expected_domain", content)
|
||||
self.assertIn("CSS", content)
|
||||
|
||||
def test_zhihu_hotlist_declares_export_artifact_contract(self):
|
||||
content = (SKILLS_DIR / "zhihu-hotlist" / "SKILL.md").read_text(encoding="utf-8")
|
||||
self.assertIn("Export Artifact", content)
|
||||
self.assertIn('"sheet_name": "知乎热榜"', content)
|
||||
self.assertIn('"columns": ["rank", "title", "heat"]', content)
|
||||
self.assertIn('"rows": [[1, "标题", "344万"]]', content)
|
||||
self.assertIn("structured artifact is primary", content)
|
||||
|
||||
def test_office_export_skill_declares_openxml_contract(self):
|
||||
content = (SKILLS_DIR / "office-export-xlsx" / "SKILL.md").read_text(encoding="utf-8")
|
||||
self.assertIn("openxml_office", content)
|
||||
self.assertIn(".xlsx", content)
|
||||
self.assertIn("sheet_name", content)
|
||||
self.assertIn("columns", content)
|
||||
self.assertIn("rows", content)
|
||||
|
||||
def test_hotlist_screen_skill_declares_echarts_html_contract(self):
|
||||
content = (SKILLS_DIR / "zhihu-hotlist-screen" / "SKILL.md").read_text(encoding="utf-8")
|
||||
self.assertIn("screen_html_export", content)
|
||||
self.assertIn(".html", content)
|
||||
self.assertIn("ECharts", content)
|
||||
self.assertIn("大屏", content)
|
||||
self.assertIn("新标签页", content)
|
||||
self.assertIn("presentation", content)
|
||||
|
||||
def test_validate_all_skills_reports_pass(self):
|
||||
results = self.validator.validate_all_skills(allow_scripts=False)
|
||||
self.assertEqual([result.record.name for result in results], EXPECTED_SKILL_NAMES)
|
||||
self.assertTrue(all(result.ok for result in results))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user