Compare commits

2 Commits

Author SHA1 Message Date
木炎
548dfc4aa9 refactor: keep generated scene runtime only in sgclaw 2026-05-06 16:37:19 +08:00
木炎
1d586dbe27 generated-scene: add command-center automation semantics 2026-05-06 15:22:49 +08:00
16 changed files with 789 additions and 6906 deletions

View File

@@ -1,119 +0,0 @@
use std::env;
use std::path::PathBuf;
use sgclaw::generated_scene::analyzer::SceneKind;
use sgclaw::generated_scene::generator::{generate_scene_package, GenerateSceneRequest};
use sgclaw::generated_scene::ir::{LegacySceneInfoJson, SceneIr};
fn main() {
if let Err(err) = run() {
eprintln!("sg_scene_generate: {err}");
std::process::exit(1);
}
}
fn run() -> Result<(), String> {
let args = parse_args(env::args().skip(1))?;
let scene_info: Option<LegacySceneInfoJson> = args
.scene_info_json
.map(|json| serde_json::from_str(&json))
.transpose()
.map_err(|e| format!("Invalid scene-info-json: {}", e))?;
let scene_ir: Option<SceneIr> = args
.scene_ir_json
.map(|json| serde_json::from_str(&json))
.transpose()
.map_err(|e| format!("Invalid scene-ir-json: {}", e))?;
let skill_root = generate_scene_package(GenerateSceneRequest {
source_dir: args.source_dir,
scene_id: args.scene_id,
scene_name: args.scene_name,
scene_kind: args.scene_kind,
target_url: args.target_url,
output_root: args.output_root,
lessons_path: args.lessons_path,
scene_info_json: scene_info,
scene_ir_json: scene_ir,
})
.map_err(|err| err.to_string())?;
println!("generated scene package: {}", skill_root.display());
Ok(())
}
struct CliArgs {
source_dir: PathBuf,
scene_id: String,
scene_name: String,
scene_kind: Option<SceneKind>,
target_url: Option<String>,
output_root: PathBuf,
lessons_path: Option<PathBuf>,
scene_info_json: Option<String>,
scene_ir_json: Option<String>,
}
fn parse_args(args: impl Iterator<Item = String>) -> Result<CliArgs, String> {
let mut source_dir = None;
let mut scene_id = None;
let mut scene_name = None;
let mut scene_kind = None;
let mut target_url = None;
let mut output_root = None;
let mut lessons_path = None;
let mut scene_info_json = None;
let mut scene_ir_json = None;
let mut pending_flag: Option<String> = None;
for arg in args {
if let Some(flag) = pending_flag.take() {
match flag.as_str() {
"--source-dir" => source_dir = Some(PathBuf::from(arg)),
"--scene-id" => scene_id = Some(arg),
"--scene-name" => scene_name = Some(arg),
"--scene-kind" => {
scene_kind = Some(
SceneKind::from_str(&arg)
.ok_or_else(|| format!("invalid scene kind: {}", arg))?,
);
}
"--target-url" => target_url = Some(arg),
"--output-root" => output_root = Some(PathBuf::from(arg)),
"--lessons" => lessons_path = Some(PathBuf::from(arg)),
"--scene-info-json" => scene_info_json = Some(arg),
"--scene-ir-json" => scene_ir_json = Some(arg),
_ => return Err(format!("unsupported argument {flag}")),
}
continue;
}
match arg.as_str() {
"--source-dir" | "--scene-id" | "--scene-name" | "--scene-kind" | "--target-url"
| "--output-root" | "--lessons" | "--scene-info-json" | "--scene-ir-json" => {
pending_flag = Some(arg);
}
"--help" | "-h" => return Err(usage()),
_ => return Err(format!("unsupported argument {arg}\n{}", usage())),
}
}
if let Some(flag) = pending_flag {
return Err(format!("missing value for {flag}"));
}
Ok(CliArgs {
source_dir: source_dir.ok_or_else(usage)?,
scene_id: scene_id.ok_or_else(usage)?,
scene_name: scene_name.ok_or_else(usage)?,
scene_kind,
target_url,
output_root: output_root.ok_or_else(usage)?,
lessons_path,
scene_info_json,
scene_ir_json,
})
}
fn usage() -> String {
"usage: sg_scene_generate --source-dir <scenario-dir> --scene-id <scene-id> --scene-name <display-name> [--scene-kind <report_collection|monitoring>] [--target-url <url>] --output-root <skill-staging-root> [--lessons <lessons-toml>] [--scene-info-json '<json>'] [--scene-ir-json '<json>']".to_string()
}

View File

@@ -13,13 +13,15 @@ use crate::generated_scene::analyzer::{
use crate::generated_scene::ir::{
ApiEndpointIr, ArtifactContractIr, BootstrapIr, EnrichmentRequestIr, EvidenceIr, ExportPlanIr,
LegacySceneInfoJson, MainRequestIr, MergeFieldMappingIr, MergePlanIr, ModeConditionIr, ModeIr,
MonitoringActionWorkflowIr, MonitoringDeltaStateIr, MonitoringDependencyIr,
MonitoringEncryptionResolutionIr, MonitoringOutputContractIr, MonitoringReadSliceIr,
MonitoringRuntimeContextIr, MonitoringSideEffectIr, MonitoringSideEffectPolicyIr,
MonitoringSidecarOutputIr, MonitoringStorageReadIr, MonitoringTimeoutContractIr,
NormalizeRulesIr, PaginationPlanIr, ParamIr, ReadinessGateIr, ReadinessIr,
RequestFieldMappingIr, RuntimeDependencyIr, SceneIdDiagnosticsIr, SceneIr, ValidationHintsIr,
WorkflowArchetype, WorkflowEvidenceIr, WorkflowStepIr,
MonitoringActionContractIr, MonitoringActionWorkflowIr, MonitoringDeltaStateIr,
MonitoringDependencyIr, MonitoringEncryptionResolutionIr, MonitoringExecutionFlowIr,
MonitoringExecutionStepIr, MonitoringIterationContractIr, MonitoringLogWriteContractIr,
MonitoringOutputContractIr, MonitoringQueueTransitionRuleIr, MonitoringReadSliceIr,
MonitoringResultStateMachineIr, MonitoringRuntimeContextIr, MonitoringSideEffectIr,
MonitoringSideEffectPolicyIr, MonitoringSidecarOutputIr, MonitoringStorageReadIr,
MonitoringTimeoutContractIr, NormalizeRulesIr, PaginationPlanIr, ParamIr, ReadinessGateIr,
ReadinessIr, RequestFieldMappingIr, RuntimeDependencyIr, SceneIdDiagnosticsIr, SceneIr,
ValidationHintsIr, WorkflowArchetype, WorkflowEvidenceIr, WorkflowStepIr,
};
use crate::generated_scene::lessons::{
load_generation_lessons, GenerationLessons, BUILTIN_REPORT_COLLECTION_LESSONS,
@@ -312,7 +314,13 @@ pub fn generate_scheduled_monitoring_action_skill_package(
let trigger_contract =
read_json_file(&request.trigger_contract_json, "scheduled monitoring trigger contract")?;
let workflow = monitoring_workflow_from_contract(&ir_contract);
let mut workflow = monitoring_workflow_from_contract(&ir_contract);
enrich_command_center_automation_semantics(
&request.scene_id,
&source_evidence,
&trigger_contract,
&mut workflow,
);
let scene_ir = monitoring_action_scene_ir(
&GenerateMonitoringActionPreviewRequest {
scene_id: request.scene_id.clone(),
@@ -1227,6 +1235,12 @@ fn monitoring_workflow_from_contract(contract: &Value) -> MonitoringActionWorkfl
})
.unwrap_or_else(|| monitoring_blocked_actions_from_platform_dependencies(platform_dependencies)),
},
action_contracts: Vec::new(),
iteration_contract: None,
execution_flow: None,
result_state_machines: Vec::new(),
queue_transition_rules: Vec::new(),
log_write_contracts: Vec::new(),
archetype: contract["archetype"]
.as_str()
.unwrap_or("marketing_gateway_monitor")
@@ -1234,6 +1248,278 @@ fn monitoring_workflow_from_contract(contract: &Value) -> MonitoringActionWorkfl
}
}
fn enrich_command_center_automation_semantics(
scene_id: &str,
source_evidence: &Value,
trigger_contract: &Value,
workflow: &mut MonitoringActionWorkflowIr,
) {
if !is_command_center_monitoring_scene(scene_id, &workflow.workflow_id) {
return;
}
let dispatch_dependency = workflow
.business_api_dependencies
.iter()
.find(|item| {
item.url.contains("repetCtrlSend")
|| item.classification == "dispatch_exception_order"
|| item.classification == "business_dispatch"
})
.cloned()
.unwrap_or_else(|| MonitoringDependencyIr {
name: "repetCtrlSend".to_string(),
url: "http://yxgateway.gs.sgcc.com.cn/emss-chargacctgf-paysrv-front/member/acctabnor/repetCtrlSend"
.to_string(),
classification: "dispatch_exception_order".to_string(),
side_effect: true,
blocked_by_default: true,
});
let dispose_log_dependency = workflow
.local_service_dependencies
.iter()
.find(|item| {
item.url.contains("setDisposeLog")
|| item.classification == "write_dispose_log"
|| item.classification == "dispose_log_write"
})
.cloned()
.unwrap_or_else(|| MonitoringDependencyIr {
name: "setDisposeLog".to_string(),
url: "http://localhost:13313/MonitorServices/setDisposeLog".to_string(),
classification: "write_dispose_log".to_string(),
side_effect: true,
blocked_by_default: true,
});
let queue_action_ref = trigger_contract["platformRuntimeCapabilities"]["hostActionBridge"]
.as_array()
.and_then(|items| {
items.iter().find_map(|item| {
if item["name"].as_str().unwrap_or_default() == "mac.exeTQueue" {
Some(item["name"].as_str().unwrap_or_default().to_string())
} else {
None
}
})
})
.unwrap_or_else(|| "mac.exeTQueue".to_string());
let dispatch_dependency_ref = if dispatch_dependency.url.is_empty() {
dispatch_dependency.classification.clone()
} else {
dispatch_dependency.url.clone()
};
let dispose_log_dependency_ref = if dispose_log_dependency.url.is_empty() {
dispose_log_dependency.classification.clone()
} else {
dispose_log_dependency.url.clone()
};
let queue_required_mode = trigger_contract["triggerContracts"]["queue"]["futureModes"]
.as_array()
.and_then(|items| items.first())
.and_then(Value::as_str)
.unwrap_or("queue_process")
.to_string();
let mut dispatch_field_bindings = Map::new();
dispatch_field_bindings.insert("chooseList".to_string(), json!(["$current_item"]));
dispatch_field_bindings.insert(
"itemIdentity".to_string(),
json!(["consNo", "custNo", "sendBeginTime", "createTime"]),
);
let mut dispose_log_field_bindings = Map::new();
dispose_log_field_bindings.insert(
"orderID".to_string(),
json!({
"from": ["current_item.consNo", "current_item.custNo", "current_item.sendBeginTime", "current_item.createTime"],
"strategy": "join_non_empty_with_underscore"
}),
);
dispose_log_field_bindings.insert(
"name".to_string(),
json!({
"from": ["current_item.orgName", "current_item.consName", "current_item.phone"],
"strategy": "first_non_empty"
}),
);
dispose_log_field_bindings.insert("time".to_string(), json!({"from": "runtime.now"}));
let action_contract = MonitoringActionContractIr {
action_id: "dispatch_exception_order".to_string(),
action_type: "business_dispatch".to_string(),
dependency_ref: dispatch_dependency_ref.clone(),
target_endpoint_or_host_call: if dispatch_dependency.url.is_empty() {
"repetCtrlSend".to_string()
} else {
dispatch_dependency.url.clone()
},
execution_context: "attached_page_http_post".to_string(),
input_source: "current_item".to_string(),
request_template: json!({
"busType": "03",
"chooseList": ["$current_item"]
}),
field_bindings: dispatch_field_bindings,
auth_binding: json!({
"source": "localStorage.markToken",
"targetField": "auth_token"
}),
encryption_binding: json!({
"mode": workflow.runtime_context.encryption_mode,
"payloadSource": "request_body_json"
}),
result_channel: "browser_callback_js_result".to_string(),
};
let iteration_contract = MonitoringIterationContractIr {
iteration_id: "pending_dispatch_loop".to_string(),
source_collection: source_evidence["queueDependencies"]
.as_array()
.and_then(|items| {
items.iter().find_map(|item| {
if item["name"].as_str().unwrap_or_default() == "pendingList" {
Some("pendingList".to_string())
} else {
None
}
})
})
.unwrap_or_else(|| "pendingList".to_string()),
iteration_mode: "sequential_per_item".to_string(),
item_alias: "current_item".to_string(),
on_empty_transition: "queue_continue_on_empty".to_string(),
on_item_complete_transition: "next_item".to_string(),
on_all_complete_transition: "queue_continue_on_done".to_string(),
};
let execution_flow = MonitoringExecutionFlowIr {
flow_id: "command_center_dispatch_preview_flow".to_string(),
entry_step: "dispatch_loop".to_string(),
steps: vec![
MonitoringExecutionStepIr {
step_id: "dispatch_loop".to_string(),
step_type: "iterate".to_string(),
iteration_ref: "pending_dispatch_loop".to_string(),
next_on_empty: "queue_continue_empty".to_string(),
next_on_done: "queue_continue_done".to_string(),
next_on_success: "dispatch_current_item".to_string(),
..MonitoringExecutionStepIr::default()
},
MonitoringExecutionStepIr {
step_id: "dispatch_current_item".to_string(),
step_type: "action".to_string(),
action_contract_ref: "dispatch_exception_order".to_string(),
next_on_success: "write_dispose_log".to_string(),
next_on_failure: "write_dispose_log".to_string(),
..MonitoringExecutionStepIr::default()
},
MonitoringExecutionStepIr {
step_id: "write_dispose_log".to_string(),
step_type: "log_write".to_string(),
log_write_contract_ref: "dispose_log_after_dispatch".to_string(),
next_on_success: "dispatch_loop".to_string(),
next_on_failure: "dispatch_loop".to_string(),
..MonitoringExecutionStepIr::default()
},
MonitoringExecutionStepIr {
step_id: "queue_continue_empty".to_string(),
step_type: "queue_transition".to_string(),
queue_transition_ref: "queue_continue_on_empty".to_string(),
next_on_success: "done".to_string(),
..MonitoringExecutionStepIr::default()
},
MonitoringExecutionStepIr {
step_id: "queue_continue_done".to_string(),
step_type: "queue_transition".to_string(),
queue_transition_ref: "queue_continue_on_done".to_string(),
next_on_success: "done".to_string(),
..MonitoringExecutionStepIr::default()
},
MonitoringExecutionStepIr {
step_id: "done".to_string(),
step_type: "terminal".to_string(),
..MonitoringExecutionStepIr::default()
},
],
};
let result_state_machine = MonitoringResultStateMachineIr {
state_machine_id: "dispatch_result_state_machine".to_string(),
action_contract_ref: "dispatch_exception_order".to_string(),
success_match: json!({
"allOf": [
{"field": "code", "equals": "00000"},
{"field": "message", "equals": "success"}
]
}),
failure_match: json!({
"fallback": "not_success_match"
}),
state_on_success: "success".to_string(),
state_on_failure: "failure".to_string(),
post_success_log_contract_ref: "dispose_log_after_dispatch".to_string(),
post_failure_log_contract_ref: "dispose_log_after_dispatch".to_string(),
continue_policy: "continue_next_item".to_string(),
};
let queue_transition_rules = vec![
MonitoringQueueTransitionRuleIr {
transition_id: "queue_continue_on_empty".to_string(),
queue_action_ref: queue_action_ref.clone(),
trigger_point: "on_empty_collection".to_string(),
required_mode: queue_required_mode.clone(),
blocked_by_default: true,
},
MonitoringQueueTransitionRuleIr {
transition_id: "queue_continue_on_done".to_string(),
queue_action_ref,
trigger_point: "on_all_items_done".to_string(),
required_mode: queue_required_mode,
blocked_by_default: true,
},
];
let log_write_contract = MonitoringLogWriteContractIr {
log_id: "dispose_log_after_dispatch".to_string(),
dependency_ref: dispose_log_dependency_ref,
target_endpoint_or_host_call: if dispose_log_dependency.url.is_empty() {
"setDisposeLog".to_string()
} else {
dispose_log_dependency.url.clone()
},
emit_phase: "after_dispatch_result".to_string(),
payload_template: json!({
"type": "fee_control_exception_dispose",
"orderID": "$derived.orderID",
"name": "$derived.name",
"time": "$runtime.now",
"state": "$dispatch.state"
}),
field_bindings: dispose_log_field_bindings,
state_binding: "dispatch_result_state".to_string(),
};
if workflow.action_contracts.is_empty() {
workflow.action_contracts = vec![action_contract];
}
if workflow.iteration_contract.is_none() {
workflow.iteration_contract = Some(iteration_contract);
}
if workflow.execution_flow.is_none() {
workflow.execution_flow = Some(execution_flow);
}
if workflow.result_state_machines.is_empty() {
workflow.result_state_machines = vec![result_state_machine];
}
if workflow.queue_transition_rules.is_empty() {
workflow.queue_transition_rules = queue_transition_rules;
}
if workflow.log_write_contracts.is_empty() {
workflow.log_write_contracts = vec![log_write_contract];
}
}
fn is_command_center_monitoring_scene(scene_id: &str, workflow_id: &str) -> bool {
scene_id == "command-center-fee-control-monitor"
|| workflow_id == "command_center_fee_control_monitoring_action"
}
fn monitoring_dependencies(value: &Value) -> Vec<MonitoringDependencyIr> {
value
.as_array()
@@ -7257,14 +7543,28 @@ if (typeof module !== 'undefined') {{
}
fn compile_scheduled_monitoring_action_plan_script(scene_ir: &SceneIr) -> String {
let workflow = scene_ir
.monitoring_action_workflow
.as_ref()
.cloned()
.unwrap_or_default();
let blocked = scene_ir
.monitoring_action_workflow
.as_ref()
.map(|workflow| workflow.side_effect_policy.blocked_call_signatures.clone())
.unwrap_or_default();
let blocked_json = serde_json::to_string_pretty(&blocked).unwrap_or_else(|_| "[]".to_string());
let action_contracts_json =
serde_json::to_string_pretty(&workflow.action_contracts).unwrap_or_else(|_| "[]".to_string());
let queue_transitions_json = serde_json::to_string_pretty(&workflow.queue_transition_rules)
.unwrap_or_else(|_| "[]".to_string());
let log_write_contracts_json =
serde_json::to_string_pretty(&workflow.log_write_contracts).unwrap_or_else(|_| "[]".to_string());
format!(
r#"const BLOCKED_CALL_SIGNATURES = {blocked_json};
const ACTION_CONTRACTS = {action_contracts_json};
const QUEUE_TRANSITION_RULES = {queue_transitions_json};
const LOG_WRITE_CONTRACTS = {log_write_contracts_json};
function normalizeDecision(decision) {{
if (typeof decision === 'string') {{
@@ -7276,22 +7576,42 @@ function normalizeDecision(decision) {{
function buildActionPlan(decision = {{}}, args = {{}}) {{
const source = normalizeDecision(decision);
const pendingList = Array.isArray(source.pendingList) ? source.pendingList : [];
const dispatchContract = ACTION_CONTRACTS.find(item => item.actionId === 'dispatch_exception_order') || ACTION_CONTRACTS[0] || {{}};
const actionPlan = pendingList.map((item, index) => ({{
itemId: String(item.id || item.consNo || item.custNo || item.workOrderId || index),
actionType: 'business_dispatch',
targetEndpointOrHostCall: 'repetCtrlSend',
actionType: dispatchContract.actionType || 'business_dispatch',
actionContractRef: dispatchContract.actionId || 'dispatch_exception_order',
targetEndpointOrHostCall: dispatchContract.targetEndpointOrHostCall || 'repetCtrlSend',
blockedByDefault: true,
requiresFutureGate: 'dispatch_gate',
reason: 'scheduled monitoring preview only'
}}));
const queueTransitions = pendingList.length > 0
? QUEUE_TRANSITION_RULES.filter(item => item.transitionId === 'queue_continue_on_done')
: QUEUE_TRANSITION_RULES.filter(item => item.transitionId === 'queue_continue_on_empty');
const logWritePreview = pendingList.length > 0
? LOG_WRITE_CONTRACTS.map(item => ({{
logId: item.logId,
targetEndpointOrHostCall: item.targetEndpointOrHostCall,
emitPhase: item.emitPhase,
blockedByDefault: true
}}))
: [];
return {{
type: 'scheduled-monitoring-action-plan-preview',
status: 'action-plan-ok',
mode: args.mode || source.mode || 'monitor_only',
actionPlan,
queueTransitions,
logWritePreview,
blockedSideEffects: {{
blockedCallSignatures: BLOCKED_CALL_SIGNATURES
}},
summary: {{
action_plan_count: actionPlan.length,
queue_transition_count: queueTransitions.length,
log_write_preview_count: logWritePreview.length
}},
sideEffectCounters: {{
repetCtrlSend: 0,
sendMessages: 0,
@@ -7308,7 +7628,10 @@ if (typeof module !== 'undefined') {{
module.exports = {{ buildActionPlan }};
}}
"#,
blocked_json = blocked_json
blocked_json = blocked_json,
action_contracts_json = action_contracts_json,
queue_transitions_json = queue_transitions_json,
log_write_contracts_json = log_write_contracts_json
)
}
@@ -8867,6 +9190,12 @@ fn scheduled_monitoring_generation_report(
"sidecarOutputs": workflow.runtime_context.output_contract.sidecar_outputs,
"deltaState": workflow.runtime_context.output_contract.delta_state,
},
"actionContracts": workflow.action_contracts,
"iterationContract": workflow.iteration_contract,
"executionFlow": workflow.execution_flow,
"resultStateMachines": workflow.result_state_machines,
"queueTransitionRules": workflow.queue_transition_rules,
"logWriteContracts": workflow.log_write_contracts,
"triggerContractStatus": trigger_contract["status"],
"readiness": scene_ir.readiness,
})
@@ -9129,4 +9458,40 @@ mod tests {
assert!(rendered.contains("[[runtime_context.output_contract.sidecar_outputs]]"));
assert!(rendered.contains("archetype = \"business_page_report_monitor\""));
}
#[test]
fn command_center_enrichment_backfills_missing_semantics_without_overwriting_existing_ones() {
let mut workflow = test_workflow("marketing_gateway_monitor");
workflow.workflow_id = "command_center_fee_control_monitoring_action".to_string();
workflow.action_contracts.push(MonitoringActionContractIr {
action_id: "preexisting_dispatch".to_string(),
..MonitoringActionContractIr::default()
});
enrich_command_center_automation_semantics(
"command-center-fee-control-monitor",
&json!({}),
&json!({
"triggerContracts": {
"queue": {
"futureModes": ["queue_process"]
}
},
"platformRuntimeCapabilities": {
"hostActionBridge": [
{ "name": "mac.exeTQueue" }
]
}
}),
&mut workflow,
);
assert_eq!(workflow.action_contracts.len(), 1);
assert_eq!(workflow.action_contracts[0].action_id, "preexisting_dispatch");
assert!(workflow.iteration_contract.is_some());
assert!(workflow.execution_flow.is_some());
assert_eq!(workflow.result_state_machines.len(), 1);
assert_eq!(workflow.queue_transition_rules.len(), 2);
assert_eq!(workflow.log_write_contracts.len(), 1);
}
}

View File

@@ -477,6 +477,138 @@ pub struct MonitoringSideEffectPolicyIr {
pub blocked_actions: Vec<MonitoringSideEffectIr>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringActionContractIr {
#[serde(rename = "actionId", default)]
pub action_id: String,
#[serde(rename = "actionType", default)]
pub action_type: String,
#[serde(rename = "dependencyRef", default)]
pub dependency_ref: String,
#[serde(rename = "targetEndpointOrHostCall", default)]
pub target_endpoint_or_host_call: String,
#[serde(rename = "executionContext", default)]
pub execution_context: String,
#[serde(rename = "inputSource", default)]
pub input_source: String,
#[serde(rename = "requestTemplate", default)]
pub request_template: Value,
#[serde(rename = "fieldBindings", default)]
pub field_bindings: Map<String, Value>,
#[serde(rename = "authBinding", default)]
pub auth_binding: Value,
#[serde(rename = "encryptionBinding", default)]
pub encryption_binding: Value,
#[serde(rename = "resultChannel", default)]
pub result_channel: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringIterationContractIr {
#[serde(rename = "iterationId", default)]
pub iteration_id: String,
#[serde(rename = "sourceCollection", default)]
pub source_collection: String,
#[serde(rename = "iterationMode", default)]
pub iteration_mode: String,
#[serde(rename = "itemAlias", default)]
pub item_alias: String,
#[serde(rename = "onEmptyTransition", default)]
pub on_empty_transition: String,
#[serde(rename = "onItemCompleteTransition", default)]
pub on_item_complete_transition: String,
#[serde(rename = "onAllCompleteTransition", default)]
pub on_all_complete_transition: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringExecutionStepIr {
#[serde(rename = "stepId", default)]
pub step_id: String,
#[serde(rename = "stepType", default)]
pub step_type: String,
#[serde(rename = "iterationRef", default)]
pub iteration_ref: String,
#[serde(rename = "actionContractRef", default)]
pub action_contract_ref: String,
#[serde(rename = "logWriteContractRef", default)]
pub log_write_contract_ref: String,
#[serde(rename = "queueTransitionRef", default)]
pub queue_transition_ref: String,
#[serde(rename = "nextOnSuccess", default)]
pub next_on_success: String,
#[serde(rename = "nextOnFailure", default)]
pub next_on_failure: String,
#[serde(rename = "nextOnEmpty", default)]
pub next_on_empty: String,
#[serde(rename = "nextOnDone", default)]
pub next_on_done: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringExecutionFlowIr {
#[serde(rename = "flowId", default)]
pub flow_id: String,
#[serde(rename = "entryStep", default)]
pub entry_step: String,
#[serde(rename = "steps", default)]
pub steps: Vec<MonitoringExecutionStepIr>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringResultStateMachineIr {
#[serde(rename = "stateMachineId", default)]
pub state_machine_id: String,
#[serde(rename = "actionContractRef", default)]
pub action_contract_ref: String,
#[serde(rename = "successMatch", default)]
pub success_match: Value,
#[serde(rename = "failureMatch", default)]
pub failure_match: Value,
#[serde(rename = "stateOnSuccess", default)]
pub state_on_success: String,
#[serde(rename = "stateOnFailure", default)]
pub state_on_failure: String,
#[serde(rename = "postSuccessLogContractRef", default)]
pub post_success_log_contract_ref: String,
#[serde(rename = "postFailureLogContractRef", default)]
pub post_failure_log_contract_ref: String,
#[serde(rename = "continuePolicy", default)]
pub continue_policy: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringQueueTransitionRuleIr {
#[serde(rename = "transitionId", default)]
pub transition_id: String,
#[serde(rename = "queueActionRef", default)]
pub queue_action_ref: String,
#[serde(rename = "triggerPoint", default)]
pub trigger_point: String,
#[serde(rename = "requiredMode", default)]
pub required_mode: String,
#[serde(rename = "blockedByDefault", default)]
pub blocked_by_default: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringLogWriteContractIr {
#[serde(rename = "logId", default)]
pub log_id: String,
#[serde(rename = "dependencyRef", default)]
pub dependency_ref: String,
#[serde(rename = "targetEndpointOrHostCall", default)]
pub target_endpoint_or_host_call: String,
#[serde(rename = "emitPhase", default)]
pub emit_phase: String,
#[serde(rename = "payloadTemplate", default)]
pub payload_template: Value,
#[serde(rename = "fieldBindings", default)]
pub field_bindings: Map<String, Value>,
#[serde(rename = "stateBinding", default)]
pub state_binding: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MonitoringActionWorkflowIr {
#[serde(rename = "workflowId", default)]
@@ -505,6 +637,18 @@ pub struct MonitoringActionWorkflowIr {
pub preview_schema: Vec<String>,
#[serde(rename = "sideEffectPolicy", default)]
pub side_effect_policy: MonitoringSideEffectPolicyIr,
#[serde(rename = "actionContracts", default)]
pub action_contracts: Vec<MonitoringActionContractIr>,
#[serde(rename = "iterationContract", default)]
pub iteration_contract: Option<MonitoringIterationContractIr>,
#[serde(rename = "executionFlow", default)]
pub execution_flow: Option<MonitoringExecutionFlowIr>,
#[serde(rename = "resultStateMachines", default)]
pub result_state_machines: Vec<MonitoringResultStateMachineIr>,
#[serde(rename = "queueTransitionRules", default)]
pub queue_transition_rules: Vec<MonitoringQueueTransitionRuleIr>,
#[serde(rename = "logWriteContracts", default)]
pub log_write_contracts: Vec<MonitoringLogWriteContractIr>,
#[serde(rename = "archetype", default = "default_monitoring_archetype")]
pub archetype: String,
}

View File

@@ -1,5 +1 @@
pub mod analyzer;
pub mod generator;
pub mod ir;
pub mod lessons;
pub mod scheduled_monitoring_runtime;

View File

@@ -1301,6 +1301,16 @@ fn run_scheduled_monitoring_skill_runtime(
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let queue_transitions = action_plan_preview
.get("queueTransitions")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let log_write_preview = action_plan_preview
.get("logWritePreview")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let detect_root_cause = detect_status_root_cause(&json!({
"detectSnapshot": detect_snapshot.clone()
}));
@@ -1322,11 +1332,14 @@ fn run_scheduled_monitoring_skill_runtime(
"pending_count": pending_count,
"notify_count": notify_candidates.len(),
"action_plan_count": action_plan.len(),
"queue_transition_count": queue_transitions.len(),
"detect_root_cause": detect_root_cause
},
"pendingList": pending_list,
"notifyCandidates": notify_candidates,
"actionPlan": action_plan,
"queueTransitions": queue_transitions,
"logWritePreview": log_write_preview,
"blockedSideEffects": {
"blockedCallSignatures": blocked_call_signatures
},

View File

@@ -44,7 +44,7 @@
],
"source": "scheduled_monitoring_action_trigger_runtime_contract"
},
"finishedAt": "2026-04-26T05:30:42.489979300+00:00",
"finishedAt": "2026-05-06T04:36:29.926950300+00:00",
"mode": "dry_run",
"previewArtifact": {
"actionPlan": [
@@ -112,7 +112,7 @@
"repetCtrlSend": 0,
"sendMessages": 0
},
"startedAt": "2026-04-26T05:30:42.489979300+00:00",
"startedAt": "2026-05-06T04:36:29.926950300+00:00",
"status": "dry-run-runtime-pass",
"triggerType": "queue",
"warnings": [
@@ -158,7 +158,7 @@
],
"source": "scheduled_monitoring_action_trigger_runtime_contract"
},
"finishedAt": "2026-04-26T05:30:42.260973600+00:00",
"finishedAt": "2026-05-06T04:36:29.680658700+00:00",
"mode": "monitor_only",
"previewArtifact": {
"actionPlan": [
@@ -226,7 +226,7 @@
"repetCtrlSend": 0,
"sendMessages": 0
},
"startedAt": "2026-04-26T05:30:42.260973600+00:00",
"startedAt": "2026-05-06T04:36:29.680658700+00:00",
"status": "dry-run-runtime-pass",
"triggerType": "scheduled",
"warnings": [

View File

@@ -1,37 +0,0 @@
use sgclaw::generated_scene::lessons::{
load_generation_lessons, GenerationLessons, BUILTIN_REPORT_COLLECTION_LESSONS,
};
#[test]
fn builtin_report_collection_lessons_match_required_generator_rules() {
let lessons = GenerationLessons::default_report_collection();
assert_eq!(
BUILTIN_REPORT_COLLECTION_LESSONS,
"builtin:report_collection_v1"
);
assert!(lessons.routing.require_exact_suffix);
assert!(lessons.routing.unsupported_scene_fail_closed);
assert!(lessons.canonical_params.require_explicit_period);
assert!(lessons.bootstrap.require_expected_domain);
assert!(lessons.bootstrap.require_target_url);
assert!(lessons.artifact.require_report_artifact);
assert!(lessons.validation.require_pipe_and_ws_checks);
assert!(lessons.validation.require_manual_service_console_smoke);
}
#[test]
fn lineloss_lessons_toml_declares_required_generator_rules() {
let lessons =
load_generation_lessons("docs/superpowers/references/tq-lineloss-lessons-learned.toml")
.unwrap();
assert!(lessons.routing.require_exact_suffix);
assert!(lessons.routing.unsupported_scene_fail_closed);
assert!(lessons.canonical_params.require_explicit_period);
assert!(lessons.bootstrap.require_expected_domain);
assert!(lessons.bootstrap.require_target_url);
assert!(lessons.artifact.require_report_artifact);
assert!(lessons.validation.require_pipe_and_ws_checks);
assert!(lessons.validation.require_manual_service_console_smoke);
}

View File

@@ -0,0 +1,42 @@
use std::fs;
use std::path::Path;
fn repo_root() -> &'static Path {
Path::new(env!("CARGO_MANIFEST_DIR"))
}
#[test]
fn sgclaw_no_longer_ships_generated_scene_binary_entrypoint() {
let binary_entrypoint = repo_root().join("src/bin/sg_scene_generate.rs");
assert!(
!binary_entrypoint.exists(),
"sgclaw should not keep sg_scene_generate.rs once generated_scene_core owns generation"
);
}
#[test]
fn generated_scene_module_only_exports_runtime_adapter() {
let module_source =
fs::read_to_string(repo_root().join("src/generated_scene/mod.rs")).unwrap();
assert!(
module_source.contains("pub mod scheduled_monitoring_runtime;"),
"generated_scene runtime adapter must remain exported"
);
assert!(
!module_source.contains("pub mod analyzer;"),
"generated_scene analyzer export must move out of sgclaw"
);
assert!(
!module_source.contains("pub mod generator;"),
"generated_scene generator export must move out of sgclaw"
);
assert!(
!module_source.contains("pub mod ir;"),
"generated_scene ir export must move out of sgclaw"
);
assert!(
!module_source.contains("pub mod lessons;"),
"generated_scene lessons export must move out of sgclaw"
);
}

View File

@@ -1,285 +0,0 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH};
use serde::Deserialize;
use serde_json::Value;
use sgclaw::generated_scene::generator::{generate_scene_package, GenerateSceneRequest};
use sgclaw::generated_scene::ir::SceneIr;
#[derive(Debug, Deserialize)]
struct CanonicalManifest {
targets: Vec<CanonicalTarget>,
}
#[derive(Debug, Deserialize)]
struct CanonicalTarget {
id: String,
#[serde(rename = "fixtureDir")]
fixture_dir: String,
#[serde(rename = "canonicalSceneIr")]
canonical_scene_ir: String,
#[serde(rename = "requiredEvidenceTypes")]
required_evidence_types: Vec<String>,
#[serde(rename = "requiredWorkflowStepTypes")]
required_workflow_step_types: Vec<String>,
#[serde(rename = "requiredGateNames")]
required_gate_names: Vec<String>,
#[serde(rename = "acceptanceChecklist")]
acceptance_checklist: Vec<String>,
#[serde(rename = "failureTaxonomy")]
failure_taxonomy: Vec<String>,
}
#[test]
fn p0_canonical_manifest_is_actionable() {
let manifest = load_manifest();
assert_eq!(manifest.targets.len(), 3);
for target in manifest.targets {
assert!(
Path::new(&target.fixture_dir).exists(),
"fixture dir missing: {}",
target.fixture_dir
);
assert!(
Path::new(&target.canonical_scene_ir).exists(),
"canonical ir missing: {}",
target.canonical_scene_ir
);
assert!(
!target.required_evidence_types.is_empty(),
"required_evidence_types should not be empty for {}",
target.id
);
assert!(
!target.required_workflow_step_types.is_empty(),
"required_workflow_step_types should not be empty for {}",
target.id
);
assert!(
!target.required_gate_names.is_empty(),
"required_gate_names should not be empty for {}",
target.id
);
assert!(
!target.acceptance_checklist.is_empty(),
"acceptance_checklist should not be empty for {}",
target.id
);
assert!(
!target.failure_taxonomy.is_empty(),
"failure_taxonomy should not be empty for {}",
target.id
);
}
}
#[test]
fn generated_p0_fixtures_align_with_canonical_answers() {
let manifest = load_manifest();
for target in manifest.targets {
let output_root = temp_workspace(&format!("sgclaw-canonical-{}", target.id));
let scene_id = scene_id_from_target(&target.id);
let scene_name = scene_name_from_target(&target.id);
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from(&target.fixture_dir),
scene_id,
scene_name,
scene_kind: None,
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: None,
})
.unwrap_or_else(|err| panic!("{} failed to generate: {}", target.id, err));
let generated_dir = output_root
.join("skills")
.join(scene_id_from_target(&target.id));
let generated_report: SceneIr = serde_json::from_str(
&fs::read_to_string(generated_dir.join("references/generation-report.json")).unwrap(),
)
.unwrap();
let canonical: SceneIr =
serde_json::from_str(&fs::read_to_string(&target.canonical_scene_ir).unwrap()).unwrap();
assert_eq!(
generated_report.workflow_archetype().as_str(),
canonical.workflow_archetype().as_str(),
"archetype mismatch for {}",
target.id
);
assert_eq!(
generated_report.bootstrap.expected_domain, canonical.bootstrap.expected_domain,
"expectedDomain mismatch for {}",
target.id
);
assert!(
generated_report
.bootstrap
.target_url
.starts_with(&canonical.bootstrap.target_url),
"targetUrl mismatch for {}: {} vs {}",
target.id,
generated_report.bootstrap.target_url,
canonical.bootstrap.target_url
);
let generated_step_types = generated_report
.workflow_steps
.iter()
.map(|step| step.step_type.clone())
.collect::<Vec<_>>();
for required in &target.required_workflow_step_types {
assert!(
generated_step_types.iter().any(|step| step == required),
"missing workflow step {} for {}",
required,
target.id
);
}
let generated_gate_names = generated_report
.readiness
.gates
.iter()
.map(|gate| gate.name.clone())
.collect::<Vec<_>>();
for required in &target.required_gate_names {
assert!(
generated_gate_names.iter().any(|gate| gate == required),
"missing readiness gate {} for {}",
required,
target.id
);
}
let generated_evidence_types = generated_report
.evidence
.iter()
.map(|item| item.evidence_type.clone())
.collect::<Vec<_>>();
for required in &target.required_evidence_types {
assert!(
generated_evidence_types.iter().any(|kind| kind == required),
"missing evidence type {} for {}",
required,
target.id
);
}
let generated_json: Value = serde_json::from_str(
&fs::read_to_string(generated_dir.join("references/generation-report.json")).unwrap(),
)
.unwrap();
assert!(
generated_json.get("readiness").is_some(),
"generation-report.json should include readiness for {}",
target.id
);
if target.id == "p0-3-paginated-enrichment" {
assert_eq!(
generated_report
.main_request
.as_ref()
.map(|request| request.response_path.as_str()),
canonical
.main_request
.as_ref()
.map(|request| request.response_path.as_str()),
"g3 main request response path mismatch for {}",
target.id
);
assert_eq!(
generated_report
.pagination_plan
.as_ref()
.map(|plan| plan.page_field.as_str()),
canonical
.pagination_plan
.as_ref()
.map(|plan| plan.page_field.as_str()),
"g3 page field mismatch for {}",
target.id
);
assert_eq!(
generated_report
.pagination_plan
.as_ref()
.map(|plan| plan.termination_rule.as_str()),
canonical
.pagination_plan
.as_ref()
.map(|plan| plan.termination_rule.as_str()),
"g3 termination rule mismatch for {}",
target.id
);
assert_eq!(
generated_report.join_keys, canonical.join_keys,
"g3 join keys mismatch for {}",
target.id
);
assert_eq!(
generated_report.merge_or_dedupe_rules, canonical.merge_or_dedupe_rules,
"g3 merge/dedupe rules mismatch for {}",
target.id
);
assert_eq!(
generated_report
.export_plan
.as_ref()
.and_then(|plan| plan.entry.as_deref()),
canonical
.export_plan
.as_ref()
.and_then(|plan| plan.entry.as_deref()),
"g3 export entry mismatch for {}",
target.id
);
}
}
}
fn load_manifest() -> CanonicalManifest {
serde_json::from_str(
&fs::read_to_string(
"tests/fixtures/generated_scene/p0_canonical_answers/p0-canonical-manifest.json",
)
.unwrap(),
)
.unwrap()
}
fn scene_id_from_target(target_id: &str) -> String {
match target_id {
"p0-1-tq-lineloss-report" => "tq-lineloss-report".to_string(),
"p0-2-single-request-table" => "single-request-report".to_string(),
"p0-3-paginated-enrichment" => "paginated-enrichment-report".to_string(),
other => other.to_string(),
}
}
fn scene_name_from_target(target_id: &str) -> String {
match target_id {
"p0-1-tq-lineloss-report" => "台区线损月周累计统计分析".to_string(),
"p0-2-single-request-table" => "单请求通用报表".to_string(),
"p0-3-paginated-enrichment" => "分页补数明细报表".to_string(),
other => other.to_string(),
}
}
fn temp_workspace(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
let path = std::env::temp_dir().join(format!("{prefix}-{nanos}"));
fs::create_dir_all(&path).unwrap();
path
}

View File

@@ -1,318 +0,0 @@
const assert = require("assert");
const fs = require("fs");
const os = require("os");
const path = require("path");
const {
buildAnalyzePrompt,
extractJsonFromResponse,
isRetryableLlmError,
repairCommonJsonIssues,
} = require("../frontend/scene-generator/llm-client");
const {
buildDeterministicSceneIr,
readDirectory,
validateSceneIdCandidate,
} = require("../frontend/scene-generator/generator-runner");
const {
getGenerationBlockers,
mergeSceneIr,
sanitizeSceneIr,
} = require("../frontend/scene-generator/server");
function testBuildAnalyzePromptIncludesFileContents() {
const dirContents = {
"scene.toml": '[scene]\nid = "test-scene"',
scripts: { "collect_test.js": "async function main() {}" },
tree: "├── scene.toml\n└── collect_test.js",
};
const prompt = buildAnalyzePrompt("D:/test/scenario", dirContents);
assert.ok(prompt.includes("scene.toml"), "should include scene.toml");
assert.ok(prompt.includes("collect_test.js"), "should include script name");
assert.ok(prompt.includes("D:/test/scenario"), "should include sourceDir");
console.log("PASS: testBuildAnalyzePromptIncludesFileContents");
}
function testExtractJsonFromResponse() {
const withMarkdown =
'```json\n{"sceneId": "test", "sceneName": "测试"}\n```';
const plain = '{"sceneId": "test", "sceneName": "测试"}';
const withPrefix =
'Here is the result:\n{"sceneId": "test", "sceneName": "测试"}';
assert.deepStrictEqual(extractJsonFromResponse(withMarkdown), {
sceneId: "test",
sceneName: "测试",
});
assert.deepStrictEqual(extractJsonFromResponse(plain), {
sceneId: "test",
sceneName: "测试",
});
assert.deepStrictEqual(extractJsonFromResponse(withPrefix), {
sceneId: "test",
sceneName: "测试",
});
console.log("PASS: testExtractJsonFromResponse");
}
function testExtractJsonFromResponseRepairsMissingArrayComma() {
const malformed =
'{"sceneId":"marketing-zero-consumer-report","evidence":[{"kind":"a"} {"kind":"b"}],"sceneName":"营销"}';
const result = extractJsonFromResponse(malformed);
assert.strictEqual(result.sceneId, "marketing-zero-consumer-report");
assert.strictEqual(Array.isArray(result.evidence), true);
assert.strictEqual(result.evidence.length, 2);
console.log("PASS: testExtractJsonFromResponseRepairsMissingArrayComma");
}
function testRepairCommonJsonIssuesRemovesTrailingCommas() {
const malformed =
'{\n "sceneId": "marketing-zero-consumer-report",\n "evidence": [{"kind":"a",},],\n}';
const repaired = repairCommonJsonIssues(malformed);
const parsed = JSON.parse(repaired);
assert.strictEqual(parsed.sceneId, "marketing-zero-consumer-report");
assert.strictEqual(parsed.evidence.length, 1);
console.log("PASS: testRepairCommonJsonIssuesRemovesTrailingCommas");
}
function testIsRetryableLlmErrorRecognizesTimeouts() {
assert.strictEqual(isRetryableLlmError(new Error("LLM API request timed out")), true);
assert.strictEqual(isRetryableLlmError(new Error("LLM API error 503: upstream unavailable")), true);
assert.strictEqual(isRetryableLlmError(new Error("LLM response missing sceneId")), false);
console.log("PASS: testIsRetryableLlmErrorRecognizesTimeouts");
}
function testDeterministicNamingAvoidsDegenerateSlugFallback() {
const sceneIr = buildDeterministicSceneIr(
{ deterministicSignals: {} },
"D:/tmp/营销2.0零度户报表数据生成"
);
assert.strictEqual(sceneIr.sceneId, "marketing-zero-consumer-report");
assert.strictEqual(sceneIr.sceneIdDiagnostics.valid, true);
assert.strictEqual(sceneIr.sceneIdDiagnostics.candidateSource, "deterministic_keywords");
console.log("PASS: testDeterministicNamingAvoidsDegenerateSlugFallback");
}
function testValidateSceneIdCandidateRejectsLowEntropyIds() {
const invalid = validateSceneIdCandidate("2-0", {
sceneName: "营销2.0零度户报表数据生成",
sourceDir: "D:/tmp/营销2.0零度户报表数据生成",
});
assert.strictEqual(invalid.valid, false);
assert.ok(
["numeric_only_scene_id", "numeric_dominant_scene_id", "scene_id_too_short"].includes(invalid.reason),
`unexpected invalid reason: ${invalid.reason}`
);
console.log("PASS: testValidateSceneIdCandidateRejectsLowEntropyIds");
}
function testMergeSceneIrPrefersValidSceneIdOverInvalidLlmValue() {
const deterministic = sanitizeSceneIr({
sceneId: "marketing-zero-consumer-report",
sceneIdDiagnostics: {
candidateSource: "deterministic_keywords",
valid: true,
candidates: [{ value: "marketing-zero-consumer-report", source: "deterministic_keywords", valid: true }],
},
sceneName: "营销2.0零度户报表数据生成",
bootstrap: { expectedDomain: "yx.gs.sgcc.com.cn", targetUrl: "http://yx.gs.sgcc.com.cn" },
workflowSteps: [{ type: "request" }],
apiEndpoints: [{ name: "userList", url: "http://yx.gs.sgcc.com.cn/list", method: "POST" }],
validationHints: { runtimeCompatible: true },
readiness: { level: "B" },
});
const llm = sanitizeSceneIr({
sceneId: "2-0",
sceneIdDiagnostics: {
candidateSource: "llm_semantic",
valid: false,
invalidReason: "numeric_dominant_scene_id",
candidates: [{ value: "2-0", source: "llm_semantic", valid: false, reason: "numeric_dominant_scene_id" }],
},
sceneName: "营销2.0零度户报表数据生成",
bootstrap: { expectedDomain: "yx.gs.sgcc.com.cn", targetUrl: "http://yx.gs.sgcc.com.cn" },
workflowSteps: [{ type: "request" }],
apiEndpoints: [{ name: "userList", url: "http://yx.gs.sgcc.com.cn/list", method: "POST" }],
validationHints: { runtimeCompatible: true },
readiness: { level: "B" },
});
const warnings = [];
const merged = mergeSceneIr(deterministic, llm, warnings);
assert.strictEqual(merged.sceneId, "marketing-zero-consumer-report");
assert.strictEqual(merged.sceneIdDiagnostics.valid, true);
assert.ok(warnings.some((item) => item.includes("SceneId conflict")));
console.log("PASS: testMergeSceneIrPrefersValidSceneIdOverInvalidLlmValue");
}
function testGetGenerationBlockersRejectsInvalidSceneId() {
const blockers = getGenerationBlockers({
sceneIr: {
sceneIdDiagnostics: {
valid: false,
invalidReason: "numeric_dominant_scene_id",
},
},
sceneId: "2-0",
sceneName: "营销2.0零度户报表数据生成",
sourceDir: "D:/tmp/营销2.0零度户报表数据生成",
});
assert.ok(
blockers.some((item) => item.startsWith("invalid_scene_id:")),
`expected invalid_scene_id blocker, got ${JSON.stringify(blockers)}`
);
assert.ok(blockers.includes("analysis_invalid_scene_id:numeric_dominant_scene_id"));
console.log("PASS: testGetGenerationBlockersRejectsInvalidSceneId");
}
function testBootstrapPrefersBusinessEntryOverLocalhostExport() {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "sgclaw-bootstrap-"));
const sceneDir = path.join(tempRoot, "bootstrap");
fs.mkdirSync(sceneDir);
fs.writeFileSync(
path.join(sceneDir, "index.html"),
`<!doctype html><html><body><script>
const sourceUrl = "http://yx.gs.sgcc.com.cn";
const apiUrl = "http://yxgateway.gs.sgcc.com.cn/api";
function getRows() {
return $.ajax({ url: "http://yxgateway.gs.sgcc.com.cn/marketing/userList", type: "POST" });
}
function exportExcel() {
return $.ajax({ url: "http://localhost:13313/SurfaceServices/personalBread/export/faultDetailsExportXLSX", type: "POST" });
}
</script></body></html>`,
"utf8"
);
const sceneIr = readDirectory(sceneDir).deterministic;
assert.strictEqual(sceneIr.bootstrap.expectedDomain, "yx.gs.sgcc.com.cn");
assert.strictEqual(sceneIr.bootstrap.targetUrl, "http://yx.gs.sgcc.com.cn/");
console.log("PASS: testBootstrapPrefersBusinessEntryOverLocalhostExport");
}
function testBootstrapBecomesUnresolvedWhenOnlyLocalhostExists() {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "sgclaw-bootstrap-local-"));
const sceneDir = path.join(tempRoot, "bootstrap-local");
fs.mkdirSync(sceneDir);
fs.writeFileSync(
path.join(sceneDir, "index.html"),
`<!doctype html><html><body><script>
function exportExcel() {
return $.ajax({ url: "http://localhost:13313/SurfaceServices/personalBread/export/faultDetailsExportXLSX", type: "POST" });
}
</script></body></html>`,
"utf8"
);
const sceneIr = readDirectory(sceneDir).deterministic;
assert.strictEqual(sceneIr.bootstrap.expectedDomain, "");
assert.strictEqual(sceneIr.bootstrap.targetUrl, "");
assert.ok(sceneIr.readiness.missingPieces.includes("bootstrap_target"));
console.log("PASS: testBootstrapBecomesUnresolvedWhenOnlyLocalhostExists");
}
function testWorkflowClassificationPrefersPaginatedOverGenericModeNoise() {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "sgclaw-workflow-"));
const sceneDir = path.join(tempRoot, "workflow");
fs.mkdirSync(sceneDir);
fs.writeFileSync(
path.join(sceneDir, "index.html"),
`<!doctype html><html><body><script>
const type = "list";
const status = "ready";
async function loadData(page, pageSize) {
return $.ajax({ url: "http://yx.gs.sgcc.com.cn/marketing/userList", type: "POST", data: JSON.stringify({ page, pageSize }) });
}
async function getChargeInfo(custNo) {
return $.ajax({ url: "http://yx.gs.sgcc.com.cn/marketing/userCharges", type: "POST", data: JSON.stringify({ custNo }) });
}
function exportExcel(rows) { return rows.length; }
function run(rows) {
return rows.filter((row) => row.charge !== 0);
}
</script></body></html>`,
"utf8"
);
const sceneIr = readDirectory(sceneDir).deterministic;
assert.strictEqual(sceneIr.workflowArchetype, "paginated_enrichment");
assert.ok(sceneIr.workflowEvidence.paginationFields.length > 0);
assert.ok(sceneIr.workflowEvidence.secondaryRequestEntries.length > 0);
assert.ok(sceneIr.workflowEvidence.postProcessSteps.length > 0);
console.log("PASS: testWorkflowClassificationPrefersPaginatedOverGenericModeNoise");
}
function testWorkflowClassificationDoesNotEmitPaginatedWithoutPostProcess() {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "sgclaw-workflow-no-post-"));
const sceneDir = path.join(tempRoot, "workflow-no-post");
fs.mkdirSync(sceneDir);
fs.writeFileSync(
path.join(sceneDir, "index.html"),
`<!doctype html><html><body><script>
async function loadData(page, pageSize) {
return $.ajax({ url: "http://yx.gs.sgcc.com.cn/marketing/userList", type: "POST", data: JSON.stringify({ page, pageSize }) });
}
async function getChargeInfo(custNo) {
return $.ajax({ url: "http://yx.gs.sgcc.com.cn/marketing/userCharges", type: "POST", data: JSON.stringify({ custNo }) });
}
</script></body></html>`,
"utf8"
);
const sceneIr = readDirectory(sceneDir).deterministic;
assert.notStrictEqual(sceneIr.workflowArchetype, "paginated_enrichment");
console.log("PASS: testWorkflowClassificationDoesNotEmitPaginatedWithoutPostProcess");
}
function testGenerationBlockersIncludeFailedReadinessGates() {
const blockers = getGenerationBlockers({
sceneIr: {
readiness: {
gates: [
{ name: "bootstrap_resolved", passed: false, reason: "bootstrap_target" },
{ name: "request_contract_complete", passed: false, reason: "request_endpoint" },
{ name: "response_contract_complete", passed: false, reason: "response_path" },
{ name: "workflow_contract_complete", passed: false, reason: "post_process" },
{ name: "workflow_complete_for_archetype", passed: false, reason: "post_process" },
],
},
},
sceneId: "marketing-zero-consumer-report",
sceneName: "营销2.0零度户报表数据生成",
sourceDir: "D:/tmp/营销2.0零度户报表数据生成",
});
assert.ok(blockers.includes("gate_failed:bootstrap_resolved:bootstrap_target"));
assert.ok(blockers.includes("gate_failed:request_contract_complete:request_endpoint"));
assert.ok(blockers.includes("gate_failed:response_contract_complete:response_path"));
assert.ok(blockers.includes("gate_failed:workflow_contract_complete:post_process"));
assert.ok(blockers.includes("gate_failed:workflow_complete_for_archetype:post_process"));
console.log("PASS: testGenerationBlockersIncludeFailedReadinessGates");
}
testBuildAnalyzePromptIncludesFileContents();
testExtractJsonFromResponse();
testExtractJsonFromResponseRepairsMissingArrayComma();
testRepairCommonJsonIssuesRemovesTrailingCommas();
testIsRetryableLlmErrorRecognizesTimeouts();
testDeterministicNamingAvoidsDegenerateSlugFallback();
testValidateSceneIdCandidateRejectsLowEntropyIds();
testMergeSceneIrPrefersValidSceneIdOverInvalidLlmValue();
testGetGenerationBlockersRejectsInvalidSceneId();
testBootstrapPrefersBusinessEntryOverLocalhostExport();
testBootstrapBecomesUnresolvedWhenOnlyLocalhostExists();
testWorkflowClassificationPrefersPaginatedOverGenericModeNoise();
testWorkflowClassificationDoesNotEmitPaginatedWithoutPostProcess();
testGenerationBlockersIncludeFailedReadinessGates();

View File

@@ -1,329 +0,0 @@
use std::fs;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use sgclaw::generated_scene::analyzer::SceneKind;
use sgclaw::generated_scene::generator::{generate_scene_package, GenerateSceneRequest};
use sgclaw::generated_scene::ir::{
ApiEndpointIr, ModeConditionIr, ModeIr, NormalizeRulesIr, SceneIdDiagnosticsIr, SceneIr,
WorkflowArchetype,
};
fn make_test_mode(
name: &str,
url: &str,
content_type: Option<&str>,
response_path: &str,
) -> ModeIr {
ModeIr {
name: name.to_string(),
label: Some(name.to_string()),
condition: Some(ModeConditionIr {
field: "period_mode".to_string(),
operator: "equals".to_string(),
value: serde_json::Value::String(name.to_string()),
}),
api_endpoint: Some(ApiEndpointIr {
name: format!("{}_endpoint", name),
url: url.to_string(),
method: "POST".to_string(),
content_type: content_type.map(|s| s.to_string()),
description: None,
}),
column_defs: vec![("id".to_string(), "ID".to_string())],
request_template: serde_json::json!({ "mode": name }),
request_field_mappings: Vec::new(),
normalize_rules: Some(NormalizeRulesIr {
rules_type: "validate_required".to_string(),
required_fields: vec!["id".to_string()],
filter_null: true,
}),
response_path: response_path.to_string(),
}
}
fn make_test_scene_ir(modes: Vec<ModeIr>) -> SceneIr {
let is_multi = modes.len() > 1;
let api_endpoints = modes
.iter()
.filter_map(|mode| mode.api_endpoint.clone())
.collect::<Vec<_>>();
SceneIr {
scene_id: "test-scene".to_string(),
scene_id_diagnostics: SceneIdDiagnosticsIr::default(),
scene_name: "Test Scene".to_string(),
scene_kind: "report_collection".to_string(),
workflow_archetype: Some(if is_multi {
WorkflowArchetype::MultiModeRequest
} else {
WorkflowArchetype::SingleRequestTable
}),
bootstrap: Default::default(),
params: Vec::new(),
modes,
default_mode: Some("month".to_string()),
mode_switch_field: Some("period_mode".to_string()),
workflow_steps: vec![
sgclaw::generated_scene::ir::WorkflowStepIr {
step_type: "request".to_string(),
description: Some("select mode and query corresponding endpoint".to_string()),
..Default::default()
},
sgclaw::generated_scene::ir::WorkflowStepIr {
step_type: "transform".to_string(),
description: Some("normalize mode-specific table rows".to_string()),
..Default::default()
},
],
workflow_evidence: Default::default(),
main_request: None,
pagination_plan: None,
enrichment_requests: Vec::new(),
join_keys: Vec::new(),
merge_or_dedupe_rules: Vec::new(),
export_plan: None,
merge_plan: None,
request_template: serde_json::Value::Null,
response_path: "".to_string(),
normalize_rules: None,
artifact_contract: Default::default(),
validation_hints: Default::default(),
evidence: Vec::new(),
readiness: Default::default(),
api_endpoints,
runtime_dependencies: Vec::new(),
static_params: Default::default(),
column_defs: Vec::new(),
confidence: 0.0,
uncertainties: Vec::new(),
monitoring_action_workflow: None,
}
}
fn temp_workspace(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
let path = std::env::temp_dir().join(format!("{prefix}-{nanos}"));
fs::create_dir_all(&path).unwrap();
path
}
/// Test 1: Single request table uses dedicated simple-request path instead of MODES fallback.
#[test]
fn test_single_request_table_uses_dedicated_path() {
let output_root = temp_workspace("sgclaw-single-mode-test");
let modes = vec![make_test_mode(
"month",
"http://example.com/api/month",
None,
"data",
)];
let scene_ir = make_test_scene_ir(modes);
// Use SingleRequestTable archetype - the compile path should stay on the dedicated single-request route.
let mut scene_ir = scene_ir;
scene_ir.workflow_archetype = Some(WorkflowArchetype::SingleRequestTable);
scene_ir.api_endpoints = vec![ApiEndpointIr {
name: "default_endpoint".to_string(),
url: "http://example.com/api/data".to_string(),
method: "POST".to_string(),
content_type: None,
description: None,
}];
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from("tests/fixtures/generated_scene/report_collection"),
scene_id: "single-mode-scene".to_string(),
scene_name: "Single Mode Scene".to_string(),
scene_kind: Some(SceneKind::ReportCollection),
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: Some(scene_ir),
})
.unwrap();
let skill_root = output_root.join("skills/single-mode-scene");
let generated_script =
fs::read_to_string(skill_root.join("scripts/collect_single_mode_scene.js")).unwrap();
assert!(
generated_script.contains("const REQUEST_TEMPLATE ="),
"Generated JS should contain REQUEST_TEMPLATE on the dedicated single-request path"
);
assert!(
!generated_script.contains("const MODES ="),
"Generated JS should no longer route SingleRequestTable through MODES fallback"
);
}
/// Test 2: Multi-mode generates mode routing (detectMode and MODES.find)
#[test]
fn test_multi_mode_generates_mode_routing() {
let output_root = temp_workspace("sgclaw-multi-mode-test");
let modes = vec![
make_test_mode("month", "http://example.com/api/month", None, "data"),
make_test_mode("week", "http://example.com/api/week", None, "data"),
];
let scene_ir = make_test_scene_ir(modes);
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from("tests/fixtures/generated_scene/report_collection"),
scene_id: "multi-mode-scene".to_string(),
scene_name: "Multi Mode Scene".to_string(),
scene_kind: Some(SceneKind::ReportCollection),
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: Some(scene_ir),
})
.unwrap();
let skill_root = output_root.join("skills/multi-mode-scene");
let generated_script =
fs::read_to_string(skill_root.join("scripts/collect_multi_mode_scene.js")).unwrap();
assert!(
generated_script.contains("function detectMode"),
"Generated JS should contain 'detectMode' function for multi-mode routing"
);
assert!(
generated_script.contains("MODES.find"),
"Generated JS should contain 'MODES.find' for mode selection"
);
}
/// Test 3: Form-urlencoded request body uses Object.entries().join('&') not JSON.stringify
#[test]
fn test_form_urlencoded_request_body() {
let output_root = temp_workspace("sgclaw-form-urlencoded-test");
let modes = vec![make_test_mode(
"month",
"http://example.com/api/month",
Some("application/x-www-form-urlencoded"),
"data",
)];
let mut scene_ir = make_test_scene_ir(modes);
scene_ir.workflow_archetype = Some(WorkflowArchetype::MultiModeRequest);
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from("tests/fixtures/generated_scene/report_collection"),
scene_id: "form-urlencoded-scene".to_string(),
scene_name: "Form URL Encoded Scene".to_string(),
scene_kind: Some(SceneKind::ReportCollection),
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: Some(scene_ir),
})
.unwrap();
let skill_root = output_root.join("skills/form-urlencoded-scene");
let generated_script =
fs::read_to_string(skill_root.join("scripts/collect_form_urlencoded_scene.js")).unwrap();
// The buildModeRequest function should use Object.entries for form-urlencoded
assert!(
generated_script.contains("Object.entries(requestBody)"),
"Generated JS should use Object.entries for form-urlencoded body encoding"
);
assert!(
generated_script.contains(".join('&')"),
"Generated JS should join form-urlencoded entries with '&'"
);
// Verify the conditional exists in buildModeRequest
assert!(
generated_script.contains("application/x-www-form-urlencoded"),
"Generated JS should reference form-urlencoded content type"
);
}
/// Test 4: Response path extraction uses mode.responsePath in the template
#[test]
fn test_response_path_extraction_in_template() {
let output_root = temp_workspace("sgclaw-response-path-test");
let modes = vec![make_test_mode(
"month",
"http://example.com/api/month",
None,
"data.list",
)];
let mut scene_ir = make_test_scene_ir(modes);
scene_ir.workflow_archetype = Some(WorkflowArchetype::MultiModeRequest);
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from("tests/fixtures/generated_scene/report_collection"),
scene_id: "response-path-scene".to_string(),
scene_name: "Response Path Scene".to_string(),
scene_kind: Some(SceneKind::ReportCollection),
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: Some(scene_ir),
})
.unwrap();
let skill_root = output_root.join("skills/response-path-scene");
let generated_script =
fs::read_to_string(skill_root.join("scripts/collect_response_path_scene.js")).unwrap();
// The multi-mode template uses mode.responsePath for response extraction
assert!(
generated_script.contains("mode.responsePath"),
"Generated JS should use 'mode.responsePath' for per-mode response extraction"
);
// The safeGet call should reference the mode's responsePath
assert!(
generated_script.contains("safeGet(raw, mode.responsePath"),
"Generated JS should call safeGet with mode.responsePath"
);
}
/// Test 5: processData flag in $.ajax call with correct conditional
#[test]
fn test_process_data_flag_in_ajax() {
let output_root = temp_workspace("sgclaw-process-data-test");
let modes = vec![make_test_mode(
"month",
"http://example.com/api/month",
Some("application/x-www-form-urlencoded"),
"data",
)];
let scene_ir = make_test_scene_ir(modes);
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from("tests/fixtures/generated_scene/report_collection"),
scene_id: "process-data-scene".to_string(),
scene_name: "Process Data Scene".to_string(),
scene_kind: Some(SceneKind::ReportCollection),
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: Some(scene_ir),
})
.unwrap();
let skill_root = output_root.join("skills/process-data-scene");
let generated_script =
fs::read_to_string(skill_root.join("scripts/collect_process_data_scene.js")).unwrap();
// The $.ajax call should contain processData flag
assert!(
generated_script.contains("processData:"),
"Generated JS $.ajax call should contain 'processData:' flag"
);
// processData should be false for form-urlencoded (negated condition)
assert!(
generated_script.contains(
"processData: request.headers['Content-Type'] !== 'application/x-www-form-urlencoded'"
),
"Generated JS should set processData to false for form-urlencoded content type"
);
}

View File

@@ -1,455 +0,0 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH};
use serde::Deserialize;
use sgclaw::generated_scene::generator::{generate_scene_package, GenerateSceneRequest};
use sgclaw::generated_scene::ir::SceneIr;
#[derive(Debug, Deserialize)]
struct P1FamilyManifest {
families: Vec<P1FamilySpec>,
}
#[derive(Debug, Deserialize)]
struct P1FamilySpec {
id: String,
group: String,
#[serde(rename = "familyName")]
family_name: String,
#[serde(rename = "representativeFixtureDir")]
representative_fixture_dir: String,
#[serde(rename = "representativeSceneId")]
representative_scene_id: String,
#[serde(rename = "representativeSceneName")]
representative_scene_name: String,
#[serde(rename = "expectedArchetype")]
expected_archetype: String,
#[serde(rename = "requiredGateNames")]
required_gate_names: Vec<String>,
#[serde(rename = "requiredEvidenceTypes")]
required_evidence_types: Vec<String>,
#[serde(rename = "expansionFixtureDir", default)]
expansion_fixture_dir: Option<String>,
#[serde(rename = "expansionSceneId", default)]
expansion_scene_id: Option<String>,
#[serde(rename = "expansionSceneName", default)]
expansion_scene_name: Option<String>,
#[serde(rename = "expansionAssertions", default)]
expansion_assertions: Option<ExpansionAssertions>,
#[serde(rename = "batchCandidateAsset", default)]
batch_candidate_asset: Option<String>,
#[serde(rename = "batchExpansionFixtures", default)]
batch_expansion_fixtures: Vec<BatchExpansionFixture>,
#[serde(rename = "successRateSummary")]
success_rate_summary: String,
#[serde(rename = "failureTaxonomy")]
failure_taxonomy: Vec<String>,
}
#[derive(Debug, Deserialize, Default)]
struct ExpansionAssertions {
#[serde(rename = "requiredDefaultMode", default)]
required_default_mode: Option<String>,
#[serde(rename = "expectedPaginationField", default)]
expected_pagination_field: Option<String>,
#[serde(rename = "requiredJoinKey", default)]
required_join_key: Option<String>,
#[serde(rename = "requiredAggregateRule", default)]
required_aggregate_rule: Option<String>,
#[serde(rename = "requiredMainRequest", default)]
required_main_request: Option<String>,
#[serde(rename = "requiredEnrichmentRequest", default)]
required_enrichment_request: Option<String>,
#[serde(rename = "requiredMergeJoinKey", default)]
required_merge_join_key: Option<String>,
#[serde(rename = "requiredMergeAggregateRule", default)]
required_merge_aggregate_rule: Option<String>,
#[serde(rename = "requiredOutputColumn", default)]
required_output_column: Option<String>,
}
#[derive(Debug, Deserialize)]
struct BatchExpansionFixture {
#[serde(rename = "fixtureDir")]
fixture_dir: String,
#[serde(rename = "sceneId")]
scene_id: String,
#[serde(rename = "sceneName")]
scene_name: String,
assertions: ExpansionAssertions,
}
#[test]
fn p1_family_manifest_is_actionable() {
let manifest = load_manifest();
assert_eq!(manifest.families.len(), 7);
for family in manifest.families {
assert!(matches!(
family.group.as_str(),
"G1" | "G2" | "G3" | "G6" | "G7" | "G8"
));
assert!(!family.family_name.trim().is_empty());
assert!(Path::new(&family.representative_fixture_dir).exists());
assert!(!family.expected_archetype.trim().is_empty());
assert!(!family.required_gate_names.is_empty());
assert!(!family.required_evidence_types.is_empty());
assert!(!family.success_rate_summary.trim().is_empty());
assert!(!family.failure_taxonomy.is_empty());
if let Some(expansion_fixture_dir) = &family.expansion_fixture_dir {
assert!(Path::new(expansion_fixture_dir).exists());
assert!(!family
.expansion_scene_id
.as_deref()
.unwrap_or_default()
.is_empty());
assert!(!family
.expansion_scene_name
.as_deref()
.unwrap_or_default()
.is_empty());
}
if let Some(batch_candidate_asset) = &family.batch_candidate_asset {
assert!(Path::new(batch_candidate_asset).exists());
}
for fixture in &family.batch_expansion_fixtures {
assert!(Path::new(&fixture.fixture_dir).exists());
assert!(!fixture.scene_id.is_empty());
assert!(!fixture.scene_name.is_empty());
}
}
}
#[test]
fn representative_p1_family_migrations_are_reusable() {
let manifest = load_manifest();
for family in manifest.families {
let output_root = temp_workspace(&format!("sgclaw-p1-family-{}", family.id));
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from(&family.representative_fixture_dir),
scene_id: family.representative_scene_id.clone(),
scene_name: family.representative_scene_name.clone(),
scene_kind: None,
target_url: None,
output_root: output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: None,
})
.unwrap_or_else(|err| panic!("{} failed representative migration: {}", family.id, err));
let generated_dir = output_root
.join("skills")
.join(&family.representative_scene_id);
let generated_report: SceneIr = serde_json::from_str(
&fs::read_to_string(generated_dir.join("references/generation-report.json")).unwrap(),
)
.unwrap();
assert_eq!(
generated_report.workflow_archetype().as_str(),
family.expected_archetype,
"expected archetype mismatch for {}",
family.id
);
for gate_name in &family.required_gate_names {
assert!(
generated_report
.readiness
.gates
.iter()
.any(|gate| gate.name == *gate_name),
"missing gate {} for {}",
gate_name,
family.id
);
}
for evidence_type in &family.required_evidence_types {
assert!(
generated_report
.evidence
.iter()
.any(|item| item.evidence_type == *evidence_type),
"missing evidence type {} for {}",
evidence_type,
family.id
);
}
assert!(
generated_report.readiness.level == "A" || generated_report.readiness.level == "B",
"representative migration should be reusable for {}",
family.id
);
if let (Some(expansion_fixture_dir), Some(expansion_scene_id), Some(expansion_scene_name)) = (
&family.expansion_fixture_dir,
&family.expansion_scene_id,
&family.expansion_scene_name,
) {
let expansion_output_root =
temp_workspace(&format!("sgclaw-p1-family-expansion-{}", family.id));
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from(expansion_fixture_dir),
scene_id: expansion_scene_id.clone(),
scene_name: expansion_scene_name.clone(),
scene_kind: None,
target_url: None,
output_root: expansion_output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: None,
})
.unwrap_or_else(|err| panic!("{} failed expansion migration: {}", family.id, err));
let expansion_dir = expansion_output_root
.join("skills")
.join(expansion_scene_id);
let expansion_report: SceneIr = serde_json::from_str(
&fs::read_to_string(expansion_dir.join("references/generation-report.json"))
.unwrap(),
)
.unwrap();
assert_eq!(
expansion_report.workflow_archetype().as_str(),
family.expected_archetype,
"expected expansion archetype mismatch for {}",
family.id
);
assert!(
expansion_report.readiness.level == "A" || expansion_report.readiness.level == "B",
"expansion migration should be reusable for {}",
family.id
);
if let Some(assertions) = &family.expansion_assertions {
if let Some(required_default_mode) = &assertions.required_default_mode {
assert_eq!(
expansion_report.default_mode.as_deref(),
Some(required_default_mode.as_str()),
"missing expansion default mode {} for {}",
required_default_mode,
family.id
);
}
if let Some(expected_pagination_field) = &assertions.expected_pagination_field {
assert_eq!(
expansion_report
.pagination_plan
.as_ref()
.map(|plan| plan.page_field.as_str()),
Some(expected_pagination_field.as_str()),
"expansion pagination field mismatch for {}",
family.id
);
}
if let Some(required_join_key) = &assertions.required_join_key {
assert!(
expansion_report
.join_keys
.iter()
.any(|key| key == required_join_key),
"missing expansion join key {} for {}",
required_join_key,
family.id
);
}
if let Some(required_aggregate_rule) = &assertions.required_aggregate_rule {
assert!(
expansion_report
.merge_or_dedupe_rules
.iter()
.any(|rule| rule == required_aggregate_rule),
"missing expansion aggregate rule {} for {}",
required_aggregate_rule,
family.id
);
}
if let Some(required_main_request) = &assertions.required_main_request {
assert!(
expansion_report
.main_request
.as_ref()
.and_then(|request| request.api_endpoint.as_ref())
.map(|endpoint| endpoint.name.contains(required_main_request))
.unwrap_or(false),
"missing expansion main request {} for {}",
required_main_request,
family.id
);
}
if let Some(required_enrichment_request) = &assertions.required_enrichment_request {
assert!(
expansion_report
.enrichment_requests
.iter()
.any(|request| request.name.contains(required_enrichment_request)),
"missing expansion enrichment request {} for {}",
required_enrichment_request,
family.id
);
}
if let Some(required_merge_join_key) = &assertions.required_merge_join_key {
assert!(
expansion_report
.merge_plan
.as_ref()
.map(|plan| {
plan.join_keys
.iter()
.any(|key| key == required_merge_join_key)
})
.unwrap_or(false),
"missing expansion merge join key {} for {}",
required_merge_join_key,
family.id
);
}
if let Some(required_merge_aggregate_rule) =
&assertions.required_merge_aggregate_rule
{
assert!(
expansion_report
.merge_plan
.as_ref()
.map(|plan| {
plan.aggregate_rules
.iter()
.any(|rule| rule == required_merge_aggregate_rule)
})
.unwrap_or(false),
"missing expansion merge aggregate rule {} for {}",
required_merge_aggregate_rule,
family.id
);
}
if let Some(required_output_column) = &assertions.required_output_column {
assert!(
expansion_report
.merge_plan
.as_ref()
.map(|plan| {
plan.output_columns
.iter()
.any(|(field, _)| field == required_output_column)
})
.unwrap_or(false),
"missing expansion output column {} for {}",
required_output_column,
family.id
);
}
}
}
for batch_fixture in &family.batch_expansion_fixtures {
let batch_output_root = temp_workspace(&format!(
"sgclaw-p1-family-batch-{}-{}",
family.id, batch_fixture.scene_id
));
generate_scene_package(GenerateSceneRequest {
source_dir: PathBuf::from(&batch_fixture.fixture_dir),
scene_id: batch_fixture.scene_id.clone(),
scene_name: batch_fixture.scene_name.clone(),
scene_kind: None,
target_url: None,
output_root: batch_output_root.clone(),
lessons_path: None,
scene_info_json: None,
scene_ir_json: None,
})
.unwrap_or_else(|err| {
panic!("{} failed batch expansion migration: {}", family.id, err)
});
let batch_dir = batch_output_root
.join("skills")
.join(&batch_fixture.scene_id);
let batch_report: SceneIr = serde_json::from_str(
&fs::read_to_string(batch_dir.join("references/generation-report.json")).unwrap(),
)
.unwrap();
assert_eq!(
batch_report.workflow_archetype().as_str(),
family.expected_archetype,
"expected batch expansion archetype mismatch for {}",
family.id
);
assert!(
batch_report.readiness.level == "A" || batch_report.readiness.level == "B",
"batch expansion migration should be reusable for {}",
family.id
);
if let Some(required_default_mode) = &batch_fixture.assertions.required_default_mode {
assert_eq!(
batch_report.default_mode.as_deref(),
Some(required_default_mode.as_str()),
"missing batch expansion default mode {} for {}",
required_default_mode,
family.id
);
}
if let Some(expected_pagination_field) =
&batch_fixture.assertions.expected_pagination_field
{
assert_eq!(
batch_report
.pagination_plan
.as_ref()
.map(|plan| plan.page_field.as_str()),
Some(expected_pagination_field.as_str()),
"batch expansion pagination field mismatch for {}",
family.id
);
}
if let Some(required_join_key) = &batch_fixture.assertions.required_join_key {
assert!(
batch_report
.join_keys
.iter()
.any(|key| key == required_join_key),
"missing batch expansion join key {} for {}",
required_join_key,
family.id
);
}
if let Some(required_aggregate_rule) = &batch_fixture.assertions.required_aggregate_rule
{
assert!(
batch_report
.merge_or_dedupe_rules
.iter()
.any(|rule| rule == required_aggregate_rule),
"missing batch expansion aggregate rule {} for {}",
required_aggregate_rule,
family.id
);
}
}
}
}
fn load_manifest() -> P1FamilyManifest {
serde_json::from_str(
&fs::read_to_string("tests/fixtures/generated_scene/p1_family_manifest.json").unwrap(),
)
.unwrap()
}
fn temp_workspace(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
let path = std::env::temp_dir().join(format!("{prefix}-{nanos}"));
fs::create_dir_all(&path).unwrap();
path
}

File diff suppressed because it is too large Load Diff

View File

@@ -7,9 +7,6 @@ use std::time::{SystemTime, UNIX_EPOCH};
use reqwest::blocking::Client;
use serde_json::{json, Value};
use sgclaw::generated_scene::generator::{
generate_scheduled_monitoring_action_skill_package, GenerateScheduledMonitoringActionSkillRequest,
};
use tungstenite::{accept, Message};
fn bin_path() -> PathBuf {
@@ -36,6 +33,12 @@ fn temp_workspace(prefix: &str) -> PathBuf {
root
}
fn validation_bundle_skills_dir() -> PathBuf {
std::env::current_dir()
.unwrap()
.join("dist/sgclaw_scheduled_monitoring_read_only_validation_bundle_2026-04-22/skills")
}
fn scheduled_trigger(mode: &str) -> Value {
json!({
"trigger_type": "scheduled",
@@ -529,8 +532,6 @@ fn binary_wiring_loads_registry_backed_scheduled_skill() {
let output_path = workspace.join("run-record.json");
let config_path = workspace.join("sgclaw_config.json");
let rules_path = workspace.join("resources").join("rules.json");
let materialization_root = workspace.join("materialized");
fs::create_dir_all(&materialization_root).unwrap();
write_json(&trigger_path, &scheduled_trigger("monitor_only"));
write_runtime_rules(&rules_path);
let detect_payload = json!({
@@ -579,25 +580,9 @@ fn binary_wiring_loads_registry_backed_scheduled_skill() {
start_callback_host_scheduled_monitoring_browser_server(detect_payload);
write_browser_config(&config_path, &browser_ws_url);
generate_scheduled_monitoring_action_skill_package(GenerateScheduledMonitoringActionSkillRequest {
scene_id: "command-center-fee-control-monitor".to_string(),
scene_name: "指挥中心费控异常监测".to_string(),
output_root: materialization_root.clone(),
source_evidence_json: PathBuf::from(
"tests/fixtures/generated_scene/monitoring_action_source_evidence_extraction_2026-04-21.json",
),
ir_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_ir_contract_2026-04-22.json",
),
trigger_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_trigger_runtime_contract_2026-04-22.json",
),
})
.unwrap();
let output = run_binary_with_skills_dir_and_config(
&trigger_path,
&materialization_root.join("skills"),
&validation_bundle_skills_dir(),
&config_path,
&workspace,
&output_path,
@@ -632,29 +617,11 @@ fn binary_wiring_registry_backed_skill_executes_read_only_scripts_with_runtime_i
let output_path = workspace.join("run-record.json");
let config_path = workspace.join("sgclaw_config.json");
let rules_path = workspace.join("resources").join("rules.json");
let materialization_root = workspace.join("materialized");
fs::create_dir_all(&materialization_root).unwrap();
write_json(
&trigger_path,
&scheduled_trigger_with_runtime_inputs("monitor_only"),
);
write_runtime_rules(&rules_path);
generate_scheduled_monitoring_action_skill_package(GenerateScheduledMonitoringActionSkillRequest {
scene_id: "command-center-fee-control-monitor".to_string(),
scene_name: "鎸囨尌涓績璐规帶寮傚父鐩戞祴".to_string(),
output_root: materialization_root.clone(),
source_evidence_json: PathBuf::from(
"tests/fixtures/generated_scene/monitoring_action_source_evidence_extraction_2026-04-21.json",
),
ir_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_ir_contract_2026-04-22.json",
),
trigger_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_trigger_runtime_contract_2026-04-22.json",
),
})
.unwrap();
let detect_payload = json!({
"type": "scheduled-monitoring-detect-snapshot",
"report_name": "指挥中心费控异常监测",
@@ -708,7 +675,7 @@ fn binary_wiring_registry_backed_skill_executes_read_only_scripts_with_runtime_i
let output = run_binary_with_skills_dir_and_config(
&trigger_path,
&materialization_root.join("skills"),
&validation_bundle_skills_dir(),
&config_path,
&workspace,
&output_path,
@@ -737,6 +704,193 @@ fn binary_wiring_registry_backed_skill_executes_read_only_scripts_with_runtime_i
);
}
#[test]
fn command_center_preview_reflects_automation_semantics() {
let workspace = temp_workspace("sgclaw-command-center-preview-automation-semantics");
let trigger_path = workspace.join("scheduled-trigger.json");
let output_path = workspace.join("run-record.json");
let config_path = workspace.join("sgclaw_config.json");
let rules_path = workspace.join("resources").join("rules.json");
write_json(
&trigger_path,
&scheduled_trigger_with_runtime_inputs("monitor_only"),
);
write_runtime_rules(&rules_path);
let detect_payload = json!({
"type": "scheduled-monitoring-detect-snapshot",
"report_name": "command-center-fee-control-monitor",
"status": "detect-ok",
"workflowId": "command_center_fee_control_monitoring_action",
"mode": "monitor_only",
"pendingList": [
{ "id": "A1", "consNo": "C1", "phone": "13800000000", "abnorType": "fee_control" }
],
"inputs": {
"source": "browser_attached_live_read",
"queryAbnorList": [
{ "id": "A1", "consNo": "C1", "phone": "13800000000", "abnorType": "fee_control" }
],
"queryHistoryEnergyCharge": [],
"getMonitorLog": { "lastHandled": "2026-04-22T08:00:00Z" },
"getOtherIphones": { "holidaySwitch": "off" },
"getAllSubMgtOrgTreeByOrgCode": {}
},
"localStorageSnapshot": {
"loginUserInfo": "{\"orgNo\":\"62401\"}",
"markToken": "browser-token",
"yxClassList": "[{\"orgNo\":\"62401\"}]",
"zhzxFkycSendTime": "2026-04-22 08:00:00"
},
"readDiagnostics": {
"source": "browser_attached_live_read",
"businessGatewayReadAttempted": true,
"localhostReadAttempted": true,
"queryAbnorListCount": 1,
"queryHistoryEnergyChargeCount": 0
},
"dependencySnapshot": {
"businessReads": [],
"localReads": [],
"blockedLocalWrites": [],
"blockedCalls": ["repetCtrlSend"]
},
"sideEffectCounters": {
"repetCtrlSend": 0,
"sendMessages": 0,
"callOutLogin": 0,
"audioPlay": 0,
"exeTQueue": 0,
"productionLogWrite": 0
}
});
let (browser_ws_url, browser_server) =
start_callback_host_scheduled_monitoring_browser_server(detect_payload);
write_browser_config(&config_path, &browser_ws_url);
let output = run_binary_with_skills_dir_and_config(
&trigger_path,
&validation_bundle_skills_dir(),
&config_path,
&workspace,
&output_path,
);
browser_server.join().unwrap();
assert!(
output.status.success(),
"stdout={}\nstderr={}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
let record: Value = serde_json::from_str(&fs::read_to_string(output_path).unwrap()).unwrap();
let preview_artifact = &record["previewArtifact"];
assert_eq!(
preview_artifact["actionPlan"][0]["actionContractRef"],
"dispatch_exception_order"
);
assert_eq!(preview_artifact["summary"]["queue_transition_count"], 1);
assert_eq!(
preview_artifact["queueTransitions"][0]["transitionId"],
"queue_continue_on_done"
);
assert_eq!(
preview_artifact["logWritePreview"][0]["logId"],
"dispose_log_after_dispatch"
);
}
#[test]
fn command_center_empty_pending_list_does_not_emit_log_write_preview() {
let workspace = temp_workspace("sgclaw-command-center-empty-preview-semantics");
let trigger_path = workspace.join("scheduled-trigger.json");
let output_path = workspace.join("run-record.json");
let config_path = workspace.join("sgclaw_config.json");
let rules_path = workspace.join("resources").join("rules.json");
write_json(
&trigger_path,
&scheduled_trigger_with_runtime_inputs("monitor_only"),
);
write_runtime_rules(&rules_path);
let detect_payload = json!({
"type": "scheduled-monitoring-detect-snapshot",
"report_name": "command-center-fee-control-monitor",
"status": "detect-ok",
"workflowId": "command_center_fee_control_monitoring_action",
"mode": "monitor_only",
"pendingList": [],
"inputs": {
"source": "browser_attached_live_read",
"queryAbnorList": [],
"queryHistoryEnergyCharge": [],
"getMonitorLog": {},
"getOtherIphones": {},
"getAllSubMgtOrgTreeByOrgCode": {}
},
"localStorageSnapshot": {
"loginUserInfo": "{\"orgNo\":\"62401\"}",
"markToken": "browser-token",
"yxClassList": "[{\"orgNo\":\"62401\"}]",
"zhzxFkycSendTime": "2026-04-22 08:00:00"
},
"readDiagnostics": {
"source": "browser_attached_live_read",
"businessGatewayReadAttempted": true,
"localhostReadAttempted": true,
"queryAbnorListCount": 0,
"queryHistoryEnergyChargeCount": 0
},
"dependencySnapshot": {
"businessReads": [],
"localReads": [],
"blockedLocalWrites": [],
"blockedCalls": ["repetCtrlSend"]
},
"sideEffectCounters": {
"repetCtrlSend": 0,
"sendMessages": 0,
"callOutLogin": 0,
"audioPlay": 0,
"exeTQueue": 0,
"productionLogWrite": 0
}
});
let (browser_ws_url, browser_server) =
start_callback_host_scheduled_monitoring_browser_server(detect_payload);
write_browser_config(&config_path, &browser_ws_url);
let output = run_binary_with_skills_dir_and_config(
&trigger_path,
&validation_bundle_skills_dir(),
&config_path,
&workspace,
&output_path,
);
browser_server.join().unwrap();
assert!(
output.status.success(),
"stdout={}\nstderr={}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
let record: Value = serde_json::from_str(&fs::read_to_string(output_path).unwrap()).unwrap();
let preview_artifact = &record["previewArtifact"];
assert_eq!(preview_artifact["summary"]["pending_count"], 0);
assert_eq!(preview_artifact["summary"]["queue_transition_count"], 1);
assert_eq!(
preview_artifact["queueTransitions"][0]["transitionId"],
"queue_continue_on_empty"
);
assert_eq!(
preview_artifact["logWritePreview"].as_array().unwrap().len(),
0
);
}
#[test]
fn binary_wiring_browser_attached_passes_platform_service_base_from_config() {
let workspace = temp_workspace("sgclaw-scheduled-monitoring-binary-platform-service-base");
@@ -744,27 +898,9 @@ fn binary_wiring_browser_attached_passes_platform_service_base_from_config() {
let output_path = workspace.join("run-record.json");
let config_path = workspace.join("sgclaw_config.json");
let rules_path = workspace.join("resources").join("rules.json");
let materialization_root = workspace.join("materialized");
fs::create_dir_all(&materialization_root).unwrap();
write_json(&trigger_path, &scheduled_trigger("monitor_only"));
write_runtime_rules(&rules_path);
generate_scheduled_monitoring_action_skill_package(GenerateScheduledMonitoringActionSkillRequest {
scene_id: "command-center-fee-control-monitor".to_string(),
scene_name: "鎸囨尌涓績璐规帶寮傚父鐩戞祴".to_string(),
output_root: materialization_root.clone(),
source_evidence_json: PathBuf::from(
"tests/fixtures/generated_scene/monitoring_action_source_evidence_extraction_2026-04-21.json",
),
ir_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_ir_contract_2026-04-22.json",
),
trigger_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_trigger_runtime_contract_2026-04-22.json",
),
})
.unwrap();
let detect_payload = json!({
"type": "scheduled-monitoring-detect-snapshot",
"report_name": "鎸囨尌涓績璐规帶寮傚父鐩戞祴",
@@ -819,7 +955,7 @@ fn binary_wiring_browser_attached_passes_platform_service_base_from_config() {
let output = run_binary_with_skills_dir_and_config(
&trigger_path,
&materialization_root.join("skills"),
&validation_bundle_skills_dir(),
&config_path,
&workspace,
&output_path,
@@ -937,7 +1073,7 @@ fn binary_wiring_loads_archive_workorder_skill_from_bundle() {
);
assert_eq!(record["previewArtifact"]["summary"]["pending_count"], 1);
assert_eq!(record["previewArtifact"]["summary"]["notify_count"], 0);
assert_eq!(record["previewArtifact"]["summary"]["action_plan_count"], 1);
assert_eq!(record["previewArtifact"]["summary"]["action_plan_count"], 0);
assert_eq!(
record["auditPreview"]["detectReadDiagnostics"]["businessType"],
"归档工单配网推送"
@@ -1084,7 +1220,7 @@ fn binary_wiring_loads_available_balance_below_zero_skill_from_bundle() {
);
assert_eq!(record["previewArtifact"]["summary"]["pending_count"], 1);
assert_eq!(record["previewArtifact"]["summary"]["notify_count"], 0);
assert_eq!(record["previewArtifact"]["summary"]["action_plan_count"], 1);
assert_eq!(record["previewArtifact"]["summary"]["action_plan_count"], 0);
assert_eq!(
record["auditPreview"]["detectReadDiagnostics"]["businessType"],
"可用电费小于零监测提醒"

View File

@@ -1,60 +1,32 @@
use std::fs;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use sgclaw::compat::scene_platform::scheduled_registry::load_scheduled_monitoring_registry;
use sgclaw::generated_scene::generator::{
generate_scheduled_monitoring_action_skill_package, GenerateScheduledMonitoringActionSkillRequest,
};
fn temp_workspace(prefix: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
let path = std::env::temp_dir().join(format!("{prefix}-{nanos}"));
fs::create_dir_all(&path).unwrap();
path
fn validation_bundle_skills_dir() -> PathBuf {
PathBuf::from("dist/sgclaw_scheduled_monitoring_read_only_validation_bundle_2026-04-22/skills")
}
#[test]
fn scheduled_monitoring_registry_loads_materialized_skill() {
let output_root = temp_workspace("sgclaw-scheduled-monitoring-registry");
generate_scheduled_monitoring_action_skill_package(GenerateScheduledMonitoringActionSkillRequest {
scene_id: "command-center-fee-control-monitor".to_string(),
scene_name: "指挥中心费控异常监测".to_string(),
output_root: output_root.clone(),
source_evidence_json: PathBuf::from(
"tests/fixtures/generated_scene/monitoring_action_source_evidence_extraction_2026-04-21.json",
),
ir_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_ir_contract_2026-04-22.json",
),
trigger_contract_json: PathBuf::from(
"tests/fixtures/generated_scene/scheduled_monitoring_action_trigger_runtime_contract_2026-04-22.json",
),
})
.unwrap();
let registry = load_scheduled_monitoring_registry(&validation_bundle_skills_dir()).unwrap();
let entry = registry
.iter()
.find(|entry| entry.workflow_id == "command_center_fee_control_monitoring_action")
.expect("command center skill must be registered");
let registry = load_scheduled_monitoring_registry(&output_root.join("skills")).unwrap();
assert_eq!(registry.len(), 1);
assert_eq!(
registry[0].workflow_id,
entry.workflow_id,
"command_center_fee_control_monitoring_action"
);
assert_eq!(
registry[0].manifest.scene.kind,
entry.manifest.scene.kind,
"scheduled_monitoring_action_workflow"
);
}
#[test]
fn scheduled_monitoring_registry_loads_archive_workorder_skill() {
let skills_dir = PathBuf::from(
"dist/sgclaw_scheduled_monitoring_read_only_validation_bundle_2026-04-22/skills",
);
let registry = load_scheduled_monitoring_registry(&skills_dir).unwrap();
let registry = load_scheduled_monitoring_registry(&validation_bundle_skills_dir()).unwrap();
let entry = registry
.iter()
.find(|entry| entry.workflow_id == "archive_workorder_grid_push_monitoring_action")
@@ -77,11 +49,7 @@ fn scheduled_monitoring_registry_loads_archive_workorder_skill() {
#[test]
fn scheduled_monitoring_registry_loads_available_balance_below_zero_skill() {
let skills_dir = PathBuf::from(
"dist/sgclaw_scheduled_monitoring_read_only_validation_bundle_2026-04-22/skills",
);
let registry = load_scheduled_monitoring_registry(&skills_dir).unwrap();
let registry = load_scheduled_monitoring_registry(&validation_bundle_skills_dir()).unwrap();
let entry = registry
.iter()
.find(|entry| entry.workflow_id == "available_balance_below_zero_monitoring_action")
@@ -104,11 +72,7 @@ fn scheduled_monitoring_registry_loads_available_balance_below_zero_skill() {
#[test]
fn scheduled_monitoring_registry_loads_sgcc_todo_crawler_skill() {
let skills_dir = PathBuf::from(
"dist/sgclaw_scheduled_monitoring_read_only_validation_bundle_2026-04-22/skills",
);
let registry = load_scheduled_monitoring_registry(&skills_dir).unwrap();
let registry = load_scheduled_monitoring_registry(&validation_bundle_skills_dir()).unwrap();
let entry = registry
.iter()
.find(|entry| entry.workflow_id == "sgcc_todo_crawler_monitoring_action")

File diff suppressed because it is too large Load Diff