Skip to content
This repository was archived by the owner on Apr 8, 2026. It is now read-only.

Commit a2351fe

Browse files
author
Jobdori
committed
feat(harness+usage): add auto_compact and token_cost parity scenarios
Two new mock parity harness scenarios: 1. auto_compact_triggered (session-compaction category) - Mock returns 50k input tokens, validates auto_compaction key is present in JSON output - Validates format parity; trigger behavior covered by conversation::tests::auto_compacts_when_cumulative_input_threshold_is_crossed 2. token_cost_reporting (token-usage category) - Mock returns known token counts (1k input, 500 output) - Validates input/output token fields present in JSON output Additional changes: - Add estimated_cost to JSON prompt output (format_usd + pricing_for_model) - Add final_text_sse_with_usage and text_message_response_with_usage helpers to mock-anthropic-service for parameterized token counts - Add ScenarioCase.extra_env and ScenarioCase.resume_session fields - Update mock_parity_scenarios.json: 10 -> 12 scenarios - Update harness request count assertion: 19 -> 21 cargo test --workspace: 558 passed, 0 failed
1 parent 6325add commit a2351fe

File tree

4 files changed

+286
-18
lines changed

4 files changed

+286
-18
lines changed

rust/crates/mock-anthropic-service/src/lib.rs

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,8 @@ enum Scenario {
9898
BashPermissionPromptApproved,
9999
BashPermissionPromptDenied,
100100
PluginToolRoundtrip,
101+
AutoCompactTriggered,
102+
TokenCostReporting,
101103
}
102104

103105
impl Scenario {
@@ -113,6 +115,8 @@ impl Scenario {
113115
"bash_permission_prompt_approved" => Some(Self::BashPermissionPromptApproved),
114116
"bash_permission_prompt_denied" => Some(Self::BashPermissionPromptDenied),
115117
"plugin_tool_roundtrip" => Some(Self::PluginToolRoundtrip),
118+
"auto_compact_triggered" => Some(Self::AutoCompactTriggered),
119+
"token_cost_reporting" => Some(Self::TokenCostReporting),
116120
_ => None,
117121
}
118122
}
@@ -129,6 +133,8 @@ impl Scenario {
129133
Self::BashPermissionPromptApproved => "bash_permission_prompt_approved",
130134
Self::BashPermissionPromptDenied => "bash_permission_prompt_denied",
131135
Self::PluginToolRoundtrip => "plugin_tool_roundtrip",
136+
Self::AutoCompactTriggered => "auto_compact_triggered",
137+
Self::TokenCostReporting => "token_cost_reporting",
132138
}
133139
}
134140
}
@@ -452,6 +458,12 @@ fn build_stream_body(request: &MessageRequest, scenario: Scenario) -> String {
452458
&[r#"{"message":"hello from plugin parity"}"#],
453459
),
454460
},
461+
Scenario::AutoCompactTriggered => {
462+
final_text_sse_with_usage("auto compact parity complete.", 50_000, 200)
463+
}
464+
Scenario::TokenCostReporting => {
465+
final_text_sse_with_usage("token cost reporting parity complete.", 1_000, 500)
466+
}
455467
}
456468
}
457469

@@ -610,6 +622,18 @@ fn build_message_response(request: &MessageRequest, scenario: Scenario) -> Messa
610622
json!({"message": "hello from plugin parity"}),
611623
),
612624
},
625+
Scenario::AutoCompactTriggered => text_message_response_with_usage(
626+
"msg_auto_compact_triggered",
627+
"auto compact parity complete.",
628+
50_000,
629+
200,
630+
),
631+
Scenario::TokenCostReporting => text_message_response_with_usage(
632+
"msg_token_cost_reporting",
633+
"token cost reporting parity complete.",
634+
1_000,
635+
500,
636+
),
613637
}
614638
}
615639

@@ -625,6 +649,8 @@ fn request_id_for(scenario: Scenario) -> &'static str {
625649
Scenario::BashPermissionPromptApproved => "req_bash_permission_prompt_approved",
626650
Scenario::BashPermissionPromptDenied => "req_bash_permission_prompt_denied",
627651
Scenario::PluginToolRoundtrip => "req_plugin_tool_roundtrip",
652+
Scenario::AutoCompactTriggered => "req_auto_compact_triggered",
653+
Scenario::TokenCostReporting => "req_token_cost_reporting",
628654
}
629655
}
630656

@@ -661,6 +687,32 @@ fn text_message_response(id: &str, text: &str) -> MessageResponse {
661687
}
662688
}
663689

690+
fn text_message_response_with_usage(
691+
id: &str,
692+
text: &str,
693+
input_tokens: u32,
694+
output_tokens: u32,
695+
) -> MessageResponse {
696+
MessageResponse {
697+
id: id.to_string(),
698+
kind: "message".to_string(),
699+
role: "assistant".to_string(),
700+
content: vec![OutputContentBlock::Text {
701+
text: text.to_string(),
702+
}],
703+
model: DEFAULT_MODEL.to_string(),
704+
stop_reason: Some("end_turn".to_string()),
705+
stop_sequence: None,
706+
usage: Usage {
707+
input_tokens,
708+
cache_creation_input_tokens: 0,
709+
cache_read_input_tokens: 0,
710+
output_tokens,
711+
},
712+
request_id: None,
713+
}
714+
}
715+
664716
fn tool_message_response(
665717
id: &str,
666718
tool_id: &str,
@@ -919,6 +971,74 @@ fn final_text_sse(text: &str) -> String {
919971
body
920972
}
921973

974+
fn final_text_sse_with_usage(text: &str, input_tokens: u32, output_tokens: u32) -> String {
975+
let mut body = String::new();
976+
append_sse(
977+
&mut body,
978+
"message_start",
979+
json!({
980+
"type": "message_start",
981+
"message": {
982+
"id": unique_message_id(),
983+
"type": "message",
984+
"role": "assistant",
985+
"content": [],
986+
"model": DEFAULT_MODEL,
987+
"stop_reason": null,
988+
"stop_sequence": null,
989+
"usage": {
990+
"input_tokens": input_tokens,
991+
"cache_creation_input_tokens": 0,
992+
"cache_read_input_tokens": 0,
993+
"output_tokens": 0
994+
}
995+
}
996+
}),
997+
);
998+
append_sse(
999+
&mut body,
1000+
"content_block_start",
1001+
json!({
1002+
"type": "content_block_start",
1003+
"index": 0,
1004+
"content_block": {"type": "text", "text": ""}
1005+
}),
1006+
);
1007+
append_sse(
1008+
&mut body,
1009+
"content_block_delta",
1010+
json!({
1011+
"type": "content_block_delta",
1012+
"index": 0,
1013+
"delta": {"type": "text_delta", "text": text}
1014+
}),
1015+
);
1016+
append_sse(
1017+
&mut body,
1018+
"content_block_stop",
1019+
json!({
1020+
"type": "content_block_stop",
1021+
"index": 0
1022+
}),
1023+
);
1024+
append_sse(
1025+
&mut body,
1026+
"message_delta",
1027+
json!({
1028+
"type": "message_delta",
1029+
"delta": {"stop_reason": "end_turn", "stop_sequence": null},
1030+
"usage": {
1031+
"input_tokens": input_tokens,
1032+
"cache_creation_input_tokens": 0,
1033+
"cache_read_input_tokens": 0,
1034+
"output_tokens": output_tokens
1035+
}
1036+
}),
1037+
);
1038+
append_sse(&mut body, "message_stop", json!({"type": "message_stop"}));
1039+
body
1040+
}
1041+
9221042
#[allow(clippy::needless_pass_by_value)]
9231043
fn append_sse(buffer: &mut String, event: &str, payload: Value) {
9241044
use std::fmt::Write as _;

rust/crates/rusty-claude-cli/src/main.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ use runtime::{
4646
ConversationRuntime, MessageRole, OAuthAuthorizationRequest, OAuthConfig,
4747
OAuthTokenExchangeRequest, PermissionMode, PermissionPolicy, ProjectContext, PromptCacheEvent,
4848
ResolvedPermissionMode, RuntimeError, Session, TokenUsage, ToolError, ToolExecutor,
49-
UsageTracker,
49+
UsageTracker, ModelPricing, format_usd, pricing_for_model,
5050
};
5151
use serde_json::json;
5252
use tools::GlobalToolRegistry;
@@ -1899,7 +1899,13 @@ impl LiveCli {
18991899
"output_tokens": summary.usage.output_tokens,
19001900
"cache_creation_input_tokens": summary.usage.cache_creation_input_tokens,
19011901
"cache_read_input_tokens": summary.usage.cache_read_input_tokens,
1902-
}
1902+
},
1903+
"estimated_cost": format_usd(
1904+
summary.usage.estimate_cost_usd_with_pricing(
1905+
pricing_for_model(&self.model)
1906+
.unwrap_or_else(runtime::ModelPricing::default_sonnet_tier)
1907+
).total_cost_usd()
1908+
)
19031909
})
19041910
);
19051911
Ok(())

0 commit comments

Comments
 (0)