mirror of
https://github.com/instructkr/claw-code.git
synced 2026-04-14 20:14:48 +08:00
feat: b5-session-export — batch 5 wave 2
This commit is contained in:
@@ -69,6 +69,7 @@ const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const BUILD_TARGET: Option<&str> = option_env!("TARGET");
|
||||
const GIT_SHA: Option<&str> = option_env!("GIT_SHA");
|
||||
const INTERNAL_PROGRESS_HEARTBEAT_INTERVAL: Duration = Duration::from_secs(3);
|
||||
const POST_TOOL_STALL_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
const PRIMARY_SESSION_EXTENSION: &str = "jsonl";
|
||||
const LEGACY_SESSION_EXTENSION: &str = "json";
|
||||
const LATEST_SESSION_REFERENCE: &str = "latest";
|
||||
@@ -6288,6 +6289,7 @@ impl ApiClient for AnthropicRuntimeClient {
|
||||
if let Some(progress_reporter) = &self.progress_reporter {
|
||||
progress_reporter.mark_model_phase();
|
||||
}
|
||||
let is_post_tool = request_ends_with_tool_result(&request);
|
||||
let message_request = MessageRequest {
|
||||
model: self.model.clone(),
|
||||
max_tokens: max_tokens_for_model(&self.model),
|
||||
@@ -6301,9 +6303,46 @@ impl ApiClient for AnthropicRuntimeClient {
|
||||
};
|
||||
|
||||
self.runtime.block_on(async {
|
||||
// When resuming after tool execution, apply a stall timeout on the
|
||||
// first stream event. If the model does not respond within the
|
||||
// deadline we drop the stalled connection and re-send the request as
|
||||
// a continuation nudge (one retry only).
|
||||
let max_attempts: usize = if is_post_tool { 2 } else { 1 };
|
||||
|
||||
for attempt in 1..=max_attempts {
|
||||
let result = self
|
||||
.consume_stream(&message_request, is_post_tool && attempt == 1)
|
||||
.await;
|
||||
match result {
|
||||
Ok(events) => return Ok(events),
|
||||
Err(error) if error.to_string().contains("post-tool stall") && attempt < max_attempts => {
|
||||
// Stalled after tool completion — nudge the model by
|
||||
// re-sending the same request.
|
||||
continue;
|
||||
}
|
||||
Err(error) => return Err(error),
|
||||
}
|
||||
}
|
||||
|
||||
Err(RuntimeError::new(
|
||||
"post-tool continuation nudge exhausted",
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl AnthropicRuntimeClient {
|
||||
/// Consume a single streaming response, optionally applying a stall
|
||||
/// timeout on the first event for post-tool continuations.
|
||||
#[allow(clippy::too_many_lines)]
|
||||
async fn consume_stream(
|
||||
&self,
|
||||
message_request: &MessageRequest,
|
||||
apply_stall_timeout: bool,
|
||||
) -> Result<Vec<AssistantEvent>, RuntimeError> {
|
||||
let mut stream =
|
||||
self.client
|
||||
.stream_message(&message_request)
|
||||
.stream_message(message_request)
|
||||
.await
|
||||
.map_err(|error| {
|
||||
RuntimeError::new(format_user_visible_api_error(&self.session_id, &error))
|
||||
@@ -6321,10 +6360,34 @@ impl ApiClient for AnthropicRuntimeClient {
|
||||
let mut pending_tool: Option<(String, String, String)> = None;
|
||||
let mut block_has_thinking_summary = false;
|
||||
let mut saw_stop = false;
|
||||
let mut received_any_event = false;
|
||||
|
||||
while let Some(event) = stream.next_event().await.map_err(|error| {
|
||||
loop {
|
||||
let next = if apply_stall_timeout && !received_any_event {
|
||||
match tokio::time::timeout(POST_TOOL_STALL_TIMEOUT, stream.next_event()).await {
|
||||
Ok(inner) => inner.map_err(|error| {
|
||||
RuntimeError::new(format_user_visible_api_error(
|
||||
&self.session_id,
|
||||
&error,
|
||||
))
|
||||
})?,
|
||||
Err(_elapsed) => {
|
||||
return Err(RuntimeError::new(
|
||||
"post-tool stall: model did not respond within timeout",
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stream.next_event().await.map_err(|error| {
|
||||
RuntimeError::new(format_user_visible_api_error(&self.session_id, &error))
|
||||
})? {
|
||||
})?
|
||||
};
|
||||
|
||||
let Some(event) = next else {
|
||||
break;
|
||||
};
|
||||
received_any_event = true;
|
||||
|
||||
match event {
|
||||
ApiStreamEvent::MessageStart(start) => {
|
||||
for block in start.message.content {
|
||||
@@ -6439,10 +6502,18 @@ impl ApiClient for AnthropicRuntimeClient {
|
||||
let mut events = response_to_events(response, out)?;
|
||||
push_prompt_cache_record(&self.client, &mut events);
|
||||
Ok(events)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` when the conversation ends with a tool-result message,
|
||||
/// meaning the model is expected to continue after tool execution.
|
||||
fn request_ends_with_tool_result(request: &ApiRequest) -> bool {
|
||||
request
|
||||
.messages
|
||||
.last()
|
||||
.is_some_and(|message| message.role == MessageRole::Tool)
|
||||
}
|
||||
|
||||
fn format_user_visible_api_error(session_id: &str, error: &api::ApiError) -> String {
|
||||
if error.is_context_window_failure() {
|
||||
format_context_window_blocked_error(session_id, error)
|
||||
|
||||
Reference in New Issue
Block a user