Fix panic on multi-byte chars in filter_json buffer truncation

The buffer truncation code was slicing at a raw byte offset which could
land in the middle of a multi-byte character (like emojis), causing a
panic. Fixed by using char_indices() to find valid character boundaries.

Also added stop_reason field to CompletionChunk initializers in tests
to complete the stop_reason feature addition.

- Fix byte boundary panic in filter_json.rs line 327
- Add test for multi-byte character handling
- Update test files with missing stop_reason field
This commit is contained in:
Dhanji R. Prasanna
2026-01-09 15:20:57 +11:00
parent c470964628
commit e301075666
11 changed files with 94 additions and 4 deletions

View File

@@ -112,7 +112,7 @@ use tracing::{debug, error};
use crate::{
CompletionChunk, CompletionRequest, CompletionResponse, CompletionStream, LLMProvider, Message,
MessageRole, Tool, ToolCall, Usage,
streaming::{decode_utf8_streaming, make_final_chunk, make_text_chunk, make_tool_chunk},
streaming::{decode_utf8_streaming, make_final_chunk, make_final_chunk_with_reason, make_text_chunk, make_tool_chunk},
};
const ANTHROPIC_API_URL: &str = "https://api.anthropic.com/v1/messages";
@@ -395,6 +395,7 @@ impl AnthropicProvider {
let mut accumulated_usage: Option<Usage> = None;
let mut byte_buffer = Vec::new(); // Buffer for incomplete UTF-8 sequences
let mut message_stopped = false; // Track if we've received message_stop
let mut stop_reason: Option<String> = None; // Track why the message stopped
while let Some(chunk_result) = stream.next().await {
match chunk_result {
@@ -583,10 +584,20 @@ impl AnthropicProvider {
current_tool_calls.clear();
}
}
"message_delta" => {
// message_delta contains the stop_reason and final usage
if let Some(delta) = &event.delta {
if let Some(reason) = &delta.stop_reason {
debug!("Received stop_reason: {}", reason);
stop_reason = Some(reason.clone());
}
}
// Usage is also in message_delta but we get it from message_start
}
"message_stop" => {
debug!("Received message stop event");
message_stopped = true;
let final_chunk = make_final_chunk(current_tool_calls.clone(), accumulated_usage.clone());
let final_chunk = make_final_chunk_with_reason(current_tool_calls.clone(), accumulated_usage.clone(), stop_reason.clone());
if tx.send(Ok(final_chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
}
@@ -931,6 +942,8 @@ struct AnthropicStreamMessage {
struct AnthropicDelta {
text: Option<String>,
partial_json: Option<String>,
#[serde(default)]
stop_reason: Option<String>,
}
#[derive(Debug, Deserialize)]

View File

@@ -493,6 +493,7 @@ impl DatabricksProvider {
finished: false,
usage: None,
tool_calls: None,
stop_reason: None,
};
if tx.send(Ok(text_chunk)).await.is_err() {
debug!("Receiver dropped");

View File

@@ -187,6 +187,8 @@ pub struct CompletionChunk {
pub finished: bool,
pub tool_calls: Option<Vec<ToolCall>>,
pub usage: Option<Usage>, // Add usage tracking for streaming
/// Stop reason from the API (e.g., "end_turn", "max_tokens", "stop_sequence")
pub stop_reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View File

@@ -157,6 +157,7 @@ impl OpenAIProvider {
finished: true,
tool_calls,
usage: accumulated_usage.clone(),
stop_reason: None, // TODO: Extract from OpenAI response
};
let _ = tx.send(Ok(final_chunk)).await;
}

View File

@@ -61,6 +61,22 @@ pub fn make_final_chunk(tool_calls: Vec<ToolCall>, usage: Option<Usage>) -> Comp
} else {
Some(tool_calls)
},
stop_reason: None,
}
}
/// Create a final completion chunk with stop reason.
pub fn make_final_chunk_with_reason(tool_calls: Vec<ToolCall>, usage: Option<Usage>, stop_reason: Option<String>) -> CompletionChunk {
CompletionChunk {
content: String::new(),
finished: true,
usage,
tool_calls: if tool_calls.is_empty() {
None
} else {
Some(tool_calls)
},
stop_reason,
}
}
@@ -71,6 +87,7 @@ pub fn make_text_chunk(content: String) -> CompletionChunk {
finished: false,
usage: None,
tool_calls: None,
stop_reason: None,
}
}
@@ -81,5 +98,6 @@ pub fn make_tool_chunk(tool_calls: Vec<ToolCall>) -> CompletionChunk {
finished: false,
usage: None,
tool_calls: Some(tool_calls),
stop_reason: None,
}
}