refactor: extract shared streaming utilities module

Agent: carmack

Create crates/g3-providers/src/streaming.rs with shared helpers:
- decode_utf8_streaming(): Handle incomplete UTF-8 sequences in SSE streams
- is_incomplete_json_error(): Detect incomplete vs malformed JSON
- make_final_chunk(): Create finished completion chunks
- make_text_chunk(): Create text content chunks
- make_tool_chunk(): Create tool call chunks

Refactor anthropic.rs:
- Use shared decode_utf8_streaming (removes 15 lines of inline UTF-8 handling)
- Use make_final_chunk, make_text_chunk, make_tool_chunk helpers
- Reduces verbose CompletionChunk constructions throughout

Refactor databricks.rs:
- Remove local copies of streaming helpers (now uses shared module)
- Reduces duplication between providers

Net reduction: 118 lines removed, 16 lines added (including new module)
All tests pass. Behavior unchanged.
This commit is contained in:
Dhanji R. Prasanna
2026-01-07 12:48:07 +11:00
parent bb63050779
commit 2bf475960c
5 changed files with 101 additions and 118 deletions

View File

@@ -112,6 +112,7 @@ use tracing::{debug, error};
use crate::{
CompletionChunk, CompletionRequest, CompletionResponse, CompletionStream, LLMProvider, Message,
MessageRole, Tool, ToolCall, Usage,
streaming::{decode_utf8_streaming, make_final_chunk, make_text_chunk, make_tool_chunk},
};
const ANTHROPIC_API_URL: &str = "https://api.anthropic.com/v1/messages";
@@ -398,30 +399,10 @@ impl AnthropicProvider {
while let Some(chunk_result) = stream.next().await {
match chunk_result {
Ok(chunk) => {
// Append new bytes to our buffer
byte_buffer.extend_from_slice(&chunk);
// Try to convert the entire buffer to UTF-8
let chunk_str = match std::str::from_utf8(&byte_buffer) {
Ok(s) => {
// Successfully converted entire buffer, clear it and use the string
let result = s.to_string();
byte_buffer.clear();
result
}
Err(e) => {
// Check if this is an incomplete sequence at the end
let valid_up_to = e.valid_up_to();
if valid_up_to > 0 {
// We have some valid UTF-8, extract it and keep the rest for next iteration
let valid_bytes =
byte_buffer.drain(..valid_up_to).collect::<Vec<_>>();
std::str::from_utf8(&valid_bytes).unwrap().to_string()
} else {
// No valid UTF-8 at all, skip this chunk and continue
continue;
}
}
let Some(chunk_str) = decode_utf8_streaming(&mut byte_buffer) else {
continue;
};
buffer.push_str(&chunk_str);
@@ -445,16 +426,7 @@ impl AnthropicProvider {
if let Some(data) = line.strip_prefix("data: ") {
if data == "[DONE]" {
debug!("Received stream completion marker");
let final_chunk = CompletionChunk {
content: String::new(),
finished: true,
usage: accumulated_usage.clone(),
tool_calls: if current_tool_calls.is_empty() {
None
} else {
Some(current_tool_calls.clone())
},
};
let final_chunk = make_final_chunk(current_tool_calls.clone(), accumulated_usage.clone());
if tx.send(Ok(final_chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
}
@@ -518,12 +490,7 @@ impl AnthropicProvider {
{
// We have complete arguments, send the tool call immediately
debug!("Tool call has complete args, sending immediately: {:?}", tool_call);
let chunk = CompletionChunk {
content: String::new(),
finished: false,
usage: None,
tool_calls: Some(vec![tool_call]),
};
let chunk = make_tool_chunk(vec![tool_call]);
if tx.send(Ok(chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
return accumulated_usage;
@@ -552,12 +519,7 @@ impl AnthropicProvider {
text.len(),
text
);
let chunk = CompletionChunk {
content: text,
finished: false,
usage: None,
tool_calls: None,
};
let chunk = make_text_chunk(text);
if tx.send(Ok(chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
return accumulated_usage;
@@ -612,12 +574,7 @@ impl AnthropicProvider {
// Send the complete tool call
if !current_tool_calls.is_empty() {
let chunk = CompletionChunk {
content: String::new(),
finished: false,
usage: None,
tool_calls: Some(current_tool_calls.clone()),
};
let chunk = make_tool_chunk(current_tool_calls.clone());
if tx.send(Ok(chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
return accumulated_usage;
@@ -629,16 +586,7 @@ impl AnthropicProvider {
"message_stop" => {
debug!("Received message stop event");
message_stopped = true;
let final_chunk = CompletionChunk {
content: String::new(),
finished: true,
usage: accumulated_usage.clone(),
tool_calls: if current_tool_calls.is_empty() {
None
} else {
Some(current_tool_calls.clone())
},
};
let final_chunk = make_final_chunk(current_tool_calls.clone(), accumulated_usage.clone());
if tx.send(Ok(final_chunk)).await.is_err() {
debug!("Receiver dropped, stopping stream");
}
@@ -682,16 +630,7 @@ impl AnthropicProvider {
}
// Send final chunk if we haven't already
let final_chunk = CompletionChunk {
content: String::new(),
finished: true,
usage: accumulated_usage.clone(),
tool_calls: if current_tool_calls.is_empty() {
None
} else {
Some(current_tool_calls)
},
};
let final_chunk = make_final_chunk(current_tool_calls, accumulated_usage.clone());
let _ = tx.send(Ok(final_chunk)).await;
accumulated_usage
}

View File

@@ -58,7 +58,7 @@
use anyhow::{anyhow, Result};
use bytes::Bytes;
use std::collections::HashMap;
use crate::streaming::{decode_utf8_streaming, is_incomplete_json_error, make_final_chunk};
use futures_util::stream::StreamExt;
use reqwest::{Client, RequestBuilder};
use serde::{Deserialize, Serialize};
@@ -66,6 +66,7 @@ use std::time::Duration;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use tracing::{debug, error, warn};
use std::collections::HashMap;
use crate::{
CompletionChunk, CompletionRequest, CompletionResponse, CompletionStream, LLMProvider, Message,
@@ -120,53 +121,6 @@ fn finalize_tool_calls(accumulators: HashMap<usize, ToolCallAccumulator>) -> Vec
.collect()
}
/// Try to decode bytes as UTF-8, handling incomplete sequences at the end.
/// Returns the decoded string and leaves any incomplete bytes in the buffer.
fn decode_utf8_streaming(byte_buffer: &mut Vec<u8>) -> Option<String> {
match std::str::from_utf8(byte_buffer) {
Ok(s) => {
let result = s.to_string();
byte_buffer.clear();
Some(result)
}
Err(e) => {
let valid_up_to = e.valid_up_to();
if valid_up_to > 0 {
let valid_bytes: Vec<u8> = byte_buffer.drain(..valid_up_to).collect();
// Safe: we just validated these bytes
Some(String::from_utf8(valid_bytes).unwrap())
} else {
None // No valid UTF-8 yet, wait for more bytes
}
}
}
}
/// Check if a JSON parse error indicates incomplete data (vs. malformed JSON).
fn is_incomplete_json_error(error: &serde_json::Error, data: &str) -> bool {
let msg = error.to_string().to_lowercase();
let looks_incomplete = msg.contains("eof")
|| msg.contains("unterminated")
|| msg.contains("unexpected end")
|| msg.contains("trailing");
let missing_terminator = !data.trim_end().ends_with('}') && !data.trim_end().ends_with(']');
looks_incomplete || missing_terminator
}
/// Create a final completion chunk with tool calls and usage.
fn make_final_chunk(tool_calls: Vec<ToolCall>, usage: Option<Usage>) -> CompletionChunk {
CompletionChunk {
content: String::new(),
finished: true,
usage,
tool_calls: if tool_calls.is_empty() {
None
} else {
Some(tool_calls)
},
}
}
const DEFAULT_CLIENT_ID: &str = "databricks-cli";
const DEFAULT_REDIRECT_URL: &str = "http://localhost:8020";
const DEFAULT_SCOPES: &[&str] = &["all-apis", "offline_access"];

View File

@@ -1,3 +1,6 @@
mod streaming;
pub use streaming::{decode_utf8_streaming, is_incomplete_json_error, make_final_chunk, make_text_chunk, make_tool_chunk};
use anyhow::Result;
use rand::Rng;
use serde::{Deserialize, Serialize};

View File

@@ -0,0 +1,85 @@
//! Shared utilities for streaming SSE response parsing.
//!
//! This module provides common helpers used by multiple LLM providers
//! for handling Server-Sent Events (SSE) streaming responses.
use crate::{CompletionChunk, ToolCall, Usage};
// ─────────────────────────────────────────────────────────────────────────────
// UTF-8 Streaming
// ─────────────────────────────────────────────────────────────────────────────
/// Try to decode bytes as UTF-8, handling incomplete sequences at the end.
/// Returns the decoded string and leaves any incomplete bytes in the buffer.
pub fn decode_utf8_streaming(byte_buffer: &mut Vec<u8>) -> Option<String> {
match std::str::from_utf8(byte_buffer) {
Ok(s) => {
let result = s.to_string();
byte_buffer.clear();
Some(result)
}
Err(e) => {
let valid_up_to = e.valid_up_to();
if valid_up_to > 0 {
let valid_bytes: Vec<u8> = byte_buffer.drain(..valid_up_to).collect();
// Safe: we just validated these bytes
Some(String::from_utf8(valid_bytes).unwrap())
} else {
None // No valid UTF-8 yet, wait for more bytes
}
}
}
}
// ─────────────────────────────────────────────────────────────────────────────
// JSON Error Detection
// ─────────────────────────────────────────────────────────────────────────────
/// Check if a JSON parse error indicates incomplete data (vs. malformed JSON).
pub fn is_incomplete_json_error(error: &serde_json::Error, data: &str) -> bool {
let msg = error.to_string().to_lowercase();
let looks_incomplete = msg.contains("eof")
|| msg.contains("unterminated")
|| msg.contains("unexpected end")
|| msg.contains("trailing");
let missing_terminator = !data.trim_end().ends_with('}') && !data.trim_end().ends_with(']');
looks_incomplete || missing_terminator
}
// ─────────────────────────────────────────────────────────────────────────────
// Completion Chunk Helpers
// ─────────────────────────────────────────────────────────────────────────────
/// Create a final completion chunk with tool calls and usage.
pub fn make_final_chunk(tool_calls: Vec<ToolCall>, usage: Option<Usage>) -> CompletionChunk {
CompletionChunk {
content: String::new(),
finished: true,
usage,
tool_calls: if tool_calls.is_empty() {
None
} else {
Some(tool_calls)
},
}
}
/// Create a text content chunk (not finished).
pub fn make_text_chunk(content: String) -> CompletionChunk {
CompletionChunk {
content,
finished: false,
usage: None,
tool_calls: None,
}
}
/// Create a tool calls chunk (not finished).
pub fn make_tool_chunk(tool_calls: Vec<ToolCall>) -> CompletionChunk {
CompletionChunk {
content: String::new(),
finished: false,
usage: None,
tool_calls: Some(tool_calls),
}
}