style: cargo fmt — fix CI formatting failures

Pre-existing formatting issues in anthropic.rs surfaced by CI cargo fmt check.
No functional changes.
This commit is contained in:
YeonGyu-Kim
2026-04-08 11:21:13 +09:00
parent 000aed4188
commit c7b3296ef6
11 changed files with 251 additions and 202 deletions

View File

@@ -515,7 +515,10 @@ impl AnthropicClient {
input_tokens: u32,
}
let request_url = format!("{}/v1/messages/count_tokens", self.base_url.trim_end_matches('/'));
let request_url = format!(
"{}/v1/messages/count_tokens",
self.base_url.trim_end_matches('/')
);
let mut request_body = self.request_profile.render_json_body(request)?;
strip_unsupported_beta_body_fields(&mut request_body);
let response = self
@@ -528,12 +531,7 @@ impl AnthropicClient {
let response = expect_success(response).await?;
let body = response.text().await.map_err(ApiError::from)?;
let parsed = serde_json::from_str::<CountTokensResponse>(&body).map_err(|error| {
ApiError::json_deserialize(
"Anthropic count_tokens",
&request.model,
&body,
error,
)
ApiError::json_deserialize("Anthropic count_tokens", &request.model, &body, error)
})?;
Ok(parsed.input_tokens)
}
@@ -597,7 +595,9 @@ fn jitter_for_base(base: Duration) -> Duration {
let tick = JITTER_COUNTER.fetch_add(1, Ordering::Relaxed);
// splitmix64 finalizer — mixes the low bits so large bases still see
// jitter across their full range instead of being clamped to subsec nanos.
let mut mixed = raw_nanos.wrapping_add(tick).wrapping_add(0x9E37_79B9_7F4A_7C15);
let mut mixed = raw_nanos
.wrapping_add(tick)
.wrapping_add(0x9E37_79B9_7F4A_7C15);
mixed = (mixed ^ (mixed >> 30)).wrapping_mul(0xBF58_476D_1CE4_E5B9);
mixed = (mixed ^ (mixed >> 27)).wrapping_mul(0x94D0_49BB_1331_11EB);
mixed ^= mixed >> 31;
@@ -1268,7 +1268,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: false,
..Default::default()
..Default::default()
};
assert!(request.with_streaming().stream);
@@ -1464,10 +1464,14 @@ mod tests {
// temperature is kept (Anthropic supports it)
assert_eq!(body["temperature"], serde_json::json!(0.7));
// frequency_penalty and presence_penalty are removed
assert!(body.get("frequency_penalty").is_none(),
"frequency_penalty must be stripped for Anthropic");
assert!(body.get("presence_penalty").is_none(),
"presence_penalty must be stripped for Anthropic");
assert!(
body.get("frequency_penalty").is_none(),
"frequency_penalty must be stripped for Anthropic"
);
assert!(
body.get("presence_penalty").is_none(),
"presence_penalty must be stripped for Anthropic"
);
// stop is renamed to stop_sequences
assert!(body.get("stop").is_none(), "stop must be renamed");
assert_eq!(body["stop_sequences"], serde_json::json!(["\n"]));
@@ -1484,8 +1488,10 @@ mod tests {
super::strip_unsupported_beta_body_fields(&mut body);
assert!(body.get("stop").is_none());
assert!(body.get("stop_sequences").is_none(),
"empty stop should not produce stop_sequences");
assert!(
body.get("stop_sequences").is_none(),
"empty stop should not produce stop_sequences"
);
}
#[test]
@@ -1499,7 +1505,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: false,
..Default::default()
..Default::default()
};
let mut rendered = client

View File

@@ -135,12 +135,7 @@ impl OpenAiCompatClient {
let request_id = request_id_from_headers(response.headers());
let body = response.text().await.map_err(ApiError::from)?;
let payload = serde_json::from_str::<ChatCompletionResponse>(&body).map_err(|error| {
ApiError::json_deserialize(
self.config.provider_name,
&request.model,
&body,
error,
)
ApiError::json_deserialize(self.config.provider_name, &request.model, &body, error)
})?;
let mut normalized = normalize_response(&request.model, payload)?;
if normalized.request_id.is_none() {
@@ -160,10 +155,7 @@ impl OpenAiCompatClient {
Ok(MessageStream {
request_id: request_id_from_headers(response.headers()),
response,
parser: OpenAiSseParser::with_context(
self.config.provider_name,
request.model.clone(),
),
parser: OpenAiSseParser::with_context(self.config.provider_name, request.model.clone()),
pending: VecDeque::new(),
done: false,
state: StreamState::new(request.model.clone()),
@@ -253,7 +245,9 @@ fn jitter_for_base(base: Duration) -> Duration {
.map(|elapsed| u64::try_from(elapsed.as_nanos()).unwrap_or(u64::MAX))
.unwrap_or(0);
let tick = JITTER_COUNTER.fetch_add(1, Ordering::Relaxed);
let mut mixed = raw_nanos.wrapping_add(tick).wrapping_add(0x9E37_79B9_7F4A_7C15);
let mut mixed = raw_nanos
.wrapping_add(tick)
.wrapping_add(0x9E37_79B9_7F4A_7C15);
mixed = (mixed ^ (mixed >> 30)).wrapping_mul(0xBF58_476D_1CE4_E5B9);
mixed = (mixed ^ (mixed >> 27)).wrapping_mul(0x94D0_49BB_1331_11EB);
mixed ^= mixed >> 31;
@@ -1110,7 +1104,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: true,
..Default::default()
..Default::default()
},
OpenAiCompatConfig::openai(),
);
@@ -1129,7 +1123,7 @@ mod tests {
tools: None,
tool_choice: None,
stream: true,
..Default::default()
..Default::default()
},
OpenAiCompatConfig::xai(),
);
@@ -1240,8 +1234,14 @@ mod tests {
..Default::default()
};
let payload = build_chat_completion_request(&request, OpenAiCompatConfig::openai());
assert!(payload.get("temperature").is_none(), "reasoning model should strip temperature");
assert!(payload.get("top_p").is_none(), "reasoning model should strip top_p");
assert!(
payload.get("temperature").is_none(),
"reasoning model should strip temperature"
);
assert!(
payload.get("top_p").is_none(),
"reasoning model should strip top_p"
);
assert!(payload.get("frequency_penalty").is_none());
assert!(payload.get("presence_penalty").is_none());
// stop is safe for all providers
@@ -1269,7 +1269,10 @@ mod tests {
..Default::default()
};
let payload = build_chat_completion_request(&request, OpenAiCompatConfig::openai());
assert!(payload.get("temperature").is_none(), "temperature should be absent");
assert!(
payload.get("temperature").is_none(),
"temperature should be absent"
);
assert!(payload.get("top_p").is_none(), "top_p should be absent");
assert!(payload.get("frequency_penalty").is_none());
assert!(payload.get("presence_penalty").is_none());