mirror of
https://github.com/instructkr/claw-code.git
synced 2026-04-03 18:04:49 +08:00
fix: update prompt tests for post-plugins-merge format
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,10 @@ use std::time::Duration;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ApiError {
|
||||
MissingApiKey,
|
||||
MissingCredentials {
|
||||
provider: &'static str,
|
||||
env_vars: &'static [&'static str],
|
||||
},
|
||||
ExpiredOAuthToken,
|
||||
Auth(String),
|
||||
InvalidApiKeyEnv(VarError),
|
||||
@@ -30,13 +33,21 @@ pub enum ApiError {
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
#[must_use]
|
||||
pub const fn missing_credentials(
|
||||
provider: &'static str,
|
||||
env_vars: &'static [&'static str],
|
||||
) -> Self {
|
||||
Self::MissingCredentials { provider, env_vars }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn is_retryable(&self) -> bool {
|
||||
match self {
|
||||
Self::Http(error) => error.is_connect() || error.is_timeout() || error.is_request(),
|
||||
Self::Api { retryable, .. } => *retryable,
|
||||
Self::RetriesExhausted { last_error, .. } => last_error.is_retryable(),
|
||||
Self::MissingApiKey
|
||||
Self::MissingCredentials { .. }
|
||||
| Self::ExpiredOAuthToken
|
||||
| Self::Auth(_)
|
||||
| Self::InvalidApiKeyEnv(_)
|
||||
@@ -51,12 +62,11 @@ impl ApiError {
|
||||
impl Display for ApiError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::MissingApiKey => {
|
||||
write!(
|
||||
f,
|
||||
"ANTHROPIC_AUTH_TOKEN or ANTHROPIC_API_KEY is not set; export one before calling the Anthropic API"
|
||||
)
|
||||
}
|
||||
Self::MissingCredentials { provider, env_vars } => write!(
|
||||
f,
|
||||
"missing {provider} credentials; export {} before calling the {provider} API",
|
||||
env_vars.join(" or ")
|
||||
),
|
||||
Self::ExpiredOAuthToken => {
|
||||
write!(
|
||||
f,
|
||||
@@ -65,10 +75,7 @@ impl Display for ApiError {
|
||||
}
|
||||
Self::Auth(message) => write!(f, "auth error: {message}"),
|
||||
Self::InvalidApiKeyEnv(error) => {
|
||||
write!(
|
||||
f,
|
||||
"failed to read ANTHROPIC_AUTH_TOKEN / ANTHROPIC_API_KEY: {error}"
|
||||
)
|
||||
write!(f, "failed to read credential environment variable: {error}")
|
||||
}
|
||||
Self::Http(error) => write!(f, "http error: {error}"),
|
||||
Self::Io(error) => write!(f, "io error: {error}"),
|
||||
@@ -81,20 +88,14 @@ impl Display for ApiError {
|
||||
..
|
||||
} => match (error_type, message) {
|
||||
(Some(error_type), Some(message)) => {
|
||||
write!(
|
||||
f,
|
||||
"anthropic api returned {status} ({error_type}): {message}"
|
||||
)
|
||||
write!(f, "api returned {status} ({error_type}): {message}")
|
||||
}
|
||||
_ => write!(f, "anthropic api returned {status}: {body}"),
|
||||
_ => write!(f, "api returned {status}: {body}"),
|
||||
},
|
||||
Self::RetriesExhausted {
|
||||
attempts,
|
||||
last_error,
|
||||
} => write!(
|
||||
f,
|
||||
"anthropic api failed after {attempts} attempts: {last_error}"
|
||||
),
|
||||
} => write!(f, "api failed after {attempts} attempts: {last_error}"),
|
||||
Self::InvalidSseFrame(message) => write!(f, "invalid sse frame: {message}"),
|
||||
Self::BackoffOverflow {
|
||||
attempt,
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
mod client;
|
||||
mod error;
|
||||
mod providers;
|
||||
mod sse;
|
||||
mod types;
|
||||
|
||||
pub use client::{
|
||||
oauth_token_is_expired, read_base_url, resolve_saved_oauth_token, resolve_startup_auth_source,
|
||||
ApiHttpClient, AuthSource, MessageStream, OAuthTokenSet,
|
||||
oauth_token_is_expired, read_base_url, read_xai_base_url, resolve_saved_oauth_token,
|
||||
resolve_startup_auth_source, MessageStream, OAuthTokenSet, ProviderClient,
|
||||
};
|
||||
pub use error::ApiError;
|
||||
pub use providers::anthropic::{AnthropicClient, AuthSource};
|
||||
pub use providers::openai_compat::{OpenAiCompatClient, OpenAiCompatConfig};
|
||||
pub use providers::{
|
||||
detect_provider_kind, max_tokens_for_model, resolve_model_alias, ProviderKind,
|
||||
};
|
||||
pub use sse::{parse_frame, SseParser};
|
||||
pub use types::{
|
||||
ContentBlockDelta, ContentBlockDeltaEvent, ContentBlockStartEvent, ContentBlockStopEvent,
|
||||
|
||||
1038
rust/crates/api/src/providers/anthropic.rs
Normal file
1038
rust/crates/api/src/providers/anthropic.rs
Normal file
File diff suppressed because it is too large
Load Diff
216
rust/crates/api/src/providers/mod.rs
Normal file
216
rust/crates/api/src/providers/mod.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
|
||||
use crate::error::ApiError;
|
||||
use crate::types::{MessageRequest, MessageResponse};
|
||||
|
||||
pub mod anthropic;
|
||||
pub mod openai_compat;
|
||||
|
||||
pub type ProviderFuture<'a, T> = Pin<Box<dyn Future<Output = Result<T, ApiError>> + Send + 'a>>;
|
||||
|
||||
pub trait Provider {
|
||||
type Stream;
|
||||
|
||||
fn send_message<'a>(
|
||||
&'a self,
|
||||
request: &'a MessageRequest,
|
||||
) -> ProviderFuture<'a, MessageResponse>;
|
||||
|
||||
fn stream_message<'a>(
|
||||
&'a self,
|
||||
request: &'a MessageRequest,
|
||||
) -> ProviderFuture<'a, Self::Stream>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ProviderKind {
|
||||
Anthropic,
|
||||
Xai,
|
||||
OpenAi,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ProviderMetadata {
|
||||
pub provider: ProviderKind,
|
||||
pub auth_env: &'static str,
|
||||
pub base_url_env: &'static str,
|
||||
pub default_base_url: &'static str,
|
||||
}
|
||||
|
||||
const MODEL_REGISTRY: &[(&str, ProviderMetadata)] = &[
|
||||
(
|
||||
"opus",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Anthropic,
|
||||
auth_env: "ANTHROPIC_API_KEY",
|
||||
base_url_env: "ANTHROPIC_BASE_URL",
|
||||
default_base_url: anthropic::DEFAULT_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"sonnet",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Anthropic,
|
||||
auth_env: "ANTHROPIC_API_KEY",
|
||||
base_url_env: "ANTHROPIC_BASE_URL",
|
||||
default_base_url: anthropic::DEFAULT_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"haiku",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Anthropic,
|
||||
auth_env: "ANTHROPIC_API_KEY",
|
||||
base_url_env: "ANTHROPIC_BASE_URL",
|
||||
default_base_url: anthropic::DEFAULT_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"grok",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"grok-3",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"grok-mini",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"grok-3-mini",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
},
|
||||
),
|
||||
(
|
||||
"grok-2",
|
||||
ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
},
|
||||
),
|
||||
];
|
||||
|
||||
#[must_use]
|
||||
pub fn resolve_model_alias(model: &str) -> String {
|
||||
let trimmed = model.trim();
|
||||
let lower = trimmed.to_ascii_lowercase();
|
||||
MODEL_REGISTRY
|
||||
.iter()
|
||||
.find_map(|(alias, metadata)| {
|
||||
(*alias == lower).then_some(match metadata.provider {
|
||||
ProviderKind::Anthropic => match *alias {
|
||||
"opus" => "claude-opus-4-6",
|
||||
"sonnet" => "claude-sonnet-4-6",
|
||||
"haiku" => "claude-haiku-4-5-20251213",
|
||||
_ => trimmed,
|
||||
},
|
||||
ProviderKind::Xai => match *alias {
|
||||
"grok" | "grok-3" => "grok-3",
|
||||
"grok-mini" | "grok-3-mini" => "grok-3-mini",
|
||||
"grok-2" => "grok-2",
|
||||
_ => trimmed,
|
||||
},
|
||||
ProviderKind::OpenAi => trimmed,
|
||||
})
|
||||
})
|
||||
.map_or_else(|| trimmed.to_string(), ToOwned::to_owned)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn metadata_for_model(model: &str) -> Option<ProviderMetadata> {
|
||||
let canonical = resolve_model_alias(model);
|
||||
if canonical.starts_with("claude") {
|
||||
return Some(ProviderMetadata {
|
||||
provider: ProviderKind::Anthropic,
|
||||
auth_env: "ANTHROPIC_API_KEY",
|
||||
base_url_env: "ANTHROPIC_BASE_URL",
|
||||
default_base_url: anthropic::DEFAULT_BASE_URL,
|
||||
});
|
||||
}
|
||||
if canonical.starts_with("grok") {
|
||||
return Some(ProviderMetadata {
|
||||
provider: ProviderKind::Xai,
|
||||
auth_env: "XAI_API_KEY",
|
||||
base_url_env: "XAI_BASE_URL",
|
||||
default_base_url: openai_compat::DEFAULT_XAI_BASE_URL,
|
||||
});
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn detect_provider_kind(model: &str) -> ProviderKind {
|
||||
if let Some(metadata) = metadata_for_model(model) {
|
||||
return metadata.provider;
|
||||
}
|
||||
if anthropic::has_auth_from_env_or_saved().unwrap_or(false) {
|
||||
return ProviderKind::Anthropic;
|
||||
}
|
||||
if openai_compat::has_api_key("OPENAI_API_KEY") {
|
||||
return ProviderKind::OpenAi;
|
||||
}
|
||||
if openai_compat::has_api_key("XAI_API_KEY") {
|
||||
return ProviderKind::Xai;
|
||||
}
|
||||
ProviderKind::Anthropic
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn max_tokens_for_model(model: &str) -> u32 {
|
||||
let canonical = resolve_model_alias(model);
|
||||
if canonical.contains("opus") {
|
||||
32_000
|
||||
} else {
|
||||
64_000
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{detect_provider_kind, max_tokens_for_model, resolve_model_alias, ProviderKind};
|
||||
|
||||
#[test]
|
||||
fn resolves_grok_aliases() {
|
||||
assert_eq!(resolve_model_alias("grok"), "grok-3");
|
||||
assert_eq!(resolve_model_alias("grok-mini"), "grok-3-mini");
|
||||
assert_eq!(resolve_model_alias("grok-2"), "grok-2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detects_provider_from_model_name_first() {
|
||||
assert_eq!(detect_provider_kind("grok"), ProviderKind::Xai);
|
||||
assert_eq!(
|
||||
detect_provider_kind("claude-sonnet-4-6"),
|
||||
ProviderKind::Anthropic
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_existing_max_token_heuristic() {
|
||||
assert_eq!(max_tokens_for_model("opus"), 32_000);
|
||||
assert_eq!(max_tokens_for_model("grok-3"), 64_000);
|
||||
}
|
||||
}
|
||||
1050
rust/crates/api/src/providers/openai_compat.rs
Normal file
1050
rust/crates/api/src/providers/openai_compat.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,9 +3,9 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use api::{
|
||||
ApiHttpClient, ApiError, ContentBlockDelta, ContentBlockDeltaEvent, ContentBlockStartEvent,
|
||||
InputContentBlock, InputMessage, MessageDeltaEvent, MessageRequest, OutputContentBlock,
|
||||
StreamEvent, ToolChoice, ToolDefinition,
|
||||
AnthropicClient, ApiError, AuthSource, ContentBlockDelta, ContentBlockDeltaEvent,
|
||||
ContentBlockStartEvent, InputContentBlock, InputMessage, MessageDeltaEvent, MessageRequest,
|
||||
OutputContentBlock, ProviderClient, StreamEvent, ToolChoice, ToolDefinition,
|
||||
};
|
||||
use serde_json::json;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
@@ -34,7 +34,7 @@ async fn send_message_posts_json_and_parses_response() {
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key")
|
||||
let client = AnthropicClient::new("test-key")
|
||||
.with_auth_token(Some("proxy-token".to_string()))
|
||||
.with_base_url(server.base_url());
|
||||
let response = client
|
||||
@@ -75,48 +75,6 @@ async fn send_message_posts_json_and_parses_response() {
|
||||
assert_eq!(body["tool_choice"]["type"], json!("auto"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_message_parses_response_with_thinking_blocks() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let body = concat!(
|
||||
"{",
|
||||
"\"id\":\"msg_thinking\",",
|
||||
"\"type\":\"message\",",
|
||||
"\"role\":\"assistant\",",
|
||||
"\"content\":[",
|
||||
"{\"type\":\"thinking\",\"thinking\":\"step 1\",\"signature\":\"sig_123\"},",
|
||||
"{\"type\":\"text\",\"text\":\"Final answer\"}",
|
||||
"],",
|
||||
"\"model\":\"claude-3-7-sonnet-latest\",",
|
||||
"\"stop_reason\":\"end_turn\",",
|
||||
"\"stop_sequence\":null,",
|
||||
"\"usage\":{\"input_tokens\":12,\"output_tokens\":4}",
|
||||
"}"
|
||||
);
|
||||
let server = spawn_server(
|
||||
state,
|
||||
vec![http_response("200 OK", "application/json", body)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key").with_base_url(server.base_url());
|
||||
let response = client
|
||||
.send_message(&sample_request(false))
|
||||
.await
|
||||
.expect("request should succeed");
|
||||
|
||||
assert_eq!(response.content.len(), 2);
|
||||
assert!(matches!(
|
||||
&response.content[0],
|
||||
OutputContentBlock::Thinking { thinking, signature }
|
||||
if thinking == "step 1" && signature.as_deref() == Some("sig_123")
|
||||
));
|
||||
assert!(matches!(
|
||||
&response.content[1],
|
||||
OutputContentBlock::Text { text } if text == "Final answer"
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stream_message_parses_sse_events_with_tool_use() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
@@ -146,7 +104,7 @@ async fn stream_message_parses_sse_events_with_tool_use() {
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key")
|
||||
let client = AnthropicClient::new("test-key")
|
||||
.with_auth_token(Some("proxy-token".to_string()))
|
||||
.with_base_url(server.base_url());
|
||||
let mut stream = client
|
||||
@@ -204,85 +162,6 @@ async fn stream_message_parses_sse_events_with_tool_use() {
|
||||
assert!(request.body.contains("\"stream\":true"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stream_message_parses_sse_events_with_thinking_blocks() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let sse = concat!(
|
||||
"event: message_start\n",
|
||||
"data: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream_thinking\",\"type\":\"message\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-3-7-sonnet-latest\",\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":8,\"output_tokens\":0}}}\n\n",
|
||||
"event: content_block_start\n",
|
||||
"data: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"thinking\",\"thinking\":\"\"}}\n\n",
|
||||
"event: content_block_delta\n",
|
||||
"data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"thinking_delta\",\"thinking\":\"step 1\"}}\n\n",
|
||||
"event: content_block_delta\n",
|
||||
"data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"signature_delta\",\"signature\":\"sig_123\"}}\n\n",
|
||||
"event: content_block_stop\n",
|
||||
"data: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
|
||||
"event: content_block_start\n",
|
||||
"data: {\"type\":\"content_block_start\",\"index\":1,\"content_block\":{\"type\":\"text\",\"text\":\"Final answer\"}}\n\n",
|
||||
"event: content_block_stop\n",
|
||||
"data: {\"type\":\"content_block_stop\",\"index\":1}\n\n",
|
||||
"event: message_delta\n",
|
||||
"data: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":8,\"output_tokens\":1}}\n\n",
|
||||
"event: message_stop\n",
|
||||
"data: {\"type\":\"message_stop\"}\n\n",
|
||||
"data: [DONE]\n\n"
|
||||
);
|
||||
let server = spawn_server(
|
||||
state,
|
||||
vec![http_response("200 OK", "text/event-stream", sse)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key").with_base_url(server.base_url());
|
||||
let mut stream = client
|
||||
.stream_message(&sample_request(false))
|
||||
.await
|
||||
.expect("stream should start");
|
||||
|
||||
let mut events = Vec::new();
|
||||
while let Some(event) = stream
|
||||
.next_event()
|
||||
.await
|
||||
.expect("stream event should parse")
|
||||
{
|
||||
events.push(event);
|
||||
}
|
||||
|
||||
assert_eq!(events.len(), 9);
|
||||
assert!(matches!(
|
||||
&events[1],
|
||||
StreamEvent::ContentBlockStart(ContentBlockStartEvent {
|
||||
content_block: OutputContentBlock::Thinking { thinking, signature },
|
||||
..
|
||||
}) if thinking.is_empty() && signature.is_none()
|
||||
));
|
||||
assert!(matches!(
|
||||
&events[2],
|
||||
StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
|
||||
delta: ContentBlockDelta::ThinkingDelta { thinking },
|
||||
..
|
||||
}) if thinking == "step 1"
|
||||
));
|
||||
assert!(matches!(
|
||||
&events[3],
|
||||
StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
|
||||
delta: ContentBlockDelta::SignatureDelta { signature },
|
||||
..
|
||||
}) if signature == "sig_123"
|
||||
));
|
||||
assert!(matches!(
|
||||
&events[5],
|
||||
StreamEvent::ContentBlockStart(ContentBlockStartEvent {
|
||||
content_block: OutputContentBlock::Text { text },
|
||||
..
|
||||
}) if text == "Final answer"
|
||||
));
|
||||
assert!(matches!(events[6], StreamEvent::ContentBlockStop(_)));
|
||||
assert!(matches!(events[7], StreamEvent::MessageDelta(_)));
|
||||
assert!(matches!(events[8], StreamEvent::MessageStop(_)));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn retries_retryable_failures_before_succeeding() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
@@ -303,7 +182,7 @@ async fn retries_retryable_failures_before_succeeding() {
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key")
|
||||
let client = AnthropicClient::new("test-key")
|
||||
.with_base_url(server.base_url())
|
||||
.with_retry_policy(2, Duration::from_millis(1), Duration::from_millis(2));
|
||||
|
||||
@@ -316,6 +195,47 @@ async fn retries_retryable_failures_before_succeeding() {
|
||||
assert_eq!(state.lock().await.len(), 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn provider_client_dispatches_anthropic_requests() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let server = spawn_server(
|
||||
state.clone(),
|
||||
vec![http_response(
|
||||
"200 OK",
|
||||
"application/json",
|
||||
"{\"id\":\"msg_provider\",\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"text\",\"text\":\"Dispatched\"}],\"model\":\"claude-3-7-sonnet-latest\",\"stop_reason\":\"end_turn\",\"stop_sequence\":null,\"usage\":{\"input_tokens\":3,\"output_tokens\":2}}",
|
||||
)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ProviderClient::from_model_with_anthropic_auth(
|
||||
"claude-sonnet-4-6",
|
||||
Some(AuthSource::ApiKey("test-key".to_string())),
|
||||
)
|
||||
.expect("anthropic provider client should be constructed");
|
||||
let client = match client {
|
||||
ProviderClient::Anthropic(client) => {
|
||||
ProviderClient::Anthropic(client.with_base_url(server.base_url()))
|
||||
}
|
||||
other => panic!("expected anthropic provider, got {other:?}"),
|
||||
};
|
||||
|
||||
let response = client
|
||||
.send_message(&sample_request(false))
|
||||
.await
|
||||
.expect("provider-dispatched request should succeed");
|
||||
|
||||
assert_eq!(response.total_tokens(), 5);
|
||||
|
||||
let captured = state.lock().await;
|
||||
let request = captured.first().expect("server should capture request");
|
||||
assert_eq!(request.path, "/v1/messages");
|
||||
assert_eq!(
|
||||
request.headers.get("x-api-key").map(String::as_str),
|
||||
Some("test-key")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn surfaces_retry_exhaustion_for_persistent_retryable_errors() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
@@ -336,7 +256,7 @@ async fn surfaces_retry_exhaustion_for_persistent_retryable_errors() {
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = ApiHttpClient::new("test-key")
|
||||
let client = AnthropicClient::new("test-key")
|
||||
.with_base_url(server.base_url())
|
||||
.with_retry_policy(1, Duration::from_millis(1), Duration::from_millis(2));
|
||||
|
||||
@@ -367,7 +287,7 @@ async fn surfaces_retry_exhaustion_for_persistent_retryable_errors() {
|
||||
#[tokio::test]
|
||||
#[ignore = "requires ANTHROPIC_API_KEY and network access"]
|
||||
async fn live_stream_smoke_test() {
|
||||
let client = ApiHttpClient::from_env().expect("ANTHROPIC_API_KEY must be set");
|
||||
let client = AnthropicClient::from_env().expect("ANTHROPIC_API_KEY must be set");
|
||||
let mut stream = client
|
||||
.stream_message(&MessageRequest {
|
||||
model: std::env::var("ANTHROPIC_MODEL")
|
||||
|
||||
415
rust/crates/api/tests/openai_compat_integration.rs
Normal file
415
rust/crates/api/tests/openai_compat_integration.rs
Normal file
@@ -0,0 +1,415 @@
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsString;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Mutex as StdMutex, OnceLock};
|
||||
|
||||
use api::{
|
||||
ContentBlockDelta, ContentBlockDeltaEvent, ContentBlockStartEvent, ContentBlockStopEvent,
|
||||
InputContentBlock, InputMessage, MessageRequest, OpenAiCompatClient, OpenAiCompatConfig,
|
||||
OutputContentBlock, ProviderClient, StreamEvent, ToolChoice, ToolDefinition,
|
||||
};
|
||||
use serde_json::json;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_message_uses_openai_compatible_endpoint_and_auth() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let body = concat!(
|
||||
"{",
|
||||
"\"id\":\"chatcmpl_test\",",
|
||||
"\"model\":\"grok-3\",",
|
||||
"\"choices\":[{",
|
||||
"\"message\":{\"role\":\"assistant\",\"content\":\"Hello from Grok\",\"tool_calls\":[]},",
|
||||
"\"finish_reason\":\"stop\"",
|
||||
"}],",
|
||||
"\"usage\":{\"prompt_tokens\":11,\"completion_tokens\":5}",
|
||||
"}"
|
||||
);
|
||||
let server = spawn_server(
|
||||
state.clone(),
|
||||
vec![http_response("200 OK", "application/json", body)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = OpenAiCompatClient::new("xai-test-key", OpenAiCompatConfig::xai())
|
||||
.with_base_url(server.base_url());
|
||||
let response = client
|
||||
.send_message(&sample_request(false))
|
||||
.await
|
||||
.expect("request should succeed");
|
||||
|
||||
assert_eq!(response.model, "grok-3");
|
||||
assert_eq!(response.total_tokens(), 16);
|
||||
assert_eq!(
|
||||
response.content,
|
||||
vec![OutputContentBlock::Text {
|
||||
text: "Hello from Grok".to_string(),
|
||||
}]
|
||||
);
|
||||
|
||||
let captured = state.lock().await;
|
||||
let request = captured.first().expect("server should capture request");
|
||||
assert_eq!(request.path, "/chat/completions");
|
||||
assert_eq!(
|
||||
request.headers.get("authorization").map(String::as_str),
|
||||
Some("Bearer xai-test-key")
|
||||
);
|
||||
let body: serde_json::Value = serde_json::from_str(&request.body).expect("json body");
|
||||
assert_eq!(body["model"], json!("grok-3"));
|
||||
assert_eq!(body["messages"][0]["role"], json!("system"));
|
||||
assert_eq!(body["tools"][0]["type"], json!("function"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_message_accepts_full_chat_completions_endpoint_override() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let body = concat!(
|
||||
"{",
|
||||
"\"id\":\"chatcmpl_full_endpoint\",",
|
||||
"\"model\":\"grok-3\",",
|
||||
"\"choices\":[{",
|
||||
"\"message\":{\"role\":\"assistant\",\"content\":\"Endpoint override works\",\"tool_calls\":[]},",
|
||||
"\"finish_reason\":\"stop\"",
|
||||
"}],",
|
||||
"\"usage\":{\"prompt_tokens\":7,\"completion_tokens\":3}",
|
||||
"}"
|
||||
);
|
||||
let server = spawn_server(
|
||||
state.clone(),
|
||||
vec![http_response("200 OK", "application/json", body)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let endpoint_url = format!("{}/chat/completions", server.base_url());
|
||||
let client = OpenAiCompatClient::new("xai-test-key", OpenAiCompatConfig::xai())
|
||||
.with_base_url(endpoint_url);
|
||||
let response = client
|
||||
.send_message(&sample_request(false))
|
||||
.await
|
||||
.expect("request should succeed");
|
||||
|
||||
assert_eq!(response.total_tokens(), 10);
|
||||
|
||||
let captured = state.lock().await;
|
||||
let request = captured.first().expect("server should capture request");
|
||||
assert_eq!(request.path, "/chat/completions");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stream_message_normalizes_text_and_multiple_tool_calls() {
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let sse = concat!(
|
||||
"data: {\"id\":\"chatcmpl_stream\",\"model\":\"grok-3\",\"choices\":[{\"delta\":{\"content\":\"Hello\"}}]}\n\n",
|
||||
"data: {\"id\":\"chatcmpl_stream\",\"choices\":[{\"delta\":{\"tool_calls\":[{\"index\":0,\"id\":\"call_1\",\"function\":{\"name\":\"weather\",\"arguments\":\"{\\\"city\\\":\\\"Paris\\\"}\"}},{\"index\":1,\"id\":\"call_2\",\"function\":{\"name\":\"clock\",\"arguments\":\"{\\\"zone\\\":\\\"UTC\\\"}\"}}]}}]}\n\n",
|
||||
"data: {\"id\":\"chatcmpl_stream\",\"choices\":[{\"delta\":{},\"finish_reason\":\"tool_calls\"}]}\n\n",
|
||||
"data: [DONE]\n\n"
|
||||
);
|
||||
let server = spawn_server(
|
||||
state.clone(),
|
||||
vec![http_response_with_headers(
|
||||
"200 OK",
|
||||
"text/event-stream",
|
||||
sse,
|
||||
&[("x-request-id", "req_grok_stream")],
|
||||
)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let client = OpenAiCompatClient::new("xai-test-key", OpenAiCompatConfig::xai())
|
||||
.with_base_url(server.base_url());
|
||||
let mut stream = client
|
||||
.stream_message(&sample_request(false))
|
||||
.await
|
||||
.expect("stream should start");
|
||||
|
||||
assert_eq!(stream.request_id(), Some("req_grok_stream"));
|
||||
|
||||
let mut events = Vec::new();
|
||||
while let Some(event) = stream.next_event().await.expect("event should parse") {
|
||||
events.push(event);
|
||||
}
|
||||
|
||||
assert!(matches!(events[0], StreamEvent::MessageStart(_)));
|
||||
assert!(matches!(
|
||||
events[1],
|
||||
StreamEvent::ContentBlockStart(ContentBlockStartEvent {
|
||||
content_block: OutputContentBlock::Text { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[2],
|
||||
StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
|
||||
delta: ContentBlockDelta::TextDelta { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[3],
|
||||
StreamEvent::ContentBlockStart(ContentBlockStartEvent {
|
||||
index: 1,
|
||||
content_block: OutputContentBlock::ToolUse { .. },
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[4],
|
||||
StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
|
||||
index: 1,
|
||||
delta: ContentBlockDelta::InputJsonDelta { .. },
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[5],
|
||||
StreamEvent::ContentBlockStart(ContentBlockStartEvent {
|
||||
index: 2,
|
||||
content_block: OutputContentBlock::ToolUse { .. },
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[6],
|
||||
StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent {
|
||||
index: 2,
|
||||
delta: ContentBlockDelta::InputJsonDelta { .. },
|
||||
})
|
||||
));
|
||||
assert!(matches!(
|
||||
events[7],
|
||||
StreamEvent::ContentBlockStop(ContentBlockStopEvent { index: 1 })
|
||||
));
|
||||
assert!(matches!(
|
||||
events[8],
|
||||
StreamEvent::ContentBlockStop(ContentBlockStopEvent { index: 2 })
|
||||
));
|
||||
assert!(matches!(
|
||||
events[9],
|
||||
StreamEvent::ContentBlockStop(ContentBlockStopEvent { index: 0 })
|
||||
));
|
||||
assert!(matches!(events[10], StreamEvent::MessageDelta(_)));
|
||||
assert!(matches!(events[11], StreamEvent::MessageStop(_)));
|
||||
|
||||
let captured = state.lock().await;
|
||||
let request = captured.first().expect("captured request");
|
||||
assert_eq!(request.path, "/chat/completions");
|
||||
assert!(request.body.contains("\"stream\":true"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn provider_client_dispatches_xai_requests_from_env() {
|
||||
let _lock = env_lock();
|
||||
let _api_key = ScopedEnvVar::set("XAI_API_KEY", "xai-test-key");
|
||||
|
||||
let state = Arc::new(Mutex::new(Vec::<CapturedRequest>::new()));
|
||||
let server = spawn_server(
|
||||
state.clone(),
|
||||
vec![http_response(
|
||||
"200 OK",
|
||||
"application/json",
|
||||
"{\"id\":\"chatcmpl_provider\",\"model\":\"grok-3\",\"choices\":[{\"message\":{\"role\":\"assistant\",\"content\":\"Through provider client\",\"tool_calls\":[]},\"finish_reason\":\"stop\"}],\"usage\":{\"prompt_tokens\":9,\"completion_tokens\":4}}",
|
||||
)],
|
||||
)
|
||||
.await;
|
||||
let _base_url = ScopedEnvVar::set("XAI_BASE_URL", server.base_url());
|
||||
|
||||
let client =
|
||||
ProviderClient::from_model("grok").expect("xAI provider client should be constructed");
|
||||
assert!(matches!(client, ProviderClient::Xai(_)));
|
||||
|
||||
let response = client
|
||||
.send_message(&sample_request(false))
|
||||
.await
|
||||
.expect("provider-dispatched request should succeed");
|
||||
|
||||
assert_eq!(response.total_tokens(), 13);
|
||||
|
||||
let captured = state.lock().await;
|
||||
let request = captured.first().expect("captured request");
|
||||
assert_eq!(request.path, "/chat/completions");
|
||||
assert_eq!(
|
||||
request.headers.get("authorization").map(String::as_str),
|
||||
Some("Bearer xai-test-key")
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct CapturedRequest {
|
||||
path: String,
|
||||
headers: HashMap<String, String>,
|
||||
body: String,
|
||||
}
|
||||
|
||||
struct TestServer {
|
||||
base_url: String,
|
||||
join_handle: tokio::task::JoinHandle<()>,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
fn base_url(&self) -> String {
|
||||
self.base_url.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.join_handle.abort();
|
||||
}
|
||||
}
|
||||
|
||||
async fn spawn_server(
|
||||
state: Arc<Mutex<Vec<CapturedRequest>>>,
|
||||
responses: Vec<String>,
|
||||
) -> TestServer {
|
||||
let listener = TcpListener::bind("127.0.0.1:0")
|
||||
.await
|
||||
.expect("listener should bind");
|
||||
let address = listener.local_addr().expect("listener addr");
|
||||
let join_handle = tokio::spawn(async move {
|
||||
for response in responses {
|
||||
let (mut socket, _) = listener.accept().await.expect("accept");
|
||||
let mut buffer = Vec::new();
|
||||
let mut header_end = None;
|
||||
loop {
|
||||
let mut chunk = [0_u8; 1024];
|
||||
let read = socket.read(&mut chunk).await.expect("read request");
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
buffer.extend_from_slice(&chunk[..read]);
|
||||
if let Some(position) = find_header_end(&buffer) {
|
||||
header_end = Some(position);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let header_end = header_end.expect("headers should exist");
|
||||
let (header_bytes, remaining) = buffer.split_at(header_end);
|
||||
let header_text = String::from_utf8(header_bytes.to_vec()).expect("utf8 headers");
|
||||
let mut lines = header_text.split("\r\n");
|
||||
let request_line = lines.next().expect("request line");
|
||||
let path = request_line
|
||||
.split_whitespace()
|
||||
.nth(1)
|
||||
.expect("path")
|
||||
.to_string();
|
||||
let mut headers = HashMap::new();
|
||||
let mut content_length = 0_usize;
|
||||
for line in lines {
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let (name, value) = line.split_once(':').expect("header");
|
||||
let value = value.trim().to_string();
|
||||
if name.eq_ignore_ascii_case("content-length") {
|
||||
content_length = value.parse().expect("content length");
|
||||
}
|
||||
headers.insert(name.to_ascii_lowercase(), value);
|
||||
}
|
||||
|
||||
let mut body = remaining[4..].to_vec();
|
||||
while body.len() < content_length {
|
||||
let mut chunk = vec![0_u8; content_length - body.len()];
|
||||
let read = socket.read(&mut chunk).await.expect("read body");
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
body.extend_from_slice(&chunk[..read]);
|
||||
}
|
||||
|
||||
state.lock().await.push(CapturedRequest {
|
||||
path,
|
||||
headers,
|
||||
body: String::from_utf8(body).expect("utf8 body"),
|
||||
});
|
||||
|
||||
socket
|
||||
.write_all(response.as_bytes())
|
||||
.await
|
||||
.expect("write response");
|
||||
}
|
||||
});
|
||||
|
||||
TestServer {
|
||||
base_url: format!("http://{address}"),
|
||||
join_handle,
|
||||
}
|
||||
}
|
||||
|
||||
fn find_header_end(bytes: &[u8]) -> Option<usize> {
|
||||
bytes.windows(4).position(|window| window == b"\r\n\r\n")
|
||||
}
|
||||
|
||||
fn http_response(status: &str, content_type: &str, body: &str) -> String {
|
||||
http_response_with_headers(status, content_type, body, &[])
|
||||
}
|
||||
|
||||
fn http_response_with_headers(
|
||||
status: &str,
|
||||
content_type: &str,
|
||||
body: &str,
|
||||
headers: &[(&str, &str)],
|
||||
) -> String {
|
||||
let mut extra_headers = String::new();
|
||||
for (name, value) in headers {
|
||||
use std::fmt::Write as _;
|
||||
write!(&mut extra_headers, "{name}: {value}\r\n").expect("header write");
|
||||
}
|
||||
format!(
|
||||
"HTTP/1.1 {status}\r\ncontent-type: {content_type}\r\n{extra_headers}content-length: {}\r\nconnection: close\r\n\r\n{body}",
|
||||
body.len()
|
||||
)
|
||||
}
|
||||
|
||||
fn sample_request(stream: bool) -> MessageRequest {
|
||||
MessageRequest {
|
||||
model: "grok-3".to_string(),
|
||||
max_tokens: 64,
|
||||
messages: vec![InputMessage {
|
||||
role: "user".to_string(),
|
||||
content: vec![InputContentBlock::Text {
|
||||
text: "Say hello".to_string(),
|
||||
}],
|
||||
}],
|
||||
system: Some("Use tools when needed".to_string()),
|
||||
tools: Some(vec![ToolDefinition {
|
||||
name: "weather".to_string(),
|
||||
description: Some("Fetches weather".to_string()),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {"city": {"type": "string"}},
|
||||
"required": ["city"]
|
||||
}),
|
||||
}]),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
stream,
|
||||
}
|
||||
}
|
||||
|
||||
fn env_lock() -> std::sync::MutexGuard<'static, ()> {
|
||||
static LOCK: OnceLock<StdMutex<()>> = OnceLock::new();
|
||||
LOCK.get_or_init(|| StdMutex::new(()))
|
||||
.lock()
|
||||
.unwrap_or_else(|poisoned| poisoned.into_inner())
|
||||
}
|
||||
|
||||
struct ScopedEnvVar {
|
||||
key: &'static str,
|
||||
previous: Option<OsString>,
|
||||
}
|
||||
|
||||
impl ScopedEnvVar {
|
||||
fn set(key: &'static str, value: impl AsRef<std::ffi::OsStr>) -> Self {
|
||||
let previous = std::env::var_os(key);
|
||||
std::env::set_var(key, value);
|
||||
Self { key, previous }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ScopedEnvVar {
|
||||
fn drop(&mut self) {
|
||||
match &self.previous {
|
||||
Some(value) => std::env::set_var(self.key, value),
|
||||
None => std::env::remove_var(self.key),
|
||||
}
|
||||
}
|
||||
}
|
||||
86
rust/crates/api/tests/provider_client_integration.rs
Normal file
86
rust/crates/api/tests/provider_client_integration.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
use std::ffi::OsString;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
use api::{read_xai_base_url, ApiError, AuthSource, ProviderClient, ProviderKind};
|
||||
|
||||
#[test]
|
||||
fn provider_client_routes_grok_aliases_through_xai() {
|
||||
let _lock = env_lock();
|
||||
let _xai_api_key = EnvVarGuard::set("XAI_API_KEY", Some("xai-test-key"));
|
||||
|
||||
let client = ProviderClient::from_model("grok-mini").expect("grok alias should resolve");
|
||||
|
||||
assert_eq!(client.provider_kind(), ProviderKind::Xai);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn provider_client_reports_missing_xai_credentials_for_grok_models() {
|
||||
let _lock = env_lock();
|
||||
let _xai_api_key = EnvVarGuard::set("XAI_API_KEY", None);
|
||||
|
||||
let error = ProviderClient::from_model("grok-3")
|
||||
.expect_err("grok requests without XAI_API_KEY should fail fast");
|
||||
|
||||
match error {
|
||||
ApiError::MissingCredentials { provider, env_vars } => {
|
||||
assert_eq!(provider, "xAI");
|
||||
assert_eq!(env_vars, &["XAI_API_KEY"]);
|
||||
}
|
||||
other => panic!("expected missing xAI credentials, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn provider_client_uses_explicit_anthropic_auth_without_env_lookup() {
|
||||
let _lock = env_lock();
|
||||
let _anthropic_api_key = EnvVarGuard::set("ANTHROPIC_API_KEY", None);
|
||||
let _anthropic_auth_token = EnvVarGuard::set("ANTHROPIC_AUTH_TOKEN", None);
|
||||
|
||||
let client = ProviderClient::from_model_with_anthropic_auth(
|
||||
"claude-sonnet-4-6",
|
||||
Some(AuthSource::ApiKey("anthropic-test-key".to_string())),
|
||||
)
|
||||
.expect("explicit anthropic auth should avoid env lookup");
|
||||
|
||||
assert_eq!(client.provider_kind(), ProviderKind::Anthropic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_xai_base_url_prefers_env_override() {
|
||||
let _lock = env_lock();
|
||||
let _xai_base_url = EnvVarGuard::set("XAI_BASE_URL", Some("https://example.xai.test/v1"));
|
||||
|
||||
assert_eq!(read_xai_base_url(), "https://example.xai.test/v1");
|
||||
}
|
||||
|
||||
fn env_lock() -> std::sync::MutexGuard<'static, ()> {
|
||||
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
|
||||
LOCK.get_or_init(|| Mutex::new(()))
|
||||
.lock()
|
||||
.unwrap_or_else(|poisoned| poisoned.into_inner())
|
||||
}
|
||||
|
||||
struct EnvVarGuard {
|
||||
key: &'static str,
|
||||
original: Option<OsString>,
|
||||
}
|
||||
|
||||
impl EnvVarGuard {
|
||||
fn set(key: &'static str, value: Option<&str>) -> Self {
|
||||
let original = std::env::var_os(key);
|
||||
match value {
|
||||
Some(value) => std::env::set_var(key, value),
|
||||
None => std::env::remove_var(key),
|
||||
}
|
||||
Self { key, original }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for EnvVarGuard {
|
||||
fn drop(&mut self) {
|
||||
match &self.original {
|
||||
Some(value) => std::env::set_var(self.key, value),
|
||||
None => std::env::remove_var(self.key),
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user