diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 56872169d59654..b45bd6270c067b 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -36,8 +36,8 @@ pub enum Model { Gpt3_5Turbo, #[serde(alias = "o1", rename = "o1")] O1, - #[serde(alias = "o1-mini", rename = "o1-mini")] - O1Mini, + #[serde(alias = "o1-mini", rename = "o3-mini")] + O3Mini, #[serde(alias = "claude-3-5-sonnet", rename = "claude-3.5-sonnet")] Claude3_5Sonnet, } @@ -46,7 +46,7 @@ impl Model { pub fn uses_streaming(&self) -> bool { match self { Self::Gpt4o | Self::Gpt4 | Self::Gpt3_5Turbo | Self::Claude3_5Sonnet => true, - Self::O1Mini | Self::O1 => false, + Self::O3Mini | Self::O1 => false, } } @@ -56,7 +56,7 @@ impl Model { "gpt-4" => Ok(Self::Gpt4), "gpt-3.5-turbo" => Ok(Self::Gpt3_5Turbo), "o1" => Ok(Self::O1), - "o1-mini" => Ok(Self::O1Mini), + "o3-mini" => Ok(Self::O3Mini), "claude-3-5-sonnet" => Ok(Self::Claude3_5Sonnet), _ => Err(anyhow!("Invalid model id: {}", id)), } @@ -67,7 +67,7 @@ impl Model { Self::Gpt3_5Turbo => "gpt-3.5-turbo", Self::Gpt4 => "gpt-4", Self::Gpt4o => "gpt-4o", - Self::O1Mini => "o1-mini", + Self::O3Mini => "o3-mini", Self::O1 => "o1", Self::Claude3_5Sonnet => "claude-3-5-sonnet", } @@ -78,7 +78,7 @@ impl Model { Self::Gpt3_5Turbo => "GPT-3.5", Self::Gpt4 => "GPT-4", Self::Gpt4o => "GPT-4o", - Self::O1Mini => "o1-mini", + Self::O3Mini => "o3-mini", Self::O1 => "o1", Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", } @@ -89,7 +89,7 @@ impl Model { Self::Gpt4o => 64000, Self::Gpt4 => 32768, Self::Gpt3_5Turbo => 12288, - Self::O1Mini => 20000, + Self::O3Mini => 20000, Self::O1 => 20000, Self::Claude3_5Sonnet => 200_000, } diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 9eb2d678555c56..067b4863e5e86b 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -179,7 +179,7 @@ impl LanguageModel for CopilotChatLanguageModel { CopilotChatModel::Gpt4o => open_ai::Model::FourOmni, CopilotChatModel::Gpt4 => open_ai::Model::Four, CopilotChatModel::Gpt3_5Turbo => open_ai::Model::ThreePointFiveTurbo, - CopilotChatModel::O1 | CopilotChatModel::O1Mini => open_ai::Model::Four, + CopilotChatModel::O1 | CopilotChatModel::O3Mini => open_ai::Model::Four, CopilotChatModel::Claude3_5Sonnet => unreachable!(), }; count_open_ai_tokens(request, model, cx)