diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 44569c51ad5..8e2ec109e62 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -28,7 +28,7 @@ body: - **OS**: macOS - **Continue version**: v0.9.4 - **IDE version**: VSCode 1.85.1 - - Model: Claude Sonnet 3.5 + - Model: Claude Sonnet 4.5 - Agent configuration value: | - OS: diff --git a/core/config/workspace/workspaceBlocks.ts b/core/config/workspace/workspaceBlocks.ts index d3dfa8690c9..4f20eda8912 100644 --- a/core/config/workspace/workspaceBlocks.ts +++ b/core/config/workspace/workspaceBlocks.ts @@ -42,9 +42,9 @@ function getContentsForNewBlock(blockType: BlockType): ConfigYaml { configYaml.models = [ { provider: "anthropic", - model: "claude-3-7-sonnet-latest", + model: "claude-sonnet-4-5", apiKey: "${{ secrets.ANTHROPIC_API_KEY }}", - name: "Claude 3.7 Sonnet", + name: "Claude Sonnet 4.5", roles: ["chat", "edit"], }, ]; diff --git a/core/llm/llms/Anthropic.vitest.ts b/core/llm/llms/Anthropic.vitest.ts index 5c623b05e5d..07915a93a49 100644 --- a/core/llm/llms/Anthropic.vitest.ts +++ b/core/llm/llms/Anthropic.vitest.ts @@ -134,7 +134,7 @@ describe("Anthropic", () => { test("streamChat should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -155,7 +155,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -177,7 +177,7 @@ describe("Anthropic", () => { test("chat should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -198,7 +198,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -220,7 +220,7 @@ describe("Anthropic", () => { test("streamComplete should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -238,7 +238,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -260,7 +260,7 @@ describe("Anthropic", () => { test("complete should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -278,7 +278,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -301,7 +301,7 @@ describe("Anthropic", () => { test("should handle system message", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -325,7 +325,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -347,7 +347,7 @@ describe("Anthropic", () => { test("should handle tool calls", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -386,7 +386,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 8192, stream: true, messages: [ @@ -424,7 +424,7 @@ describe("Anthropic", () => { test("should handle custom max tokens", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); @@ -446,7 +446,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", max_tokens: 1000, stream: true, messages: [ @@ -470,7 +470,7 @@ describe("Anthropic", () => { test("should throw error when API key is missing", async () => { const anthropic = new Anthropic({ apiKey: "", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiBase: "https://api.anthropic.com/v1/", }); diff --git a/core/llm/llms/OpenRouter.vitest.ts b/core/llm/llms/OpenRouter.vitest.ts index e6e8afdd216..85ca5dd1c91 100644 --- a/core/llm/llms/OpenRouter.vitest.ts +++ b/core/llm/llms/OpenRouter.vitest.ts @@ -6,13 +6,13 @@ import OpenRouter from "./OpenRouter"; describe("OpenRouter Anthropic Caching", () => { it("should detect Anthropic models correctly", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiKey: "test-key", }); // Test private method through modifyChatBody const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", messages: [], }; @@ -22,7 +22,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should add cache_control to user messages when caching is enabled", () => { const openRouter = new OpenRouter({ - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4.5", apiKey: "test-key", cacheBehavior: { cacheConversation: true, @@ -31,7 +31,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4.5", messages: [ { role: "user", content: "First message" }, { role: "assistant", content: "Response" }, @@ -71,7 +71,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should correctly handle cache_control with system messages present", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiKey: "test-key", cacheBehavior: { cacheConversation: true, @@ -80,7 +80,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", messages: [ { role: "system", content: "You are a helpful assistant" }, { role: "user", content: "First user message" }, @@ -137,7 +137,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should add cache_control to system message when caching is enabled", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiKey: "test-key", cacheBehavior: { cacheConversation: false, @@ -146,7 +146,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", messages: [ { role: "system", content: "You are a helpful assistant" }, { role: "user", content: "Hello" }, @@ -176,7 +176,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should handle array content correctly", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", apiKey: "test-key", cacheBehavior: { cacheConversation: true, @@ -185,7 +185,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-5", messages: [ { role: "user", diff --git a/core/llm/toolSupport.test.ts b/core/llm/toolSupport.test.ts index 2f0f566e7e3..7981dac5f71 100644 --- a/core/llm/toolSupport.test.ts +++ b/core/llm/toolSupport.test.ts @@ -179,17 +179,11 @@ describe("PROVIDER_TOOL_SUPPORT", () => { }); it("should return undefined for Claude Haiku and Opus models", () => { - expect(supportsFn("anthropic.claude-3-5-haiku-20240307-v1:0")).toBe( - false, - ); - expect(supportsFn("anthropic.claude-3.5-haiku-20240620-v1:0")).toBe( - false, - ); - expect(supportsFn("anthropic.claude-3-7-haiku-20240620-v1:0")).toBe( - false, - ); - expect(supportsFn("anthropic.claude-3-5-opus-20240620-v1:0")).toBe(false); - expect(supportsFn("anthropic.claude-3.7-opus-20240620-v1:0")).toBe(false); + expect(supportsFn("anthropic.claude-3-5-haiku-20240307-v1:0")).toBe(true); + expect(supportsFn("anthropic.claude-3.5-haiku-20240620-v1:0")).toBe(true); + expect(supportsFn("anthropic.claude-3-7-haiku-20240620-v1:0")).toBe(true); + expect(supportsFn("anthropic.claude-3-5-opus-20240620-v1:0")).toBe(true); + expect(supportsFn("anthropic.claude-3.7-opus-20240620-v1:0")).toBe(true); }); it("should return undefined for other unsupported models", () => { diff --git a/core/llm/toolSupport.ts b/core/llm/toolSupport.ts index 01ad207983b..3466a50eeb0 100644 --- a/core/llm/toolSupport.ts +++ b/core/llm/toolSupport.ts @@ -14,39 +14,17 @@ export const PROVIDER_TOOL_SUPPORT: Record boolean> = } } catch (e) {} - return [ - "claude-3-5", - "claude-3.5", - "claude-3-7", - "claude-3.7", - "claude-sonnet-4", - "claude-4-sonnet", - "gpt-4", - "o3", - "gemini", - "claude-opus-4", - "gemma", - ].some((part) => model.toLowerCase().startsWith(part)); + return ["claude", "gpt-4", "o3", "gemini", "gemma"].some((part) => + model.toLowerCase().startsWith(part), + ); }, anthropic: (model) => { - const lower = model.toLowerCase(); - if ( - [ - "claude-3-5", - "claude-3.5", - "claude-3-7", - "claude-3.7", - "claude-sonnet-4", - "claude-4-sonnet", - "claude-opus-4", - ].some((part) => lower.startsWith(part)) - ) { - return true; + if (model.includes("claude-2") || model.includes("claude-instant")) { + return false; } - if (lower.includes("claude") && lower.includes("4-5")) { + if (["claude"].some((part) => model.toLowerCase().startsWith(part))) { return true; } - return false; }, azure: (model) => { @@ -125,15 +103,12 @@ export const PROVIDER_TOOL_SUPPORT: Record boolean> = ); }, bedrock: (model) => { + if (model.includes("claude-2") || model.includes("claude-instant")) { + return false; + } if ( [ - "claude-3-5-sonnet", - "claude-3.5-sonnet", - "claude-3-7-sonnet", - "claude-3.7-sonnet", - "claude-sonnet-4", - "claude-4-sonnet", - "claude-opus-4", + "claude", "nova-lite", "nova-pro", "nova-micro", @@ -280,8 +255,7 @@ export const PROVIDER_TOOL_SUPPORT: Record boolean> = "openai/o3", "openai/o4", "openai/gpt-oss", - "anthropic/claude-3", - "anthropic/claude-4", + "anthropic/claude", "microsoft/phi-3", "google/gemini-flash-1.5", "google/gemini-2", @@ -391,7 +365,8 @@ export function isRecommendedAgentModel(modelName: string): boolean { const recs: RegExp[][] = [ [/o[134]/], [/deepseek/, /r1|reasoner/], - [/gemini/, /2\.5|3/, /pro/], + [/gemini/, /2\.5/, /pro/], + [/gemini/, /3-pro/], [/gpt/, /-5|5\.1/], [/claude/, /sonnet/, /3\.7|3-7|-4/], [/claude/, /opus/, /-4/], diff --git a/docs/customize/model-providers/top-level/anthropic.mdx b/docs/customize/model-providers/top-level/anthropic.mdx index 7231e93e935..fc8ec83f802 100644 --- a/docs/customize/model-providers/top-level/anthropic.mdx +++ b/docs/customize/model-providers/top-level/anthropic.mdx @@ -41,7 +41,7 @@ sidebarTitle: "Anthropic" - **Check out a more advanced configuration [here](https://hub.continue.dev/anthropic/claude-4-sonnet?view=config)** + **Check out a more advanced configuration [here](https://hub.continue.dev/anthropic/claude-sonnet-4-5?view=config)** ## How to Enable Prompt Caching with Claude diff --git a/docs/customize/models.mdx b/docs/customize/models.mdx index a21827a300b..85bcf25c42b 100644 --- a/docs/customize/models.mdx +++ b/docs/customize/models.mdx @@ -30,10 +30,10 @@ Read more about [model roles](/customize/model-roles), [model capabilities](/cus # Frontier Models -[Claude 4 Sonnet](https://hub.continue.dev/anthropic/claude-4-sonnet) from Anthropic +[Claude Sonnet 4.5](https://hub.continue.dev/anthropic/claude-sonnet-4-5) from Anthropic 1. Get your API key from [Anthropic](https://console.anthropic.com/) -2. Add [Claude 4 Sonnet](https://hub.continue.dev/anthropic/claude-4-sonnet) to a config on Continue Mission Control +2. Add[Claude Sonnet 4.5](https://hub.continue.dev/anthropic/claude-sonnet-4-5) to a config on Continue Mission Control 3. Add `ANTHROPIC_API_KEY` as a [User Secret](https://docs.continue.dev/mission-control/secrets/secret-types#user-secrets) on Continue Mission Control [here](https://hub.continue.dev/settings/secrets) 4. Click `Reload config` in the config selector in the Continue IDE extension diff --git a/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt b/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt index 5135dc87ae9..725cc948ae2 100644 --- a/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt +++ b/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt @@ -14,10 +14,10 @@ const val DEFAULT_CONFIG = { "models": [ { - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-5", "provider": "anthropic", "apiKey": "", - "title": "Claude 3.5 Sonnet" + "title": "Claude Sonnet 4.5" } ], "tabAutocompleteModel": { diff --git a/extensions/vscode/config_schema.json b/extensions/vscode/config_schema.json index 111db4046da..c7ea072f43a 100644 --- a/extensions/vscode/config_schema.json +++ b/extensions/vscode/config_schema.json @@ -850,14 +850,10 @@ "anyOf": [ { "enum": [ - "claude-2", - "claude-instant-1", - "claude-3-5-sonnet-latest", - "claude-3-7-sonnet-20250219", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1" + "claude-sonnet-4", + "claude-sonnet-4-5", + "claude-opus-4-1", + "claude-haiku-4-5" ] }, { @@ -1610,14 +1606,10 @@ "codeup-13b", "deepseek-7b", "deepseek-33b", - "claude-2", - "claude-instant-1", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", + "claude-sonnet-4", + "claude-sonnet-4-5", + "claude-opus-4-1", + "claude-haiku-4-5", "command-r", "command-r-plus", "chat-bison-001", diff --git a/extensions/vscode/e2e/test-continue/config.json b/extensions/vscode/e2e/test-continue/config.json index fbe78fa0aac..814552d9115 100644 --- a/extensions/vscode/e2e/test-continue/config.json +++ b/extensions/vscode/e2e/test-continue/config.json @@ -16,7 +16,7 @@ { "provider": "mock", "title": "TOOL MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-5", "capabilities": { "tools": true }, @@ -50,7 +50,7 @@ { "provider": "mock", "title": "SYSTEM MESSAGE MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-5", "requestOptions": { "extraBodyProperties": { "chatStream": [["REPEAT_SYSTEM_MSG"]] @@ -60,7 +60,7 @@ { "provider": "mock", "title": "LAST MESSAGE MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-5", "requestOptions": { "extraBodyProperties": { "chatStream": [["REPEAT_LAST_MSG"]] diff --git a/gui/src/pages/AddNewModel/configs/models.ts b/gui/src/pages/AddNewModel/configs/models.ts index 7de58309d53..59edce01dfa 100644 --- a/gui/src/pages/AddNewModel/configs/models.ts +++ b/gui/src/pages/AddNewModel/configs/models.ts @@ -1213,17 +1213,45 @@ export const models: { [key: string]: ModelPackage } = { icon: "openai.png", isOpenSource: false, }, + claude45Sonnet: { + title: "Claude Sonnet 4.5", + description: + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", + params: { + model: "claude-sonnet-4-5-20250929", + contextLength: 200_000, + title: "Claude 4.5 Sonnet", + apiKey: "", + }, + providerOptions: ["anthropic", "replicate"], + icon: "anthropic.png", + isOpenSource: false, + }, + claude45Haiku: { + title: "Claude Haiku 4.5", + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", + params: { + model: "claude-haiku-4-5-20251001", + contextLength: 200_000, + title: "Claude Haiku 4.5", + apiKey: "", + }, + providerOptions: ["anthropic"], + icon: "anthropic.png", + isOpenSource: false, + }, claude4Sonnet: { - title: "Claude 4 Sonnet", + title: "Claude Sonnet 4", description: - "The most intelligent model in the Claude 4 series. Costing lesser than Claude 4 Opus.", + "The most intelligent model in the Claude 4 series. Costing lesser than Claude Opus 4.", params: { model: "claude-4-sonnet-latest", contextLength: 200_000, title: "Claude 4 Sonnet", apiKey: "", }, - providerOptions: ["anthropic", "replicate"], + providerOptions: ["anthropic"], icon: "anthropic.png", isOpenSource: false, }, @@ -1240,14 +1268,13 @@ export const models: { [key: string]: ModelPackage } = { icon: "anthropic.png", isOpenSource: false, }, - claude35Haiku: { - title: "Claude 3.5 Haiku", - description: - "The fastest model in the Claude 3.5 series: a compact model for near-instant responsiveness", + claude41Opus: { + title: "Claude Opus 4.1", + description: "The most capable model in the Claude 4 series", params: { - model: "claude-3-5-haiku-latest", + model: "claude-opus-4-1-20250805", contextLength: 200_000, - title: "Claude 3.5 Haiku", + title: "Claude Opus 4.1", apiKey: "", }, providerOptions: ["anthropic"], @@ -1643,20 +1670,7 @@ export const models: { [key: string]: ModelPackage } = { icon: "openai.png", isOpenSource: false, }, - asksageclaude35Sonnet: { - title: "Claude 3.5 Sonnet", - description: - "Anthropic's most intelligent model, but much less expensive than Claude 3 Opus", - params: { - model: "claude-35-sonnet", - contextLength: 200_000, - title: "Claude 3.5 Sonnet", - apiKey: "", - }, - providerOptions: ["askSage"], - icon: "anthropic.png", - isOpenSource: false, - }, + asksageclaude37sonnet: { title: "Claude 3.7 Sonnet", description: "Anthropic's 3.7 model.", @@ -1683,19 +1697,7 @@ export const models: { [key: string]: ModelPackage } = { icon: "anthropic.png", isOpenSource: false, }, - asksageclaude35gov: { - title: "Claude 3.5 Sonnet gov*", - description: "Anthropic's 3.5 Sonnet model.", - params: { - model: "aws-bedrock-claude-35-sonnet-gov", - contextLength: 200_000, - title: "Claude 3.5 Sonnet gov*", - apiKey: "", - }, - providerOptions: ["askSage"], - icon: "anthropic.png", - isOpenSource: false, - }, + asksageclaude4s: { title: "Claude 4 Sonnet", description: "Anthropic's Claude 4 Sonnet", @@ -2416,20 +2418,61 @@ export const models: { [key: string]: ModelPackage } = { icon: "cometapi.png", isOpenSource: false, }, - cometapiClaude35HaikuLatest: { - title: "Claude 3.5 Haiku Latest", + cometapiClaude45Sonnet: { + title: "Claude 4.5 Sonnet Latest", description: - "Claude 3.5 Haiku Latest via CometAPI - fast and efficient model from Anthropic.", + "Claude 4.5 Sonnet Latest via CometAPI - Anthropic's smartest model for complex agents and coding.", params: { - model: "claude-3-5-haiku-latest", + model: "claude-sonnet-4-5-20250929", contextLength: 200_000, - title: "Claude 3.5 Haiku Latest", + title: "Claude 4.5 Sonnet Latest", apiKey: "", }, providerOptions: ["cometapi"], icon: "cometapi.png", isOpenSource: false, }, + cometapiClaude45Haiku: { + title: "Claude 4.5 Haiku Latest", + description: + "Claude 4.5 Haiku Latest via CometAPI - Anthropic's fastest model with near-frontier intelligence.", + params: { + model: "claude-haiku-4-5-20251001", + contextLength: 200_000, + title: "Claude 4.5 Haiku Latest", + apiKey: "", + }, + providerOptions: ["cometapi"], + icon: "cometapi.png", + isOpenSource: false, + }, + asksageclaude35Sonnet: { + title: "Claude 3.5 Sonnet", + description: + "Anthropic's most intelligent model, but much less expensive than Claude 3 Opus", + params: { + model: "claude-35-sonnet", + contextLength: 200_000, + title: "Claude 3.5 Sonnet", + apiKey: "", + }, + providerOptions: ["askSage"], + icon: "anthropic.png", + isOpenSource: false, + }, + asksageclaude35gov: { + title: "Claude 3.5 Sonnet gov*", + description: "Anthropic's 3.5 Sonnet model.", + params: { + model: "aws-bedrock-claude-35-sonnet-gov", + contextLength: 200_000, + title: "Claude 3.5 Sonnet gov*", + apiKey: "", + }, + providerOptions: ["askSage"], + icon: "anthropic.png", + isOpenSource: false, + }, // Gemini series models via CometAPI cometapiGemini25Pro: { diff --git a/gui/src/pages/AddNewModel/configs/providers.ts b/gui/src/pages/AddNewModel/configs/providers.ts index 8af1043473d..085fb95228b 100644 --- a/gui/src/pages/AddNewModel/configs/providers.ts +++ b/gui/src/pages/AddNewModel/configs/providers.ts @@ -77,12 +77,13 @@ export const providers: Partial> = { models.cometapiO4Mini, models.cometapiO3Pro, // Anthropic Claude family + models.cometapiClaude45Sonnet, + models.cometapiClaude45Haiku, models.cometapiClaudeOpus41, models.cometapiClaudeOpus41Thinking, models.cometapiClaudeSonnet4, models.cometapiClaudeSonnet4Thinking, models.cometapiClaude37SonnetLatest, - models.cometapiClaude35HaikuLatest, // Google Gemini family models.cometapiGemini25Pro, models.cometapiGemini25Flash, @@ -169,9 +170,11 @@ export const providers: Partial> = { }, ], packages: [ - models.claude4Sonnet, models.claude4_5Opus, - models.claude35Haiku, + models.claude45Sonnet, + models.claude45Haiku, + models.claude41Opus, + models.claude4Sonnet, ], apiKeyUrl: "https://console.anthropic.com/account/keys", }, diff --git a/packages/config-yaml/src/markdown/agentFiles.test.ts b/packages/config-yaml/src/markdown/agentFiles.test.ts index 20763ff0e49..2937db476d5 100644 --- a/packages/config-yaml/src/markdown/agentFiles.test.ts +++ b/packages/config-yaml/src/markdown/agentFiles.test.ts @@ -185,7 +185,7 @@ describe("serializeAgentFile", () => { const agentFile: AgentFile = { name: "Test Agent File", description: "A test agent file", - model: "anthropic/claude-3-sonnet", + model: "anthropic/claude-sonnet-4-5", tools: "tool1, tool2", rules: "rule1, rule2", prompt: "This is the test prompt", diff --git a/packages/config-yaml/src/schemas/commonSlugs.ts b/packages/config-yaml/src/schemas/commonSlugs.ts index 3ba10bbd5f2..c0790c44805 100644 --- a/packages/config-yaml/src/schemas/commonSlugs.ts +++ b/packages/config-yaml/src/schemas/commonSlugs.ts @@ -1,19 +1,20 @@ export const commonModelSlugs = [ - "anthropic/claude-3-7-sonnet", + "anthropic/claude-sonnet-4", "togetherai/llama-4-maverick-instruct-17bx128e", "google/gemini-2.5-pro", "mistral/codestral", "voyageai/voyage-code-3", + "anthropic/claude-sonnet-4-5", "relace/instant-apply", "xai/grok-2", "openai/gpt-4o", "togetherai/llama-4-scout-instruct-17bx16e", - "anthropic/claude-3-5-sonnet", + "anthropic/claude-haiku-4-5", "google/gemini-2.0-flash", "voyageai/rerank-2", + "anthropic/claude-opus-4-1", "ollama/deepseek-r1", "morphllm/morph-v0", - "anthropic/claude-3-5-haiku", "lmstudio/deepseek-r1", "openai/o3-mini", "voyageai/voyage-code-2", diff --git a/packages/llm-info/src/providers/anthropic.ts b/packages/llm-info/src/providers/anthropic.ts index 176cd372748..a84709f7b7e 100644 --- a/packages/llm-info/src/providers/anthropic.ts +++ b/packages/llm-info/src/providers/anthropic.ts @@ -4,13 +4,43 @@ export const Anthropic: ModelProvider = { id: "anthropic", displayName: "Anthropic", models: [ + { + model: "claude-sonnet-4-5-20250929", + displayName: "Claude 4.5 Sonnet", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", + regex: /claude-(?:4[.-]5-sonnet|sonnet-4[.-]5).*/i, + recommendedFor: ["chat"], + }, + { + model: "claude-haiku-4-5-20251001", + displayName: "Claude 4.5 Haiku", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", + regex: /claude-(?:4[.-]5-haiku|haiku-4[.-]5).*/i, + recommendedFor: ["chat"], + }, + { + model: "claude-opus-4-1-20250805", + displayName: "Claude 4.1 Opus", + contextLength: 200000, + maxCompletionTokens: 32000, + description: + "Exceptional model for specialized reasoning tasks with advanced agentic capabilities and superior coding performance.", + regex: /claude-opus-4[.-]1.*/i, + recommendedFor: ["chat"], + }, { model: "claude-sonnet-4-20250514", displayName: "Claude 4 Sonnet", contextLength: 200000, maxCompletionTokens: 8192, description: - "Most intelligent model with the highest level of intelligence and capability.", + "Previous generation model with strong coding and reasoning capabilities, now superseded by Claude 4.5 Sonnet.", // Sometimes written as claude-4-sonnet, other times as claude-sonnet-4 regex: /claude-(?:4-sonnet|sonnet-4).*/i, recommendedFor: ["chat"], @@ -40,29 +70,30 @@ export const Anthropic: ModelProvider = { displayName: "Claude 4 Opus", contextLength: 200000, maxCompletionTokens: 8192, - description: "Previous iteration on Opus", + description: + "Previous generation model with high intelligence, now superseded by Claude 4.1 Opus.", regex: /claude-(?:4-opus|opus-4).*/i, recommendedFor: ["chat"], }, { - model: "claude-3-5-sonnet-latest", - displayName: "Claude 3.5 Sonnet", + model: "claude-3-7-sonnet-latest", + displayName: "Claude 3.7 Sonnet", contextLength: 200000, - maxCompletionTokens: 8192, + maxCompletionTokens: 128000, description: - "Most intelligent model with the highest level of intelligence and capability.", - regex: /claude-3[.-]5-sonnet.*/i, + "First hybrid reasoning model with extended thinking capabilities, excellent for coding and front-end development.", + regex: /claude-3[.-]7-sonnet.*/i, recommendedFor: ["chat"], }, { - model: "claude-3-7-sonnet-latest", - displayName: "Claude 3.7 Sonnet", + model: "claude-3-5-sonnet-latest", + displayName: "Claude 3.5 Sonnet", contextLength: 200000, maxCompletionTokens: 8192, description: - "Most intelligent model with the highest level of intelligence and capability.", - regex: /claude-3[.-]7-sonnet.*/i, + "Previous flagship model with strong performance across diverse tasks, now superseded by Claude 4.5.", + regex: /claude-3[.-]5-sonnet.*/i, recommendedFor: ["chat"], }, { @@ -116,7 +147,7 @@ export const Anthropic: ModelProvider = { contextLength: 100000, maxCompletionTokens: 4096, description: - "Our cheapest small and fast model, a predecessor of Claude Haiku.", + "Anthropic's cheapest small and fast model, a predecessor of Claude Haiku.", regex: /claude-instant-1\.2/i, }, ], diff --git a/packages/llm-info/src/providers/cometapi.ts b/packages/llm-info/src/providers/cometapi.ts index bb11aea616d..3efe24dc671 100644 --- a/packages/llm-info/src/providers/cometapi.ts +++ b/packages/llm-info/src/providers/cometapi.ts @@ -79,6 +79,24 @@ export const CometAPI: ModelProvider = { }, // Claude Series + { + model: "claude-sonnet-4-5", + displayName: "Claude 4.5 Sonnet", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", + recommendedFor: ["chat"], + }, + { + model: "claude-haiku-4-5-20251001", + displayName: "Claude 4.5 Haiku (2025-10-01)", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", + recommendedFor: ["chat"], + }, { model: "claude-opus-4-1-20250805", displayName: "Claude Opus 4.1 (2025-08-05)", diff --git a/packages/openai-adapters/src/apis/OpenRouter.test.ts b/packages/openai-adapters/src/apis/OpenRouter.test.ts index 6b7175446b8..e62e2123f17 100644 --- a/packages/openai-adapters/src/apis/OpenRouter.test.ts +++ b/packages/openai-adapters/src/apis/OpenRouter.test.ts @@ -14,7 +14,7 @@ describe("OpenRouterApi Anthropic caching", () => { const api = new OpenRouterApi(baseConfig); const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "user", content: "First" }, { role: "assistant", content: "Resp" }, @@ -161,7 +161,7 @@ describe("OpenRouterApi Anthropic caching", () => { describe("applyAnthropicCachingToOpenRouterBody", () => { it("mutates OpenAI chat body with system and tool caching", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "system", content: "You are helpful" }, { role: "user", content: "Alpha" }, @@ -241,7 +241,7 @@ describe("OpenRouterApi Anthropic caching", () => { it("leaves system untouched when strategy is none while caching users", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "system", content: "Stay focused" }, { role: "user", content: "Question" }, @@ -276,7 +276,7 @@ describe("OpenRouterApi Anthropic caching", () => { it("adds cache_control only to final text segment of user arrays", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "user", diff --git a/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts b/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts index 859bcab02ad..dba75137cd5 100644 --- a/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts +++ b/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts @@ -35,7 +35,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionNonStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [{ role: "user", content: "hello" }], }, new AbortController().signal, @@ -62,7 +62,7 @@ describe("Anthropic Adapter Tests", () => { }, ], system: undefined, - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: undefined, }, @@ -77,7 +77,7 @@ describe("Anthropic Adapter Tests", () => { text: "Hello! How can I help you today?", }, ], - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", stop_reason: "end_turn", stop_sequence: null, usage: { @@ -98,7 +98,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [{ role: "user", content: "hello" }], stream: true, }, @@ -126,7 +126,7 @@ describe("Anthropic Adapter Tests", () => { }, ], system: undefined, - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: true, }, @@ -160,7 +160,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [ { role: "system", content: "You are a helpful assistant." }, { role: "user", content: "hello" }, @@ -197,7 +197,7 @@ describe("Anthropic Adapter Tests", () => { cache_control: { type: "ephemeral" }, }, ], - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: true, }, diff --git a/packages/openai-adapters/src/test/main.test.ts b/packages/openai-adapters/src/test/main.test.ts index 3b7b96193e6..8a10c8b727f 100644 --- a/packages/openai-adapters/src/test/main.test.ts +++ b/packages/openai-adapters/src/test/main.test.ts @@ -70,7 +70,7 @@ const TESTS: Omit[] = [ }, { provider: "anthropic", - model: "claude-3-5-haiku-latest", + model: "claude-haiku-4-5", apiKey: process.env.ANTHROPIC_API_KEY!, roles: ["chat"], options: {