From 5e3651d540f03b448bd8bba2e6f51f1c4bbc50d9 Mon Sep 17 00:00:00 2001 From: Ryan Kruse Date: Wed, 25 Feb 2026 13:28:36 -0600 Subject: [PATCH] anthropic: handle top-level system param; bump version Add support for a top-level `system` parameter for the Anthropic provider: include `system?: string` in AnthropicCreateArgs, convert a top-level `system` into a system message when building LLMRequest, and extract a system message back to the top-level `system` when converting a request to args. Filter out system messages from messagesToAnthropicInput and ensure extractParams ignores the `system` field. Also bump package version to 0.1.5-beta in package.json and package-lock.json. --- ts-sdk/package-lock.json | 4 ++-- ts-sdk/package.json | 2 +- ts-sdk/src/providers/anthropic/common.ts | 11 ++++++---- ts-sdk/src/providers/anthropic/proxy.ts | 26 ++++++++++++++++++++---- ts-sdk/src/providers/anthropic/types.ts | 1 + 5 files changed, 33 insertions(+), 11 deletions(-) diff --git a/ts-sdk/package-lock.json b/ts-sdk/package-lock.json index e8079c9..f4b6bb4 100644 --- a/ts-sdk/package-lock.json +++ b/ts-sdk/package-lock.json @@ -1,12 +1,12 @@ { "name": "@memorilabs/axon", - "version": "0.1.4-beta", + "version": "0.1.5-beta", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@memorilabs/axon", - "version": "0.1.4-beta", + "version": "0.1.5-beta", "license": "Apache-2.0", "devDependencies": { "@anthropic-ai/sdk": "*", diff --git a/ts-sdk/package.json b/ts-sdk/package.json index 30f7b87..42ef9af 100644 --- a/ts-sdk/package.json +++ b/ts-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@memorilabs/axon", - "version": "0.1.4-beta", + "version": "0.1.5-beta", "description": "TypeScript SDK for Memori's cloud memory service", "type": "module", "main": "./dist/index.js", diff --git a/ts-sdk/src/providers/anthropic/common.ts b/ts-sdk/src/providers/anthropic/common.ts index 394a6f7..560039a 100644 --- a/ts-sdk/src/providers/anthropic/common.ts +++ b/ts-sdk/src/providers/anthropic/common.ts @@ -21,10 +21,13 @@ export function anthropicInputToMessages(input: unknown): Message[] { } export function messagesToAnthropicInput(request: LLMRequest): AnthropicMessage[] { - return request.messages.map((m) => ({ - role: m.role, - content: m.content, - })); + // Filter out system messages, they'll be handled separately + return request.messages + .filter((m) => m.role !== 'system') + .map((m) => ({ + role: m.role, + content: m.content, + })); } export function contentFromAnthropic(response: unknown): string { diff --git a/ts-sdk/src/providers/anthropic/proxy.ts b/ts-sdk/src/providers/anthropic/proxy.ts index e0165dd..92e2ec4 100644 --- a/ts-sdk/src/providers/anthropic/proxy.ts +++ b/ts-sdk/src/providers/anthropic/proxy.ts @@ -15,13 +15,20 @@ import { PROVIDERS } from '../../utils/constants.js'; function extractParams(args: AnthropicCreateArgs): Record { // Separate model and messages from extra provider-specific parameters - const { model: _model, messages: _messages, ...params } = args; + const { model: _model, messages: _messages, system: _system, ...params } = args; return params; } function argsToRequest(args: AnthropicCreateArgs): LLMRequest { + const messages = anthropicInputToMessages(args.messages); + + // If there's a system parameter at the top level, convert it to a system message + if (args.system) { + messages.unshift({ role: 'system', content: args.system }); + } + return { - messages: anthropicInputToMessages(args.messages), + messages, model: args.model, params: extractParams(args), }; @@ -29,11 +36,22 @@ function argsToRequest(args: AnthropicCreateArgs): LLMRequest { function requestToArgs(request: LLMRequest): AnthropicCreateArgs { if (!request.model) throw new Error('No model provided.'); - return { + + // Extract system message if present + const systemMessage = request.messages.find((m) => m.role === 'system'); + + const args: AnthropicCreateArgs = { model: request.model, messages: messagesToAnthropicInput(request), ...(request.params ?? {}), - } as AnthropicCreateArgs; + }; + + // Add system parameter at top level if system message exists + if (systemMessage) { + args.system = systemMessage.content; + } + + return args; } function rawToCanonical(raw: unknown): LLMResponse { diff --git a/ts-sdk/src/providers/anthropic/types.ts b/ts-sdk/src/providers/anthropic/types.ts index db6481a..f426d70 100644 --- a/ts-sdk/src/providers/anthropic/types.ts +++ b/ts-sdk/src/providers/anthropic/types.ts @@ -1,6 +1,7 @@ export interface AnthropicCreateArgs { model: string; messages: AnthropicMessage[]; + system?: string; stream?: boolean; max_tokens?: number; [key: string]: unknown;