diff --git a/shared/llm/llm.model.ts b/shared/llm/llm.model.ts index 6efe8c9d..0695f8ba 100644 --- a/shared/llm/llm.model.ts +++ b/shared/llm/llm.model.ts @@ -85,6 +85,7 @@ export interface GenerateTextOptions extends CallSettings { id?: string; thinking?: ThinkingLevel; providerOptions?: Record; + abortSignal?: AbortSignal; } /** diff --git a/shared/llm/llm.schema.ts b/shared/llm/llm.schema.ts index bfe59161..ce9ed275 100644 --- a/shared/llm/llm.schema.ts +++ b/shared/llm/llm.schema.ts @@ -175,6 +175,7 @@ const GenerateTextOptionsSpecificSchema = Type.Object({ id: Type.Optional(Type.String()), thinking: Type.Optional(Type.Union([Type.Literal('none'), Type.Literal('low'), Type.Literal('medium'), Type.Literal('high')])), providerOptions: Type.Optional(Type.Record(Type.String(), Type.Any())), + abortSignal: Type.Optional(Type.Any()), }); export const GenerateTextOptionsSchema = Type.Intersect([CallSettingsSchema, GenerateTextOptionsSpecificSchema], { $id: 'GenerateTextOptions' }); diff --git a/src/llm/services/ai-llm.ts b/src/llm/services/ai-llm.ts index 15e45cd5..b256a2f5 100644 --- a/src/llm/services/ai-llm.ts +++ b/src/llm/services/ai-llm.ts @@ -246,6 +246,9 @@ export abstract class AiLLM extends BaseLLM { console.log(new Error(`No generateMessage id provided. (${promptPreview})`)); } + const settingsToSave = { ...combinedOpts }; + settingsToSave.abortSignal = undefined; + const createLlmCallRequest: CreateLlmRequest = { messages: cloneAndTruncateBuffers(llmMessages), llmId: this.getId(), @@ -253,7 +256,7 @@ export abstract class AiLLM extends BaseLLM { // userId: currentUser().id, callStack: callStack(), description, - settings: combinedOpts, + settings: settingsToSave, }; const llmCall: LlmCall = await this.saveLlmCallRequest(createLlmCallRequest); @@ -272,7 +275,7 @@ export abstract class AiLLM extends BaseLLM { maxRetries: combinedOpts.maxRetries, maxOutputTokens: combinedOpts.maxOutputTokens, providerOptions: combinedOpts.providerOptions, - // abortSignal: combinedOpts.abortSignal, + abortSignal: (combinedOpts as any)?.abortSignal, }; // Messages can be large, and model property with schemas, so just log the reference to the LlmCall its saved in logger.info({ args: { ...args, messages: `LlmCall:${llmCall.id}`, model: this.getId() } }, `Generating text - ${opts?.id}`); @@ -414,12 +417,15 @@ export abstract class AiLLM extends BaseLLM { service: this.service, }); + const settingsToSave = { ...combinedOpts }; + settingsToSave.abortSignal = undefined; + const createLlmCallRequest: CreateLlmRequest = { messages: llmMessages, llmId: this.getId(), agentId: agentContext()?.agentId, callStack: callStack(), - settings: combinedOpts, + settings: settingsToSave, }; const llmCall: LlmCall = await this.saveLlmCallRequest(createLlmCallRequest); @@ -430,7 +436,7 @@ export abstract class AiLLM extends BaseLLM { const args: StreamTextArgs = { model: this.aiModel(), messages, - // abortSignal: combinedOpts?.abortSignal, + abortSignal: combinedOpts?.abortSignal, temperature: combinedOpts?.temperature, // topP: combinedOpts?.topP, // anthropic '`temperature` and `top_p` cannot both be specified for this model. Please use only one.' stopSequences: combinedOpts?.stopSequences,