Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions shared/llm/llm.model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ export interface GenerateTextOptions extends CallSettings {
id?: string;
thinking?: ThinkingLevel;
providerOptions?: Record<string, any>;
abortSignal?: AbortSignal;
}

/**
Expand Down
1 change: 1 addition & 0 deletions shared/llm/llm.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ const GenerateTextOptionsSpecificSchema = Type.Object({
id: Type.Optional(Type.String()),
thinking: Type.Optional(Type.Union([Type.Literal('none'), Type.Literal('low'), Type.Literal('medium'), Type.Literal('high')])),
providerOptions: Type.Optional(Type.Record(Type.String(), Type.Any())),
abortSignal: Type.Optional(Type.Any()),
});

export const GenerateTextOptionsSchema = Type.Intersect([CallSettingsSchema, GenerateTextOptionsSpecificSchema], { $id: 'GenerateTextOptions' });
Expand Down
14 changes: 10 additions & 4 deletions src/llm/services/ai-llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -246,14 +246,17 @@ export abstract class AiLLM<Provider extends ProviderV2> extends BaseLLM {
console.log(new Error(`No generateMessage id provided. (${promptPreview})`));
}

const settingsToSave = { ...combinedOpts };
settingsToSave.abortSignal = undefined;

const createLlmCallRequest: CreateLlmRequest = {
messages: cloneAndTruncateBuffers(llmMessages),
llmId: this.getId(),
agentId: agentContext()?.agentId,
// userId: currentUser().id,
callStack: callStack(),
description,
settings: combinedOpts,
settings: settingsToSave,
};
const llmCall: LlmCall = await this.saveLlmCallRequest(createLlmCallRequest);

Expand All @@ -272,7 +275,7 @@ export abstract class AiLLM<Provider extends ProviderV2> extends BaseLLM {
maxRetries: combinedOpts.maxRetries,
maxOutputTokens: combinedOpts.maxOutputTokens,
providerOptions: combinedOpts.providerOptions,
// abortSignal: combinedOpts.abortSignal,
abortSignal: (combinedOpts as any)?.abortSignal,
};
// Messages can be large, and model property with schemas, so just log the reference to the LlmCall its saved in
logger.info({ args: { ...args, messages: `LlmCall:${llmCall.id}`, model: this.getId() } }, `Generating text - ${opts?.id}`);
Expand Down Expand Up @@ -414,12 +417,15 @@ export abstract class AiLLM<Provider extends ProviderV2> extends BaseLLM {
service: this.service,
});

const settingsToSave = { ...combinedOpts };
settingsToSave.abortSignal = undefined;

const createLlmCallRequest: CreateLlmRequest = {
messages: llmMessages,
llmId: this.getId(),
agentId: agentContext()?.agentId,
callStack: callStack(),
settings: combinedOpts,
settings: settingsToSave,
};
const llmCall: LlmCall = await this.saveLlmCallRequest(createLlmCallRequest);

Expand All @@ -430,7 +436,7 @@ export abstract class AiLLM<Provider extends ProviderV2> extends BaseLLM {
const args: StreamTextArgs = {
model: this.aiModel(),
messages,
// abortSignal: combinedOpts?.abortSignal,
abortSignal: combinedOpts?.abortSignal,
temperature: combinedOpts?.temperature,
// topP: combinedOpts?.topP, // anthropic '`temperature` and `top_p` cannot both be specified for this model. Please use only one.'
stopSequences: combinedOpts?.stopSequences,
Expand Down