diff --git a/packages/opencode/src/cli/cmd/run.ts b/packages/opencode/src/cli/cmd/run.ts index 54248f96f3d..94932b744de 100644 --- a/packages/opencode/src/cli/cmd/run.ts +++ b/packages/opencode/src/cli/cmd/run.ts @@ -87,12 +87,19 @@ export const RunCommand = cmd({ type: "number", describe: "port for the local server (defaults to random port if no value provided)", }) + .option("project-dir", { + type: "string", + describe: "project directory to run in (defaults to current working directory)", + }) .option("variant", { type: "string", describe: "model variant (provider-specific reasoning effort, e.g., high, max, minimal)", }) }, handler: async (args) => { + // Use --project-dir if provided, otherwise fall back to cwd + const cwd = args.projectDir ? path.resolve(args.projectDir) : process.cwd() + let message = [...args.message, ...(args["--"] || [])] .map((arg) => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg)) .join(" ") @@ -102,7 +109,7 @@ export const RunCommand = cmd({ const files = Array.isArray(args.file) ? args.file : [args.file] for (const filePath of files) { - const resolvedPath = path.resolve(process.cwd(), filePath) + const resolvedPath = path.resolve(cwd, filePath) const file = Bun.file(resolvedPath) const stats = await file.stat().catch(() => {}) if (!stats) { @@ -339,11 +346,8 @@ export const RunCommand = cmd({ } await bootstrap(process.cwd(), async () => { - const fetchFn = (async (input: RequestInfo | URL, init?: RequestInit) => { - const request = new Request(input, init) - return Server.App().fetch(request) - }) as typeof globalThis.fetch - const sdk = createOpencodeClient({ baseUrl: "http://opencode.internal", fetch: fetchFn }) + const server = Server.listen({ port: args.port ?? 0, hostname: "127.0.0.1" }) + const sdk = createOpencodeClient({ baseUrl: `http://${server.hostname}:${server.port}` }) if (args.command) { const exists = await Command.get(args.command) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 1574c644d32..c5c0eba8891 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -125,8 +125,9 @@ export namespace Config { } const exists = existsSync(path.join(dir, "node_modules")) - const installing = installDependencies(dir) - if (!exists) await installing + if (!exists && !Flag.OPENCODE_DISABLE_PLUGIN_INSTALL) { + await installDependencies(dir) + } result.command = mergeDeep(result.command ?? {}, await loadCommand(dir)) result.agent = mergeDeep(result.agent, await loadAgent(dir)) diff --git a/packages/opencode/src/flag/flag.ts b/packages/opencode/src/flag/flag.ts index 4cdb549096a..d599dbdd457 100644 --- a/packages/opencode/src/flag/flag.ts +++ b/packages/opencode/src/flag/flag.ts @@ -9,10 +9,12 @@ export namespace Flag { export const OPENCODE_DISABLE_TERMINAL_TITLE = truthy("OPENCODE_DISABLE_TERMINAL_TITLE") export const OPENCODE_PERMISSION = process.env["OPENCODE_PERMISSION"] export const OPENCODE_DISABLE_DEFAULT_PLUGINS = truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS") + export const OPENCODE_DISABLE_PLUGIN_INSTALL = truthy("OPENCODE_DISABLE_PLUGIN_INSTALL") export const OPENCODE_DISABLE_LSP_DOWNLOAD = truthy("OPENCODE_DISABLE_LSP_DOWNLOAD") export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS") export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT") export const OPENCODE_DISABLE_MODELS_FETCH = truthy("OPENCODE_DISABLE_MODELS_FETCH") + export const OPENCODE_DISABLE_MODELS_DEV = truthy("OPENCODE_DISABLE_MODELS_DEV") export const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE") export const OPENCODE_DISABLE_CLAUDE_CODE_PROMPT = OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT") @@ -20,6 +22,7 @@ export namespace Flag { OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS") export const OPENCODE_FAKE_VCS = process.env["OPENCODE_FAKE_VCS"] export const OPENCODE_CLIENT = process.env["OPENCODE_CLIENT"] ?? "cli" + export const OPENCODE_TRACE_DIR = process.env["OPENCODE_TRACE_DIR"] export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"] export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"] diff --git a/packages/opencode/src/index.ts b/packages/opencode/src/index.ts index 6dc5e99e91e..b1bd9c39209 100644 --- a/packages/opencode/src/index.ts +++ b/packages/opencode/src/index.ts @@ -26,6 +26,7 @@ import { EOL } from "os" import { WebCommand } from "./cli/cmd/web" import { PrCommand } from "./cli/cmd/pr" import { SessionCommand } from "./cli/cmd/session" +import { TraceLogger } from "./util/trace-logger" process.on("unhandledRejection", (e) => { Log.Default.error("rejection", { @@ -56,6 +57,10 @@ const cli = yargs(hideBin(process.argv)) type: "string", choices: ["DEBUG", "INFO", "WARN", "ERROR"], }) + .option("trace-dir", { + describe: "directory to save request-response trace logs (also configurable via OPENCODE_TRACE_DIR env variable)", + type: "string", + }) .middleware(async (opts) => { await Log.init({ print: process.argv.includes("--print-logs"), @@ -70,6 +75,9 @@ const cli = yargs(hideBin(process.argv)) process.env.AGENT = "1" process.env.OPENCODE = "1" + // Initialize trace logger from CLI option or environment variable + TraceLogger.init(opts.traceDir as string | undefined) + Log.Default.info("opencode", { version: Installation.VERSION, args: process.argv.slice(2), diff --git a/packages/opencode/src/provider/models.ts b/packages/opencode/src/provider/models.ts index c5465f9880e..9c50e7013cc 100644 --- a/packages/opencode/src/provider/models.ts +++ b/packages/opencode/src/provider/models.ts @@ -2,10 +2,28 @@ import { Global } from "../global" import { Log } from "../util/log" import path from "path" import z from "zod" -import { data } from "./models-macro" with { type: "macro" } import { Installation } from "../installation" import { Flag } from "../flag/flag" +// Inline fallback for fetching models data at runtime +// Previously used macro import: import { data } from "./models-macro" with { type: "macro" } +// Macros don't work correctly with `bun run --conditions=browser` +async function fetchModelsData(): Promise { + const envPath = Bun.env.MODELS_DEV_API_JSON + if (envPath) { + const file = Bun.file(envPath) + if (await file.exists()) { + return await file.text() + } + } + const json = await fetch("https://models.dev/api.json", { + headers: { + "User-Agent": Installation.USER_AGENT, + }, + }).then((x) => x.text()) + return json +} + export namespace ModelsDev { const log = Log.create({ service: "models.dev" }) const filepath = path.join(Global.Path.cache, "models.json") @@ -77,15 +95,13 @@ export namespace ModelsDev { export type Provider = z.infer export async function get() { + // If models.dev is completely disabled, return empty object + if (Flag.OPENCODE_DISABLE_MODELS_DEV) return {} refresh() const file = Bun.file(filepath) const result = await file.json().catch(() => {}) if (result) return result as Record - if (typeof data === "function") { - const json = await data() - return JSON.parse(json) as Record - } - const json = await fetch("https://models.dev/api.json").then((x) => x.text()) + const json = await fetchModelsData() return JSON.parse(json) as Record } diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index bcb115edf41..6841ce1ecfd 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -800,6 +800,16 @@ export namespace Provider { database[providerID] = parsed } + // Initialize config providers directly into providers object + // This ensures custom providers (like vllm) work even without auth/env setup + for (const [providerID, provider] of configProviders) { + const dbProvider = database[providerID] + if (dbProvider && Object.keys(dbProvider.models).length > 0) { + providers[providerID] = dbProvider + log.info("loaded config provider", { providerID }) + } + } + // load env const env = Env.all() for (const [providerID, provider] of Object.entries(database)) { diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index 1029b45ea0d..94317147baa 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -22,6 +22,7 @@ import type { MessageV2 } from "./message-v2" import { Plugin } from "@/plugin" import { SystemPrompt } from "./system" import { Flag } from "@/flag/flag" +import { TraceLogger } from "@/util/trace-logger" import { PermissionNext } from "@/permission/next" import { Auth } from "@/auth" @@ -162,11 +163,41 @@ export namespace LLM { }) } - return streamText({ + // Create trace entry if tracing is enabled + const traceEntry = TraceLogger.isEnabled() + ? TraceLogger.createTraceEntry({ + sessionID: input.sessionID, + providerID: input.model.providerID, + modelID: input.model.id, + agent: input.agent.name, + system: system, + messages: input.messages, + tools: tools, + parameters: { + temperature: params.temperature, + topP: params.topP, + topK: params.topK, + maxOutputTokens: maxOutputTokens, + options: params.options, + }, + }) + : undefined + + const startTime = Date.now() + + const streamResult = streamText({ onError(error) { l.error("stream error", { error, }) + // Log trace with error if tracing is enabled + if (traceEntry) { + TraceLogger.updateTraceWithResponse(traceEntry, { + error: error instanceof Error ? error : new Error(String(error)), + duration: Date.now() - startTime, + }) + TraceLogger.logTrace(traceEntry) + } }, async experimental_repairToolCall(failed) { const lower = failed.toolCall.toolName.toLowerCase() @@ -253,6 +284,110 @@ export namespace LLM { }), experimental_telemetry: { isEnabled: cfg.experimental?.openTelemetry }, }) + + // Wrap the result to capture trace data if tracing is enabled + if (traceEntry) { + const originalFullStream = streamResult.fullStream + + // Collect response data as stream progresses + const responseData: { + text: string + toolCalls: Array<{ id: string; name: string; input: any }> + reasoning: string[] + finishReason?: string + usage?: any + } = { + text: "", + toolCalls: [], + reasoning: [], + } + + let currentReasoning = "" + + // Wrap fullStream to collect data + const wrappedStream = (async function* () { + try { + for await (const chunk of originalFullStream) { + // Collect response data based on chunk type + if ("type" in chunk) { + switch (chunk.type) { + case "text-delta": + if ("text" in chunk && chunk.text && typeof chunk.text === "string") { + responseData.text += chunk.text + } + break + case "reasoning-start": + currentReasoning = "" + break + case "reasoning-delta": + if ("text" in chunk && chunk.text && typeof chunk.text === "string") { + currentReasoning += chunk.text + } + break + case "reasoning-end": + if (currentReasoning) { + responseData.reasoning.push(currentReasoning) + currentReasoning = "" + } + break + case "tool-call": + if ("toolCallId" in chunk && "toolName" in chunk && "input" in chunk) { + responseData.toolCalls.push({ + id: chunk.toolCallId as string, + name: chunk.toolName as string, + input: chunk.input, + }) + } + break + case "finish-step": + if ("finishReason" in chunk) { + responseData.finishReason = chunk.finishReason as string + } + if ("usage" in chunk) { + responseData.usage = chunk.usage + } + // Log trace when stream finishes + TraceLogger.updateTraceWithResponse(traceEntry, { + finishReason: responseData.finishReason, + usage: responseData.usage, + content: { + text: responseData.text ? [responseData.text] : undefined, + toolCalls: responseData.toolCalls.length > 0 ? responseData.toolCalls : undefined, + reasoning: responseData.reasoning.length > 0 ? responseData.reasoning : undefined, + }, + duration: Date.now() - startTime, + }) + await TraceLogger.logTrace(traceEntry) + break + } + } + yield chunk + } + } catch (error) { + // Log trace with error + if (error instanceof Error) { + TraceLogger.updateTraceWithResponse(traceEntry, { + error, + duration: Date.now() - startTime, + }) + await TraceLogger.logTrace(traceEntry) + } + throw error + } + })() + + // Create a proxy to wrap the stream result with our traced fullStream + return new Proxy(streamResult, { + get(target, prop) { + if (prop === "fullStream") { + return wrappedStream + } + return Reflect.get(target, prop) + }, + }) + } + + return streamResult } async function resolveTools(input: Pick) { diff --git a/packages/opencode/src/util/trace-logger.ts b/packages/opencode/src/util/trace-logger.ts new file mode 100644 index 00000000000..618e2c62488 --- /dev/null +++ b/packages/opencode/src/util/trace-logger.ts @@ -0,0 +1,303 @@ +import { Flag } from "@/flag/flag" +import { Log } from "./log" +import path from "path" +import fs from "fs/promises" + +export namespace TraceLogger { + const log = Log.create({ service: "trace-logger" }) + + let traceDir: string | undefined = undefined + let enabled = false + + /** + * Initialize the trace logger with the specified directory. + * Can be called from CLI options or will use environment variable. + */ + export function init(directory?: string) { + traceDir = directory || Flag.OPENCODE_TRACE_DIR + enabled = !!traceDir + + if (enabled) { + log.info("trace logging enabled", { directory: traceDir }) + } + } + + /** + * Check if trace logging is enabled + */ + export function isEnabled(): boolean { + return enabled + } + + /** + * Get the trace directory path + */ + export function getDirectory(): string | undefined { + return traceDir + } + + export type TraceEntry = { + timestamp: string + sessionID: string + requestID: string + providerID: string + modelID: string + agent: string + duration?: number + request: { + model: string + messages: Array<{ + role: string + content: string | any[] + tool_calls?: any[] + tool_call_id?: string + reasoning_content?: string + }> + temperature?: number + top_p?: number + stream?: boolean + stream_options?: { + include_usage?: boolean + } + tools?: any[] + max_tokens?: number + } + response?: { + id?: string + object?: string + created?: number + model?: string + choices?: Array<{ + index: number + message: { + role: string + content: string | null + tool_calls?: any[] + reasoning_content?: string + refusal?: string | null + } + finish_reason: string + logprobs?: any + }> + usage?: { + prompt_tokens: number + total_tokens: number + completion_tokens: number + cache_read_tokens?: number + cache_write_tokens?: number + } + error?: Error + content?: { + text?: string[] + } + } + error?: { + name: string + message: string + stack?: string + } + system?: { + hostname?: string + platform?: string + release?: string + nodeVersion?: string + } + } + + /** + * Log a trace entry for an LLM request-response pair + */ + export async function logTrace(entry: TraceEntry): Promise { + if (!enabled || !traceDir) { + return + } + + try { + // Ensure the trace directory exists + await fs.mkdir(traceDir, { recursive: true }) + + // Create a filename with timestamp, session ID, and request ID + const timestamp = new Date().toISOString().replace(/[:.]/g, "-") + const filename = `${timestamp}_${entry.sessionID}_${entry.requestID}.json` + const filepath = path.join(traceDir, filename) + + // Write the trace entry as formatted JSON + await fs.writeFile(filepath, JSON.stringify(entry, null, 2), "utf-8") + + log.debug("trace logged", { filepath }) + } catch (error) { + log.error("failed to write trace", { error }) + } + } + + /** + * Create a trace entry from LLM stream input + */ + export function createTraceEntry(input: { + sessionID: string + providerID: string + modelID: string + agent: string + system: string[] + messages: any[] + tools: any[] | Record + parameters: { + temperature?: number + topP?: number + topK?: number + maxOutputTokens?: number + stream?: boolean + options?: any + } + }): TraceEntry { + // Merge system prompts into messages array as role "system" + const systemMessages = input.system.map((content) => ({ + role: "system", + content, + })) + + // Format tools - convert Record to array if needed + let formattedTools: any[] | undefined = undefined + if (input.tools) { + if (Array.isArray(input.tools)) { + formattedTools = input.tools.length > 0 ? input.tools : undefined + } else { + const toolsArray = Object.values(input.tools) + formattedTools = toolsArray.length > 0 ? toolsArray : undefined + } + } + + return { + timestamp: new Date().toISOString(), + sessionID: input.sessionID, + requestID: generateRequestID(), + providerID: input.providerID, + modelID: input.modelID, + agent: input.agent, + request: { + model: input.modelID, + messages: [...systemMessages, ...input.messages], + temperature: input.parameters.temperature, + top_p: input.parameters.topP, + stream: input.parameters.stream ?? true, + stream_options: { + include_usage: true, + }, + tools: formattedTools, + max_tokens: input.parameters.maxOutputTokens, + }, + } + } + + /** + * Generate a unique request ID for tracing + */ + function generateRequestID(): string { + const timestamp = Date.now().toString(36) + const random = Math.random().toString(36).substring(2, 10) + return `trace_${timestamp}_${random}` + } + + /** + * Update a trace entry with response data + */ + export function updateTraceWithResponse( + entry: TraceEntry, + response: { + id?: string + finishReason?: string + usage?: any + content?: { + text?: string[] + toolCalls?: Array<{ + id: string + name: string + input: any + }> + reasoning?: string[] + } + error?: Error + duration?: number + }, + ): void { + // Store duration if provided + if (response.duration !== undefined) { + entry.duration = response.duration + } + + if (response.error) { + entry.error = { + name: response.error.name, + message: response.error.message, + stack: response.error.stack, + } + // Add system information even on error + entry.system = { + hostname: process.env.HOSTNAME, + platform: process.platform, + release: process.release?.name, + nodeVersion: process.version, + } + return + } + + // Build message content + let messageContent: string | null = null + const toolCalls: any[] = [] + + if (response.content?.text && response.content.text.length > 0) { + messageContent = response.content.text.join("") + } + + if (response.content?.toolCalls && response.content.toolCalls.length > 0) { + response.content.toolCalls.forEach((tc) => { + toolCalls.push({ + id: tc.id, + type: "function", + function: { + name: tc.name, + arguments: JSON.stringify(tc.input), + }, + }) + }) + } + + entry.response = { + id: response.id || `chatcmpl-${Date.now().toString(36)}`, + object: "chat.completion", + created: Math.floor(Date.now() / 1000), + model: entry.request.model, + choices: [ + { + index: 0, + message: { + role: "assistant", + content: messageContent, + tool_calls: toolCalls.length > 0 ? toolCalls : undefined, + reasoning_content: response.content?.reasoning?.join("") || undefined, + refusal: null, + }, + finish_reason: response.finishReason || "stop", + logprobs: null, + }, + ], + usage: response.usage + ? { + prompt_tokens: response.usage.inputTokens || response.usage.promptTokens || 0, + total_tokens: response.usage.totalTokens || 0, + completion_tokens: response.usage.outputTokens || response.usage.completionTokens || 0, + cache_read_tokens: response.usage.cachedInputTokens || response.usage.cacheReadTokens, + cache_write_tokens: response.usage.cacheCreationTokens, + } + : undefined, + } + + // Add system information + entry.system = { + hostname: process.env.HOSTNAME, + platform: process.platform, + release: process.release?.name, + nodeVersion: process.version, + } + } +} diff --git a/packages/opencode/test/util/trace-logger.test.ts b/packages/opencode/test/util/trace-logger.test.ts new file mode 100644 index 00000000000..cc0ddd95548 --- /dev/null +++ b/packages/opencode/test/util/trace-logger.test.ts @@ -0,0 +1,257 @@ +import { describe, it, expect, beforeEach, afterEach } from "bun:test" +import { TraceLogger } from "../../src/util/trace-logger" +import fs from "fs/promises" +import path from "path" +import os from "os" + +describe("TraceLogger", () => { + let testDir: string + + beforeEach(async () => { + // Create a temporary directory for test traces + testDir = path.join(os.tmpdir(), `opencode-trace-test-${Date.now()}`) + await fs.mkdir(testDir, { recursive: true }) + }) + + afterEach(async () => { + // Clean up test directory + try { + await fs.rm(testDir, { recursive: true, force: true }) + } catch (e) { + // Ignore cleanup errors + } + }) + + it("should not be enabled by default", () => { + expect(TraceLogger.isEnabled()).toBe(false) + }) + + it("should enable tracing when initialized with a directory", () => { + TraceLogger.init(testDir) + expect(TraceLogger.isEnabled()).toBe(true) + expect(TraceLogger.getDirectory()).toBe(testDir) + }) + + it("should create a trace entry with correct structure", () => { + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: ["System prompt"], + messages: [{ role: "user", content: "Hello" }], + tools: { bash: { name: "bash" } }, + parameters: { + temperature: 0.7, + topP: 0.9, + maxOutputTokens: 32000, + }, + }) + + expect(entry.sessionID).toBe("test-session") + expect(entry.providerID).toBe("openai") + expect(entry.modelID).toBe("gpt-4") + expect(entry.agent).toBe("default") + expect(entry.request.messages).toEqual([ + { role: "system", content: "System prompt" }, + { role: "user", content: "Hello" }, + ]) + expect(entry.request.tools).toEqual([{ name: "bash" }]) + }) + + it("should update trace entry with response data", () => { + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: ["System prompt"], + messages: [], + tools: {}, + parameters: {}, + }) + + TraceLogger.updateTraceWithResponse(entry, { + finishReason: "stop", + usage: { + promptTokens: 100, + completionTokens: 50, + totalTokens: 150, + }, + content: { + text: ["Hello, world!"], + }, + duration: 1234, + }) + + expect(entry.response).toBeDefined() + expect(entry.response?.choices?.[0]?.finish_reason).toBe("stop") + expect(entry.response?.usage?.prompt_tokens).toBe(100) + expect(entry.response?.usage?.completion_tokens).toBe(50) + expect(entry.response?.usage?.total_tokens).toBe(150) + expect(entry.response?.choices?.[0]?.message.content).toBe("Hello, world!") + expect(entry.duration).toBe(1234) + }) + + it("should log trace to file when enabled", async () => { + TraceLogger.init(testDir) + + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: ["System prompt"], + messages: [{ role: "user", content: "Test message" }], + tools: {}, + parameters: {}, + }) + + TraceLogger.updateTraceWithResponse(entry, { + finishReason: "stop", + usage: { + promptTokens: 10, + completionTokens: 5, + totalTokens: 15, + }, + content: { + text: ["Response text"], + }, + duration: 500, + }) + + await TraceLogger.logTrace(entry) + + // Verify file was created + const files = await fs.readdir(testDir) + expect(files.length).toBe(1) + expect(files[0]).toMatch(/\.json$/) + + // Verify file contents + const filePath = path.join(testDir, files[0]) + const content = await fs.readFile(filePath, "utf-8") + const parsed = JSON.parse(content) + + expect(parsed.sessionID).toBe("test-session") + expect(parsed.providerID).toBe("openai") + expect(parsed.modelID).toBe("gpt-4") + expect(parsed.request.messages).toEqual([ + { role: "system", content: "System prompt" }, + { role: "user", content: "Test message" }, + ]) + expect(parsed.response.choices[0].message.content).toBe("Response text") + }) + + it("should not log trace when disabled", async () => { + // Don't initialize - tracing should be disabled + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: [], + messages: [], + tools: {}, + parameters: {}, + }) + + await TraceLogger.logTrace(entry) + + // Verify no files were created in test directory + const exists = await fs + .access(testDir) + .then(() => true) + .catch(() => false) + if (exists) { + const files = await fs.readdir(testDir) + expect(files.length).toBe(0) + } + }) + + it("should handle errors in trace entry", () => { + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: [], + messages: [], + tools: {}, + parameters: {}, + }) + + const error = new Error("Test error") + error.stack = "Error stack trace" + + TraceLogger.updateTraceWithResponse(entry, { + error, + duration: 100, + }) + + expect(entry.error).toBeDefined() + expect(entry.error?.name).toBe("Error") + expect(entry.error?.message).toBe("Test error") + expect(entry.error?.stack).toBe("Error stack trace") + }) + + it("should include tool calls in response", () => { + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: [], + messages: [], + tools: {}, + parameters: {}, + }) + + TraceLogger.updateTraceWithResponse(entry, { + content: { + toolCalls: [ + { + id: "call_1", + name: "bash", + input: { command: "ls -la" }, + }, + { + id: "call_2", + name: "read_file", + input: { path: "test.txt" }, + }, + ], + }, + duration: 500, + }) + + expect(entry.response?.choices?.[0]?.message.tool_calls).toHaveLength(2) + expect(entry.response?.choices?.[0]?.message.tool_calls?.[0].function.name).toBe("bash") + expect(entry.response?.choices?.[0]?.message.tool_calls?.[1].function.name).toBe("read_file") + }) + + it("should merge multiple system messages into messages array", () => { + const entry = TraceLogger.createTraceEntry({ + sessionID: "test-session", + providerID: "openai", + modelID: "gpt-4", + agent: "default", + system: ["System prompt 1", "System prompt 2", "System prompt 3"], + messages: [ + { role: "user", content: "User message 1" }, + { role: "assistant", content: "Assistant response" }, + { role: "user", content: "User message 2" }, + ], + tools: {}, + parameters: {}, + }) + + // Verify system messages are at the beginning + expect(entry.request.messages).toHaveLength(6) + expect(entry.request.messages[0]).toEqual({ role: "system", content: "System prompt 1" }) + expect(entry.request.messages[1]).toEqual({ role: "system", content: "System prompt 2" }) + expect(entry.request.messages[2]).toEqual({ role: "system", content: "System prompt 3" }) + expect(entry.request.messages[3]).toEqual({ role: "user", content: "User message 1" }) + expect(entry.request.messages[4]).toEqual({ role: "assistant", content: "Assistant response" }) + expect(entry.request.messages[5]).toEqual({ role: "user", content: "User message 2" }) + }) +})