Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions packages/opencode/src/cli/cmd/run.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,12 +87,19 @@ export const RunCommand = cmd({
type: "number",
describe: "port for the local server (defaults to random port if no value provided)",
})
.option("project-dir", {
type: "string",
describe: "project directory to run in (defaults to current working directory)",
})
.option("variant", {
type: "string",
describe: "model variant (provider-specific reasoning effort, e.g., high, max, minimal)",
})
},
handler: async (args) => {
// Use --project-dir if provided, otherwise fall back to cwd
const cwd = args.projectDir ? path.resolve(args.projectDir) : process.cwd()

let message = [...args.message, ...(args["--"] || [])]
.map((arg) => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg))
.join(" ")
Expand All @@ -102,7 +109,7 @@ export const RunCommand = cmd({
const files = Array.isArray(args.file) ? args.file : [args.file]

for (const filePath of files) {
const resolvedPath = path.resolve(process.cwd(), filePath)
const resolvedPath = path.resolve(cwd, filePath)
const file = Bun.file(resolvedPath)
const stats = await file.stat().catch(() => {})
if (!stats) {
Expand Down Expand Up @@ -339,11 +346,8 @@ export const RunCommand = cmd({
}

await bootstrap(process.cwd(), async () => {
const fetchFn = (async (input: RequestInfo | URL, init?: RequestInit) => {
const request = new Request(input, init)
return Server.App().fetch(request)
}) as typeof globalThis.fetch
const sdk = createOpencodeClient({ baseUrl: "http://opencode.internal", fetch: fetchFn })
const server = Server.listen({ port: args.port ?? 0, hostname: "127.0.0.1" })
const sdk = createOpencodeClient({ baseUrl: `http://${server.hostname}:${server.port}` })

if (args.command) {
const exists = await Command.get(args.command)
Expand Down
5 changes: 3 additions & 2 deletions packages/opencode/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,9 @@ export namespace Config {
}

const exists = existsSync(path.join(dir, "node_modules"))
const installing = installDependencies(dir)
if (!exists) await installing
if (!exists && !Flag.OPENCODE_DISABLE_PLUGIN_INSTALL) {
await installDependencies(dir)
}

result.command = mergeDeep(result.command ?? {}, await loadCommand(dir))
result.agent = mergeDeep(result.agent, await loadAgent(dir))
Expand Down
2 changes: 2 additions & 0 deletions packages/opencode/src/flag/flag.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export namespace Flag {
export const OPENCODE_DISABLE_TERMINAL_TITLE = truthy("OPENCODE_DISABLE_TERMINAL_TITLE")
export const OPENCODE_PERMISSION = process.env["OPENCODE_PERMISSION"]
export const OPENCODE_DISABLE_DEFAULT_PLUGINS = truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS")
export const OPENCODE_DISABLE_PLUGIN_INSTALL = truthy("OPENCODE_DISABLE_PLUGIN_INSTALL")
export const OPENCODE_DISABLE_LSP_DOWNLOAD = truthy("OPENCODE_DISABLE_LSP_DOWNLOAD")
export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS")
export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT")
Expand All @@ -20,6 +21,7 @@ export namespace Flag {
OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS")
export const OPENCODE_FAKE_VCS = process.env["OPENCODE_FAKE_VCS"]
export const OPENCODE_CLIENT = process.env["OPENCODE_CLIENT"] ?? "cli"
export const OPENCODE_TRACE_DIR = process.env["OPENCODE_TRACE_DIR"]
export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"]
export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"]

Expand Down
8 changes: 8 additions & 0 deletions packages/opencode/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import { EOL } from "os"
import { WebCommand } from "./cli/cmd/web"
import { PrCommand } from "./cli/cmd/pr"
import { SessionCommand } from "./cli/cmd/session"
import { TraceLogger } from "./util/trace-logger"

process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", {
Expand Down Expand Up @@ -56,6 +57,10 @@ const cli = yargs(hideBin(process.argv))
type: "string",
choices: ["DEBUG", "INFO", "WARN", "ERROR"],
})
.option("trace-dir", {
describe: "directory to save request-response trace logs (also configurable via OPENCODE_TRACE_DIR env variable)",
type: "string",
})
.middleware(async (opts) => {
await Log.init({
print: process.argv.includes("--print-logs"),
Expand All @@ -70,6 +75,9 @@ const cli = yargs(hideBin(process.argv))
process.env.AGENT = "1"
process.env.OPENCODE = "1"

// Initialize trace logger from CLI option or environment variable
TraceLogger.init(opts.traceDir as string | undefined)

Log.Default.info("opencode", {
version: Installation.VERSION,
args: process.argv.slice(2),
Expand Down
26 changes: 20 additions & 6 deletions packages/opencode/src/provider/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,28 @@ import { Global } from "../global"
import { Log } from "../util/log"
import path from "path"
import z from "zod"
import { data } from "./models-macro" with { type: "macro" }
import { Installation } from "../installation"
import { Flag } from "../flag/flag"

// Inline fallback for fetching models data at runtime
// Previously used macro import: import { data } from "./models-macro" with { type: "macro" }
// Macros don't work correctly with `bun run --conditions=browser`
async function fetchModelsData(): Promise<string> {
const envPath = Bun.env.MODELS_DEV_API_JSON
if (envPath) {
const file = Bun.file(envPath)
if (await file.exists()) {
return await file.text()
}
}
const json = await fetch("https://models.dev/api.json", {
headers: {
"User-Agent": Installation.USER_AGENT,
},
}).then((x) => x.text())
return json
}

export namespace ModelsDev {
const log = Log.create({ service: "models.dev" })
const filepath = path.join(Global.Path.cache, "models.json")
Expand Down Expand Up @@ -81,11 +99,7 @@ export namespace ModelsDev {
const file = Bun.file(filepath)
const result = await file.json().catch(() => {})
if (result) return result as Record<string, Provider>
if (typeof data === "function") {
const json = await data()
return JSON.parse(json) as Record<string, Provider>
}
const json = await fetch("https://models.dev/api.json").then((x) => x.text())
const json = await fetchModelsData()
return JSON.parse(json) as Record<string, Provider>
}

Expand Down
137 changes: 136 additions & 1 deletion packages/opencode/src/session/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import type { MessageV2 } from "./message-v2"
import { Plugin } from "@/plugin"
import { SystemPrompt } from "./system"
import { Flag } from "@/flag/flag"
import { TraceLogger } from "@/util/trace-logger"
import { PermissionNext } from "@/permission/next"
import { Auth } from "@/auth"

Expand Down Expand Up @@ -162,11 +163,41 @@ export namespace LLM {
})
}

return streamText({
// Create trace entry if tracing is enabled
const traceEntry = TraceLogger.isEnabled()
? TraceLogger.createTraceEntry({
sessionID: input.sessionID,
providerID: input.model.providerID,
modelID: input.model.id,
agent: input.agent.name,
system: system,
messages: input.messages,
tools: tools,
parameters: {
temperature: params.temperature,
topP: params.topP,
topK: params.topK,
maxOutputTokens: maxOutputTokens,
options: params.options,
},
})
: undefined

const startTime = Date.now()

const streamResult = streamText({
onError(error) {
l.error("stream error", {
error,
})
// Log trace with error if tracing is enabled
if (traceEntry) {
TraceLogger.updateTraceWithResponse(traceEntry, {
error: error instanceof Error ? error : new Error(String(error)),
duration: Date.now() - startTime,
})
TraceLogger.logTrace(traceEntry)
}
},
async experimental_repairToolCall(failed) {
const lower = failed.toolCall.toolName.toLowerCase()
Expand Down Expand Up @@ -253,6 +284,110 @@ export namespace LLM {
}),
experimental_telemetry: { isEnabled: cfg.experimental?.openTelemetry },
})

// Wrap the result to capture trace data if tracing is enabled
if (traceEntry) {
const originalFullStream = streamResult.fullStream

// Collect response data as stream progresses
const responseData: {
text: string
toolCalls: Array<{ id: string; name: string; input: any }>
reasoning: string[]
finishReason?: string
usage?: any
} = {
text: "",
toolCalls: [],
reasoning: [],
}

let currentReasoning = ""

// Wrap fullStream to collect data
const wrappedStream = (async function* () {
try {
for await (const chunk of originalFullStream) {
// Collect response data based on chunk type
if ("type" in chunk) {
switch (chunk.type) {
case "text-delta":
if ("text" in chunk && chunk.text && typeof chunk.text === "string") {
responseData.text += chunk.text
}
break
case "reasoning-start":
currentReasoning = ""
break
case "reasoning-delta":
if ("text" in chunk && chunk.text && typeof chunk.text === "string") {
currentReasoning += chunk.text
}
break
case "reasoning-end":
if (currentReasoning) {
responseData.reasoning.push(currentReasoning)
currentReasoning = ""
}
break
case "tool-call":
if ("toolCallId" in chunk && "toolName" in chunk && "input" in chunk) {
responseData.toolCalls.push({
id: chunk.toolCallId as string,
name: chunk.toolName as string,
input: chunk.input,
})
}
break
case "finish-step":
if ("finishReason" in chunk) {
responseData.finishReason = chunk.finishReason as string
}
if ("usage" in chunk) {
responseData.usage = chunk.usage
}
// Log trace when stream finishes
TraceLogger.updateTraceWithResponse(traceEntry, {
finishReason: responseData.finishReason,
usage: responseData.usage,
content: {
text: responseData.text ? [responseData.text] : undefined,
toolCalls: responseData.toolCalls.length > 0 ? responseData.toolCalls : undefined,
reasoning: responseData.reasoning.length > 0 ? responseData.reasoning : undefined,
},
duration: Date.now() - startTime,
})
await TraceLogger.logTrace(traceEntry)
break
}
}
yield chunk
}
} catch (error) {
// Log trace with error
if (error instanceof Error) {
TraceLogger.updateTraceWithResponse(traceEntry, {
error,
duration: Date.now() - startTime,
})
await TraceLogger.logTrace(traceEntry)
}
throw error
}
})()

// Create a proxy to wrap the stream result with our traced fullStream
return new Proxy(streamResult, {
get(target, prop) {
if (prop === "fullStream") {
return wrappedStream
}
return Reflect.get(target, prop)
},
})
}

return streamResult
}

async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "user">) {
Expand Down
Loading