From 10843651a1c0cfd50a8f5bb49382e291f5fa59c8 Mon Sep 17 00:00:00 2001 From: Winn Cook <111204176+WinnCook@users.noreply.github.com> Date: Sat, 14 Mar 2026 07:37:48 -0600 Subject: [PATCH 1/2] fix(cli): surface Codex meta-only output --- src/llm/cli.ts | 63 +++++++++++++++++++++++++++++ tests/llm.cli.more-branches.test.ts | 46 +++++++++++++++++++++ 2 files changed, 109 insertions(+) diff --git a/src/llm/cli.ts b/src/llm/cli.ts index 09a8b6f3..cd09fd24 100644 --- a/src/llm/cli.ts +++ b/src/llm/cli.ts @@ -49,6 +49,66 @@ type CliRunResult = { const isNonEmptyString = (value: unknown): value is string => typeof value === "string" && value.trim().length > 0; +const CODEX_META_ONLY_OUTPUT_ERROR = + "Codex returned no assistant text; stdout only contained session/meta events."; + +const CODEX_FOOTER_LINE_PATTERN = /\bcli\/codex(?:\/\S+)?$/; + +function hasTextPayload(payload: Record): boolean { + for (const key of ["result", "response", "output", "message", "text", "content"] as const) { + const value = payload[key]; + if (typeof value === "string" && value.trim().length > 0) { + return true; + } + if ( + value && + typeof value === "object" && + !Array.isArray(value) && + hasTextPayload(value as Record) + ) { + return true; + } + } + return false; +} + +function parseJsonRecord(line: string): Record | null { + if (!line.startsWith("{")) return null; + try { + const parsed = JSON.parse(line) as unknown; + if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) return null; + return parsed as Record; + } catch { + return null; + } +} + +function isCodexFooterLine(line: string): boolean { + return line.includes("·") && CODEX_FOOTER_LINE_PATTERN.test(line); +} + +function isCodexMetaOnlyOutput(output: string): boolean { + const lines = output + .split(/\r?\n/) + .map((line) => line.trim()) + .filter((line) => line.length > 0); + if (lines.length === 0) return false; + let sawMeta = false; + for (const line of lines) { + if (isCodexFooterLine(line)) { + sawMeta = true; + continue; + } + const payload = parseJsonRecord(line); + if (!payload) return false; + if (typeof payload.type !== "string" || hasTextPayload(payload)) { + return false; + } + sawMeta = true; + } + return sawMeta; +} + function getCliProviderConfig( provider: CliProvider, config: CliConfig | null | undefined, @@ -186,6 +246,9 @@ export async function runCliModel({ } const stdoutText = stdout.trim(); if (stdoutText) { + if (isCodexMetaOnlyOutput(stdoutText)) { + throw new Error(CODEX_META_ONLY_OUTPUT_ERROR); + } return { text: stdoutText, usage, costUsd }; } throw new Error("CLI returned empty output"); diff --git a/tests/llm.cli.more-branches.test.ts b/tests/llm.cli.more-branches.test.ts index 025d221e..29d7e876 100644 --- a/tests/llm.cli.more-branches.test.ts +++ b/tests/llm.cli.more-branches.test.ts @@ -1,7 +1,13 @@ import type { ChildProcess } from "node:child_process"; +import { writeFileSync } from "node:fs"; import { describe, expect, it } from "vitest"; import { runCliModel } from "../src/llm/cli.js"; +const CODEX_META_ONLY_STDOUT = [ + '{"type":"thread.started","thread_id":"019cd2c2-0645-7312-b7f2-f10a3d41eb5c"}', + "2m 0s · 3.1k words · cli/codex/gpt-5.2", +].join("\n"); + describe("llm/cli extra branches", () => { it("parses the last JSON object when stdout includes a preface", async () => { const result = await runCliModel({ @@ -58,4 +64,44 @@ describe("llm/cli extra branches", () => { expect(result.usage?.completionTokens).toBe(2); expect(result.usage?.totalTokens).toBe(3); }); + + it("throws when Codex last-message is empty and stdout only contains session metadata", async () => { + await expect( + runCliModel({ + provider: "codex", + prompt: "hi", + model: "gpt-5.2", + allowTools: false, + timeoutMs: 1000, + env: {}, + config: null, + execFileImpl: (_cmd, args, _opts, cb) => { + const outputIndex = args.indexOf("--output-last-message"); + const outputPath = outputIndex >= 0 ? args[outputIndex + 1] : null; + if (!outputPath) throw new Error("missing output path"); + writeFileSync(outputPath, " ", "utf8"); + cb(null, CODEX_META_ONLY_STDOUT, ""); + return { stdin: { write() {}, end() {} } } as unknown as ChildProcess; + }, + }), + ).rejects.toThrow(/stdout only contained session\/meta events/i); + }); + + it("throws when Codex last-message is missing and stdout only contains session metadata", async () => { + await expect( + runCliModel({ + provider: "codex", + prompt: "hi", + model: "gpt-5.2", + allowTools: false, + timeoutMs: 1000, + env: {}, + config: null, + execFileImpl: (_cmd, _args, _opts, cb) => { + cb(null, CODEX_META_ONLY_STDOUT, ""); + return { stdin: { write() {}, end() {} } } as unknown as ChildProcess; + }, + }), + ).rejects.toThrow(/stdout only contained session\/meta events/i); + }); }); From d52a413afb6e085367f64ca271ca141eea17a2da Mon Sep 17 00:00:00 2001 From: Winn Cook <111204176+WinnCook@users.noreply.github.com> Date: Sat, 14 Mar 2026 08:56:03 -0600 Subject: [PATCH 2/2] fix(cli): honor array text in codex stdout fallback --- src/llm/cli.ts | 31 ++++++++++++++-------------- tests/llm.cli.more-branches.test.ts | 32 +++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 15 deletions(-) diff --git a/src/llm/cli.ts b/src/llm/cli.ts index cd09fd24..b775c726 100644 --- a/src/llm/cli.ts +++ b/src/llm/cli.ts @@ -53,23 +53,24 @@ const CODEX_META_ONLY_OUTPUT_ERROR = "Codex returned no assistant text; stdout only contained session/meta events."; const CODEX_FOOTER_LINE_PATTERN = /\bcli\/codex(?:\/\S+)?$/; +const CODEX_TEXT_PAYLOAD_KEYS = [ + "result", + "response", + "output", + "message", + "text", + "content", +] as const; + +function hasTextPayloadValue(value: unknown): boolean { + if (typeof value === "string") return value.trim().length > 0; + if (Array.isArray(value)) return value.some((entry) => hasTextPayloadValue(entry)); + if (!value || typeof value !== "object") return false; + return hasTextPayload(value as Record); +} function hasTextPayload(payload: Record): boolean { - for (const key of ["result", "response", "output", "message", "text", "content"] as const) { - const value = payload[key]; - if (typeof value === "string" && value.trim().length > 0) { - return true; - } - if ( - value && - typeof value === "object" && - !Array.isArray(value) && - hasTextPayload(value as Record) - ) { - return true; - } - } - return false; + return CODEX_TEXT_PAYLOAD_KEYS.some((key) => hasTextPayloadValue(payload[key])); } function parseJsonRecord(line: string): Record | null { diff --git a/tests/llm.cli.more-branches.test.ts b/tests/llm.cli.more-branches.test.ts index 29d7e876..0ba276fc 100644 --- a/tests/llm.cli.more-branches.test.ts +++ b/tests/llm.cli.more-branches.test.ts @@ -8,6 +8,20 @@ const CODEX_META_ONLY_STDOUT = [ "2m 0s · 3.1k words · cli/codex/gpt-5.2", ].join("\n"); +const CODEX_STDOUT_WITH_ARRAY_TEXT = [ + JSON.stringify({ + type: "response.completed", + response: { + output: [ + { + content: [{ type: "output_text", text: "assistant text from array payload" }], + }, + ], + }, + }), + "2m 0s · 3.1k words · cli/codex/gpt-5.2", +].join("\n"); + describe("llm/cli extra branches", () => { it("parses the last JSON object when stdout includes a preface", async () => { const result = await runCliModel({ @@ -104,4 +118,22 @@ describe("llm/cli extra branches", () => { }), ).rejects.toThrow(/stdout only contained session\/meta events/i); }); + + it("keeps raw stdout fallback when Codex stdout includes nested array text", async () => { + const result = await runCliModel({ + provider: "codex", + prompt: "hi", + model: "gpt-5.2", + allowTools: false, + timeoutMs: 1000, + env: {}, + config: null, + execFileImpl: (_cmd, _args, _opts, cb) => { + cb(null, CODEX_STDOUT_WITH_ARRAY_TEXT, ""); + return { stdin: { write() {}, end() {} } } as unknown as ChildProcess; + }, + }); + + expect(result.text).toBe(CODEX_STDOUT_WITH_ARRAY_TEXT); + }); });