Skip to content

Commit d4d637f

Browse files
committed
refactor: derive compress graph from token buckets
1 parent 96062b0 commit d4d637f

File tree

6 files changed

+239
-88
lines changed

6 files changed

+239
-88
lines changed

lib/hooks.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import { handleHelpCommand } from "./commands/help"
1313
import { handleSweepCommand } from "./commands/sweep"
1414
import { handleManualToggleCommand, handleManualTriggerCommand } from "./commands/manual"
1515
import { ensureSessionInitialized } from "./state/state"
16-
import { getCurrentParams } from "./strategies/utils"
16+
import { cacheSystemPromptTokens } from "./ui/utils"
1717

1818
const INTERNAL_AGENT_SIGNATURES = [
1919
"You are a title generator",
@@ -107,6 +107,8 @@ export function createChatMessageTransformHandler(
107107
return
108108
}
109109

110+
cacheSystemPromptTokens(state, output.messages)
111+
110112
syncToolCache(state, config, logger, output.messages)
111113
buildToolIdList(state, output.messages)
112114

lib/state/state.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ export function createSessionState(): SessionState {
8282
currentTurn: 0,
8383
variant: undefined,
8484
modelContextLimit: undefined,
85+
systemPromptTokens: undefined,
8586
}
8687
}
8788

@@ -108,6 +109,7 @@ export function resetSessionState(state: SessionState): void {
108109
state.currentTurn = 0
109110
state.variant = undefined
110111
state.modelContextLimit = undefined
112+
state.systemPromptTokens = undefined
111113
}
112114

113115
export async function ensureSessionInitialized(

lib/state/types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,4 +53,5 @@ export interface SessionState {
5353
currentTurn: number
5454
variant: string | undefined
5555
modelContextLimit: number | undefined
56+
systemPromptTokens: number | undefined
5657
}

lib/tools/compress.ts

Lines changed: 4 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import { getCurrentParams, countAllMessageTokens, countTokens } from "../strateg
88
import type { AssistantMessage } from "@opencode-ai/sdk/v2"
99
import { findStringInMessages, collectToolIdsInRange, collectMessageIdsInRange } from "./utils"
1010
import { sendCompressNotification } from "../ui/notification"
11+
import { cacheSystemPromptTokens } from "../ui/utils"
1112
import { prune as applyPruneTransforms } from "../messages/prune"
1213
import { clog, C } from "../compress-logger"
1314

@@ -109,6 +110,8 @@ export function createCompressTool(ctx: ToolContext): ReturnType<typeof tool> {
109110
ctx.config.manualMode.enabled,
110111
)
111112

113+
cacheSystemPromptTokens(state, messages)
114+
112115
clog.info(C.STATE, `State Snapshot (before boundary matching)`, {
113116
sessionId: state.sessionId,
114117
isSubAgent: state.isSubAgent,
@@ -406,20 +409,6 @@ export function createCompressTool(ctx: ToolContext): ReturnType<typeof tool> {
406409
compressedToolIds: compressedToolIds.length,
407410
})
408411

409-
// Build token weight map for all messages (for proportional bar graph)
410-
const weights = new Map<string, number>()
411-
let contentWeight = 0
412-
for (const msg of messages) {
413-
const w = countAllMessageTokens(msg)
414-
weights.set(msg.info.id, w)
415-
contentWeight += w
416-
}
417-
418-
// System prompt = API total minus message content
419-
const systemWeight = Math.max(0, totalSessionTokens - contentWeight)
420-
const sessionIds = ["__system__", ...messages.map((m) => m.info.id)]
421-
weights.set("__system__", systemWeight)
422-
423412
await sendCompressNotification(
424413
client,
425414
logger,
@@ -431,11 +420,7 @@ export function createCompressTool(ctx: ToolContext): ReturnType<typeof tool> {
431420
topic,
432421
summary,
433422
summaryTokens,
434-
totalSessionTokens,
435-
estimatedCompressedTokens,
436-
sessionIds,
437-
weights,
438-
messages.length,
423+
messages,
439424
currentParams,
440425
)
441426

lib/ui/notification.ts

Lines changed: 18 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import type { Logger } from "../logger"
22
import type { SessionState } from "../state"
33
import {
4+
buildCompressionGraphData,
5+
formatCompressionGraph,
46
formatPrunedItemsList,
57
formatStatsHeader,
68
formatTokenCount,
7-
formatSessionMap,
89
} from "./utils"
9-
import { ToolParameterEntry } from "../state"
10+
import { ToolParameterEntry, WithParts } from "../state"
1011
import { PluginConfig } from "../config"
1112
import { clog, C } from "../compress-logger"
1213

@@ -139,11 +140,7 @@ export async function sendCompressNotification(
139140
topic: string,
140141
summary: string,
141142
summaryTokens: number,
142-
totalSessionTokens: number,
143-
compressedTokens: number,
144-
sessionMessageIds: string[],
145-
weights: Map<string, number>,
146-
totalMessages: number,
143+
messages: WithParts[],
147144
params: any,
148145
): Promise<boolean> {
149146
if (config.pruneNotification === "off") {
@@ -158,26 +155,24 @@ export async function sendCompressNotification(
158155
} else {
159156
message = formatStatsHeader(state.stats.totalPruneTokens, state.stats.pruneTokenCounter)
160157

161-
const pruneTokenCounterStr = `~${formatTokenCount(compressedTokens)}`
158+
const newIds = new Set(messageIds)
159+
const newToolIds = new Set(toolIds)
160+
const graphData = buildCompressionGraphData(state, messages, newIds, newToolIds)
161+
const progressBar = formatCompressionGraph(graphData, 50)
162+
const pruneTokenCounterStr = `~${formatTokenCount(graphData.recentCompressedTokens)}`
163+
const reduction =
164+
graphData.totalSessionTokens > 0
165+
? Math.round(
166+
(graphData.recentCompressedTokens / graphData.totalSessionTokens) * 100,
167+
)
168+
: 0
162169

163-
clog.info(C.COMPRESS, `sendCompressNotification inputs`, {
164-
totalSessionTokens,
165-
compressedTokens,
170+
clog.info(C.COMPRESS, `sendCompressNotification graph`, {
166171
summaryTokens,
167-
ratio:
168-
totalSessionTokens > 0 ? (compressedTokens / totalSessionTokens).toFixed(4) : "N/A",
172+
reductionPercent: reduction,
173+
...graphData,
169174
})
170175

171-
const newIds = new Set(messageIds)
172-
const progressBar = formatSessionMap(
173-
sessionMessageIds,
174-
state.prune.messages,
175-
newIds,
176-
weights,
177-
50,
178-
)
179-
const reduction =
180-
totalSessionTokens > 0 ? Math.round((compressedTokens / totalSessionTokens) * 100) : 0
181176
message += `\n\n${progressBar}`
182177
message += `\n▣ Compressing (${pruneTokenCounterStr} removed, ${reduction}% reduction)`
183178
message += `\n→ Topic: ${topic}`

0 commit comments

Comments
 (0)