Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ checkpoint, and status-only commits are intentionally omitted.
- Fixed live worker scheduling to filter GitHub Actions runs through supported
`workflowName` JSON fields instead of silently falling back to zero active
workers when `gh run list --workflow` is unavailable.
- Reduced repair live-capacity polling from one GitHub Actions API request per
active status to a single recent-runs request filtered locally, and avoided an
immediate duplicate capacity probe in the dispatch loop.
- Retried Codex edit workers after TPM/rate-limit exits and collapsed JSONL failure transcripts into concise repair status reasons.
- Added deterministic merged closing-PR provenance to issue close reports and
public close comments when GitHub exposes a high-confidence closing PR.
Expand Down
6 changes: 2 additions & 4 deletions src/repair/dispatch-jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import {
activeRepairWorkflowRunForJob,
assertLiveWorkerCapacity,
currentProjectRepo,
liveWorkerCapacity,
parseArgs,
parseJob,
readMaxLiveWorkers,
Expand Down Expand Up @@ -87,10 +86,9 @@ while (!failed && index < jobs.length) {
requested: 1,
maxLiveWorkers,
});
const refreshed = liveWorkerCapacity({ repo, workflow, requested: 1, maxLiveWorkers });
batchSize = Math.min(batchSize, Math.max(1, refreshed.available || capacity.available || 1));
batchSize = Math.min(batchSize, Math.max(1, capacity.available || 1));
console.log(
`live worker capacity: ${refreshed.active}/${refreshed.max_live_workers} active; dispatching next ${batchSize} run(s)`,
`live worker capacity: ${capacity.active}/${capacity.max_live_workers} active; dispatching next ${batchSize} run(s)`,
);
}

Expand Down
44 changes: 27 additions & 17 deletions src/repair/live-worker-capacity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ export const DEFAULT_REPAIR_RUN_NAME_PREFIX = "repair cluster ";
const DEFAULT_CAPACITY_POLL_MS = 30_000;
const DEFAULT_CAPACITY_TIMEOUT_MS = 30 * 60 * 1000;
const ACTIVE_WORKFLOW_STATUSES = ["queued", "in_progress", "waiting", "requested", "pending"];
const ACTIVE_WORKFLOW_STATUS_SET = new Set(ACTIVE_WORKFLOW_STATUSES);

export function readMaxLiveWorkers(args: LooseRecord = {}) {
return readMaxLiveWorkerLimit(
Expand Down Expand Up @@ -110,24 +111,16 @@ export function listActiveWorkflowRuns({
workflow = REPAIR_CLUSTER_WORKFLOW,
runNamePrefix = "",
excludeRunNamePrefix = "",
fetchWorkflowRuns = fetchRecentWorkflowRuns,
}: LooseRecord = {}) {
const runs: LooseRecord[] = [];
for (const status of ACTIVE_WORKFLOW_STATUSES) {
const workflowRuns = ghJson([
"api",
"--method",
"GET",
`repos/${repo}/actions/workflows/${encodeURIComponent(workflow)}/runs`,
"-f",
`status=${status}`,
"-f",
"per_page=100",
"--jq",
".workflow_runs",
]);
if (Array.isArray(workflowRuns))
runs.push(...workflowRuns.map((run: JsonValue) => normalizeWorkflowRun(run, status)));
}
const fetchRuns =
typeof fetchWorkflowRuns === "function" ? fetchWorkflowRuns : fetchRecentWorkflowRuns;
const workflowRuns = fetchRuns({ repo, workflow });
const runs = Array.isArray(workflowRuns)
? workflowRuns
.filter(isActiveWorkflowRun)
.map((run: JsonValue) => normalizeWorkflowRun(run, String(run.status ?? "")))
: [];
return [
...new Map(runs.map((run: JsonValue) => [String(run.databaseId ?? run.id), run])).values(),
]
Expand All @@ -138,6 +131,19 @@ export function listActiveWorkflowRuns({
);
}

function fetchRecentWorkflowRuns({ repo, workflow }: LooseRecord) {
return ghJson([
"api",
"--method",
"GET",
`repos/${repo}/actions/workflows/${encodeURIComponent(workflow)}/runs`,
"-f",
"per_page=100",
"--jq",
".workflow_runs",
]);
}

export function repairRunNamePrefixForJob(
jobPath: JsonValue,
automergeRunNamePrefix: JsonValue = DEFAULT_AUTOMERGE_REPAIR_RUN_NAME_PREFIX,
Expand Down Expand Up @@ -229,6 +235,10 @@ export function normalizeWorkflowRun(run: LooseRecord, fallbackStatus: string) {
};
}

function isActiveWorkflowRun(run: LooseRecord) {
return ACTIVE_WORKFLOW_STATUS_SET.has(String(run.status ?? ""));
}

function joinRepairRunNamePrefix(prefix: JsonValue, jobPath: string) {
const text = String(prefix ?? "");
if (!text || !jobPath) return `${text}${jobPath}`;
Expand Down
52 changes: 52 additions & 0 deletions test/repair/live-worker-capacity.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import test from "node:test";

import {
MAX_LIVE_WORKERS,
listActiveWorkflowRuns,
normalizeWorkflowRun,
readMaxLiveWorkers,
repairRunNameForJob,
Expand Down Expand Up @@ -61,3 +62,54 @@ test("workflow run normalization prefers the human Actions URL", () => {
);
assert.equal(run.url, "https://github.com/openclaw/clawsweeper/actions/runs/123");
});

test("active workflow runs are filtered from one recent-runs fetch", () => {
const calls = [];
const runs = listActiveWorkflowRuns({
repo: "openclaw/clawsweeper",
workflow: "repair-cluster.yml",
runNamePrefix: "repair cluster ",
excludeRunNamePrefix: "repair cluster skip",
fetchWorkflowRuns: ({ repo, workflow }) => {
calls.push({ repo, workflow });
return [
{
id: 1,
status: "completed",
display_title: "repair cluster completed",
created_at: "2026-05-05T00:04:00.000Z",
},
{
id: 2,
status: "queued",
display_title: "repair cluster older.md",
created_at: "2026-05-05T00:01:00.000Z",
},
{
id: 3,
status: "in_progress",
display_title: "repair cluster newer.md",
created_at: "2026-05-05T00:03:00.000Z",
},
{
id: 4,
status: "waiting",
display_title: "repair cluster skip this.md",
created_at: "2026-05-05T00:05:00.000Z",
},
{
id: 5,
status: "requested",
display_title: "automerge repair jobs/openclaw/inbox/pr.md",
created_at: "2026-05-05T00:02:00.000Z",
},
];
},
});

assert.deepEqual(calls, [{ repo: "openclaw/clawsweeper", workflow: "repair-cluster.yml" }]);
assert.deepEqual(
runs.map((run) => run.databaseId),
[3, 2],
);
});
Loading