From 569e8229249963fe5f4729bd2583bda109d12114 Mon Sep 17 00:00:00 2001 From: Valerie Gardner Date: Fri, 6 Mar 2026 03:50:27 -0800 Subject: [PATCH] fix: memory leak - unbounded virtual memory growth during active usage Addresses multiple reports of OpenCode processes growing to 100+ GB virtual memory, triggering OOM kills and system crashes (#13230, #11399, #7046, #9140). Root cause: Bun/JSC reserves virtual memory regions that accumulate without bound during active LLM streaming and tool execution. Combined with several code-level leaks, the process grows at ~2.3 GB/min of virtual address space until the kernel OOM killer intervenes. Fixes: 1. plugin/index.ts: Bus.subscribeAll() was called without storing the unsubscribe function. Each call to init() added a permanent wildcard subscriber, causing O(n) callback fan-out per bus event. 2. session/prompt.ts: The full conversation history (msgs array) was kept alive during processor.process() after model message conversion. Now released immediately after use. Added Bun.gc(true) after each processing turn and at loop exit to force JSC heap compaction. 3. session/processor.ts: Hoisted currentText/reasoningMap outside try block for proper cleanup on both success and error paths. Clear toolcalls record after processing. Added Bun.gc(false) between steps to curb growth during multi-tool-call sequences. Release stream references before retry. 4. lsp/client.ts: diagnostics Map and files object grew with every unique file path and were never cleared. Now cleared on shutdown(). 5. lsp/index.ts: clients array, broken Set, and spawning Map were left populated after teardown, preventing GC. Now cleared. --- packages/opencode/src/lsp/client.ts | 2 ++ packages/opencode/src/lsp/index.ts | 3 +++ packages/opencode/src/plugin/index.ts | 5 ++++- packages/opencode/src/session/processor.ts | 15 +++++++++++++-- packages/opencode/src/session/prompt.ts | 10 ++++++++++ 5 files changed, 32 insertions(+), 3 deletions(-) diff --git a/packages/opencode/src/lsp/client.ts b/packages/opencode/src/lsp/client.ts index 084ccf831ee..c5af356cb9f 100644 --- a/packages/opencode/src/lsp/client.ts +++ b/packages/opencode/src/lsp/client.ts @@ -237,6 +237,8 @@ export namespace LSPClient { }, async shutdown() { l.info("shutting down") + diagnostics.clear() + for (const key of Object.keys(files)) delete files[key] connection.end() connection.dispose() input.server.process.kill() diff --git a/packages/opencode/src/lsp/index.ts b/packages/opencode/src/lsp/index.ts index 9d7d30632ab..3a2af3b6287 100644 --- a/packages/opencode/src/lsp/index.ts +++ b/packages/opencode/src/lsp/index.ts @@ -140,6 +140,9 @@ export namespace LSP { }, async (state) => { await Promise.all(state.clients.map((client) => client.shutdown())) + state.clients.length = 0 + state.broken.clear() + state.spawning.clear() }, ) diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index e65d21bfd60..e69afa7166b 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -124,6 +124,8 @@ export namespace Plugin { return state().then((x) => x.hooks) } + let unsub: (() => void) | undefined + export async function init() { const hooks = await state().then((x) => x.hooks) const config = await Config.get() @@ -131,7 +133,8 @@ export namespace Plugin { // @ts-expect-error this is because we haven't moved plugin to sdk v2 await hook.config?.(config) } - Bus.subscribeAll(async (input) => { + unsub?.() + unsub = Bus.subscribeAll(async (input) => { const hooks = await state().then((x) => x.hooks) for (const hook of hooks) { hook["event"]?.({ diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 67edc0ecfe3..9afc07ae697 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -47,9 +47,9 @@ export namespace SessionProcessor { needsCompaction = false const shouldBreak = (await Config.get()).experimental?.continue_loop_on_deny !== true while (true) { + let currentText: MessageV2.TextPart | undefined + let reasoningMap: Record = {} try { - let currentText: MessageV2.TextPart | undefined - let reasoningMap: Record = {} const stream = await LLM.stream(streamInput) for await (const value of stream.fullStream) { @@ -285,6 +285,9 @@ export namespace SessionProcessor { ) { needsCompaction = true } + // Incremental GC between steps to curb virtual memory growth + // during multi-tool-call sequences. + Bun.gc(false) break case "text-start": @@ -374,6 +377,9 @@ export namespace SessionProcessor { next: Date.now() + delay, }) await SessionRetry.sleep(delay, input.abort).catch(() => {}) + // Release stream references before retry + currentText = undefined + for (const key of Object.keys(reasoningMap)) delete reasoningMap[key] continue } input.assistantMessage.error = error @@ -384,6 +390,9 @@ export namespace SessionProcessor { SessionStatus.set(input.sessionID, { type: "idle" }) } } + // Release stream-scoped references + currentText = undefined + for (const key of Object.keys(reasoningMap)) delete reasoningMap[key] if (snapshot) { const patch = await Snapshot.patch(snapshot) if (patch.files.length) { @@ -417,6 +426,8 @@ export namespace SessionProcessor { } input.assistantMessage.time.completed = Date.now() await Session.updateMessage(input.assistantMessage) + // Release tool call references to allow GC of part data + for (const key of Object.keys(toolcalls)) delete toolcalls[key] if (needsCompaction) return "compact" if (blocked) return "stop" if (input.assistantMessage.error) return "stop" diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 4f77920cc98..be3cda8f1d6 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -677,6 +677,12 @@ export namespace SessionPrompt { toolChoice: format.type === "json_schema" ? "required" : undefined, }) + // Release conversation data and hint GC to reclaim memory. + // Bun/JSC reserves large virtual memory regions that accumulate + // without explicit collection, eventually triggering the OOM killer. + msgs = [] as any + Bun.gc(true) + // If structured output was captured, save it and exit immediately // This takes priority because the StructuredOutput tool was called successfully if (structuredOutput !== undefined) { @@ -714,6 +720,10 @@ export namespace SessionPrompt { continue } SessionCompaction.prune({ sessionID }) + + // Final GC pass to reclaim memory from the entire processing loop + Bun.gc(true) + for await (const item of MessageV2.stream(sessionID)) { if (item.info.role === "user") continue const queued = state()[sessionID]?.callbacks ?? []