diff --git a/packages/opencode/src/lsp/client.ts b/packages/opencode/src/lsp/client.ts index 084ccf831ee..c5af356cb9f 100644 --- a/packages/opencode/src/lsp/client.ts +++ b/packages/opencode/src/lsp/client.ts @@ -237,6 +237,8 @@ export namespace LSPClient { }, async shutdown() { l.info("shutting down") + diagnostics.clear() + for (const key of Object.keys(files)) delete files[key] connection.end() connection.dispose() input.server.process.kill() diff --git a/packages/opencode/src/lsp/index.ts b/packages/opencode/src/lsp/index.ts index 9d7d30632ab..3a2af3b6287 100644 --- a/packages/opencode/src/lsp/index.ts +++ b/packages/opencode/src/lsp/index.ts @@ -140,6 +140,9 @@ export namespace LSP { }, async (state) => { await Promise.all(state.clients.map((client) => client.shutdown())) + state.clients.length = 0 + state.broken.clear() + state.spawning.clear() }, ) diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index e65d21bfd60..e69afa7166b 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -124,6 +124,8 @@ export namespace Plugin { return state().then((x) => x.hooks) } + let unsub: (() => void) | undefined + export async function init() { const hooks = await state().then((x) => x.hooks) const config = await Config.get() @@ -131,7 +133,8 @@ export namespace Plugin { // @ts-expect-error this is because we haven't moved plugin to sdk v2 await hook.config?.(config) } - Bus.subscribeAll(async (input) => { + unsub?.() + unsub = Bus.subscribeAll(async (input) => { const hooks = await state().then((x) => x.hooks) for (const hook of hooks) { hook["event"]?.({ diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 67edc0ecfe3..9afc07ae697 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -47,9 +47,9 @@ export namespace SessionProcessor { needsCompaction = false const shouldBreak = (await Config.get()).experimental?.continue_loop_on_deny !== true while (true) { + let currentText: MessageV2.TextPart | undefined + let reasoningMap: Record = {} try { - let currentText: MessageV2.TextPart | undefined - let reasoningMap: Record = {} const stream = await LLM.stream(streamInput) for await (const value of stream.fullStream) { @@ -285,6 +285,9 @@ export namespace SessionProcessor { ) { needsCompaction = true } + // Incremental GC between steps to curb virtual memory growth + // during multi-tool-call sequences. + Bun.gc(false) break case "text-start": @@ -374,6 +377,9 @@ export namespace SessionProcessor { next: Date.now() + delay, }) await SessionRetry.sleep(delay, input.abort).catch(() => {}) + // Release stream references before retry + currentText = undefined + for (const key of Object.keys(reasoningMap)) delete reasoningMap[key] continue } input.assistantMessage.error = error @@ -384,6 +390,9 @@ export namespace SessionProcessor { SessionStatus.set(input.sessionID, { type: "idle" }) } } + // Release stream-scoped references + currentText = undefined + for (const key of Object.keys(reasoningMap)) delete reasoningMap[key] if (snapshot) { const patch = await Snapshot.patch(snapshot) if (patch.files.length) { @@ -417,6 +426,8 @@ export namespace SessionProcessor { } input.assistantMessage.time.completed = Date.now() await Session.updateMessage(input.assistantMessage) + // Release tool call references to allow GC of part data + for (const key of Object.keys(toolcalls)) delete toolcalls[key] if (needsCompaction) return "compact" if (blocked) return "stop" if (input.assistantMessage.error) return "stop" diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 4f77920cc98..be3cda8f1d6 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -677,6 +677,12 @@ export namespace SessionPrompt { toolChoice: format.type === "json_schema" ? "required" : undefined, }) + // Release conversation data and hint GC to reclaim memory. + // Bun/JSC reserves large virtual memory regions that accumulate + // without explicit collection, eventually triggering the OOM killer. + msgs = [] as any + Bun.gc(true) + // If structured output was captured, save it and exit immediately // This takes priority because the StructuredOutput tool was called successfully if (structuredOutput !== undefined) { @@ -714,6 +720,10 @@ export namespace SessionPrompt { continue } SessionCompaction.prune({ sessionID }) + + // Final GC pass to reclaim memory from the entire processing loop + Bun.gc(true) + for await (const item of MessageV2.stream(sessionID)) { if (item.info.role === "user") continue const queued = state()[sessionID]?.callbacks ?? []