From 6ce481e95b7c5da814efcb379b8578552514e43e Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 10:09:14 -0400 Subject: [PATCH 001/120] move useful scripts to script folder --- packages/opencode/{ => script}/time.ts | 2 ++ packages/opencode/{ => script}/trace-imports.ts | 0 2 files changed, 2 insertions(+) rename packages/opencode/{ => script}/time.ts (88%) rename packages/opencode/{ => script}/trace-imports.ts (100%) diff --git a/packages/opencode/time.ts b/packages/opencode/script/time.ts similarity index 88% rename from packages/opencode/time.ts rename to packages/opencode/script/time.ts index c00936db26..0db795ed0a 100755 --- a/packages/opencode/time.ts +++ b/packages/opencode/script/time.ts @@ -1,3 +1,5 @@ +#!/usr/bin/env bun + import path from "path" const toDynamicallyImport = path.join(process.cwd(), process.argv[2]) await import(toDynamicallyImport) diff --git a/packages/opencode/trace-imports.ts b/packages/opencode/script/trace-imports.ts similarity index 100% rename from packages/opencode/trace-imports.ts rename to packages/opencode/script/trace-imports.ts From 8ab17f5ce0bb3e74a49aa661fc1beef8314b8e04 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 11:18:44 -0400 Subject: [PATCH 002/120] tui: fix path comparison in theme installer to handle different path formats --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index af37ffbd76..cdb778d146 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -194,7 +194,8 @@ function createThemeInstaller( } return } - if (prev?.dest === dest && prev.mtime === mtime && prev.size === size) return + if (path.normalize(prev?.dest ?? "") === path.normalize(dest) && prev.mtime === mtime && prev.size === size) + return } const text = await Filesystem.readText(src).catch((error) => { From 2b1696f1d174fde75f57abd9f498b53bf00e0d68 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 11:28:19 -0400 Subject: [PATCH 003/120] Revert "tui: fix path comparison in theme installer to handle different path formats" This reverts commit 8ab17f5ce0bb3e74a49aa661fc1beef8314b8e04. --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index cdb778d146..af37ffbd76 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -194,8 +194,7 @@ function createThemeInstaller( } return } - if (path.normalize(prev?.dest ?? "") === path.normalize(dest) && prev.mtime === mtime && prev.size === size) - return + if (prev?.dest === dest && prev.mtime === mtime && prev.size === size) return } const text = await Filesystem.readText(src).catch((error) => { From a200f6fb8b5e02aaf50a4f3e6c1a377f66e1c582 Mon Sep 17 00:00:00 2001 From: Frank Date: Thu, 16 Apr 2026 11:32:54 -0400 Subject: [PATCH 004/120] zen: opus 4.7 --- packages/web/src/content/docs/ar/zen.mdx | 2 ++ packages/web/src/content/docs/bs/zen.mdx | 2 ++ packages/web/src/content/docs/da/zen.mdx | 2 ++ packages/web/src/content/docs/de/zen.mdx | 2 ++ packages/web/src/content/docs/es/zen.mdx | 2 ++ packages/web/src/content/docs/fr/zen.mdx | 2 ++ packages/web/src/content/docs/it/zen.mdx | 2 ++ packages/web/src/content/docs/ja/zen.mdx | 2 ++ packages/web/src/content/docs/ko/zen.mdx | 2 ++ packages/web/src/content/docs/nb/zen.mdx | 2 ++ packages/web/src/content/docs/pl/zen.mdx | 2 ++ packages/web/src/content/docs/pt-br/zen.mdx | 2 ++ packages/web/src/content/docs/ru/zen.mdx | 2 ++ packages/web/src/content/docs/th/zen.mdx | 2 ++ packages/web/src/content/docs/tr/zen.mdx | 2 ++ packages/web/src/content/docs/zen.mdx | 2 ++ packages/web/src/content/docs/zh-cn/zen.mdx | 2 ++ packages/web/src/content/docs/zh-tw/zen.mdx | 2 ++ 18 files changed, 36 insertions(+) diff --git a/packages/web/src/content/docs/ar/zen.mdx b/packages/web/src/content/docs/ar/zen.mdx index 7a3931c85c..5d056c9b50 100644 --- a/packages/web/src/content/docs/ar/zen.mdx +++ b/packages/web/src/content/docs/ar/zen.mdx @@ -74,6 +74,7 @@ OpenCode Zen هي بوابة AI تتيح لك الوصول إلى هذه الن | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -128,6 +129,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/bs/zen.mdx b/packages/web/src/content/docs/bs/zen.mdx index 76a25e5f65..5f46290649 100644 --- a/packages/web/src/content/docs/bs/zen.mdx +++ b/packages/web/src/content/docs/bs/zen.mdx @@ -79,6 +79,7 @@ Našim modelima možete pristupiti i preko sljedećih API endpointa. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Podržavamo pay-as-you-go model. Ispod su cijene **po 1M tokena**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/da/zen.mdx b/packages/web/src/content/docs/da/zen.mdx index 146fa02803..456b98ef1a 100644 --- a/packages/web/src/content/docs/da/zen.mdx +++ b/packages/web/src/content/docs/da/zen.mdx @@ -79,6 +79,7 @@ Du kan også få adgang til vores modeller gennem følgende API-endpoints. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Vi understøtter en pay-as-you-go-model. Nedenfor er priserne **pr. 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/de/zen.mdx b/packages/web/src/content/docs/de/zen.mdx index bc9438672b..238047500b 100644 --- a/packages/web/src/content/docs/de/zen.mdx +++ b/packages/web/src/content/docs/de/zen.mdx @@ -70,6 +70,7 @@ Du kannst auch über die folgenden API-Endpunkte auf unsere Modelle zugreifen. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Wir unterstützen ein Pay-as-you-go-Modell. Unten findest du die Preise **pro 1M | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/es/zen.mdx b/packages/web/src/content/docs/es/zen.mdx index 6fc0fb13e1..ed8f8b334f 100644 --- a/packages/web/src/content/docs/es/zen.mdx +++ b/packages/web/src/content/docs/es/zen.mdx @@ -79,6 +79,7 @@ También puedes acceder a nuestros modelos a través de los siguientes endpoints | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Admitimos un modelo de pago por uso. A continuación se muestran los precios **p | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/fr/zen.mdx b/packages/web/src/content/docs/fr/zen.mdx index a71328a72e..e9fb209d21 100644 --- a/packages/web/src/content/docs/fr/zen.mdx +++ b/packages/web/src/content/docs/fr/zen.mdx @@ -70,6 +70,7 @@ Vous pouvez également accéder à nos modèles via les points de terminaison AP | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Nous prenons en charge un modèle de paiement à l'utilisation. Vous trouverez c | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/it/zen.mdx b/packages/web/src/content/docs/it/zen.mdx index cdc2c9a45b..338fc289c8 100644 --- a/packages/web/src/content/docs/it/zen.mdx +++ b/packages/web/src/content/docs/it/zen.mdx @@ -79,6 +79,7 @@ Puoi anche accedere ai nostri modelli tramite i seguenti endpoint API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Supportiamo un modello pay-as-you-go. Qui sotto trovi i prezzi **per 1M token**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ja/zen.mdx b/packages/web/src/content/docs/ja/zen.mdx index b8da1308c1..970b883940 100644 --- a/packages/web/src/content/docs/ja/zen.mdx +++ b/packages/web/src/content/docs/ja/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen は、OpenCode のほかのプロバイダーと同じように動 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ko/zen.mdx b/packages/web/src/content/docs/ko/zen.mdx index af74b71afd..10abc4ba60 100644 --- a/packages/web/src/content/docs/ko/zen.mdx +++ b/packages/web/src/content/docs/ko/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen은 OpenCode의 다른 provider와 똑같이 작동합니다. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/nb/zen.mdx b/packages/web/src/content/docs/nb/zen.mdx index a216be1064..605dadee59 100644 --- a/packages/web/src/content/docs/nb/zen.mdx +++ b/packages/web/src/content/docs/nb/zen.mdx @@ -79,6 +79,7 @@ Du kan også få tilgang til modellene våre gjennom følgende API-endepunkter. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Vi støtter en pay-as-you-go-modell. Nedenfor er prisene **per 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/pl/zen.mdx b/packages/web/src/content/docs/pl/zen.mdx index ffbdb66fa9..aaefc179b3 100644 --- a/packages/web/src/content/docs/pl/zen.mdx +++ b/packages/web/src/content/docs/pl/zen.mdx @@ -79,6 +79,7 @@ Możesz też uzyskać dostęp do naszych modeli przez poniższe endpointy API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Obsługujemy model pay-as-you-go. Poniżej znajdują się ceny **za 1M tokenów* | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/pt-br/zen.mdx b/packages/web/src/content/docs/pt-br/zen.mdx index d911e441f4..a44c4de600 100644 --- a/packages/web/src/content/docs/pt-br/zen.mdx +++ b/packages/web/src/content/docs/pt-br/zen.mdx @@ -70,6 +70,7 @@ Você também pode acessar nossos modelos pelos seguintes endpoints de API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Oferecemos um modelo pay-as-you-go. Abaixo estão os preços **por 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ru/zen.mdx b/packages/web/src/content/docs/ru/zen.mdx index 8a92bf502e..555ca68524 100644 --- a/packages/web/src/content/docs/ru/zen.mdx +++ b/packages/web/src/content/docs/ru/zen.mdx @@ -79,6 +79,7 @@ OpenCode Zen работает как любой другой провайдер | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/th/zen.mdx b/packages/web/src/content/docs/th/zen.mdx index 2c82c1e07d..0e5ddcbfc7 100644 --- a/packages/web/src/content/docs/th/zen.mdx +++ b/packages/web/src/content/docs/th/zen.mdx @@ -72,6 +72,7 @@ OpenCode Zen ทำงานเหมือน provider อื่น ๆ ใน | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -126,6 +127,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/tr/zen.mdx b/packages/web/src/content/docs/tr/zen.mdx index 30aa2bb9d7..16f0fb0dd4 100644 --- a/packages/web/src/content/docs/tr/zen.mdx +++ b/packages/web/src/content/docs/tr/zen.mdx @@ -70,6 +70,7 @@ Modellerimize aşağıdaki API uç noktaları aracılığıyla da erişebilirsin | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Kullandıkça öde modelini destekliyoruz. Aşağıda **1M token başına** fiya | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zen.mdx b/packages/web/src/content/docs/zen.mdx index cf634a9c98..f4c4f51106 100644 --- a/packages/web/src/content/docs/zen.mdx +++ b/packages/web/src/content/docs/zen.mdx @@ -79,6 +79,7 @@ You can also access our models through the following API endpoints. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -130,6 +131,7 @@ We support a pay-as-you-go model. Below are the prices **per 1M tokens**. | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3.6 Plus | $0.50 | $3.00 | $0.05 | $0.625 | | Qwen3.5 Plus | $0.20 | $1.20 | $0.02 | $0.25 | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zh-cn/zen.mdx b/packages/web/src/content/docs/zh-cn/zen.mdx index cd281a4b0c..22cffb9019 100644 --- a/packages/web/src/content/docs/zh-cn/zen.mdx +++ b/packages/web/src/content/docs/zh-cn/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen 的工作方式与 OpenCode 中的任何其他提供商相同。 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zh-tw/zen.mdx b/packages/web/src/content/docs/zh-tw/zen.mdx index bdc3c07db6..75b97c82b6 100644 --- a/packages/web/src/content/docs/zh-tw/zen.mdx +++ b/packages/web/src/content/docs/zh-tw/zen.mdx @@ -74,6 +74,7 @@ OpenCode Zen 的運作方式和 OpenCode 中的其他供應商一樣。 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -129,6 +130,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | From cc7acd90ab2fda54f06ff687a46d7364e479dc32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Thu, 16 Apr 2026 17:43:15 +0200 Subject: [PATCH 005/120] fix(nix): add shared package to bun install filters (#22665) --- nix/node_modules.nix | 1 + packages/shared/package.json | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nix/node_modules.nix b/nix/node_modules.nix index e10e85d2fe..ba97405df9 100644 --- a/nix/node_modules.nix +++ b/nix/node_modules.nix @@ -55,6 +55,7 @@ stdenvNoCC.mkDerivation { --filter './packages/opencode' \ --filter './packages/desktop' \ --filter './packages/app' \ + --filter './packages/shared' \ --frozen-lockfile \ --ignore-scripts \ --no-progress diff --git a/packages/shared/package.json b/packages/shared/package.json index ac2d8f2097..bdfca12a93 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -17,10 +17,10 @@ }, "imports": {}, "devDependencies": { + "@tsconfig/bun": "catalog:", "@types/semver": "catalog:", "@types/bun": "catalog:", - "@types/npmcli__arborist": "6.3.3", - "@tsconfig/bun": "catalog:" + "@types/npmcli__arborist": "6.3.3" }, "dependencies": { "@effect/platform-node": "catalog:", From 378c05f202b0fda6561451a93639712a11400972 Mon Sep 17 00:00:00 2001 From: Graham Campbell Date: Thu, 16 Apr 2026 16:57:36 +0100 Subject: [PATCH 006/120] feat: Add support for claude opus 4.7 xhigh adaptive reasoning effort (#22833) Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> --- packages/opencode/src/provider/transform.ts | 21 +++-- .../opencode/test/provider/transform.test.ts | 92 +++++++++++++++++++ 2 files changed, 107 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index c940b31c8c..92862b0ca6 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -389,12 +389,21 @@ export function topK(model: Provider.Model) { const WIDELY_SUPPORTED_EFFORTS = ["low", "medium", "high"] const OPENAI_EFFORTS = ["none", "minimal", ...WIDELY_SUPPORTED_EFFORTS, "xhigh"] +function anthropicAdaptiveEfforts(apiId: string): string[] | null { + if (["opus-4-7", "opus-4.7"].some((v) => apiId.includes(v))) { + return ["low", "medium", "high", "xhigh", "max"] + } + if (["opus-4-6", "opus-4.6", "sonnet-4-6", "sonnet-4.6"].some((v) => apiId.includes(v))) { + return ["low", "medium", "high", "max"] + } + return null +} + export function variants(model: Provider.Model): Record> { if (!model.capabilities.reasoning) return {} const id = model.id.toLowerCase() - const isAnthropicAdaptive = ["opus-4-6", "opus-4.6", "sonnet-4-6", "sonnet-4.6"].some((v) => model.api.id.includes(v)) - const adaptiveEfforts = ["low", "medium", "high", "max"] + const adaptiveEfforts = anthropicAdaptiveEfforts(model.api.id) if ( id.includes("deepseek") || id.includes("minimax") || @@ -429,7 +438,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -578,7 +587,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -609,7 +618,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -716,7 +725,7 @@ export function variants(model: Provider.Model): Record [ effort, diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index 0666d0f641..d53ce38b16 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -2246,6 +2246,46 @@ describe("ProviderTransform.variants", () => { }) }) + test("anthropic opus 4.7 models return adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "gateway", + api: { + id: "anthropic/claude-opus-4-7", + url: "https://gateway.ai", + npm: "@ai-sdk/gateway", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "xhigh", + }) + expect(result.max).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "max", + }) + }) + + test("anthropic opus 4.7 dot-format models return adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "gateway", + api: { + id: "anthropic/claude-opus-4.7", + url: "https://gateway.ai", + npm: "@ai-sdk/gateway", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + }) + test("anthropic models return anthropic thinking options", () => { const model = createMockModel({ id: "anthropic/claude-sonnet-4", @@ -2654,6 +2694,32 @@ describe("ProviderTransform.variants", () => { }) }) + test("opus 4.7 returns adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "anthropic", + api: { + id: "claude-opus-4-7", + url: "https://api.anthropic.com", + npm: "@ai-sdk/anthropic", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "xhigh", + }) + expect(result.max).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "max", + }) + }) + test("returns high and max with thinking config", () => { const model = createMockModel({ id: "anthropic/claude-4", @@ -2702,6 +2768,32 @@ describe("ProviderTransform.variants", () => { }) }) + test("anthropic opus 4.7 returns adaptive reasoning options with xhigh", () => { + const model = createMockModel({ + id: "bedrock/anthropic-claude-opus-4-7", + providerID: "bedrock", + api: { + id: "anthropic.claude-opus-4-7", + url: "https://bedrock.amazonaws.com", + npm: "@ai-sdk/amazon-bedrock", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + reasoningConfig: { + type: "adaptive", + maxReasoningEffort: "xhigh", + }, + }) + expect(result.max).toEqual({ + reasoningConfig: { + type: "adaptive", + maxReasoningEffort: "max", + }, + }) + }) + test("returns WIDELY_SUPPORTED_EFFORTS with reasoningConfig", () => { const model = createMockModel({ id: "bedrock/llama-4", From 8c0205a84ab225e6901eff92e6a589e8fc88b679 Mon Sep 17 00:00:00 2001 From: Nacai <111849193+B67687@users.noreply.github.com> Date: Fri, 17 Apr 2026 00:01:35 +0800 Subject: [PATCH 007/120] fix: align stale bot message with actual 60-day threshold (#22842) Co-authored-by: opencode-agent[bot] Co-authored-by: rekram1-node --- script/github/close-issues.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/script/github/close-issues.ts b/script/github/close-issues.ts index 7b38bf6758..e8f0573ebb 100755 --- a/script/github/close-issues.ts +++ b/script/github/close-issues.ts @@ -2,8 +2,7 @@ const repo = "anomalyco/opencode" const days = 60 -const msg = - "To stay organized issues are automatically closed after 90 days of no activity. If the issue is still relevant please open a new one." +const msg = `To stay organized issues are automatically closed after ${days} days of no activity. If the issue is still relevant please open a new one.` const token = process.env.GITHUB_TOKEN if (!token) { From 305460b25fc673f707a238f180d93e58d80f1ee9 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:15:44 -0400 Subject: [PATCH 008/120] fix: add a few more tests for sync and session restore (#22837) --- packages/opencode/src/server/instance/sync.ts | 1 + packages/opencode/test/sync/index.test.ts | 48 +++ .../test/workspace/workspace-restore.test.ts | 280 ++++++++++++++++++ 3 files changed, 329 insertions(+) create mode 100644 packages/opencode/test/workspace/workspace-restore.test.ts diff --git a/packages/opencode/src/server/instance/sync.ts b/packages/opencode/src/server/instance/sync.ts index 633e77f10e..ac43b638eb 100644 --- a/packages/opencode/src/server/instance/sync.ts +++ b/packages/opencode/src/server/instance/sync.ts @@ -53,6 +53,7 @@ export const SyncRoutes = lazy(() => const body = c.req.valid("json") const events = body.events const source = events[0].aggregateID + log.info("sync replay requested", { sessionID: source, events: events.length, diff --git a/packages/opencode/test/sync/index.test.ts b/packages/opencode/test/sync/index.test.ts index 2ba716cac0..36429c3d84 100644 --- a/packages/opencode/test/sync/index.test.ts +++ b/packages/opencode/test/sync/index.test.ts @@ -187,5 +187,53 @@ describe("SyncEvent", () => { ).toThrow(/Unknown event type/) }), ) + + test( + "replayAll accepts later chunks after the first batch", + withInstance(() => { + const { Created } = setup() + const id = Identifier.descending("message") + + const one = SyncEvent.replayAll([ + { + id: "evt_1", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 0, + aggregateID: id, + data: { id, name: "first" }, + }, + { + id: "evt_2", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 1, + aggregateID: id, + data: { id, name: "second" }, + }, + ]) + + const two = SyncEvent.replayAll([ + { + id: "evt_3", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 2, + aggregateID: id, + data: { id, name: "third" }, + }, + { + id: "evt_4", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 3, + aggregateID: id, + data: { id, name: "fourth" }, + }, + ]) + + expect(one).toBe(id) + expect(two).toBe(id) + + const rows = Database.use((db) => db.select().from(EventTable).all()) + expect(rows.map((row) => row.seq)).toEqual([0, 1, 2, 3]) + }), + ) }) }) diff --git a/packages/opencode/test/workspace/workspace-restore.test.ts b/packages/opencode/test/workspace/workspace-restore.test.ts new file mode 100644 index 0000000000..ee9ad059f8 --- /dev/null +++ b/packages/opencode/test/workspace/workspace-restore.test.ts @@ -0,0 +1,280 @@ +import { afterEach, beforeEach, describe, expect, mock, spyOn, test } from "bun:test" +import fs from "node:fs/promises" +import path from "node:path" +import { GlobalBus } from "../../src/bus/global" +import { registerAdaptor } from "../../src/control-plane/adaptors" +import type { WorkspaceAdaptor } from "../../src/control-plane/types" +import { Workspace } from "../../src/control-plane/workspace" +import { AppRuntime } from "../../src/effect/app-runtime" +import { Flag } from "../../src/flag/flag" +import { ModelID, ProviderID } from "../../src/provider/schema" +import { Instance } from "../../src/project/instance" +import { Session as SessionNs } from "../../src/session" +import { MessageV2 } from "../../src/session/message-v2" +import { MessageID, PartID, type SessionID } from "../../src/session/schema" +import { Database, asc, eq } from "../../src/storage" +import { SyncEvent } from "../../src/sync" +import { EventTable } from "../../src/sync/event.sql" +import { Log } from "../../src/util" +import { resetDatabase } from "../fixture/db" +import { tmpdir } from "../fixture/fixture" + +void Log.init({ print: false }) + +const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES + +beforeEach(() => { + Database.close() + // @ts-expect-error test override + Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true +}) + +afterEach(async () => { + mock.restore() + await Instance.disposeAll() + // @ts-expect-error test override + Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original + await resetDatabase() +}) + +function create(input?: SessionNs.CreateInput) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.create(input))) +} + +function get(id: SessionID) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.get(id))) +} + +function updateMessage(msg: T) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.updateMessage(msg))) +} + +function updatePart(part: T) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.updatePart(part))) +} + +async function user(sessionID: SessionID, text: string) { + const msg = await updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID, + agent: "build", + model: { providerID: ProviderID.make("test"), modelID: ModelID.make("test") }, + time: { created: Date.now() }, + }) + await updatePart({ + id: PartID.ascending(), + sessionID, + messageID: msg.id, + type: "text", + text, + }) +} + +function remote(dir: string, url: string): WorkspaceAdaptor { + return { + name: "remote", + description: "remote", + configure(info) { + return { + ...info, + directory: dir, + } + }, + async create() { + await fs.mkdir(dir, { recursive: true }) + }, + async remove() {}, + target() { + return { + type: "remote" as const, + url, + } + }, + } +} + +function local(dir: string): WorkspaceAdaptor { + return { + name: "local", + description: "local", + configure(info) { + return { + ...info, + directory: dir, + } + }, + async create() { + await fs.mkdir(dir, { recursive: true }) + }, + async remove() {}, + target() { + return { + type: "local" as const, + directory: dir, + } + }, + } +} + +function eventStreamResponse() { + return new Response(new ReadableStream({ start() {} }), { + status: 200, + headers: { + "content-type": "text/event-stream", + }, + }) +} + +describe("Workspace.sessionRestore", () => { + test("replays session events in batches of 10 and emits progress", async () => { + await using tmp = await tmpdir({ git: true }) + const dir = path.join(tmp.path, ".restore") + const seen: any[] = [] + const posts: Array<{ + path: string + body: { directory: string; events: Array<{ seq: number; aggregateID: string }> } + }> = [] + const on = (evt: any) => seen.push(evt) + GlobalBus.on("event", on) + + const raw = globalThis.fetch + spyOn(globalThis, "fetch").mockImplementation( + Object.assign( + async (input: URL | RequestInfo, init?: BunFetchRequestInit | RequestInit) => { + const url = new URL(typeof input === "string" || input instanceof URL ? input : input.url) + if (url.pathname !== "/base/sync/replay") { + return eventStreamResponse() + } + const body = JSON.parse(String(init?.body)) + posts.push({ + path: url.pathname, + body, + }) + return Response.json({ sessionID: body.events[0].aggregateID }) + }, + { + preconnect: raw.preconnect?.bind(raw), + }, + ) as typeof globalThis.fetch, + ) + + try { + const setup = await Instance.provide({ + directory: tmp.path, + fn: async () => { + registerAdaptor(Instance.project.id, "worktree", remote(dir, "https://workspace.test/base")) + const space = await Workspace.create({ + type: "worktree", + branch: null, + extra: null, + projectID: Instance.project.id, + }) + const session = await create({}) + for (let i = 0; i < 6; i++) { + await user(session.id, `msg ${i}`) + } + const rows = Database.use((db) => + db + .select({ seq: EventTable.seq }) + .from(EventTable) + .where(eq(EventTable.aggregate_id, session.id)) + .orderBy(asc(EventTable.seq)) + .all(), + ) + const result = await Workspace.sessionRestore({ + workspaceID: space.id, + sessionID: session.id, + }) + return { space, session, rows, result } + }, + }) + + expect(setup.rows).toHaveLength(13) + expect(setup.result).toEqual({ total: 2 }) + expect(posts).toHaveLength(2) + expect(posts[0]?.path).toBe("/base/sync/replay") + expect(posts[1]?.path).toBe("/base/sync/replay") + expect(posts[0]?.body.directory).toBe(dir) + expect(posts[1]?.body.directory).toBe(dir) + expect(posts[0]?.body.events).toHaveLength(10) + expect(posts[1]?.body.events).toHaveLength(4) + expect(posts.flatMap((item) => item.body.events.map((event) => event.seq))).toEqual([ + ...setup.rows.map((row) => row.seq), + setup.rows.at(-1)!.seq + 1, + ]) + expect(posts[1]?.body.events.at(-1)).toMatchObject({ + aggregateID: setup.session.id, + seq: setup.rows.at(-1)!.seq + 1, + type: SyncEvent.versionedType(SessionNs.Event.Updated.type, SessionNs.Event.Updated.version), + data: { + sessionID: setup.session.id, + info: { + workspaceID: setup.space.id, + }, + }, + }) + + const restore = seen.filter( + (evt) => evt.workspace === setup.space.id && evt.payload.type === Workspace.Event.Restore.type, + ) + expect(restore.map((evt) => evt.payload.properties.step)).toEqual([0, 1, 2]) + expect(restore.map((evt) => evt.payload.properties.total)).toEqual([2, 2, 2]) + expect(restore.map((evt) => evt.payload.properties.sessionID)).toEqual([ + setup.session.id, + setup.session.id, + setup.session.id, + ]) + } finally { + GlobalBus.off("event", on) + } + }) + + test("replays locally without posting to a server", async () => { + await using tmp = await tmpdir({ git: true }) + const dir = path.join(tmp.path, ".restore-local") + const seen: any[] = [] + const on = (evt: any) => seen.push(evt) + GlobalBus.on("event", on) + + const fetch = spyOn(globalThis, "fetch") + const replayAll = spyOn(SyncEvent, "replayAll") + + try { + const setup = await Instance.provide({ + directory: tmp.path, + fn: async () => { + registerAdaptor(Instance.project.id, "local-restore", local(dir)) + const space = await Workspace.create({ + type: "local-restore", + branch: null, + extra: null, + projectID: Instance.project.id, + }) + const session = await create({}) + for (let i = 0; i < 6; i++) { + await user(session.id, `msg ${i}`) + } + const result = await Workspace.sessionRestore({ + workspaceID: space.id, + sessionID: session.id, + }) + const updated = await get(session.id) + return { space, session, result, updated } + }, + }) + + expect(setup.result).toEqual({ total: 2 }) + expect(fetch).not.toHaveBeenCalled() + expect(replayAll).toHaveBeenCalledTimes(2) + expect(setup.updated.workspaceID).toBe(setup.space.id) + + const restore = seen.filter( + (evt) => evt.workspace === setup.space.id && evt.payload.type === Workspace.Event.Restore.type, + ) + expect(restore.map((evt) => evt.payload.properties.step)).toEqual([0, 1, 2]) + } finally { + GlobalBus.off("event", on) + } + }) +}) From 06afd332913e1ad4b067a0f1a1c906ca8376bc45 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:24:40 -0400 Subject: [PATCH 009/120] refactor(tui): improve workspace management (#22691) --- .../dialog-session-delete-failed.tsx | 101 +++++++++++++ .../cmd/tui/component/dialog-session-list.tsx | 97 ++++++++++++- .../tui/component/dialog-workspace-create.tsx | 136 +++++++++++++++++- .../cli/cmd/tui/component/prompt/index.tsx | 4 +- .../opencode/src/cli/cmd/tui/context/sync.tsx | 17 ++- .../src/control-plane/workspace-context.ts | 6 +- packages/opencode/src/effect/bridge.ts | 3 +- packages/opencode/src/effect/instance-ref.ts | 3 +- packages/opencode/src/session/session.ts | 3 +- .../test/cli/tui/sync-provider.test.tsx | 16 +-- 10 files changed, 349 insertions(+), 37 deletions(-) create mode 100644 packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx new file mode 100644 index 0000000000..4a22a0c492 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx @@ -0,0 +1,101 @@ +import { TextAttributes } from "@opentui/core" +import { useTheme } from "../context/theme" +import { useDialog } from "../ui/dialog" +import { createStore } from "solid-js/store" +import { For } from "solid-js" +import { useKeyboard } from "@opentui/solid" + +export function DialogSessionDeleteFailed(props: { + session: string + workspace: string + onDelete?: () => boolean | void | Promise + onRestore?: () => boolean | void | Promise + onDone?: () => void +}) { + const dialog = useDialog() + const { theme } = useTheme() + const [store, setStore] = createStore({ + active: "delete" as "delete" | "restore", + }) + + const options = [ + { + id: "delete" as const, + title: "Delete workspace", + description: "Delete the workspace and all sessions attached to it.", + run: props.onDelete, + }, + { + id: "restore" as const, + title: "Restore to new workspace", + description: "Try to restore this session into a new workspace.", + run: props.onRestore, + }, + ] + + async function confirm() { + const result = await options.find((item) => item.id === store.active)?.run?.() + if (result === false) return + props.onDone?.() + if (!props.onDone) dialog.clear() + } + + useKeyboard((evt) => { + if (evt.name === "return") { + void confirm() + } + if (evt.name === "left" || evt.name === "up") { + setStore("active", "delete") + } + if (evt.name === "right" || evt.name === "down") { + setStore("active", "restore") + } + }) + + return ( + + + + Failed to Delete Session + + dialog.clear()}> + esc + + + + {`The session "${props.session}" could not be deleted because the workspace "${props.workspace}" is not available.`} + + + Choose how you want to recover this broken workspace session. + + + + {(item) => ( + { + setStore("active", item.id) + void confirm() + }} + > + + {item.title} + + + {item.description} + + + )} + + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index f58b73c9a7..75c79dcdd8 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -13,8 +13,10 @@ import { DialogSessionRename } from "./dialog-session-rename" import { Keybind } from "@/util" import { createDebouncedSignal } from "../util/signal" import { useToast } from "../ui/toast" -import { DialogWorkspaceCreate, openWorkspaceSession } from "./dialog-workspace-create" +import { DialogWorkspaceCreate, openWorkspaceSession, restoreWorkspaceSession } from "./dialog-workspace-create" import { Spinner } from "./spinner" +import { errorMessage } from "@/util/error" +import { DialogSessionDeleteFailed } from "./dialog-session-delete-failed" type WorkspaceStatus = "connected" | "connecting" | "disconnected" | "error" @@ -30,7 +32,7 @@ export function DialogSessionList() { const [toDelete, setToDelete] = createSignal() const [search, setSearch] = createDebouncedSignal("", 150) - const [searchResults] = createResource(search, async (query) => { + const [searchResults, { refetch }] = createResource(search, async (query) => { if (!query) return undefined const result = await sdk.client.session.list({ search: query, limit: 30 }) return result.data ?? [] @@ -56,6 +58,57 @@ export function DialogSessionList() { )) } + function recover(session: NonNullable[number]>) { + const workspace = project.workspace.get(session.workspaceID!) + const list = () => dialog.replace(() => ) + dialog.replace(() => ( + { + const current = currentSessionID() + const info = current ? sync.data.session.find((item) => item.id === current) : undefined + const result = await sdk.client.experimental.workspace.remove({ id: session.workspaceID! }) + if (result.error) { + toast.show({ + variant: "error", + title: "Failed to delete workspace", + message: errorMessage(result.error), + }) + return false + } + await project.workspace.sync() + await sync.session.refresh() + if (search()) await refetch() + if (info?.workspaceID === session.workspaceID) { + route.navigate({ type: "home" }) + } + return true + }} + onRestore={() => { + dialog.replace(() => ( + + restoreWorkspaceSession({ + dialog, + sdk, + sync, + project, + toast, + workspaceID, + sessionID: session.id, + done: list, + }) + } + /> + )) + return false + }} + /> + )) + } + const options = createMemo(() => { const today = new Date().toDateString() return sessions() @@ -145,9 +198,43 @@ export function DialogSessionList() { title: "delete", onTrigger: async (option) => { if (toDelete() === option.value) { - void sdk.client.session.delete({ - sessionID: option.value, - }) + const session = sessions().find((item) => item.id === option.value) + const status = session?.workspaceID ? project.workspace.status(session.workspaceID) : undefined + + try { + const result = await sdk.client.session.delete({ + sessionID: option.value, + }) + if (result.error) { + if (session?.workspaceID) { + recover(session) + } else { + toast.show({ + variant: "error", + title: "Failed to delete session", + message: errorMessage(result.error), + }) + } + setToDelete(undefined) + return + } + } catch (err) { + if (session?.workspaceID) { + recover(session) + } else { + toast.show({ + variant: "error", + title: "Failed to delete session", + message: errorMessage(err), + }) + } + setToDelete(undefined) + return + } + if (status && status !== "connected") { + await sync.session.refresh() + } + if (search()) await refetch() setToDelete(undefined) return } diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index 447a1c3258..ca504d864d 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -6,6 +6,8 @@ import { useSync } from "@tui/context/sync" import { useProject } from "@tui/context/project" import { createMemo, createSignal, onMount } from "solid-js" import { setTimeout as sleep } from "node:timers/promises" +import { errorData, errorMessage } from "@/util/error" +import * as Log from "@/util/log" import { useSDK } from "../context/sdk" import { useToast } from "../ui/toast" @@ -15,6 +17,8 @@ type Adaptor = { description: string } +const log = Log.Default.clone().tag("service", "tui-workspace") + function scoped(sdk: ReturnType, sync: ReturnType, workspaceID: string) { return createOpencodeClient({ baseUrl: sdk.url, @@ -33,8 +37,20 @@ export async function openWorkspaceSession(input: { workspaceID: string }) { const client = scoped(input.sdk, input.sync, input.workspaceID) + log.info("workspace session create requested", { + workspaceID: input.workspaceID, + }) + + console.log("opening!") while (true) { - const result = await client.session.create({ workspaceID: input.workspaceID }).catch(() => undefined) + console.log("creating") + const result = await client.session.create({ workspace: input.workspaceID }).catch((err) => { + log.error("workspace session create request failed", { + workspaceID: input.workspaceID, + error: errorData(err), + }) + return undefined + }) if (!result) { input.toast.show({ message: "Failed to create workspace session", @@ -42,26 +58,113 @@ export async function openWorkspaceSession(input: { }) return } - if (result.response.status >= 500 && result.response.status < 600) { + log.info("workspace session create response", { + workspaceID: input.workspaceID, + status: result.response?.status, + sessionID: result.data?.id, + }) + if (result.response?.status && result.response.status >= 500 && result.response.status < 600) { + log.warn("workspace session create retrying after server error", { + workspaceID: input.workspaceID, + status: result.response.status, + }) await sleep(1000) continue } if (!result.data) { + log.error("workspace session create returned no data", { + workspaceID: input.workspaceID, + status: result.response?.status, + }) input.toast.show({ message: "Failed to create workspace session", variant: "error", }) return } + input.route.navigate({ type: "session", sessionID: result.data.id, }) + log.info("workspace session create complete", { + workspaceID: input.workspaceID, + sessionID: result.data.id, + }) input.dialog.clear() return } } +export async function restoreWorkspaceSession(input: { + dialog: ReturnType + sdk: ReturnType + sync: ReturnType + project: ReturnType + toast: ReturnType + workspaceID: string + sessionID: string + done?: () => void +}) { + log.info("session restore requested", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + }) + const result = await input.sdk.client.experimental.workspace + .sessionRestore({ id: input.workspaceID, sessionID: input.sessionID }) + .catch((err) => { + log.error("session restore request failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + return undefined + }) + if (!result?.data) { + log.error("session restore failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + status: result?.response?.status, + error: result?.error ? errorData(result.error) : undefined, + }) + input.toast.show({ + message: `Failed to restore session: ${errorMessage(result?.error ?? "no response")}`, + variant: "error", + }) + return + } + + log.info("session restore response", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + status: result.response?.status, + total: result.data.total, + }) + + await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()]).catch((err) => { + log.error("session restore refresh failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + throw err + }) + + log.info("session restore complete", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + total: result.data.total, + }) + + input.toast.show({ + message: "Session restored into the new workspace", + variant: "success", + }) + input.done?.() + if (input.done) return + input.dialog.clear() +} + export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) => Promise | void }) { const dialog = useDialog() const sync = useSync() @@ -123,18 +226,43 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) = const create = async (type: string) => { if (creating()) return setCreating(type) + log.info("workspace create requested", { + type, + }) + + const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => { + log.error("workspace create request failed", { + type, + error: errorData(err), + }) + return undefined + }) - const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch(() => undefined) const workspace = result?.data if (!workspace) { setCreating(undefined) + log.error("workspace create failed", { + type, + status: result?.response.status, + error: result?.error ? errorData(result.error) : undefined, + }) toast.show({ - message: "Failed to create workspace", + message: `Failed to create workspace: ${errorMessage(result?.error ?? "no response")}`, variant: "error", }) return } + log.info("workspace create response", { + type, + workspaceID: workspace.id, + status: result.response?.status, + }) + await project.workspace.sync() + log.info("workspace create synced", { + type, + workspaceID: workspace.id, + }) await props.onSelect(workspace.id) setCreating(undefined) } diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index b4ab82729f..e64a16eb8a 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -617,9 +617,7 @@ export function Prompt(props: PromptProps) { let sessionID = props.sessionID if (sessionID == null) { - const res = await sdk.client.session.create({ - workspaceID: props.workspaceID, - }) + const res = await sdk.client.session.create({ workspace: props.workspaceID }) if (res.error) { console.log("Creating a session failed:", res.error) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 46227e28aa..38b4457445 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -474,6 +474,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ if (match.found) return store.session[match.index] return undefined }, + async refresh() { + const start = Date.now() - 30 * 24 * 60 * 60 * 1000 + const list = await sdk.client.session + .list({ start }) + .then((x) => (x.data ?? []).toSorted((a, b) => a.id.localeCompare(b.id))) + setStore("session", reconcile(list)) + }, status(sessionID: string) { const session = result.session.get(sessionID) if (!session) return "idle" @@ -485,13 +492,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { + console.log('YO', sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return - const workspace = project.workspace.current() const [session, messages, todo, diff] = await Promise.all([ - sdk.client.session.get({ sessionID, workspace }, { throwOnError: true }), - sdk.client.session.messages({ sessionID, limit: 100, workspace }), - sdk.client.session.todo({ sessionID, workspace }), - sdk.client.session.diff({ sessionID, workspace }), + sdk.client.session.get({ sessionID }, { throwOnError: true }), + sdk.client.session.messages({ sessionID, limit: 100 }), + sdk.client.session.todo({ sessionID }), + sdk.client.session.diff({ sessionID }), ]) setStore( produce((draft) => { diff --git a/packages/opencode/src/control-plane/workspace-context.ts b/packages/opencode/src/control-plane/workspace-context.ts index 565472a24f..3d4fa5baef 100644 --- a/packages/opencode/src/control-plane/workspace-context.ts +++ b/packages/opencode/src/control-plane/workspace-context.ts @@ -2,17 +2,17 @@ import { LocalContext } from "../util" import type { WorkspaceID } from "../control-plane/schema" export interface WorkspaceContext { - workspaceID: string + workspaceID: WorkspaceID } const context = LocalContext.create("instance") export const WorkspaceContext = { async provide(input: { workspaceID: WorkspaceID; fn: () => R }): Promise { - return context.provide({ workspaceID: input.workspaceID as string }, () => input.fn()) + return context.provide({ workspaceID: input.workspaceID }, () => input.fn()) }, - restore(workspaceID: string, fn: () => R): R { + restore(workspaceID: WorkspaceID, fn: () => R): R { return context.provide({ workspaceID }, fn) }, diff --git a/packages/opencode/src/effect/bridge.ts b/packages/opencode/src/effect/bridge.ts index d79fc74f47..03e5aefd23 100644 --- a/packages/opencode/src/effect/bridge.ts +++ b/packages/opencode/src/effect/bridge.ts @@ -1,6 +1,7 @@ import { Effect, Fiber } from "effect" import { WorkspaceContext } from "@/control-plane/workspace-context" import { Instance, type InstanceContext } from "@/project/instance" +import type { WorkspaceID } from "@/control-plane/schema" import { LocalContext } from "@/util" import { InstanceRef, WorkspaceRef } from "./instance-ref" import { attachWith } from "./run-service" @@ -10,7 +11,7 @@ export interface Shape { readonly fork: (effect: Effect.Effect) => Fiber.Fiber } -function restore(instance: InstanceContext | undefined, workspace: string | undefined, fn: () => R): R { +function restore(instance: InstanceContext | undefined, workspace: WorkspaceID | undefined, fn: () => R): R { if (instance && workspace !== undefined) { return WorkspaceContext.restore(workspace, () => Instance.restore(instance, fn)) } diff --git a/packages/opencode/src/effect/instance-ref.ts b/packages/opencode/src/effect/instance-ref.ts index 301316c771..effc560c58 100644 --- a/packages/opencode/src/effect/instance-ref.ts +++ b/packages/opencode/src/effect/instance-ref.ts @@ -1,10 +1,11 @@ import { Context } from "effect" import type { InstanceContext } from "@/project/instance" +import type { WorkspaceID } from "@/control-plane/schema" export const InstanceRef = Context.Reference("~opencode/InstanceRef", { defaultValue: () => undefined, }) -export const WorkspaceRef = Context.Reference("~opencode/WorkspaceRef", { +export const WorkspaceRef = Context.Reference("~opencode/WorkspaceRef", { defaultValue: () => undefined, }) diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index e288aec73a..8c5fc29e4a 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -519,12 +519,13 @@ export const layer: Layer.Layer = workspaceID?: WorkspaceID }) { const directory = yield* InstanceState.directory + const workspace = yield* InstanceState.workspaceID return yield* createNext({ parentID: input?.parentID, directory, title: input?.title, permission: input?.permission, - workspaceID: input?.workspaceID, + workspaceID: workspace, }) }) diff --git a/packages/opencode/test/cli/tui/sync-provider.test.tsx b/packages/opencode/test/cli/tui/sync-provider.test.tsx index 3ef126ef4c..e75e186199 100644 --- a/packages/opencode/test/cli/tui/sync-provider.test.tsx +++ b/packages/opencode/test/cli/tui/sync-provider.test.tsx @@ -264,27 +264,15 @@ describe("SyncProvider", () => { log.length = 0 await sync.session.sync("ses_1") + expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) - expect(log.filter((item) => item.path === "/session/ses_1" && item.workspace === "ws_a")).toHaveLength(1) - expect(sync.data.todo.ses_1[0]?.content).toBe("todo-ws_a") - expect(sync.data.message.ses_1[0]?.id).toBe("msg_1") - expect(sync.data.part.msg_1[0]).toMatchObject({ type: "text", text: "part-ws_a" }) - expect(sync.data.session_diff.ses_1[0]?.file).toBe("ws_a.ts") - - log.length = 0 project.workspace.set("ws_b") await waitBoot(log, "ws_b") expect(project.workspace.current()).toBe("ws_b") log.length = 0 await sync.session.sync("ses_1") - await wait(() => log.some((item) => item.path === "/session/ses_1" && item.workspace === "ws_b")) - - expect(log.filter((item) => item.path === "/session/ses_1" && item.workspace === "ws_b")).toHaveLength(1) - expect(sync.data.todo.ses_1[0]?.content).toBe("todo-ws_b") - expect(sync.data.message.ses_1[0]?.id).toBe("msg_1") - expect(sync.data.part.msg_1[0]).toMatchObject({ type: "text", text: "part-ws_b" }) - expect(sync.data.session_diff.ses_1[0]?.file).toBe("ws_b.ts") + expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) } finally { app.renderer.destroy() } From d82bc3a421c04e3abbade123344dc40d81e03395 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 16:26:12 +0000 Subject: [PATCH 010/120] chore: generate --- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 38b4457445..10b70d50ac 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -492,7 +492,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { - console.log('YO', sessionID, fullSyncedSessions.has(sessionID)) + console.log("YO", sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return const [session, messages, todo, diff] = await Promise.all([ sdk.client.session.get({ sessionID }, { throwOnError: true }), From b28956f0dbc22d786fab24b2a34fd07fba6d27ec Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:35:37 -0400 Subject: [PATCH 011/120] fix(core): better global sync event structure (#22858) --- packages/opencode/src/control-plane/workspace.ts | 3 +-- packages/opencode/src/sync/sync-event.ts | 12 ++++-------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index b43fe848ba..d870eb6360 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -460,8 +460,7 @@ export namespace Workspace { if (!("payload" in evt)) return if (evt.payload.type === "sync") { - // This name -> type is temporary - SyncEvent.replay({ ...evt.payload, type: evt.payload.name } as SyncEvent.SerializedEvent) + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) } GlobalBus.emit("event", { diff --git a/packages/opencode/src/sync/sync-event.ts b/packages/opencode/src/sync/sync-event.ts index db487ddd24..94c889d917 100644 --- a/packages/opencode/src/sync/sync-event.ts +++ b/packages/opencode/src/sync/sync-event.ts @@ -155,8 +155,10 @@ function process(def: Def, event: Event, options: { workspace: WorkspaceContext.workspaceID, payload: { type: "sync", - name: versionedType(def.type, def.version), - ...event, + syncEvent: { + type: versionedType(def.type, def.version), + ...event, + }, }, }) } @@ -164,12 +166,6 @@ function process(def: Def, event: Event, options: { }) } -// TODO: -// -// * Support applying multiple events at one time. One transaction, -// and it validets all the sequence ids -// * when loading events from db, apply zod validation to ensure shape - export function replay(event: SerializedEvent, options?: { publish: boolean }) { const def = registry.get(event.type) if (!def) { From bfffc3c2c6349d9199dd1a73260612b5ec2da88d Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:19:43 -0400 Subject: [PATCH 012/120] tui: ensure TUI plugins load with proper project context when multiple directories are open Fixes potential plugin resolution issues when switching between projects by wrapping plugin loading in Instance.provide(). This ensures each plugin resolves dependencies relative to its correct project directory instead of inheriting context from whatever instance happened to be active. Also reorganizes config loading code into focused modules (command.ts, managed.ts, plugin.ts) to make the codebase easier to maintain and test. --- .../src/cli/cmd/tui/plugin/runtime.ts | 70 +++--- packages/opencode/src/config/command.ts | 76 ++++++ packages/opencode/src/config/config.ts | 233 +++--------------- packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/managed.ts | 71 ++++++ packages/opencode/src/config/paths.ts | 5 +- packages/opencode/test/config/config.test.ts | 29 ++- packages/shared/src/npm.ts | 43 ++-- 8 files changed, 262 insertions(+), 267 deletions(-) create mode 100644 packages/opencode/src/config/command.ts create mode 100644 packages/opencode/src/config/managed.ts diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index af37ffbd76..ac1c0fc3b8 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -16,6 +16,7 @@ import { TuiConfig } from "@/cli/cmd/tui/config/tui" import { Log } from "@/util" import { errorData, errorMessage } from "@/util/error" import { isRecord } from "@/util/record" +import { Instance } from "@/project/instance" import { readPackageThemes, readPluginId, @@ -789,7 +790,13 @@ async function addPluginBySpec(state: RuntimeState | undefined, raw: string) { state.pending.delete(spec) return true } - const ready = await resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies()) + const ready = await Instance.provide({ + directory: state.directory, + fn: () => resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies()), + }).catch((error) => { + fail("failed to add tui plugin", { path: next, error }) + return [] as PluginLoad[] + }) if (!ready.length) { return false } @@ -980,37 +987,42 @@ export namespace TuiPluginRuntime { } runtime = next try { - const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { - log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) - } + await Instance.provide({ + directory: cwd, + fn: async () => { + const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { + log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) + } - for (const item of INTERNAL_TUI_PLUGINS) { - log.info("loading internal tui plugin", { id: item.id }) - const entry = loadInternalPlugin(item) - const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) - addPluginEntry(next, { - id: entry.id, - load: entry, - meta, - themes: {}, - plugin: entry.module.tui, - enabled: true, - }) - } + for (const item of INTERNAL_TUI_PLUGINS) { + log.info("loading internal tui plugin", { id: item.id }) + const entry = loadInternalPlugin(item) + const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) + addPluginEntry(next, { + id: entry.id, + load: entry, + meta, + themes: {}, + plugin: entry.module.tui, + enabled: true, + }) + } - const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) - await addExternalPluginEntries(next, ready) + const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) + await addExternalPluginEntries(next, ready) - applyInitialPluginEnabledState(next, config) - for (const plugin of next.plugins) { - if (!plugin.enabled) continue - // Keep plugin execution sequential for deterministic side effects: - // command registration order affects keybind/command precedence, - // route registration is last-wins when ids collide, - // and hook chains rely on stable plugin ordering. - await activatePluginEntry(next, plugin, false) - } + applyInitialPluginEnabledState(next, config) + for (const plugin of next.plugins) { + if (!plugin.enabled) continue + // Keep plugin execution sequential for deterministic side effects: + // command registration order affects keybind/command precedence, + // route registration is last-wins when ids collide, + // and hook chains rely on stable plugin ordering. + await activatePluginEntry(next, plugin, false) + } + }, + }) } catch (error) { fail("failed to load tui plugins", { directory: cwd, error }) } diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts new file mode 100644 index 0000000000..4b2d58f3ff --- /dev/null +++ b/packages/opencode/src/config/command.ts @@ -0,0 +1,76 @@ +import { Log } from "../util" +import path from "path" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Bus } from "@/bus" +import * as ConfigMarkdown from "./markdown" +import { InvalidError } from "./paths" + +const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) + +const log = Log.create({ service: "config" }) + +function rel(item: string, patterns: string[]) { + const normalizedItem = item.replaceAll("\\", "/") + for (const pattern of patterns) { + const index = normalizedItem.indexOf(pattern) + if (index === -1) continue + return normalizedItem.slice(index + pattern.length) + } +} + +function trim(file: string) { + const ext = path.extname(file) + return ext.length ? file.slice(0, -ext.length) : file +} + +export namespace ConfigCommand { + export const Info = z.object({ + template: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: ModelId.optional(), + subtask: z.boolean().optional(), + }) + + export type Info = z.infer + + export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{command,commands}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse command ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load command", { command: item, err }) + return undefined + }) + if (!md) continue + + const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] + const file = rel(item, patterns) ?? path.basename(item) + const name = trim(file) + + const config = { + name, + ...md.data, + template: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + } + return result + } +} diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 97e7a662d0..3922357f2e 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -2,9 +2,8 @@ import { Log } from "../util" import path from "path" import { pathToFileURL } from "url" import os from "os" -import { Process } from "../util" import z from "zod" -import { mergeDeep, pipe, unique } from "remeda" +import { mergeDeep, pipe } from "remeda" import { Global } from "../global" import fsNode from "fs/promises" import { NamedError } from "@opencode-ai/shared/util/error" @@ -35,10 +34,11 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" - -import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" import { InstanceRef } from "@/effect/instance-ref" import { Npm } from "@opencode-ai/shared/npm" +import { ConfigPlugin } from "./plugin" +import { ConfigManaged } from "./managed" +import { ConfigCommand } from "./command" const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) const PluginOptions = z.record(z.string(), z.unknown()) @@ -55,78 +55,6 @@ export type PluginOrigin = { const log = Log.create({ service: "config" }) -// Managed settings directory for enterprise deployments (highest priority, admin-controlled) -// These settings override all user and project settings -function systemManagedConfigDir(): string { - switch (process.platform) { - case "darwin": - return "/Library/Application Support/opencode" - case "win32": - return path.join(process.env.ProgramData || "C:\\ProgramData", "opencode") - default: - return "/etc/opencode" - } -} - -export function managedConfigDir() { - return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() -} - -const managedDir = managedConfigDir() - -const MANAGED_PLIST_DOMAIN = "ai.opencode.managed" - -// Keys injected by macOS/MDM into the managed plist that are not OpenCode config -const PLIST_META = new Set([ - "PayloadDisplayName", - "PayloadIdentifier", - "PayloadType", - "PayloadUUID", - "PayloadVersion", - "_manualProfile", -]) - -/** - * Parse raw JSON (from plutil conversion of a managed plist) into OpenCode config. - * Strips MDM metadata keys before parsing through the config schema. - * Pure function — no OS interaction, safe to unit test directly. - */ -export function parseManagedPlist(json: string, source: string): Info { - const raw = JSON.parse(json) - for (const key of Object.keys(raw)) { - if (PLIST_META.has(key)) delete raw[key] - } - return parseConfig(JSON.stringify(raw), source) -} - -/** - * Read macOS managed preferences deployed via .mobileconfig / MDM (Jamf, Kandji, etc). - * MDM-installed profiles write to /Library/Managed Preferences/ which is only writable by root. - * User-scoped plists are checked first, then machine-scoped. - */ -async function readManagedPreferences(): Promise { - if (process.platform !== "darwin") return {} - - const domain = MANAGED_PLIST_DOMAIN - const user = os.userInfo().username - const paths = [ - path.join("/Library/Managed Preferences", user, `${domain}.plist`), - path.join("/Library/Managed Preferences", `${domain}.plist`), - ] - - for (const plist of paths) { - if (!existsSync(plist)) continue - log.info("reading macOS managed preferences", { path: plist }) - const result = await Process.run(["plutil", "-convert", "json", "-o", "-", plist], { nothrow: true }) - if (result.code !== 0) { - log.warn("failed to convert managed preferences plist", { path: plist }) - continue - } - return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) - } - return {} -} - // Custom merge function that concatenates array fields instead of replacing them function mergeConfigConcatArrays(target: Info, source: Info): Info { const merged = mergeDeep(target, source) @@ -154,44 +82,6 @@ function trim(file: string) { return ext.length ? file.slice(0, -ext.length) : file } -async function loadCommand(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{command,commands}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse command ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load command", { command: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] - const file = rel(item, patterns) ?? path.basename(item) - const name = trim(file) - - const config = { - name, - ...md.data, - template: md.content.trim(), - } - const parsed = Command.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) - } - return result -} - async function loadAgent(dir: string) { const result: Record = {} @@ -267,60 +157,6 @@ async function loadMode(dir: string) { return result } -async function loadPlugin(dir: string) { - const plugins: PluginSpec[] = [] - - for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - plugins.push(pathToFileURL(item).href) - } - return plugins -} - -export function pluginSpecifier(plugin: PluginSpec): string { - return Array.isArray(plugin) ? plugin[0] : plugin -} - -export function pluginOptions(plugin: PluginSpec): PluginOptions | undefined { - return Array.isArray(plugin) ? plugin[1] : undefined -} - -export async function resolvePluginSpec(plugin: PluginSpec, configFilepath: string): Promise { - const spec = pluginSpecifier(plugin) - if (!isPathPluginSpec(spec)) return plugin - - const base = path.dirname(configFilepath) - const file = (() => { - if (spec.startsWith("file://")) return spec - if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href - return pathToFileURL(path.resolve(base, spec)).href - })() - - const resolved = await resolvePathPluginTarget(file).catch(() => file) - - if (Array.isArray(plugin)) return [resolved, plugin[1]] - return resolved -} - -export function deduplicatePluginOrigins(plugins: PluginOrigin[]): PluginOrigin[] { - const seen = new Set() - const list: PluginOrigin[] = [] - - for (const plugin of plugins.toReversed()) { - const spec = pluginSpecifier(plugin.spec) - const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg - if (seen.has(name)) continue - seen.add(name) - list.push(plugin) - } - - return list.toReversed() -} - export const McpLocal = z .object({ type: z.literal("local").describe("Type of MCP server connection"), @@ -453,15 +289,6 @@ export const Permission = z }) export type Permission = z.infer -export const Command = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ModelId.optional(), - subtask: z.boolean().optional(), -}) -export type Command = z.infer - export const Skills = z.object({ paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), urls: z @@ -854,7 +681,7 @@ export const Info = z logLevel: Log.Level.optional().describe("Log level"), server: Server.optional().describe("Server configuration for opencode serve and web commands"), command: z - .record(z.string(), Command) + .record(z.string(), ConfigCommand.Info) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), skills: Skills.optional().describe("Additional skill folder paths"), @@ -1095,7 +922,7 @@ function writable(info: Info) { return next } -function parseConfig(text: string, filepath: string): Info { +export function parseConfig(text: string, filepath: string): Info { const errors: JsoncParseError[] = [] const data = parseJsonc(text, errors, { allowTrailingComma: true }) if (errors.length) { @@ -1193,7 +1020,7 @@ export const layer = Layer.effect( if (data.plugin && isFile) { const list = data.plugin for (let i = 0; i < list.length; i++) { - list[i] = yield* Effect.promise(() => resolvePluginSpec(list[i], options.path)) + list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) } } return data @@ -1253,7 +1080,7 @@ export const layer = Layer.effect( return yield* cachedGlobal }) - const setupConfigDir = Effect.fnUntraced(function* (dir: string) { + const ensureGitignore = Effect.fn("Config.ensureGitignore")(function* (dir: string) { const gitignore = path.join(dir, ".gitignore") const hasIgnore = yield* fs.existsSafe(gitignore) if (!hasIgnore) { @@ -1262,9 +1089,6 @@ export const layer = Layer.effect( ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), ) } - yield* npmSvc.install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) }) const loadInstanceState = Effect.fn("Config.loadInstanceState")(function* (ctx: InstanceContext) { @@ -1284,7 +1108,7 @@ export const layer = Layer.effect( const track = Effect.fnUntraced(function* (source: string, list: PluginSpec[] | undefined, kind?: PluginScope) { if (!list?.length) return const hit = kind ?? (yield* scope(source)) - const plugins = deduplicatePluginOrigins([ + const plugins = ConfigPlugin.deduplicatePluginOrigins([ ...(result.plugin_origins ?? []), ...list.map((spec) => ({ spec, source, scope: hit })), ]) @@ -1347,7 +1171,7 @@ export const layer = Layer.effect( const deps: Fiber.Fiber[] = [] - for (const dir of unique(directories)) { + for (const dir of directories) { if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { for (const file of ["opencode.json", "opencode.jsonc"]) { const source = path.join(dir, file) @@ -1359,24 +1183,30 @@ export const layer = Layer.effect( } } - const dep = yield* setupConfigDir(dir).pipe( - Effect.exit, - Effect.tap((exit) => - Exit.isFailure(exit) - ? Effect.sync(() => { - log.warn("background dependency install failed", { dir, error: String(exit.cause) }) - }) - : Effect.void, - ), - Effect.asVoid, - Effect.forkScoped, - ) + yield* ensureGitignore(dir).pipe(Effect.forkScoped) + + const dep = yield* npmSvc + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe( + Effect.exit, + Effect.tap((exit) => + Exit.isFailure(exit) + ? Effect.sync(() => { + log.warn("background dependency install failed", { dir, error: String(exit.cause) }) + }) + : Effect.void, + ), + Effect.asVoid, + Effect.forkDetach, + ) deps.push(dep) - result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => loadCommand(dir))) + result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadAgent(dir))) result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadMode(dir))) - const list = yield* Effect.promise(() => loadPlugin(dir)) + const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) yield* track(dir, list) } @@ -1429,6 +1259,7 @@ export const layer = Layer.effect( ) } + const managedDir = ConfigManaged.managedConfigDir() if (existsSync(managedDir)) { for (const file of ["opencode.json", "opencode.jsonc"]) { const source = path.join(managedDir, file) @@ -1437,7 +1268,7 @@ export const layer = Layer.effect( } // macOS managed preferences (.mobileconfig deployed via MDM) override everything - result = mergeConfigConcatArrays(result, yield* Effect.promise(() => readManagedPreferences())) + result = mergeConfigConcatArrays(result, yield* Effect.promise(() => ConfigManaged.readManagedPreferences())) for (const [name, mode] of Object.entries(result.mode ?? {})) { result.agent = mergeDeep(result.agent ?? {}, { diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index fbcca1aa9a..8380d370d8 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,3 +1,5 @@ export * as Config from "./config" +export * as ConfigCommand from "./command" +export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" export * as ConfigPaths from "./paths" diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts new file mode 100644 index 0000000000..61c535185f --- /dev/null +++ b/packages/opencode/src/config/managed.ts @@ -0,0 +1,71 @@ +import { existsSync } from "fs" +import os from "os" +import path from "path" +import { type Info, parseConfig } from "./config" +import { Log, Process } from "../util" + +const log = Log.create({ service: "config" }) + +const MANAGED_PLIST_DOMAIN = "ai.opencode.managed" + +// Keys injected by macOS/MDM into the managed plist that are not OpenCode config +const PLIST_META = new Set([ + "PayloadDisplayName", + "PayloadIdentifier", + "PayloadType", + "PayloadUUID", + "PayloadVersion", + "_manualProfile", +]) + +function systemManagedConfigDir(): string { + switch (process.platform) { + case "darwin": + return "/Library/Application Support/opencode" + case "win32": + return path.join(process.env.ProgramData || "C:\\ProgramData", "opencode") + default: + return "/etc/opencode" + } +} + +function managedConfigDir() { + return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() +} + +function parseManagedPlist(json: string, source: string): Info { + const raw = JSON.parse(json) + for (const key of Object.keys(raw)) { + if (PLIST_META.has(key)) delete raw[key] + } + return parseConfig(JSON.stringify(raw), source) +} + +async function readManagedPreferences(): Promise { + if (process.platform !== "darwin") return {} + + const user = os.userInfo().username + const paths = [ + path.join("/Library/Managed Preferences", user, `${MANAGED_PLIST_DOMAIN}.plist`), + path.join("/Library/Managed Preferences", `${MANAGED_PLIST_DOMAIN}.plist`), + ] + + for (const plist of paths) { + if (!existsSync(plist)) continue + log.info("reading macOS managed preferences", { path: plist }) + const result = await Process.run(["plutil", "-convert", "json", "-o", "-", plist], { nothrow: true }) + if (result.code !== 0) { + log.warn("failed to convert managed preferences plist", { path: plist }) + continue + } + return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) + } + + return {} +} + +export const ConfigManaged = { + managedConfigDir, + parseManagedPlist, + readManagedPreferences, +} diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index eeb9d62d3f..fabd3fd5f8 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -6,13 +6,14 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" import { Global } from "@/global" +import { unique } from "remeda" export async function projectFiles(name: string, directory: string, worktree?: string) { return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) } export async function directories(directory: string, worktree?: string) { - return [ + return unique([ Global.Path.config, ...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? await Array.fromAsync( @@ -31,7 +32,7 @@ export async function directories(directory: string, worktree?: string) { }), )), ...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []), - ] + ]) } export function fileInDirectory(dir: string, name: string) { diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 1f36312447..303fa8ba08 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -1,7 +1,7 @@ -import { test, expect, describe, mock, afterEach, beforeEach, spyOn } from "bun:test" -import { Deferred, Effect, Fiber, Layer, Option } from "effect" +import { test, expect, describe, mock, afterEach, beforeEach } from "bun:test" +import { Effect, Layer, Option } from "effect" import { NodeFileSystem, NodePath } from "@effect/platform-node" -import { Config } from "../../src/config" +import { Config, ConfigManaged } from "../../src/config" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { Instance } from "../../src/project/instance" @@ -10,7 +10,7 @@ import { AccessToken, Account, AccountID, OrgID } from "../../src/account" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Env } from "../../src/env" import { provideTmpdirInstance } from "../fixture/fixture" -import { tmpdir, tmpdirScoped } from "../fixture/fixture" +import { tmpdir } from "../fixture/fixture" import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" import { testEffect } from "../lib/effect" @@ -24,7 +24,6 @@ import { pathToFileURL } from "url" import { Global } from "../../src/global" import { ProjectID } from "../../src/project/schema" import { Filesystem } from "../../src/util" -import * as Network from "../../src/util/network" import { ConfigPlugin } from "@/config/plugin" import { Npm } from "@opencode-ai/shared/npm" @@ -1860,14 +1859,14 @@ describe("resolvePluginSpec", () => { }) const file = path.join(tmp.path, "opencode.json") - const hit = await Config.resolvePluginSpec("./plugin", file) - expect(Config.pluginSpecifier(hit)).toBe(pathToFileURL(path.join(tmp.path, "plugin", "index.ts")).href) + const hit = await ConfigPlugin.resolvePluginSpec("./plugin", file) + expect(ConfigPlugin.pluginSpecifier(hit)).toBe(pathToFileURL(path.join(tmp.path, "plugin", "index.ts")).href) }) }) describe("deduplicatePluginOrigins", () => { const dedupe = (plugins: Config.PluginSpec[]) => - Config.deduplicatePluginOrigins( + ConfigPlugin.deduplicatePluginOrigins( plugins.map((spec) => ({ spec, source: "", @@ -1937,8 +1936,8 @@ describe("deduplicatePluginOrigins", () => { const config = await load() const plugins = config.plugin ?? [] - expect(plugins.some((p) => Config.pluginSpecifier(p) === "my-plugin@1.0.0")).toBe(true) - expect(plugins.some((p) => Config.pluginSpecifier(p).startsWith("file://"))).toBe(true) + expect(plugins.some((p) => ConfigPlugin.pluginSpecifier(p) === "my-plugin@1.0.0")).toBe(true) + expect(plugins.some((p) => ConfigPlugin.pluginSpecifier(p).startsWith("file://"))).toBe(true) }, }) }) @@ -2209,7 +2208,7 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { // parseManagedPlist unit tests — pure function, no OS interaction test("parseManagedPlist strips MDM metadata keys", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ PayloadDisplayName: "OpenCode Managed", PayloadIdentifier: "ai.opencode.managed.test", @@ -2231,7 +2230,7 @@ test("parseManagedPlist strips MDM metadata keys", async () => { }) test("parseManagedPlist parses server settings", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", server: { hostname: "127.0.0.1", mdns: false }, @@ -2245,7 +2244,7 @@ test("parseManagedPlist parses server settings", async () => { }) test("parseManagedPlist parses permission rules", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", permission: { @@ -2269,7 +2268,7 @@ test("parseManagedPlist parses permission rules", async () => { }) test("parseManagedPlist parses enabled_providers", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", enabled_providers: ["anthropic", "google"], @@ -2280,7 +2279,7 @@ test("parseManagedPlist parses enabled_providers", async () => { }) test("parseManagedPlist handles empty config", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json" }), "test:mobileconfig", ) diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts index 955cafa190..e4f42227de 100644 --- a/packages/shared/src/npm.ts +++ b/packages/shared/src/npm.ts @@ -142,7 +142,7 @@ export namespace Npm { yield* flock.acquire(`npm-install:${dir}`) - const reify = Effect.fnUntraced(function* () { + const reify = Effect.fn("Npm.reify")(function* () { const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) const arb = new Arborist({ path: dir, @@ -176,28 +176,31 @@ export namespace Npm { const pkgAny = pkg as any const lockAny = lock as any - const declared = new Set([ - ...Object.keys(pkgAny?.dependencies || {}), - ...Object.keys(pkgAny?.devDependencies || {}), - ...Object.keys(pkgAny?.peerDependencies || {}), - ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), - ]) + yield* Effect.gen(function* () { + const declared = new Set([ + ...Object.keys(pkgAny?.dependencies || {}), + ...Object.keys(pkgAny?.devDependencies || {}), + ...Object.keys(pkgAny?.peerDependencies || {}), + ...Object.keys(pkgAny?.optionalDependencies || {}), + ...(input?.add || []), + ]) - const root = lockAny?.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root?.dependencies || {}), - ...Object.keys(root?.devDependencies || {}), - ...Object.keys(root?.peerDependencies || {}), - ...Object.keys(root?.optionalDependencies || {}), - ]) + const root = lockAny?.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root?.dependencies || {}), + ...Object.keys(root?.devDependencies || {}), + ...Object.keys(root?.peerDependencies || {}), + ...Object.keys(root?.optionalDependencies || {}), + ]) - for (const name of declared) { - if (!locked.has(name)) { - yield* reify() - return + for (const name of declared) { + if (!locked.has(name)) { + yield* reify() + return + } } - } + }).pipe(Effect.withSpan("Npm.checkDirty")) + return }, Effect.scoped) const which = Effect.fn("Npm.which")(function* (pkg: string) { From 33bb847a1dfb5e79b4815813739671a40afa0e51 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:40:16 -0400 Subject: [PATCH 013/120] config: refactor --- packages/opencode/src/acp/agent.ts | 3 +- packages/opencode/src/cli/cmd/mcp.ts | 9 +- packages/opencode/src/config/agent.ts | 171 ++++++++ packages/opencode/src/config/command.ts | 102 ++--- packages/opencode/src/config/config.ts | 401 ++---------------- packages/opencode/src/config/entry-name.ts | 16 + packages/opencode/src/config/index.ts | 4 + packages/opencode/src/config/mcp.ts | 70 +++ packages/opencode/src/config/model-id.ts | 3 + packages/opencode/src/config/permission.ts | 68 +++ packages/opencode/src/mcp/mcp.ts | 25 +- .../opencode/src/permission/permission.ts | 4 +- packages/opencode/src/server/instance/mcp.ts | 3 +- packages/opencode/test/config/config.test.ts | 5 +- 14 files changed, 449 insertions(+), 435 deletions(-) create mode 100644 packages/opencode/src/config/agent.ts create mode 100644 packages/opencode/src/config/entry-name.ts create mode 100644 packages/opencode/src/config/mcp.ts create mode 100644 packages/opencode/src/config/model-id.ts create mode 100644 packages/opencode/src/config/permission.ts diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 53bc7ed5fb..9388c87f12 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -44,6 +44,7 @@ import { AppRuntime } from "@/effect/app-runtime" import { Installation } from "@/installation" import { MessageV2 } from "@/session/message-v2" import { Config } from "@/config" +import { ConfigMCP } from "@/config/mcp" import { Todo } from "@/session/todo" import { z } from "zod" import { LoadAPIKeyError } from "ai" @@ -1213,7 +1214,7 @@ export namespace ACP { description: "compact the session", }) - const mcpServers: Record = {} + const mcpServers: Record = {} for (const server of params.mcpServers) { if ("type" in server) { mcpServers[server.name] = { diff --git a/packages/opencode/src/cli/cmd/mcp.ts b/packages/opencode/src/cli/cmd/mcp.ts index dc6d5e8896..a5751ce836 100644 --- a/packages/opencode/src/cli/cmd/mcp.ts +++ b/packages/opencode/src/cli/cmd/mcp.ts @@ -8,6 +8,7 @@ import { MCP } from "../../mcp" import { McpAuth } from "../../mcp/auth" import { McpOAuthProvider } from "../../mcp/oauth-provider" import { Config } from "../../config" +import { ConfigMCP } from "../../config/mcp" import { Instance } from "../../project/instance" import { Installation } from "../../installation" import { InstallationVersion } from "../../installation/version" @@ -43,7 +44,7 @@ function getAuthStatusText(status: MCP.AuthStatus): string { type McpEntry = NonNullable[string] -type McpConfigured = Config.Mcp +type McpConfigured = ConfigMCP.Info function isMcpConfigured(config: McpEntry): config is McpConfigured { return typeof config === "object" && config !== null && "type" in config } @@ -426,7 +427,7 @@ async function resolveConfigPath(baseDir: string, global = false) { return candidates[0] } -async function addMcpToConfig(name: string, mcpConfig: Config.Mcp, configPath: string) { +async function addMcpToConfig(name: string, mcpConfig: ConfigMCP.Info, configPath: string) { let text = "{}" if (await Filesystem.exists(configPath)) { text = await Filesystem.readText(configPath) @@ -514,7 +515,7 @@ export const McpAddCommand = cmd({ }) if (prompts.isCancel(command)) throw new UI.CancelledError() - const mcpConfig: Config.Mcp = { + const mcpConfig: ConfigMCP.Info = { type: "local", command: command.split(" "), } @@ -544,7 +545,7 @@ export const McpAddCommand = cmd({ }) if (prompts.isCancel(useOAuth)) throw new UI.CancelledError() - let mcpConfig: Config.Mcp + let mcpConfig: ConfigMCP.Info if (useOAuth) { const hasClientId = await prompts.confirm({ diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts new file mode 100644 index 0000000000..3819368e82 --- /dev/null +++ b/packages/opencode/src/config/agent.ts @@ -0,0 +1,171 @@ +export * as ConfigAgent from "./agent" + +import { Log } from "../util" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Bus } from "@/bus" +import { configEntryNameFromPath } from "./entry-name" +import * as ConfigMarkdown from "./markdown" +import { ConfigModelID } from "./model-id" +import { InvalidError } from "./paths" +import { ConfigPermission } from "./permission" + +const log = Log.create({ service: "config" }) + +export const Info = z + .object({ + model: ConfigModelID.optional(), + variant: z + .string() + .optional() + .describe("Default model variant for this agent (applies only when using the agent's configured model)."), + temperature: z.number().optional(), + top_p: z.number().optional(), + prompt: z.string().optional(), + tools: z.record(z.string(), z.boolean()).optional().describe("@deprecated Use 'permission' field instead"), + disable: z.boolean().optional(), + description: z.string().optional().describe("Description of when to use the agent"), + mode: z.enum(["subagent", "primary", "all"]).optional(), + hidden: z + .boolean() + .optional() + .describe("Hide this subagent from the @ autocomplete menu (default: false, only applies to mode: subagent)"), + options: z.record(z.string(), z.any()).optional(), + color: z + .union([ + z.string().regex(/^#[0-9a-fA-F]{6}$/, "Invalid hex color format"), + z.enum(["primary", "secondary", "accent", "success", "warning", "error", "info"]), + ]) + .optional() + .describe("Hex color code (e.g., #FF5733) or theme color (e.g., primary)"), + steps: z + .number() + .int() + .positive() + .optional() + .describe("Maximum number of agentic iterations before forcing text-only response"), + maxSteps: z.number().int().positive().optional().describe("@deprecated Use 'steps' field instead."), + permission: ConfigPermission.Info.optional(), + }) + .catchall(z.any()) + .transform((agent, _ctx) => { + const knownKeys = new Set([ + "name", + "model", + "variant", + "prompt", + "description", + "temperature", + "top_p", + "mode", + "hidden", + "color", + "steps", + "maxSteps", + "options", + "permission", + "disable", + "tools", + ]) + + const options: Record = { ...agent.options } + for (const [key, value] of Object.entries(agent)) { + if (!knownKeys.has(key)) options[key] = value + } + + const permission: ConfigPermission.Info = {} + for (const [tool, enabled] of Object.entries(agent.tools ?? {})) { + const action = enabled ? "allow" : "deny" + if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { + permission.edit = action + continue + } + permission[tool] = action + } + Object.assign(permission, agent.permission) + + const steps = agent.steps ?? agent.maxSteps + + return { ...agent, options, permission, steps } as typeof agent & { + options?: Record + permission?: ConfigPermission.Info + steps?: number + } + }) + .meta({ + ref: "AgentConfig", + }) +export type Info = z.infer + +export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{agent,agents}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse agent ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load agent", { agent: item, err }) + return undefined + }) + if (!md) continue + + const patterns = ["/.opencode/agent/", "/.opencode/agents/", "/agent/", "/agents/"] + const name = configEntryNameFromPath(item, patterns) + + const config = { + name, + ...md.data, + prompt: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + } + return result +} + +export async function loadMode(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{mode,modes}/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse mode ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load mode", { mode: item, err }) + return undefined + }) + if (!md) continue + + const config = { + name: configEntryNameFromPath(item, []), + ...md.data, + prompt: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = { + ...parsed.data, + mode: "primary" as const, + } + } + } + return result +} diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 4b2d58f3ff..5606bdd4c7 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -1,76 +1,60 @@ +export * as ConfigCommand from "./command" + import { Log } from "../util" -import path from "path" import z from "zod" import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" +import { configEntryNameFromPath } from "./entry-name" import * as ConfigMarkdown from "./markdown" +import { ConfigModelID } from "./model-id" import { InvalidError } from "./paths" -const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) - const log = Log.create({ service: "config" }) -function rel(item: string, patterns: string[]) { - const normalizedItem = item.replaceAll("\\", "/") - for (const pattern of patterns) { - const index = normalizedItem.indexOf(pattern) - if (index === -1) continue - return normalizedItem.slice(index + pattern.length) - } -} +export const Info = z.object({ + template: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: ConfigModelID.optional(), + subtask: z.boolean().optional(), +}) -function trim(file: string) { - const ext = path.extname(file) - return ext.length ? file.slice(0, -ext.length) : file -} +export type Info = z.infer -export namespace ConfigCommand { - export const Info = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ModelId.optional(), - subtask: z.boolean().optional(), - }) +export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{command,commands}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse command ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load command", { command: item, err }) + return undefined + }) + if (!md) continue - export type Info = z.infer + const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] + const name = configEntryNameFromPath(item, patterns) - export async function load(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{command,commands}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse command ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load command", { command: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] - const file = rel(item, patterns) ?? path.basename(item) - const name = trim(file) - - const config = { - name, - ...md.data, - template: md.content.trim(), - } - const parsed = Info.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + const config = { + name, + ...md.data, + template: md.content.trim(), } - return result + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) } + return result } diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 3922357f2e..92d66cf2bb 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -20,12 +20,9 @@ import { import { Instance, type InstanceContext } from "../project/instance" import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" -import * as ConfigMarkdown from "./markdown" import { existsSync } from "fs" -import { Bus } from "@/bus" import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" -import { Glob } from "@opencode-ai/shared/util/glob" import { Account } from "@/account" import { isRecord } from "@/util/record" import * as ConfigPaths from "./paths" @@ -36,22 +33,13 @@ import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { InstanceRef } from "@/effect/instance-ref" import { Npm } from "@opencode-ai/shared/npm" +import { ConfigAgent } from "./agent" +import { ConfigMCP } from "./mcp" +import { ConfigModelID } from "./model-id" import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" - -const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) -const PluginOptions = z.record(z.string(), z.unknown()) -export const PluginSpec = z.union([z.string(), z.tuple([z.string(), PluginOptions])]) - -export type PluginOptions = z.infer -export type PluginSpec = z.infer -export type PluginScope = "global" | "local" -export type PluginOrigin = { - spec: PluginSpec - source: string - scope: PluginScope -} +import { ConfigPermission } from "./permission" const log = Log.create({ service: "config" }) @@ -64,231 +52,6 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } -export type InstallInput = { - waitTick?: (input: { dir: string; attempt: number; delay: number; waited: number }) => void | Promise -} - -function rel(item: string, patterns: string[]) { - const normalizedItem = item.replaceAll("\\", "/") - for (const pattern of patterns) { - const index = normalizedItem.indexOf(pattern) - if (index === -1) continue - return normalizedItem.slice(index + pattern.length) - } -} - -function trim(file: string) { - const ext = path.extname(file) - return ext.length ? file.slice(0, -ext.length) : file -} - -async function loadAgent(dir: string) { - const result: Record = {} - - for (const item of await Glob.scan("{agent,agents}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse agent ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load agent", { agent: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/agent/", "/.opencode/agents/", "/agent/", "/agents/"] - const file = rel(item, patterns) ?? path.basename(item) - const agentName = trim(file) - - const config = { - name: agentName, - ...md.data, - prompt: md.content.trim(), - } - const parsed = Agent.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) - } - return result -} - -async function loadMode(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{mode,modes}/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse mode ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load mode", { mode: item, err }) - return undefined - }) - if (!md) continue - - const config = { - name: path.basename(item, ".md"), - ...md.data, - prompt: md.content.trim(), - } - const parsed = Agent.safeParse(config) - if (parsed.success) { - result[config.name] = { - ...parsed.data, - mode: "primary" as const, - } - continue - } - } - return result -} - -export const McpLocal = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) - -export const McpOAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) -export type McpOAuth = z.infer - -export const McpRemote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([McpOAuth, z.literal(false)]) - .optional() - .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) - -export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote]) -export type Mcp = z.infer - -export const PermissionAction = z.enum(["ask", "allow", "deny"]).meta({ - ref: "PermissionActionConfig", -}) -export type PermissionAction = z.infer - -export const PermissionObject = z.record(z.string(), PermissionAction).meta({ - ref: "PermissionObjectConfig", -}) -export type PermissionObject = z.infer - -export const PermissionRule = z.union([PermissionAction, PermissionObject]).meta({ - ref: "PermissionRuleConfig", -}) -export type PermissionRule = z.infer - -// Capture original key order before zod reorders, then rebuild in original order -const permissionPreprocess = (val: unknown) => { - if (typeof val === "object" && val !== null && !Array.isArray(val)) { - return { __originalKeys: Object.keys(val), ...val } - } - return val -} - -const permissionTransform = (x: unknown): Record => { - if (typeof x === "string") return { "*": x as PermissionAction } - const obj = x as { __originalKeys?: string[] } & Record - const { __originalKeys, ...rest } = obj - if (!__originalKeys) return rest as Record - const result: Record = {} - for (const key of __originalKeys) { - if (key in rest) result[key] = rest[key] as PermissionRule - } - return result -} - -export const Permission = z - .preprocess( - permissionPreprocess, - z - .object({ - __originalKeys: z.string().array().optional(), - read: PermissionRule.optional(), - edit: PermissionRule.optional(), - glob: PermissionRule.optional(), - grep: PermissionRule.optional(), - list: PermissionRule.optional(), - bash: PermissionRule.optional(), - task: PermissionRule.optional(), - external_directory: PermissionRule.optional(), - todowrite: PermissionAction.optional(), - question: PermissionAction.optional(), - webfetch: PermissionAction.optional(), - websearch: PermissionAction.optional(), - codesearch: PermissionAction.optional(), - lsp: PermissionRule.optional(), - doom_loop: PermissionAction.optional(), - skill: PermissionRule.optional(), - }) - .catchall(PermissionRule) - .or(PermissionAction), - ) - .transform(permissionTransform) - .meta({ - ref: "PermissionConfig", - }) -export type Permission = z.infer - export const Skills = z.object({ paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), urls: z @@ -298,95 +61,6 @@ export const Skills = z.object({ }) export type Skills = z.infer -export const Agent = z - .object({ - model: ModelId.optional(), - variant: z - .string() - .optional() - .describe("Default model variant for this agent (applies only when using the agent's configured model)."), - temperature: z.number().optional(), - top_p: z.number().optional(), - prompt: z.string().optional(), - tools: z.record(z.string(), z.boolean()).optional().describe("@deprecated Use 'permission' field instead"), - disable: z.boolean().optional(), - description: z.string().optional().describe("Description of when to use the agent"), - mode: z.enum(["subagent", "primary", "all"]).optional(), - hidden: z - .boolean() - .optional() - .describe("Hide this subagent from the @ autocomplete menu (default: false, only applies to mode: subagent)"), - options: z.record(z.string(), z.any()).optional(), - color: z - .union([ - z.string().regex(/^#[0-9a-fA-F]{6}$/, "Invalid hex color format"), - z.enum(["primary", "secondary", "accent", "success", "warning", "error", "info"]), - ]) - .optional() - .describe("Hex color code (e.g., #FF5733) or theme color (e.g., primary)"), - steps: z - .number() - .int() - .positive() - .optional() - .describe("Maximum number of agentic iterations before forcing text-only response"), - maxSteps: z.number().int().positive().optional().describe("@deprecated Use 'steps' field instead."), - permission: Permission.optional(), - }) - .catchall(z.any()) - .transform((agent, _ctx) => { - const knownKeys = new Set([ - "name", - "model", - "variant", - "prompt", - "description", - "temperature", - "top_p", - "mode", - "hidden", - "color", - "steps", - "maxSteps", - "options", - "permission", - "disable", - "tools", - ]) - - // Extract unknown properties into options - const options: Record = { ...agent.options } - for (const [key, value] of Object.entries(agent)) { - if (!knownKeys.has(key)) options[key] = value - } - - // Convert legacy tools config to permissions - const permission: Permission = {} - for (const [tool, enabled] of Object.entries(agent.tools ?? {})) { - const action = enabled ? "allow" : "deny" - // write, edit, patch, multiedit all map to edit permission - if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { - permission.edit = action - } else { - permission[tool] = action - } - } - Object.assign(permission, agent.permission) - - // Convert legacy maxSteps to steps - const steps = agent.steps ?? agent.maxSteps - - return { ...agent, options, permission, steps } as typeof agent & { - options?: Record - permission?: Permission - steps?: number - } - }) - .meta({ - ref: "AgentConfig", - }) -export type Agent = z.infer - export const Keybinds = z .object({ leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), @@ -696,7 +370,7 @@ export const Info = z .describe( "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), - plugin: PluginSpec.array().optional(), + plugin: ConfigPlugin.Spec.array().optional(), share: z .enum(["manual", "auto", "disabled"]) .optional() @@ -718,8 +392,8 @@ export const Info = z .array(z.string()) .optional() .describe("When set, ONLY these providers will be enabled. All other providers will be ignored"), - model: ModelId.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), - small_model: ModelId.describe( + model: ConfigModelID.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), + small_model: ConfigModelID.describe( "Small model to use for tasks like title generation in the format of provider/model", ).optional(), default_agent: z @@ -731,26 +405,26 @@ export const Info = z username: z.string().optional().describe("Custom username to display in conversations instead of system username"), mode: z .object({ - build: Agent.optional(), - plan: Agent.optional(), + build: ConfigAgent.Info.optional(), + plan: ConfigAgent.Info.optional(), }) - .catchall(Agent) + .catchall(ConfigAgent.Info) .optional() .describe("@deprecated Use `agent` field instead."), agent: z .object({ // primary - plan: Agent.optional(), - build: Agent.optional(), + plan: ConfigAgent.Info.optional(), + build: ConfigAgent.Info.optional(), // subagent - general: Agent.optional(), - explore: Agent.optional(), + general: ConfigAgent.Info.optional(), + explore: ConfigAgent.Info.optional(), // specialized - title: Agent.optional(), - summary: Agent.optional(), - compaction: Agent.optional(), + title: ConfigAgent.Info.optional(), + summary: ConfigAgent.Info.optional(), + compaction: ConfigAgent.Info.optional(), }) - .catchall(Agent) + .catchall(ConfigAgent.Info) .optional() .describe("Agent configuration, see https://opencode.ai/docs/agents"), provider: z.record(z.string(), Provider).optional().describe("Custom provider configurations and model overrides"), @@ -758,7 +432,7 @@ export const Info = z .record( z.string(), z.union([ - Mcp, + ConfigMCP.Info, z .object({ enabled: z.boolean(), @@ -820,7 +494,7 @@ export const Info = z ), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), - permission: Permission.optional(), + permission: ConfigPermission.Info.optional(), tools: z.record(z.string(), z.boolean()).optional(), enterprise: z .object({ @@ -867,7 +541,7 @@ export const Info = z }) export type Info = z.output & { - plugin_origins?: PluginOrigin[] + plugin_origins?: ConfigPlugin.Origin[] } type State = { @@ -1084,10 +758,17 @@ export const layer = Layer.effect( const gitignore = path.join(dir, ".gitignore") const hasIgnore = yield* fs.existsSafe(gitignore) if (!hasIgnore) { - yield* fs.writeFileString( - gitignore, - ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), - ) + yield* fs + .writeFileString( + gitignore, + ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), + ) + .pipe( + Effect.catchIf( + (e) => e.reason._tag === "PermissionDenied", + () => Effect.void, + ), + ) } }) @@ -1105,7 +786,11 @@ export const layer = Layer.effect( return "global" }) - const track = Effect.fnUntraced(function* (source: string, list: PluginSpec[] | undefined, kind?: PluginScope) { + const track = Effect.fnUntraced(function* ( + source: string, + list: ConfigPlugin.Spec[] | undefined, + kind?: ConfigPlugin.Scope, + ) { if (!list?.length) return const hit = kind ?? (yield* scope(source)) const plugins = ConfigPlugin.deduplicatePluginOrigins([ @@ -1116,7 +801,7 @@ export const layer = Layer.effect( result.plugin_origins = plugins }) - const merge = (source: string, next: Info, kind?: PluginScope) => { + const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { result = mergeConfigConcatArrays(result, next) return track(source, next.plugin, kind) } @@ -1183,7 +868,7 @@ export const layer = Layer.effect( } } - yield* ensureGitignore(dir).pipe(Effect.forkScoped) + yield* ensureGitignore(dir).pipe(Effect.orDie) const dep = yield* npmSvc .install(dir, { @@ -1204,8 +889,8 @@ export const layer = Layer.effect( deps.push(dep) result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) - result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadAgent(dir))) - result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadMode(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) yield* track(dir, list) } @@ -1284,9 +969,9 @@ export const layer = Layer.effect( } if (result.tools) { - const perms: Record = {} + const perms: Record = {} for (const [tool, enabled] of Object.entries(result.tools)) { - const action: PermissionAction = enabled ? "allow" : "deny" + const action: ConfigPermission.Action = enabled ? "allow" : "deny" if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { perms.edit = action continue diff --git a/packages/opencode/src/config/entry-name.ts b/packages/opencode/src/config/entry-name.ts new file mode 100644 index 0000000000..a553152c97 --- /dev/null +++ b/packages/opencode/src/config/entry-name.ts @@ -0,0 +1,16 @@ +import path from "path" + +function sliceAfterMatch(filePath: string, searchRoots: string[]) { + const normalizedPath = filePath.replaceAll("\\", "/") + for (const searchRoot of searchRoots) { + const index = normalizedPath.indexOf(searchRoot) + if (index === -1) continue + return normalizedPath.slice(index + searchRoot.length) + } +} + +export function configEntryNameFromPath(filePath: string, searchRoots: string[]) { + const candidate = sliceAfterMatch(filePath, searchRoots) ?? path.basename(filePath) + const ext = path.extname(candidate) + return ext.length ? candidate.slice(0, -ext.length) : candidate +} diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index 8380d370d8..f1af71867d 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,5 +1,9 @@ export * as Config from "./config" +export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" +export * as ConfigMCP from "./mcp" +export { ConfigModelID } from "./model-id" +export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts new file mode 100644 index 0000000000..fb8f8caa41 --- /dev/null +++ b/packages/opencode/src/config/mcp.ts @@ -0,0 +1,70 @@ +import z from "zod" + +export namespace ConfigMCP { + export const Local = z + .object({ + type: z.literal("local").describe("Type of MCP server connection"), + command: z.string().array().describe("Command and arguments to run the MCP server"), + environment: z + .record(z.string(), z.string()) + .optional() + .describe("Environment variables to set when running the MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpLocalConfig", + }) + + export const OAuth = z + .object({ + clientId: z + .string() + .optional() + .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), + clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), + scope: z.string().optional().describe("OAuth scopes to request during authorization"), + redirectUri: z + .string() + .optional() + .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), + }) + .strict() + .meta({ + ref: "McpOAuthConfig", + }) + export type OAuth = z.infer + + export const Remote = z + .object({ + type: z.literal("remote").describe("Type of MCP server connection"), + url: z.string().describe("URL of the remote MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), + oauth: z + .union([OAuth, z.literal(false)]) + .optional() + .describe( + "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + ), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpRemoteConfig", + }) + + export const Info = z.discriminatedUnion("type", [Local, Remote]) + export type Info = z.infer +} diff --git a/packages/opencode/src/config/model-id.ts b/packages/opencode/src/config/model-id.ts new file mode 100644 index 0000000000..909e9aa929 --- /dev/null +++ b/packages/opencode/src/config/model-id.ts @@ -0,0 +1,3 @@ +import z from "zod" + +export const ConfigModelID = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) diff --git a/packages/opencode/src/config/permission.ts b/packages/opencode/src/config/permission.ts new file mode 100644 index 0000000000..af01f6f2a3 --- /dev/null +++ b/packages/opencode/src/config/permission.ts @@ -0,0 +1,68 @@ +export * as ConfigPermission from "./permission" +import z from "zod" + +const permissionPreprocess = (val: unknown) => { + if (typeof val === "object" && val !== null && !Array.isArray(val)) { + return { __originalKeys: globalThis.Object.keys(val), ...val } + } + return val +} + +export const Action = z.enum(["ask", "allow", "deny"]).meta({ + ref: "PermissionActionConfig", +}) +export type Action = z.infer + +export const Object = z.record(z.string(), Action).meta({ + ref: "PermissionObjectConfig", +}) +export type Object = z.infer + +export const Rule = z.union([Action, Object]).meta({ + ref: "PermissionRuleConfig", +}) +export type Rule = z.infer + +const transform = (x: unknown): Record => { + if (typeof x === "string") return { "*": x as Action } + const obj = x as { __originalKeys?: string[] } & Record + const { __originalKeys, ...rest } = obj + if (!__originalKeys) return rest as Record + const result: Record = {} + for (const key of __originalKeys) { + if (key in rest) result[key] = rest[key] as Rule + } + return result +} + +export const Info = z + .preprocess( + permissionPreprocess, + z + .object({ + __originalKeys: z.string().array().optional(), + read: Rule.optional(), + edit: Rule.optional(), + glob: Rule.optional(), + grep: Rule.optional(), + list: Rule.optional(), + bash: Rule.optional(), + task: Rule.optional(), + external_directory: Rule.optional(), + todowrite: Action.optional(), + question: Action.optional(), + webfetch: Action.optional(), + websearch: Action.optional(), + codesearch: Action.optional(), + lsp: Rule.optional(), + doom_loop: Action.optional(), + skill: Rule.optional(), + }) + .catchall(Rule) + .or(Action), + ) + .transform(transform) + .meta({ + ref: "PermissionConfig", + }) +export type Info = z.infer diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts index 1f1022538f..6666e0854f 100644 --- a/packages/opencode/src/mcp/mcp.ts +++ b/packages/opencode/src/mcp/mcp.ts @@ -10,6 +10,7 @@ import { ToolListChangedNotificationSchema, } from "@modelcontextprotocol/sdk/types.js" import { Config } from "../config" +import { ConfigMCP } from "../config/mcp" import { Log } from "../util" import { NamedError } from "@opencode-ai/shared/util/error" import z from "zod/v4" @@ -123,7 +124,7 @@ type PromptInfo = Awaited>["prompts"][numbe type ResourceInfo = Awaited>["resources"][number] type McpEntry = NonNullable[string] -function isMcpConfigured(entry: McpEntry): entry is Config.Mcp { +function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { return typeof entry === "object" && entry !== null && "type" in entry } @@ -224,7 +225,7 @@ export interface Interface { readonly tools: () => Effect.Effect> readonly prompts: () => Effect.Effect> readonly resources: () => Effect.Effect> - readonly add: (name: string, mcp: Config.Mcp) => Effect.Effect<{ status: Record | Status }> + readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> readonly connect: (name: string) => Effect.Effect readonly disconnect: (name: string) => Effect.Effect readonly getPrompt: ( @@ -276,7 +277,10 @@ export const layer = Layer.effect( const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } - const connectRemote = Effect.fn("MCP.connectRemote")(function* (key: string, mcp: Config.Mcp & { type: "remote" }) { + const connectRemote = Effect.fn("MCP.connectRemote")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "remote" }, + ) { const oauthDisabled = mcp.oauth === false const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined let authProvider: McpOAuthProvider | undefined @@ -382,7 +386,10 @@ export const layer = Layer.effect( } }) - const connectLocal = Effect.fn("MCP.connectLocal")(function* (key: string, mcp: Config.Mcp & { type: "local" }) { + const connectLocal = Effect.fn("MCP.connectLocal")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "local" }, + ) { const [cmd, ...args] = mcp.command const cwd = Instance.directory const transport = new StdioClientTransport({ @@ -414,7 +421,7 @@ export const layer = Layer.effect( ) }) - const create = Effect.fn("MCP.create")(function* (key: string, mcp: Config.Mcp) { + const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { if (mcp.enabled === false) { log.info("mcp server disabled", { key }) return DISABLED_RESULT @@ -424,8 +431,8 @@ export const layer = Layer.effect( const { client: mcpClient, status } = mcp.type === "remote" - ? yield* connectRemote(key, mcp as Config.Mcp & { type: "remote" }) - : yield* connectLocal(key, mcp as Config.Mcp & { type: "local" }) + ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) + : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) if (!mcpClient) { return { status } satisfies CreateResult @@ -588,7 +595,7 @@ export const layer = Layer.effect( return s.clients }) - const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: Config.Mcp) { + const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { const s = yield* InstanceState.get(state) const result = yield* create(name, mcp) @@ -602,7 +609,7 @@ export const layer = Layer.effect( return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) }) - const add = Effect.fn("MCP.add")(function* (name: string, mcp: Config.Mcp) { + const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { yield* createAndStore(name, mcp) const s = yield* InstanceState.get(state) return { status: s.status } diff --git a/packages/opencode/src/permission/permission.ts b/packages/opencode/src/permission/permission.ts index fe7fb85455..44dac3b1db 100644 --- a/packages/opencode/src/permission/permission.ts +++ b/packages/opencode/src/permission/permission.ts @@ -1,6 +1,6 @@ import { Bus } from "@/bus" import { BusEvent } from "@/bus/bus-event" -import { Config } from "@/config" +import { ConfigPermission } from "@/config/permission" import { InstanceState } from "@/effect" import { ProjectID } from "@/project/schema" import { MessageID, SessionID } from "@/session/schema" @@ -289,7 +289,7 @@ function expand(pattern: string): string { return pattern } -export function fromConfig(permission: Config.Permission) { +export function fromConfig(permission: ConfigPermission.Info) { const ruleset: Ruleset = [] for (const [key, value] of Object.entries(permission)) { if (typeof value === "string") { diff --git a/packages/opencode/src/server/instance/mcp.ts b/packages/opencode/src/server/instance/mcp.ts index 695008fc4e..f6e6f1eddb 100644 --- a/packages/opencode/src/server/instance/mcp.ts +++ b/packages/opencode/src/server/instance/mcp.ts @@ -3,6 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { MCP } from "../../mcp" import { Config } from "../../config" +import { ConfigMCP } from "../../config/mcp" import { AppRuntime } from "../../effect/app-runtime" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -53,7 +54,7 @@ export const McpRoutes = lazy(() => "json", z.object({ name: z.string(), - config: Config.Mcp, + config: ConfigMCP.Info, }), ), async (c) => { diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 303fa8ba08..21d6e3e93d 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -845,6 +845,9 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => { }, }) + // TODO: this is a hack to wait for backgruounded gitignore + await new Promise((resolve) => setTimeout(resolve, 1000)) + expect(await Filesystem.exists(path.join(tmp.extra, ".gitignore"))).toBe(true) expect(await Filesystem.readText(path.join(tmp.extra, ".gitignore"))).toContain("package-lock.json") } finally { @@ -1865,7 +1868,7 @@ describe("resolvePluginSpec", () => { }) describe("deduplicatePluginOrigins", () => { - const dedupe = (plugins: Config.PluginSpec[]) => + const dedupe = (plugins: ConfigPlugin.Spec[]) => ConfigPlugin.deduplicatePluginOrigins( plugins.map((spec) => ({ spec, From 9bf2dfea353135874e2ba5d284e6eb0cd1b9e35d Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:47:09 -0400 Subject: [PATCH 014/120] core: refactor config schemas into separate modules for better maintainability --- packages/opencode/src/config/config.ts | 288 +-------------------- packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/keybinds.ts | 316 +++++++++++------------ packages/opencode/src/config/provider.ts | 120 +++++++++ packages/opencode/src/config/skills.ts | 13 + 5 files changed, 296 insertions(+), 443 deletions(-) create mode 100644 packages/opencode/src/config/provider.ts create mode 100644 packages/opencode/src/config/skills.ts diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 92d66cf2bb..7df5dbe2ff 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -40,6 +40,8 @@ import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" import { ConfigPermission } from "./permission" +import { ConfigProvider } from "./provider" +import { ConfigSkills } from "./skills" const log = Log.create({ service: "config" }) @@ -52,168 +54,6 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } -export const Skills = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), -}) -export type Skills = z.infer - -export const Keybinds = z - .object({ - leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), - app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), - editor_open: z.string().optional().default("e").describe("Open external editor"), - theme_list: z.string().optional().default("t").describe("List available themes"), - sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), - scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), - username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), - status_view: z.string().optional().default("s").describe("View status"), - session_export: z.string().optional().default("x").describe("Export session to editor"), - session_new: z.string().optional().default("n").describe("Create a new session"), - session_list: z.string().optional().default("l").describe("List all sessions"), - session_timeline: z.string().optional().default("g").describe("Show session timeline"), - session_fork: z.string().optional().default("none").describe("Fork session from message"), - session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), - session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), - stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), - model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), - model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), - session_share: z.string().optional().default("none").describe("Share current session"), - session_unshare: z.string().optional().default("none").describe("Unshare current session"), - session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), - session_compact: z.string().optional().default("c").describe("Compact the session"), - messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), - messages_page_down: z - .string() - .optional() - .default("pagedown,ctrl+alt+f") - .describe("Scroll messages down by one page"), - messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), - messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), - messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), - messages_half_page_down: z.string().optional().default("ctrl+alt+d").describe("Scroll messages down by half page"), - messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), - messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), - messages_next: z.string().optional().default("none").describe("Navigate to next message"), - messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), - messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), - messages_copy: z.string().optional().default("y").describe("Copy message"), - messages_undo: z.string().optional().default("u").describe("Undo message"), - messages_redo: z.string().optional().default("r").describe("Redo message"), - messages_toggle_conceal: z - .string() - .optional() - .default("h") - .describe("Toggle code block concealment in messages"), - tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), - model_list: z.string().optional().default("m").describe("List available models"), - model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), - model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), - model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), - model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), - command_list: z.string().optional().default("ctrl+p").describe("List available commands"), - agent_list: z.string().optional().default("a").describe("List agents"), - agent_cycle: z.string().optional().default("tab").describe("Next agent"), - agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), - variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), - variant_list: z.string().optional().default("none").describe("List model variants"), - input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), - input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), - input_submit: z.string().optional().default("return").describe("Submit input"), - input_newline: z - .string() - .optional() - .default("shift+return,ctrl+return,alt+return,ctrl+j") - .describe("Insert newline in input"), - input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), - input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), - input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), - input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), - input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), - input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), - input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), - input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), - input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), - input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), - input_select_line_home: z.string().optional().default("ctrl+shift+a").describe("Select to start of line in input"), - input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), - input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), - input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), - input_select_visual_line_home: z - .string() - .optional() - .default("alt+shift+a") - .describe("Select to start of visual line in input"), - input_select_visual_line_end: z - .string() - .optional() - .default("alt+shift+e") - .describe("Select to end of visual line in input"), - input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), - input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), - input_select_buffer_home: z - .string() - .optional() - .default("shift+home") - .describe("Select to start of buffer in input"), - input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), - input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), - input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), - input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), - input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), - input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), - input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), - input_word_forward: z - .string() - .optional() - .default("alt+f,alt+right,ctrl+right") - .describe("Move word forward in input"), - input_word_backward: z - .string() - .optional() - .default("alt+b,alt+left,ctrl+left") - .describe("Move word backward in input"), - input_select_word_forward: z - .string() - .optional() - .default("alt+shift+f,alt+shift+right") - .describe("Select word forward in input"), - input_select_word_backward: z - .string() - .optional() - .default("alt+shift+b,alt+shift+left") - .describe("Select word backward in input"), - input_delete_word_forward: z - .string() - .optional() - .default("alt+d,alt+delete,ctrl+delete") - .describe("Delete word forward in input"), - input_delete_word_backward: z - .string() - .optional() - .default("ctrl+w,ctrl+backspace,alt+backspace") - .describe("Delete word backward in input"), - history_previous: z.string().optional().default("up").describe("Previous history item"), - history_next: z.string().optional().default("down").describe("Next history item"), - session_child_first: z.string().optional().default("down").describe("Go to first child session"), - session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), - session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), - session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), - terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), - tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), - plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), - display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), - }) - .strict() - .meta({ - ref: "KeybindsConfig", - }) - export const Server = z .object({ port: z.number().int().positive().optional().describe("Port to listen on"), @@ -232,123 +72,6 @@ export const Layout = z.enum(["auto", "stretch"]).meta({ }) export type Layout = z.infer -export const Model = z - .object({ - id: z.string(), - name: z.string(), - family: z.string().optional(), - release_date: z.string(), - attachment: z.boolean(), - reasoning: z.boolean(), - temperature: z.boolean(), - tool_call: z.boolean(), - interleaved: z - .union([ - z.literal(true), - z - .object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }) - .strict(), - ]) - .optional(), - cost: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - context_over_200k: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - }) - .optional(), - }) - .optional(), - limit: z.object({ - context: z.number(), - input: z.number().optional(), - output: z.number(), - }), - modalities: z - .object({ - input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - }) - .optional(), - experimental: z.boolean().optional(), - status: z.enum(["alpha", "beta", "deprecated"]).optional(), - provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()).optional(), - variants: z - .record( - z.string(), - z - .object({ - disabled: z.boolean().optional().describe("Disable this variant for the model"), - }) - .catchall(z.any()), - ) - .optional() - .describe("Variant-specific configuration"), - }) - .partial() - -export const Provider = z - .object({ - api: z.string().optional(), - name: z.string(), - env: z.array(z.string()), - id: z.string(), - npm: z.string().optional(), - whitelist: z.array(z.string()).optional(), - blacklist: z.array(z.string()).optional(), - options: z - .object({ - apiKey: z.string().optional(), - baseURL: z.string().optional(), - enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), - setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), - timeout: z - .union([ - z - .number() - .int() - .positive() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - z.literal(false).describe("Disable timeout for this provider entirely."), - ]) - .optional() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - chunkTimeout: z - .number() - .int() - .positive() - .optional() - .describe( - "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", - ), - }) - .catchall(z.any()) - .optional(), - models: z.record(z.string(), Model).optional(), - }) - .partial() - .strict() - .meta({ - ref: "ProviderConfig", - }) - -export type Provider = z.infer - export const Info = z .object({ $schema: z.string().optional().describe("JSON schema reference for configuration validation"), @@ -358,7 +81,7 @@ export const Info = z .record(z.string(), ConfigCommand.Info) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), - skills: Skills.optional().describe("Additional skill folder paths"), + skills: ConfigSkills.Info.optional().describe("Additional skill folder paths"), watcher: z .object({ ignore: z.array(z.string()).optional(), @@ -427,7 +150,10 @@ export const Info = z .catchall(ConfigAgent.Info) .optional() .describe("Agent configuration, see https://opencode.ai/docs/agents"), - provider: z.record(z.string(), Provider).optional().describe("Custom provider configurations and model overrides"), + provider: z + .record(z.string(), ConfigProvider.Info) + .optional() + .describe("Custom provider configurations and model overrides"), mcp: z .record( z.string(), diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index f1af71867d..37665d8c67 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -7,3 +7,5 @@ export * as ConfigMCP from "./mcp" export { ConfigModelID } from "./model-id" export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" +export * as ConfigProvider from "./provider" +export * as ConfigSkills from "./skills" diff --git a/packages/opencode/src/config/keybinds.ts b/packages/opencode/src/config/keybinds.ts index 9b8d9e2834..cb146b7cae 100644 --- a/packages/opencode/src/config/keybinds.ts +++ b/packages/opencode/src/config/keybinds.ts @@ -1,164 +1,156 @@ +export * as ConfigKeybinds from "./keybinds" + import z from "zod" -export namespace ConfigKeybinds { - export const Keybinds = z - .object({ - leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), - app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), - editor_open: z.string().optional().default("e").describe("Open external editor"), - theme_list: z.string().optional().default("t").describe("List available themes"), - sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), - scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), - username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), - status_view: z.string().optional().default("s").describe("View status"), - session_export: z.string().optional().default("x").describe("Export session to editor"), - session_new: z.string().optional().default("n").describe("Create a new session"), - session_list: z.string().optional().default("l").describe("List all sessions"), - session_timeline: z.string().optional().default("g").describe("Show session timeline"), - session_fork: z.string().optional().default("none").describe("Fork session from message"), - session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), - session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), - stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), - model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), - model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), - session_share: z.string().optional().default("none").describe("Share current session"), - session_unshare: z.string().optional().default("none").describe("Unshare current session"), - session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), - session_compact: z.string().optional().default("c").describe("Compact the session"), - messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), - messages_page_down: z - .string() - .optional() - .default("pagedown,ctrl+alt+f") - .describe("Scroll messages down by one page"), - messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), - messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), - messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), - messages_half_page_down: z - .string() - .optional() - .default("ctrl+alt+d") - .describe("Scroll messages down by half page"), - messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), - messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), - messages_next: z.string().optional().default("none").describe("Navigate to next message"), - messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), - messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), - messages_copy: z.string().optional().default("y").describe("Copy message"), - messages_undo: z.string().optional().default("u").describe("Undo message"), - messages_redo: z.string().optional().default("r").describe("Redo message"), - messages_toggle_conceal: z - .string() - .optional() - .default("h") - .describe("Toggle code block concealment in messages"), - tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), - model_list: z.string().optional().default("m").describe("List available models"), - model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), - model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), - model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), - model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), - command_list: z.string().optional().default("ctrl+p").describe("List available commands"), - agent_list: z.string().optional().default("a").describe("List agents"), - agent_cycle: z.string().optional().default("tab").describe("Next agent"), - agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), - variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), - variant_list: z.string().optional().default("none").describe("List model variants"), - input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), - input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), - input_submit: z.string().optional().default("return").describe("Submit input"), - input_newline: z - .string() - .optional() - .default("shift+return,ctrl+return,alt+return,ctrl+j") - .describe("Insert newline in input"), - input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), - input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), - input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), - input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), - input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), - input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), - input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), - input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), - input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), - input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), - input_select_line_home: z - .string() - .optional() - .default("ctrl+shift+a") - .describe("Select to start of line in input"), - input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), - input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), - input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), - input_select_visual_line_home: z - .string() - .optional() - .default("alt+shift+a") - .describe("Select to start of visual line in input"), - input_select_visual_line_end: z - .string() - .optional() - .default("alt+shift+e") - .describe("Select to end of visual line in input"), - input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), - input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), - input_select_buffer_home: z - .string() - .optional() - .default("shift+home") - .describe("Select to start of buffer in input"), - input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), - input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), - input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), - input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), - input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), - input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), - input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), - input_word_forward: z - .string() - .optional() - .default("alt+f,alt+right,ctrl+right") - .describe("Move word forward in input"), - input_word_backward: z - .string() - .optional() - .default("alt+b,alt+left,ctrl+left") - .describe("Move word backward in input"), - input_select_word_forward: z - .string() - .optional() - .default("alt+shift+f,alt+shift+right") - .describe("Select word forward in input"), - input_select_word_backward: z - .string() - .optional() - .default("alt+shift+b,alt+shift+left") - .describe("Select word backward in input"), - input_delete_word_forward: z - .string() - .optional() - .default("alt+d,alt+delete,ctrl+delete") - .describe("Delete word forward in input"), - input_delete_word_backward: z - .string() - .optional() - .default("ctrl+w,ctrl+backspace,alt+backspace") - .describe("Delete word backward in input"), - history_previous: z.string().optional().default("up").describe("Previous history item"), - history_next: z.string().optional().default("down").describe("Next history item"), - session_child_first: z.string().optional().default("down").describe("Go to first child session"), - session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), - session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), - session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), - terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), - tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), - plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), - display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), - }) - .strict() - .meta({ - ref: "KeybindsConfig", - }) -} +export const Keybinds = z + .object({ + leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), + app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), + editor_open: z.string().optional().default("e").describe("Open external editor"), + theme_list: z.string().optional().default("t").describe("List available themes"), + sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), + scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), + username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), + status_view: z.string().optional().default("s").describe("View status"), + session_export: z.string().optional().default("x").describe("Export session to editor"), + session_new: z.string().optional().default("n").describe("Create a new session"), + session_list: z.string().optional().default("l").describe("List all sessions"), + session_timeline: z.string().optional().default("g").describe("Show session timeline"), + session_fork: z.string().optional().default("none").describe("Fork session from message"), + session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), + session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), + stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), + model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), + model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), + session_share: z.string().optional().default("none").describe("Share current session"), + session_unshare: z.string().optional().default("none").describe("Unshare current session"), + session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), + session_compact: z.string().optional().default("c").describe("Compact the session"), + messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), + messages_page_down: z + .string() + .optional() + .default("pagedown,ctrl+alt+f") + .describe("Scroll messages down by one page"), + messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), + messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), + messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), + messages_half_page_down: z.string().optional().default("ctrl+alt+d").describe("Scroll messages down by half page"), + messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), + messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), + messages_next: z.string().optional().default("none").describe("Navigate to next message"), + messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), + messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), + messages_copy: z.string().optional().default("y").describe("Copy message"), + messages_undo: z.string().optional().default("u").describe("Undo message"), + messages_redo: z.string().optional().default("r").describe("Redo message"), + messages_toggle_conceal: z + .string() + .optional() + .default("h") + .describe("Toggle code block concealment in messages"), + tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), + model_list: z.string().optional().default("m").describe("List available models"), + model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), + model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), + model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), + model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), + command_list: z.string().optional().default("ctrl+p").describe("List available commands"), + agent_list: z.string().optional().default("a").describe("List agents"), + agent_cycle: z.string().optional().default("tab").describe("Next agent"), + agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), + variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), + variant_list: z.string().optional().default("none").describe("List model variants"), + input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), + input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), + input_submit: z.string().optional().default("return").describe("Submit input"), + input_newline: z + .string() + .optional() + .default("shift+return,ctrl+return,alt+return,ctrl+j") + .describe("Insert newline in input"), + input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), + input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), + input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), + input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), + input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), + input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), + input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), + input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), + input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), + input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), + input_select_line_home: z.string().optional().default("ctrl+shift+a").describe("Select to start of line in input"), + input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), + input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), + input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), + input_select_visual_line_home: z + .string() + .optional() + .default("alt+shift+a") + .describe("Select to start of visual line in input"), + input_select_visual_line_end: z + .string() + .optional() + .default("alt+shift+e") + .describe("Select to end of visual line in input"), + input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), + input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), + input_select_buffer_home: z + .string() + .optional() + .default("shift+home") + .describe("Select to start of buffer in input"), + input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), + input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), + input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), + input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), + input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), + input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), + input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), + input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), + input_word_forward: z + .string() + .optional() + .default("alt+f,alt+right,ctrl+right") + .describe("Move word forward in input"), + input_word_backward: z + .string() + .optional() + .default("alt+b,alt+left,ctrl+left") + .describe("Move word backward in input"), + input_select_word_forward: z + .string() + .optional() + .default("alt+shift+f,alt+shift+right") + .describe("Select word forward in input"), + input_select_word_backward: z + .string() + .optional() + .default("alt+shift+b,alt+shift+left") + .describe("Select word backward in input"), + input_delete_word_forward: z + .string() + .optional() + .default("alt+d,alt+delete,ctrl+delete") + .describe("Delete word forward in input"), + input_delete_word_backward: z + .string() + .optional() + .default("ctrl+w,ctrl+backspace,alt+backspace") + .describe("Delete word backward in input"), + history_previous: z.string().optional().default("up").describe("Previous history item"), + history_next: z.string().optional().default("down").describe("Next history item"), + session_child_first: z.string().optional().default("down").describe("Go to first child session"), + session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), + session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), + session_parent: z.string().optional().default("up").describe("Go to parent session"), + terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), + terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), + tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), + plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), + display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), + }) + .strict() + .meta({ + ref: "KeybindsConfig", + }) diff --git a/packages/opencode/src/config/provider.ts b/packages/opencode/src/config/provider.ts new file mode 100644 index 0000000000..09efedf497 --- /dev/null +++ b/packages/opencode/src/config/provider.ts @@ -0,0 +1,120 @@ +import z from "zod" + +export namespace ConfigProvider { + export const Model = z + .object({ + id: z.string(), + name: z.string(), + family: z.string().optional(), + release_date: z.string(), + attachment: z.boolean(), + reasoning: z.boolean(), + temperature: z.boolean(), + tool_call: z.boolean(), + interleaved: z + .union([ + z.literal(true), + z + .object({ + field: z.enum(["reasoning_content", "reasoning_details"]), + }) + .strict(), + ]) + .optional(), + cost: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + context_over_200k: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + }) + .optional(), + }) + .optional(), + limit: z.object({ + context: z.number(), + input: z.number().optional(), + output: z.number(), + }), + modalities: z + .object({ + input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + }) + .optional(), + experimental: z.boolean().optional(), + status: z.enum(["alpha", "beta", "deprecated"]).optional(), + provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), + options: z.record(z.string(), z.any()), + headers: z.record(z.string(), z.string()).optional(), + variants: z + .record( + z.string(), + z + .object({ + disabled: z.boolean().optional().describe("Disable this variant for the model"), + }) + .catchall(z.any()), + ) + .optional() + .describe("Variant-specific configuration"), + }) + .partial() + + export const Info = z + .object({ + api: z.string().optional(), + name: z.string(), + env: z.array(z.string()), + id: z.string(), + npm: z.string().optional(), + whitelist: z.array(z.string()).optional(), + blacklist: z.array(z.string()).optional(), + options: z + .object({ + apiKey: z.string().optional(), + baseURL: z.string().optional(), + enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), + setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), + timeout: z + .union([ + z + .number() + .int() + .positive() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + z.literal(false).describe("Disable timeout for this provider entirely."), + ]) + .optional() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + chunkTimeout: z + .number() + .int() + .positive() + .optional() + .describe( + "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + ), + }) + .catchall(z.any()) + .optional(), + models: z.record(z.string(), Model).optional(), + }) + .partial() + .strict() + .meta({ + ref: "ProviderConfig", + }) + + export type Info = z.infer +} diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts new file mode 100644 index 0000000000..bdc63f5d6a --- /dev/null +++ b/packages/opencode/src/config/skills.ts @@ -0,0 +1,13 @@ +import z from "zod" + +export namespace ConfigSkills { + export const Info = z.object({ + paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), + urls: z + .array(z.string()) + .optional() + .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), + }) + + export type Info = z.infer +} From 8b1f0e2d90c03fc5de6077f868af1548485cc466 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:55:40 -0400 Subject: [PATCH 015/120] core: add documentation comments to plugin configuration merge logic Adds explanatory comments to config.ts and plugin.ts clarifying: - How plugin specs are stored and normalized during config loading - Why plugin_origins tracks provenance for location-sensitive decisions - Why path-like specs are resolved early to prevent reinterpretation during merges - How plugin deduplication works while keeping origin metadata for writes and diagnostics --- packages/opencode/src/config/config.ts | 23 ++++++++++++++++++----- packages/opencode/src/config/plugin.ts | 11 ++++++++++- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 7df5dbe2ff..ed3be88082 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -93,6 +93,7 @@ export const Info = z .describe( "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), + // User-facing plugin config is stored as Specs; provenance gets attached later while configs are merged. plugin: ConfigPlugin.Spec.array().optional(), share: z .enum(["manual", "auto", "disabled"]) @@ -267,6 +268,8 @@ export const Info = z }) export type Info = z.output & { + // plugin_origins is derived state, not a persisted config field. It keeps each winning plugin spec together + // with the file and scope it came from so later runtime code can make location-sensitive decisions. plugin_origins?: ConfigPlugin.Origin[] } @@ -420,6 +423,8 @@ export const layer = Layer.effect( if (data.plugin && isFile) { const list = data.plugin for (let i = 0; i < list.length; i++) { + // Normalize path-like plugin specs while we still know which config file declared them. + // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) } } @@ -505,20 +510,26 @@ export const layer = Layer.effect( const consoleManagedProviders = new Set() let activeOrgName: string | undefined - const scope = Effect.fnUntraced(function* (source: string) { + const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { if (source.startsWith("http://") || source.startsWith("https://")) return "global" if (source === "OPENCODE_CONFIG_CONTENT") return "local" if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" return "global" }) - const track = Effect.fnUntraced(function* ( + const mergePluginOrigins = Effect.fnUntraced(function* ( source: string, + // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step + // is attached. list: ConfigPlugin.Spec[] | undefined, + // Scope can be inferred from the source path, but some callers already know whether the config should + // behave as global or local and can pass that explicitly. kind?: ConfigPlugin.Scope, ) { if (!list?.length) return - const hit = kind ?? (yield* scope(source)) + const hit = kind ?? (yield* pluginScopeForSource(source)) + // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while + // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. const plugins = ConfigPlugin.deduplicatePluginOrigins([ ...(result.plugin_origins ?? []), ...list.map((spec) => ({ spec, source, scope: hit })), @@ -529,7 +540,7 @@ export const layer = Layer.effect( const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { result = mergeConfigConcatArrays(result, next) - return track(source, next.plugin, kind) + return mergePluginOrigins(source, next.plugin, kind) } for (const [key, value] of Object.entries(auth)) { @@ -617,8 +628,10 @@ export const layer = Layer.effect( result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) + // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load + // returns normalized Specs and we only need to attach origin metadata here. const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) - yield* track(dir, list) + yield* mergePluginOrigins(dir, list) } if (process.env.OPENCODE_CONFIG_CONTENT) { diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index d13a9d5adc..3a10c0a715 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -8,11 +8,16 @@ export namespace ConfigPlugin { const Options = z.record(z.string(), z.unknown()) export type Options = z.infer + // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. + // It answers "what should we load?" but says nothing about where that value came from. export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) export type Spec = z.infer export type Scope = "global" | "local" + // Origin keeps the original config provenance attached to a spec. + // After multiple config files are merged, callers still need to know which file declared the plugin + // and whether it should behave like a global or project-local plugin. export type Origin = { spec: Spec source: string @@ -33,7 +38,7 @@ export namespace ConfigPlugin { return plugins } - export function pluginSpecifier(plugin: ConfigPlugin.Spec): string { + export function pluginSpecifier(plugin: Spec): string { return Array.isArray(plugin) ? plugin[0] : plugin } @@ -41,6 +46,8 @@ export namespace ConfigPlugin { return Array.isArray(plugin) ? plugin[1] : undefined } + // Path-like specs are resolved relative to the config file that declared them so merges later on do not + // accidentally reinterpret `./plugin.ts` relative to some other directory. export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { const spec = pluginSpecifier(plugin) if (!isPathPluginSpec(spec)) return plugin @@ -58,6 +65,8 @@ export namespace ConfigPlugin { return resolved } + // Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the + // full Origin so downstream code still knows which config file won and where follow-up writes should go. export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { const seen = new Set() const list: Origin[] = [] From c5deeee8c7b2e5b3927d28958d2ceb9ebddeb256 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 12:19:01 -0500 Subject: [PATCH 016/120] fix: ensure azure has store = true by default (#22764) --- packages/opencode/src/provider/transform.ts | 4 ++++ .../opencode/test/provider/transform.test.ts | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 92862b0ca6..a294c568d7 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -793,6 +793,10 @@ export function options(input: { result["store"] = false } + if (input.model.api.npm === "@ai-sdk/azure") { + result["store"] = true + } + if (input.model.api.npm === "@openrouter/ai-sdk-provider") { result["usage"] = { include: true, diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index d53ce38b16..f92b448cf3 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -100,6 +100,24 @@ describe("ProviderTransform.options - setCacheKey", () => { }) expect(result.store).toBe(false) }) + + test("should set store=true for azure provider by default", () => { + const azureModel = { + ...mockModel, + providerID: "azure", + api: { + id: "gpt-4", + url: "https://azure.com", + npm: "@ai-sdk/azure", + }, + } + const result = ProviderTransform.options({ + model: azureModel, + sessionID, + providerOptions: {}, + }) + expect(result.store).toBe(true) + }) }) describe("ProviderTransform.options - zai/zhipuai thinking", () => { From 03e20e6ac125b6a792c567451c2f904c6ed6941c Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:29:00 -0400 Subject: [PATCH 017/120] core: modularize config parsing to improve maintainability Extract error handling, parsing logic, and variable substitution into dedicated modules. This reduces duplication between tui.json and opencode.json parsing and makes the config system easier to extend for future config formats. --- .../opencode/src/cli/cmd/tui/config/tui.ts | 45 +++--- packages/opencode/src/config/agent.ts | 2 +- packages/opencode/src/config/command.ts | 2 +- packages/opencode/src/config/config.ts | 146 +++++++----------- packages/opencode/src/config/error.ts | 21 +++ packages/opencode/src/config/index.ts | 3 + packages/opencode/src/config/managed.ts | 16 +- packages/opencode/src/config/parse.ts | 80 ++++++++++ packages/opencode/src/config/paths.ts | 123 +-------------- packages/opencode/src/config/variable.ts | 84 ++++++++++ packages/opencode/test/config/config.test.ts | 86 ++++++----- 11 files changed, 335 insertions(+), 273 deletions(-) create mode 100644 packages/opencode/src/config/error.ts create mode 100644 packages/opencode/src/config/parse.ts create mode 100644 packages/opencode/src/config/variable.ts diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 6f2c161fb5..e8eb9ff5d3 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -1,6 +1,7 @@ import z from "zod" import { mergeDeep, unique } from "remeda" import { Context, Effect, Fiber, Layer } from "effect" +import { ConfigParse } from "@/config/parse" import * as ConfigPaths from "@/config/paths" import { migrateTuiConfig } from "./tui-migrate" import { TuiInfo } from "./tui-schema" @@ -68,6 +69,14 @@ export namespace TuiConfig { } } + async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config + } + async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { const data = await loadFile(file) acc.result = mergeDeep(acc.result, data) @@ -183,26 +192,22 @@ export namespace TuiConfig { } async function load(text: string, configFilepath: string): Promise { - const raw = await ConfigPaths.parseText(text, configFilepath, "empty") - if (!isRecord(raw)) return {} + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - const normalized = normalize(raw) - - const parsed = Info.safeParse(normalized) - if (!parsed.success) { - log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues }) - return {} - } - - const data = parsed.data - if (data.plugin) { - for (let i = 0; i < data.plugin.length; i++) { - data.plugin[i] = await ConfigPlugin.resolvePluginSpec(data.plugin[i], configFilepath) - } - } - - return data + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) } } diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts index 3819368e82..f754f009d4 100644 --- a/packages/opencode/src/config/agent.ts +++ b/packages/opencode/src/config/agent.ts @@ -6,9 +6,9 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" import { configEntryNameFromPath } from "./entry-name" +import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" import { ConfigModelID } from "./model-id" -import { InvalidError } from "./paths" import { ConfigPermission } from "./permission" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 5606bdd4c7..9799250567 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -6,9 +6,9 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" import { configEntryNameFromPath } from "./entry-name" +import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" import { ConfigModelID } from "./model-id" -import { InvalidError } from "./paths" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index ed3be88082..6b6d74ed82 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -10,13 +10,7 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Flag } from "../flag/flag" import { Auth } from "../auth" import { Env } from "../env" -import { - type ParseError as JsoncParseError, - applyEdits, - modify, - parse as parseJsonc, - printParseErrorCode, -} from "jsonc-parser" +import { applyEdits, modify } from "jsonc-parser" import { Instance, type InstanceContext } from "../project/instance" import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" @@ -25,6 +19,7 @@ import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" +import { InvalidError, JsonError } from "./error" import * as ConfigPaths from "./paths" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" @@ -39,6 +34,7 @@ import { ConfigModelID } from "./model-id" import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" +import { ConfigParse } from "./parse" import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" @@ -54,6 +50,28 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } +function normalizeLoadedConfig(data: unknown, source: string) { + if (!isRecord(data)) return data + const copy = { ...data } + const hadLegacy = "theme" in copy || "keybinds" in copy || "tui" in copy + if (!hadLegacy) return copy + delete copy.theme + delete copy.keybinds + delete copy.tui + log.warn("tui keys in opencode config are deprecated; move them to tui.json", { path: source }) + return copy +} + +async function resolveLoadedPlugins(config: T, filepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + // Normalize path-like plugin specs while we still know which config file declared them. + // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], filepath) + } + return config +} + export const Server = z .object({ port: z.number().int().positive().optional().describe("Port to listen on"), @@ -325,42 +343,6 @@ function writable(info: Info) { return next } -export function parseConfig(text: string, filepath: string): Info { - const errors: JsoncParseError[] = [] - const data = parseJsonc(text, errors, { allowTrailingComma: true }) - if (errors.length) { - const lines = text.split("\n") - const errorDetails = errors - .map((e) => { - const beforeOffset = text.substring(0, e.offset).split("\n") - const line = beforeOffset.length - const column = beforeOffset[beforeOffset.length - 1].length + 1 - const problemLine = lines[line - 1] - - const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` - if (!problemLine) return error - - return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` - }) - .join("\n") - - throw new JsonError({ - path: filepath, - message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`, - }) - } - - const parsed = Info.safeParse(data) - if (parsed.success) return parsed.data - - throw new InvalidError({ - path: filepath, - issues: parsed.error.issues, - }) -} - -export const { JsonError, InvalidError } = ConfigPaths - export const ConfigDirectoryTypoError = NamedError.create( "ConfigDirectoryTypoError", z.object({ @@ -393,48 +375,31 @@ export const layer = Layer.effect( text: string, options: { path: string } | { dir: string; source: string }, ) { - const original = text - const source = "path" in options ? options.path : options.source - const isFile = "path" in options - const data = yield* Effect.promise(() => - ConfigPaths.parseText(text, "path" in options ? options.path : { source: options.source, dir: options.dir }), - ) - - const normalized = (() => { - if (!data || typeof data !== "object" || Array.isArray(data)) return data - const copy = { ...(data as Record) } - const hadLegacy = "theme" in copy || "keybinds" in copy || "tui" in copy - if (!hadLegacy) return copy - delete copy.theme - delete copy.keybinds - delete copy.tui - log.warn("tui keys in opencode config are deprecated; move them to tui.json", { path: source }) - return copy - })() - - const parsed = Info.safeParse(normalized) - if (parsed.success) { - if (!parsed.data.$schema && isFile) { - parsed.data.$schema = "https://opencode.ai/config.json" - const updated = original.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",') - yield* fs.writeFileString(options.path, updated).pipe(Effect.catch(() => Effect.void)) - } - const data = parsed.data - if (data.plugin && isFile) { - const list = data.plugin - for (let i = 0; i < list.length; i++) { - // Normalize path-like plugin specs while we still know which config file declared them. - // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. - list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) - } - } - return data + if (!("path" in options)) { + return yield* Effect.promise(() => + ConfigParse.load(Info, text, { + type: "virtual", + dir: options.dir, + source: options.source, + normalize: normalizeLoadedConfig, + }), + ) } - throw new InvalidError({ - path: source, - issues: parsed.error.issues, - }) + const data = yield* Effect.promise(() => + ConfigParse.load(Info, text, { + type: "path", + path: options.path, + normalize: normalizeLoadedConfig, + }), + ) + yield* Effect.promise(() => resolveLoadedPlugins(data, options.path)) + if (!data.$schema) { + data.$schema = "https://opencode.ai/config.json" + const updated = text.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",') + yield* fs.writeFileString(options.path, updated).pipe(Effect.catch(() => Effect.void)) + } + return data }) const loadFile = Effect.fnUntraced(function* (filepath: string) { @@ -692,7 +657,16 @@ export const layer = Layer.effect( } // macOS managed preferences (.mobileconfig deployed via MDM) override everything - result = mergeConfigConcatArrays(result, yield* Effect.promise(() => ConfigManaged.readManagedPreferences())) + const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) + if (managed) { + result = mergeConfigConcatArrays( + result, + yield* loadConfig(managed.text, { + dir: path.dirname(managed.source), + source: managed.source, + }), + ) + } for (const [name, mode] of Object.entries(result.mode ?? {})) { result.agent = mergeDeep(result.agent ?? {}, { @@ -803,13 +777,13 @@ export const layer = Layer.effect( let next: Info if (!file.endsWith(".jsonc")) { - const existing = parseConfig(before, file) + const existing = ConfigParse.parse(Info, before, file) const merged = mergeDeep(writable(existing), input) yield* fs.writeFileString(file, JSON.stringify(merged, null, 2)).pipe(Effect.orDie) next = merged } else { const updated = patchJsonc(before, input) - next = parseConfig(updated, file) + next = ConfigParse.parse(Info, updated, file) yield* fs.writeFileString(file, updated).pipe(Effect.orDie) } diff --git a/packages/opencode/src/config/error.ts b/packages/opencode/src/config/error.ts new file mode 100644 index 0000000000..06f549fd85 --- /dev/null +++ b/packages/opencode/src/config/error.ts @@ -0,0 +1,21 @@ +export * as ConfigError from "./error" + +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" + +export const JsonError = NamedError.create( + "ConfigJsonError", + z.object({ + path: z.string(), + message: z.string().optional(), + }), +) + +export const InvalidError = NamedError.create( + "ConfigInvalidError", + z.object({ + path: z.string(), + issues: z.custom().optional(), + message: z.string().optional(), + }), +) diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index 37665d8c67..c4a1c608b1 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,10 +1,13 @@ export * as Config from "./config" export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" +export * as ConfigError from "./error" +export * as ConfigVariable from "./variable" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" export * as ConfigMCP from "./mcp" export { ConfigModelID } from "./model-id" +export * as ConfigParse from "./parse" export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" export * as ConfigProvider from "./provider" diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts index 61c535185f..19b048ffce 100644 --- a/packages/opencode/src/config/managed.ts +++ b/packages/opencode/src/config/managed.ts @@ -1,7 +1,6 @@ import { existsSync } from "fs" import os from "os" import path from "path" -import { type Info, parseConfig } from "./config" import { Log, Process } from "../util" const log = Log.create({ service: "config" }) @@ -33,16 +32,16 @@ function managedConfigDir() { return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() } -function parseManagedPlist(json: string, source: string): Info { +function parseManagedPlist(json: string): string { const raw = JSON.parse(json) for (const key of Object.keys(raw)) { if (PLIST_META.has(key)) delete raw[key] } - return parseConfig(JSON.stringify(raw), source) + return JSON.stringify(raw) } -async function readManagedPreferences(): Promise { - if (process.platform !== "darwin") return {} +async function readManagedPreferences() { + if (process.platform !== "darwin") return const user = os.userInfo().username const paths = [ @@ -58,10 +57,13 @@ async function readManagedPreferences(): Promise { log.warn("failed to convert managed preferences plist", { path: plist }) continue } - return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) + return { + source: `mobileconfig:${plist}`, + text: parseManagedPlist(result.stdout.toString()), + } } - return {} + return } export const ConfigManaged = { diff --git a/packages/opencode/src/config/parse.ts b/packages/opencode/src/config/parse.ts new file mode 100644 index 0000000000..65cc483859 --- /dev/null +++ b/packages/opencode/src/config/parse.ts @@ -0,0 +1,80 @@ +export * as ConfigParse from "./parse" + +import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser" +import z from "zod" +import { ConfigVariable } from "./variable" +import { InvalidError, JsonError } from "./error" + +type Schema = z.ZodType +type VariableMode = "error" | "empty" + +export type LoadOptions = + | { + type: "path" + path: string + missing?: VariableMode + normalize?: (data: unknown, source: string) => unknown + } + | { + type: "virtual" + dir: string + source: string + missing?: VariableMode + normalize?: (data: unknown, source: string) => unknown + } + +function issues(text: string, errors: JsoncParseError[]) { + const lines = text.split("\n") + return errors + .map((e) => { + const beforeOffset = text.substring(0, e.offset).split("\n") + const line = beforeOffset.length + const column = beforeOffset[beforeOffset.length - 1].length + 1 + const problemLine = lines[line - 1] + + const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` + if (!problemLine) return error + + return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` + }) + .join("\n") +} + +export function parse(schema: Schema, text: string, filepath: string): T { + const errors: JsoncParseError[] = [] + const data = parseJsonc(text, errors, { allowTrailingComma: true }) + if (errors.length) { + throw new JsonError({ + path: filepath, + message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${issues(text, errors)}\n--- End ---`, + }) + } + + const parsed = schema.safeParse(data) + if (parsed.success) return parsed.data + + throw new InvalidError({ + path: filepath, + issues: parsed.error.issues, + }) +} + +export async function load(schema: Schema, text: string, options: LoadOptions): Promise { + const source = options.type === "path" ? options.path : options.source + const expanded = await ConfigVariable.substitute( + text, + options.type === "path" ? { type: "path", path: options.path } : options, + options.missing, + ) + const data = parse(z.unknown(), expanded, source) + const normalized = options.normalize ? options.normalize(data, source) : data + const parsed = schema.safeParse(normalized) + if (!parsed.success) { + throw new InvalidError({ + path: source, + issues: parsed.error.issues, + }) + } + + return parsed.data +} diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index fabd3fd5f8..faf585d9b2 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -1,12 +1,9 @@ import path from "path" -import os from "os" -import z from "zod" -import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser" -import { NamedError } from "@opencode-ai/shared/util/error" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" import { Global } from "@/global" import { unique } from "remeda" +import { JsonError } from "./error" export async function projectFiles(name: string, directory: string, worktree?: string) { return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) @@ -39,23 +36,6 @@ export function fileInDirectory(dir: string, name: string) { return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)] } -export const JsonError = NamedError.create( - "ConfigJsonError", - z.object({ - path: z.string(), - message: z.string().optional(), - }), -) - -export const InvalidError = NamedError.create( - "ConfigInvalidError", - z.object({ - path: z.string(), - issues: z.custom().optional(), - message: z.string().optional(), - }), -) - /** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */ export async function readFile(filepath: string) { return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => { @@ -63,104 +43,3 @@ export async function readFile(filepath: string) { throw new JsonError({ path: filepath }, { cause: err }) }) } - -type ParseSource = string | { source: string; dir: string } - -function source(input: ParseSource) { - return typeof input === "string" ? input : input.source -} - -function dir(input: ParseSource) { - return typeof input === "string" ? path.dirname(input) : input.dir -} - -/** Apply {env:VAR} and {file:path} substitutions to config text. */ -async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") { - text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { - return process.env[varName] || "" - }) - - const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g)) - if (!fileMatches.length) return text - - const configDir = dir(input) - const configSource = source(input) - let out = "" - let cursor = 0 - - for (const match of fileMatches) { - const token = match[0] - const index = match.index! - out += text.slice(cursor, index) - - const lineStart = text.lastIndexOf("\n", index - 1) + 1 - const prefix = text.slice(lineStart, index).trimStart() - if (prefix.startsWith("//")) { - out += token - cursor = index + token.length - continue - } - - let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "") - if (filePath.startsWith("~/")) { - filePath = path.join(os.homedir(), filePath.slice(2)) - } - - const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath) - const fileContent = ( - await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => { - if (missing === "empty") return "" - - const errMsg = `bad file reference: "${token}"` - if (error.code === "ENOENT") { - throw new InvalidError( - { - path: configSource, - message: errMsg + ` ${resolvedPath} does not exist`, - }, - { cause: error }, - ) - } - throw new InvalidError({ path: configSource, message: errMsg }, { cause: error }) - }) - ).trim() - - out += JSON.stringify(fileContent).slice(1, -1) - cursor = index + token.length - } - - out += text.slice(cursor) - return out -} - -/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */ -export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") { - const configSource = source(input) - text = await substitute(text, input, missing) - - const errors: JsoncParseError[] = [] - const data = parseJsonc(text, errors, { allowTrailingComma: true }) - if (errors.length) { - const lines = text.split("\n") - const errorDetails = errors - .map((e) => { - const beforeOffset = text.substring(0, e.offset).split("\n") - const line = beforeOffset.length - const column = beforeOffset[beforeOffset.length - 1].length + 1 - const problemLine = lines[line - 1] - - const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` - if (!problemLine) return error - - return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` - }) - .join("\n") - - throw new JsonError({ - path: configSource, - message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`, - }) - } - - return data -} diff --git a/packages/opencode/src/config/variable.ts b/packages/opencode/src/config/variable.ts new file mode 100644 index 0000000000..e016e33a21 --- /dev/null +++ b/packages/opencode/src/config/variable.ts @@ -0,0 +1,84 @@ +export * as ConfigVariable from "./variable" + +import path from "path" +import os from "os" +import { Filesystem } from "@/util" +import { InvalidError } from "./error" + +type ParseSource = + | { + type: "path" + path: string + } + | { + type: "virtual" + source: string + dir: string + } + +function source(input: ParseSource) { + return input.type === "path" ? input.path : input.source +} + +function dir(input: ParseSource) { + return input.type === "path" ? path.dirname(input.path) : input.dir +} + +/** Apply {env:VAR} and {file:path} substitutions to config text. */ +export async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") { + text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { + return process.env[varName] || "" + }) + + const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g)) + if (!fileMatches.length) return text + + const configDir = dir(input) + const configSource = source(input) + let out = "" + let cursor = 0 + + for (const match of fileMatches) { + const token = match[0] + const index = match.index! + out += text.slice(cursor, index) + + const lineStart = text.lastIndexOf("\n", index - 1) + 1 + const prefix = text.slice(lineStart, index).trimStart() + if (prefix.startsWith("//")) { + out += token + cursor = index + token.length + continue + } + + let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "") + if (filePath.startsWith("~/")) { + filePath = path.join(os.homedir(), filePath.slice(2)) + } + + const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath) + const fileContent = ( + await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => { + if (missing === "empty") return "" + + const errMsg = `bad file reference: "${token}"` + if (error.code === "ENOENT") { + throw new InvalidError( + { + path: configSource, + message: errMsg + ` ${resolvedPath} does not exist`, + }, + { cause: error }, + ) + } + throw new InvalidError({ path: configSource, message: errMsg }, { cause: error }) + }) + ).trim() + + out += JSON.stringify(fileContent).slice(1, -1) + cursor = index + token.length + } + + out += text.slice(cursor) + return out +} diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 21d6e3e93d..c41f395e51 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -2,6 +2,7 @@ import { test, expect, describe, mock, afterEach, beforeEach } from "bun:test" import { Effect, Layer, Option } from "effect" import { NodeFileSystem, NodePath } from "@effect/platform-node" import { Config, ConfigManaged } from "../../src/config" +import { ConfigParse } from "../../src/config/parse" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { Instance } from "../../src/project/instance" @@ -2211,17 +2212,20 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { // parseManagedPlist unit tests — pure function, no OS interaction test("parseManagedPlist strips MDM metadata keys", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - PayloadDisplayName: "OpenCode Managed", - PayloadIdentifier: "ai.opencode.managed.test", - PayloadType: "ai.opencode.managed", - PayloadUUID: "AAAA-BBBB-CCCC", - PayloadVersion: 1, - _manualProfile: true, - share: "disabled", - model: "mdm/model", - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + PayloadDisplayName: "OpenCode Managed", + PayloadIdentifier: "ai.opencode.managed.test", + PayloadType: "ai.opencode.managed", + PayloadUUID: "AAAA-BBBB-CCCC", + PayloadVersion: 1, + _manualProfile: true, + share: "disabled", + model: "mdm/model", + }), + ), "test:mobileconfig", ) expect(config.share).toBe("disabled") @@ -2233,12 +2237,15 @@ test("parseManagedPlist strips MDM metadata keys", async () => { }) test("parseManagedPlist parses server settings", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - server: { hostname: "127.0.0.1", mdns: false }, - autoupdate: true, - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + server: { hostname: "127.0.0.1", mdns: false }, + autoupdate: true, + }), + ), "test:mobileconfig", ) expect(config.server?.hostname).toBe("127.0.0.1") @@ -2247,18 +2254,21 @@ test("parseManagedPlist parses server settings", async () => { }) test("parseManagedPlist parses permission rules", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - permission: { - "*": "ask", - bash: { "*": "ask", "rm -rf *": "deny", "curl *": "deny" }, - grep: "allow", - glob: "allow", - webfetch: "ask", - "~/.ssh/*": "deny", - }, - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + permission: { + "*": "ask", + bash: { "*": "ask", "rm -rf *": "deny", "curl *": "deny" }, + grep: "allow", + glob: "allow", + webfetch: "ask", + "~/.ssh/*": "deny", + }, + }), + ), "test:mobileconfig", ) expect(config.permission?.["*"]).toBe("ask") @@ -2271,19 +2281,23 @@ test("parseManagedPlist parses permission rules", async () => { }) test("parseManagedPlist parses enabled_providers", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - enabled_providers: ["anthropic", "google"], - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + enabled_providers: ["anthropic", "google"], + }), + ), "test:mobileconfig", ) expect(config.enabled_providers).toEqual(["anthropic", "google"]) }) test("parseManagedPlist handles empty config", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist(JSON.stringify({ $schema: "https://opencode.ai/config.json" })), "test:mobileconfig", ) expect(config.$schema).toBe("https://opencode.ai/config.json") From cefa7f04c66fce1fb586736222a2d6b8a5609ded Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:32:22 -0400 Subject: [PATCH 018/120] core: reorganize ConfigPaths module export for cleaner dependency management --- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/config/paths.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 6b6d74ed82..a738ebf130 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -20,7 +20,6 @@ import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" import { InvalidError, JsonError } from "./error" -import * as ConfigPaths from "./paths" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" @@ -38,6 +37,7 @@ import { ConfigParse } from "./parse" import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" +import { ConfigPaths } from "./paths" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index faf585d9b2..dcf0c940f2 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -1,3 +1,5 @@ +export * as ConfigPaths from "./paths" + import path from "path" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" From bee5f919fc5ae915aad0a4b5138e1e7f8274b9de Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:32:22 -0400 Subject: [PATCH 019/120] core: reorganize ConfigPaths module export for cleaner dependency management --- packages/opencode/src/config/managed.ts | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts index 19b048ffce..a53fb70af3 100644 --- a/packages/opencode/src/config/managed.ts +++ b/packages/opencode/src/config/managed.ts @@ -1,7 +1,10 @@ +export * as ConfigManaged from "./managed" + import { existsSync } from "fs" import os from "os" import path from "path" import { Log, Process } from "../util" +import { warn } from "console" const log = Log.create({ service: "config" }) @@ -28,11 +31,11 @@ function systemManagedConfigDir(): string { } } -function managedConfigDir() { +export function managedConfigDir() { return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() } -function parseManagedPlist(json: string): string { +export function parseManagedPlist(json: string): string { const raw = JSON.parse(json) for (const key of Object.keys(raw)) { if (PLIST_META.has(key)) delete raw[key] @@ -40,7 +43,7 @@ function parseManagedPlist(json: string): string { return JSON.stringify(raw) } -async function readManagedPreferences() { +export async function readManagedPreferences() { if (process.platform !== "darwin") return const user = os.userInfo().username @@ -65,9 +68,3 @@ async function readManagedPreferences() { return } - -export const ConfigManaged = { - managedConfigDir, - parseManagedPlist, - readManagedPreferences, -} From c60862fc9e0a3378fe7be7c5079545c0d5c8d405 Mon Sep 17 00:00:00 2001 From: Thomas Butler <58192340+trbutler4@users.noreply.github.com> Date: Thu, 16 Apr 2026 13:21:04 -0500 Subject: [PATCH 020/120] fix: add missing glob dependency (#22851) --- bun.lock | 1 + nix/hashes.json | 2 +- packages/shared/package.json | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/bun.lock b/bun.lock index 644de37f2e..3d82a64af9 100644 --- a/bun.lock +++ b/bun.lock @@ -516,6 +516,7 @@ "@effect/platform-node": "catalog:", "@npmcli/arborist": "catalog:", "effect": "catalog:", + "glob": "13.0.5", "mime-types": "3.0.2", "minimatch": "10.2.5", "semver": "catalog:", diff --git a/nix/hashes.json b/nix/hashes.json index e9795e5c37..c844031be0 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,6 +1,6 @@ { "nodeModules": { - "x86_64-linux": "sha256-NJAK+cPjwn+2ojDLyyDmBQyx2pD+rILetp7VCylgjek=", + "x86_64-linux": "sha256-b9tsgqQDXd2uM/j+rZnvkoXbXzB4iYCEasXsy9kgIl4=", "aarch64-linux": "sha256-q8NTtFQJoyM7TTvErGA6RtmUscxoZKD/mj9N6S5YhkA=", "aarch64-darwin": "sha256-/ccoSZNLef6j9j14HzpVqhKCR+czM3mhPKPH51mHO24=", "x86_64-darwin": "sha256-6Pd10sMHL/5ZoWNvGPwPn4/AIs1TKjt/3gFyrVpBaE0=" diff --git a/packages/shared/package.json b/packages/shared/package.json index bdfca12a93..4d10a30a36 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -26,6 +26,7 @@ "@effect/platform-node": "catalog:", "@npmcli/arborist": "catalog:", "effect": "catalog:", + "glob": "13.0.5", "mime-types": "3.0.2", "minimatch": "10.2.5", "semver": "catalog:", From 143817d44e1ededeccbaff3e61690dfea4fe4109 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 13:28:20 -0500 Subject: [PATCH 021/120] chore: bump ai sdk deps for opus 4.7 (#22869) --- bun.lock | 30 ++++++++++++++---------------- packages/opencode/package.json | 8 ++++---- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/bun.lock b/bun.lock index 3d82a64af9..7225c0a9a0 100644 --- a/bun.lock +++ b/bun.lock @@ -146,7 +146,7 @@ "name": "@opencode-ai/console-function", "version": "1.4.6", "dependencies": { - "@ai-sdk/anthropic": "3.0.64", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/openai": "3.0.48", "@ai-sdk/openai-compatible": "2.0.37", "@hono/zod-validator": "catalog:", @@ -322,15 +322,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.93", - "@ai-sdk/anthropic": "3.0.67", + "@ai-sdk/amazon-bedrock": "4.0.94", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.97", + "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.109", + "@ai-sdk/google-vertex": "4.0.111", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", @@ -738,9 +738,9 @@ "@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="], - "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], + "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], "@ai-sdk/azure": ["@ai-sdk/azure@3.0.49", "", { "dependencies": { "@ai-sdk/openai": "3.0.48", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-wskgAL+OmrHG7by/iWIxEBQCEdc1mDudha/UZav46i0auzdFfsDB/k2rXZaC4/3nWSgMZkxr0W3ncyouEGX/eg=="], @@ -758,11 +758,11 @@ "@ai-sdk/fireworks": ["@ai-sdk/fireworks@2.0.46", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XRKR0zgRyegdmtK5CDUEjlyRp0Fo+XVCdoG+301U1SGtgRIAYG3ObVtgzVJBVpJdHFSLHuYeLTnNiQoUxD7+FQ=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.97", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ERHmVGX30YKTwxObuHQzNqoOf8Nb5WwYMDBn34e3TGGVn0vLEXwMimo7uRVTbhhi4gfu9WtwYTE4x1+csZok1w=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.102", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GrwDpaYJiVafrsA1MTbZtXPcQUI67g5AXiJo7Y1F8b+w+SiYHLk3ZIn1YmpQVoVAh2bjvxjj+Vo0AvfskuGH4g=="], "@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="], - "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.109", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/google": "3.0.63", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-QzQ+DgOoSYlkU4mK0H+iaCaW1bl5zOimH9X2E2oylcVyUtAdCuduQ959Uw1ygW3l09J2K/ceEDtK8OUPHyOA7g=="], + "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.111", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5gILpAWWI5idfal/MfoH3tlQeSnOJ9jfL8JB8m2fdc3ue/9xoXkYDpXpDL/nyJImFjMCi6eR0Fpvlo/IKEWDIg=="], "@ai-sdk/groq": ["@ai-sdk/groq@3.0.31", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XbbugpnFmXGu2TlXiq8KUJskP6/VVbuFcnFIGDzDIB/Chg6XHsNnqrTF80Zxkh0Pd3+NvbM+2Uqrtsndk6bDAg=="], @@ -5152,9 +5152,9 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], + "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], - "@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], + "@ai-sdk/amazon-bedrock/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], "@ai-sdk/azure/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], @@ -5166,7 +5166,7 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], + "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], "@ai-sdk/google-vertex/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -5684,6 +5684,8 @@ "ai/@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.95", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZmUNNbZl3V42xwQzPaNUi+s8eqR2lnrxf0bvB6YbLXpLjHYv0k2Y78t12cNOfY0bxGeuVVTLyk856uLuQIuXEQ=="], + "ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], + "ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], "ai-gateway-provider/@ai-sdk/google": ["@ai-sdk/google@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-uz8tIlkDgQJG9Js2Wh9JHzd4kI9+hYJqf9XXJLx60vyN5mRIqhr49iwR5zGP5Gl8odp2PeR3Gh2k+5bh3Z1HHw=="], @@ -5900,8 +5902,6 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], - "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.67", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-FFX4P5Fd6lcQJc2OLngZQkbbJHa0IDDZi087Edb8qRZx6h90krtM61ArbMUL8us/7ZUwojCXnyJ/wQ2Eflx2jQ=="], - "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -6094,8 +6094,6 @@ "@actions/github/@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], - "@ai-sdk/anthropic/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], - "@ai-sdk/azure/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 7ed33ebe09..3a98d0eb93 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -79,15 +79,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.93", - "@ai-sdk/anthropic": "3.0.67", + "@ai-sdk/amazon-bedrock": "4.0.94", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.97", + "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.109", + "@ai-sdk/google-vertex": "4.0.111", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", From 370770122c355f50cfc98193a25b1150e0288f31 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 18:29:57 +0000 Subject: [PATCH 022/120] chore: generate --- bun.lock | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/bun.lock b/bun.lock index 7225c0a9a0..e236f3f491 100644 --- a/bun.lock +++ b/bun.lock @@ -146,7 +146,7 @@ "name": "@opencode-ai/console-function", "version": "1.4.6", "dependencies": { - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", "@ai-sdk/openai-compatible": "2.0.37", "@hono/zod-validator": "catalog:", @@ -740,7 +740,7 @@ "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], "@ai-sdk/azure": ["@ai-sdk/azure@3.0.49", "", { "dependencies": { "@ai-sdk/openai": "3.0.48", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-wskgAL+OmrHG7by/iWIxEBQCEdc1mDudha/UZav46i0auzdFfsDB/k2rXZaC4/3nWSgMZkxr0W3ncyouEGX/eg=="], @@ -5152,10 +5152,14 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], + "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], "@ai-sdk/amazon-bedrock/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + "@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], + "@ai-sdk/azure/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], @@ -5166,6 +5170,8 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], + "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], "@ai-sdk/google-vertex/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -5902,6 +5908,8 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], + "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -6094,6 +6102,8 @@ "@actions/github/@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], + "@ai-sdk/anthropic/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], + "@ai-sdk/azure/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], From 9afbdc102c2e7e449c98a08082b382cd2696715e Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 14:45:17 -0400 Subject: [PATCH 023/120] fix(test): make plugin loader theme source path separator-safe (#22870) --- packages/opencode/test/cli/tui/plugin-loader.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/test/cli/tui/plugin-loader.test.ts b/packages/opencode/test/cli/tui/plugin-loader.test.ts index dc64fb3365..f5b04ff434 100644 --- a/packages/opencode/test/cli/tui/plugin-loader.test.ts +++ b/packages/opencode/test/cli/tui/plugin-loader.test.ts @@ -331,7 +331,7 @@ export default { const localOpts = { fn_marker: tmp.extra.fnMarker, marker: tmp.extra.localMarker, - source: tmp.extra.localDest.replace(".opencode/themes/", ""), + source: path.join(tmp.path, tmp.extra.localThemeFile), dest: tmp.extra.localDest, theme_path: `./${tmp.extra.localThemeFile}`, theme_name: tmp.extra.localThemeName, From bf4c1078290a5bf7e580141b17e7b37d905de311 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:07:02 -0400 Subject: [PATCH 024/120] fix: remove 7 unnecessary `as any` casts in opencode core (#22840) --- .../src/cli/cmd/tui/routes/session/index.tsx | 2 +- packages/opencode/src/storage/json-migration.ts | 12 ++++++------ packages/opencode/src/util/defer.ts | 6 ++---- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 1a64c21d00..5b4308d593 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -597,7 +597,7 @@ export function Session() { { title: conceal() ? "Disable code concealment" : "Enable code concealment", value: "session.toggle.conceal", - keybind: "messages_toggle_conceal" as any, + keybind: "messages_toggle_conceal", category: "Session", onSelect: (dialog) => { setConceal((prev) => !prev) diff --git a/packages/opencode/src/storage/json-migration.ts b/packages/opencode/src/storage/json-migration.ts index 4803d452fe..12133ce435 100644 --- a/packages/opencode/src/storage/json-migration.ts +++ b/packages/opencode/src/storage/json-migration.ts @@ -95,7 +95,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase[0], label: string) { if (values.length === 0) return 0 try { db.insert(table).values(values).onConflictDoNothing().run() @@ -152,7 +152,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase() - const projectValues = [] as any[] + const projectValues: unknown[] = [] for (let i = 0; i < projectFiles.length; i += batchSize) { const end = Math.min(i + batchSize, projectFiles.length) const batch = await read(projectFiles, i, end) @@ -186,7 +186,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase path.basename(path.dirname(file))) const sessionIds = new Set() - const sessionValues = [] as any[] + const sessionValues: unknown[] = [] for (let i = 0; i < sessionFiles.length; i += batchSize) { const end = Math.min(i + batchSize, sessionFiles.length) const batch = await read(sessionFiles, i, end) @@ -314,7 +314,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase | NodeSQLiteDatabase path.basename(file, ".json")) - const permValues = [] as any[] + const permValues: unknown[] = [] for (let i = 0; i < permFiles.length; i += batchSize) { const end = Math.min(i + batchSize, permFiles.length) const batch = await read(permFiles, i, end) @@ -376,7 +376,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase path.basename(file, ".json")) - const shareValues = [] as any[] + const shareValues: unknown[] = [] for (let i = 0; i < shareFiles.length; i += batchSize) { const end = Math.min(i + batchSize, shareFiles.length) const batch = await read(shareFiles, i, end) diff --git a/packages/opencode/src/util/defer.ts b/packages/opencode/src/util/defer.ts index d1c9edc66a..33eb4d74d2 100644 --- a/packages/opencode/src/util/defer.ts +++ b/packages/opencode/src/util/defer.ts @@ -1,6 +1,4 @@ -export function defer void | Promise>( - fn: T, -): T extends () => Promise ? { [Symbol.asyncDispose]: () => Promise } : { [Symbol.dispose]: () => void } { +export function defer(fn: () => void | Promise): AsyncDisposable & Disposable { return { [Symbol.dispose]() { void fn() @@ -8,5 +6,5 @@ export function defer void | Promise>( [Symbol.asyncDispose]() { return Promise.resolve(fn()) }, - } as any + } } From 47e0e2342cbd9fd335864a403dbac9b3ec9a19af Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 14:12:43 -0500 Subject: [PATCH 025/120] tweak: set display 'summarized' by default for opus 4.7 thorugh messages api (#22873) --- packages/opencode/src/provider/transform.ts | 3 +++ packages/opencode/test/provider/transform.test.ts | 2 ++ 2 files changed, 5 insertions(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index a294c568d7..66e87fb3b8 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -594,6 +594,9 @@ export function variants(model: Provider.Model): Record { expect(result.xhigh).toEqual({ thinking: { type: "adaptive", + display: "summarized", }, effort: "xhigh", }) expect(result.max).toEqual({ thinking: { type: "adaptive", + display: "summarized", }, effort: "max", }) From 7c1b30291c8f8bde6e3ca7b257259384728a1eef Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 19:19:52 +0000 Subject: [PATCH 026/120] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index c844031be0..239b72fd70 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-b9tsgqQDXd2uM/j+rZnvkoXbXzB4iYCEasXsy9kgIl4=", - "aarch64-linux": "sha256-q8NTtFQJoyM7TTvErGA6RtmUscxoZKD/mj9N6S5YhkA=", - "aarch64-darwin": "sha256-/ccoSZNLef6j9j14HzpVqhKCR+czM3mhPKPH51mHO24=", - "x86_64-darwin": "sha256-6Pd10sMHL/5ZoWNvGPwPn4/AIs1TKjt/3gFyrVpBaE0=" + "x86_64-linux": "sha256-tYAb5Mo39UW1VEejYuo0jW0jzH2OyY/HrqgiZL3rmjY=", + "aarch64-linux": "sha256-3zGKV5UwokXpmY0nT1mry3IhNf2EQYLKT7ac+/trmQA=", + "aarch64-darwin": "sha256-oKXAut7eu/eW5a43OT8+aFuH1F1tuIldTs+7PUXSCv4=", + "x86_64-darwin": "sha256-Az+9X1scOEhw3aOO8laKJoZjiuz3qlLTIk1bx25P/z4=" } } From 219b473e660994ac69a0c5d753ac65da951f3bf8 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:24:24 -0400 Subject: [PATCH 027/120] refactor: unwrap BashArity namespace to flat exports + self-reexport (#22874) --- .../specs/effect/namespace-treeshake.md | 609 +++++------------- packages/opencode/src/permission/arity.ts | 296 ++++----- 2 files changed, 309 insertions(+), 596 deletions(-) diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md index 5d1fbd07e5..ac4d3987de 100644 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ b/packages/opencode/specs/effect/namespace-treeshake.md @@ -1,499 +1,212 @@ -# Namespace → flat export migration +# Namespace → self-reexport migration -Migrate `export namespace` to the `export * as` / flat-export pattern used by -effect-smol. Primary goal: tree-shakeability. Secondary: consistency with Effect -conventions, LLM-friendliness for future migrations. - -## What changes and what doesn't - -The **consumer API stays the same**. You still write `Provider.ModelNotFoundError`, -`Config.JsonError`, `Bus.publish`, etc. The namespace ergonomics are preserved. - -What changes is **how** the namespace is constructed — the TypeScript -`export namespace` keyword is replaced by `export * as` in a barrel file. This -is a mechanical change: unwrap the namespace body into flat exports, add a -one-line barrel. Consumers that import `{ Provider }` don't notice. - -Import paths actually get **nicer**. Today most consumers import from the -explicit file (`"../provider/provider"`). After the migration, each module has a -barrel `index.ts`, so imports become `"../provider"` or `"@/provider"`: +Migrate every `export namespace Foo { ... }` to flat top-level exports plus a +single self-reexport line at the bottom of the same file: ```ts -// BEFORE — points at the file directly -import { Provider } from "../provider/provider" - -// AFTER — resolves to provider/index.ts, same Provider namespace -import { Provider } from "../provider" +export * as Foo from "./foo" ``` -## Why this matters right now +No barrel `index.ts` files. No cross-directory indirection. Consumers keep the +exact same `import { Foo } from "../foo/foo"` ergonomics. -The CLI binary startup time (TOI) is too slow. Profiling shows we're loading -massive dependency graphs that are never actually used at runtime — because -bundlers cannot tree-shake TypeScript `export namespace` bodies. +## Why this pattern -### The problem in one sentence - -`cli/error.ts` needs 6 lightweight `.isInstance()` checks on error classes, but -importing `{ Provider }` from `provider.ts` forces the bundler to include **all -20+ `@ai-sdk/*` packages**, `@aws-sdk/credential-providers`, -`google-auth-library`, and every other top-level import in that 1709-line file. - -### Why `export namespace` defeats tree-shaking - -TypeScript compiles `export namespace Foo { ... }` to an IIFE: - -```js -// TypeScript output -export var Provider; -(function (Provider) { - Provider.ModelNotFoundError = NamedError.create(...) - // ... 1600 more lines of assignments ... -})(Provider || (Provider = {})) -``` - -This is **opaque to static analysis**. The bundler sees one big function call -whose return value populates an object. It cannot determine which properties are -used downstream, so it keeps everything. Every `import` statement at the top of -`provider.ts` executes unconditionally — that's 20+ AI SDK packages loaded into -memory just so the CLI can check `Provider.ModelNotFoundError.isInstance(x)`. - -### What `export * as` does differently - -`export * as Provider from "./provider"` compiles to a static re-export. The -bundler knows the exact shape of `Provider` at compile time — it's the named -export list of `./provider.ts`. When it sees `Provider.ModelNotFoundError` used -but `Provider.layer` unused, it can trace that `ModelNotFoundError` doesn't -reference `createAnthropic` or any AI SDK import, and drop them. The namespace -object still exists at runtime — same API — but the bundler can see inside it. - -### Concrete impact - -The worst import chain in the codebase: +We tested three options against Bun, esbuild, Rollup (what Vite uses under the +hood), Bun's runtime, and Node's native TypeScript runner. ``` -src/index.ts (entry point) + heavy.ts loaded? + A. namespace B. barrel C. self-reexport +Bun bundler YES YES no +esbuild YES YES no +Rollup (Vite) YES YES no +Bun runtime YES YES no +Node --experimental-strip-types SYNTAX ERROR YES no +``` + +- **`export namespace`** compiles to an IIFE. Bundlers see one opaque function + call and can't analyze what's used. Node's native TS runner rejects the + syntax outright: `SyntaxError: TypeScript namespace declaration is not +supported in strip-only mode`. +- **Barrel `index.ts`** files (`export * as Foo from "./foo"` in a separate + file) force every re-exported sibling to evaluate when you import one name. + Siblings with side effects (top-level imports of SDKs, etc.) always load. +- **Self-reexport** keeps the file as plain ESM. Bundlers see static named + exports. The module is only pulled in when something actually imports from + it. There is no barrel hop, so no sibling contamination and no circular + import hazard. + +Bundle overhead for the self-reexport wrapper is roughly 240 bytes per module +(`Object.defineProperty` namespace proxy). At ~100 modules that's ~24KB — +negligible for a CLI binary. + +## The pattern + +### Before + +```ts +// src/permission/arity.ts +export namespace BashArity { + export function prefix(tokens: string[]) { ... } +} +``` + +### After + +```ts +// src/permission/arity.ts +export function prefix(tokens: string[]) { ... } + +export * as BashArity from "./arity" +``` + +Consumers don't change at all: + +```ts +import { BashArity } from "@/permission/arity" +BashArity.prefix(...) // still works +``` + +Editors still auto-import `BashArity` like any named export, because the file +does have a named `BashArity` export at the module top level. + +### Odd but harmless + +`BashArity.BashArity.BashArity.prefix(...)` compiles and runs because the +namespace contains a re-export of itself. Nobody would write that. Not a +problem. + +## Why this is different from what we tried first + +An earlier pass used sibling barrel files (`index.ts` with `export * as ...`). +That turned out to be wrong for our constraints: + +1. The barrel file always loads all its sibling modules when you import + through it, even if you only need one. For our CLI this is exactly the + cost we're trying to avoid. +2. Barrel + sibling imports made it very easy to accidentally create circular + imports that only surface as `ReferenceError` at runtime, not at + typecheck. + +The self-reexport has none of those issues. There is no indirection. The +file and the namespace are the same unit. + +## Why this matters for startup + +The worst import chain in the codebase looks like: + +``` +src/index.ts └── FormatError from src/cli/error.ts - ├── { Provider } from provider/provider.ts (1709 lines) + ├── { Provider } from provider/provider.ts (~1700 lines) │ ├── 20+ @ai-sdk/* packages │ ├── @aws-sdk/credential-providers │ ├── google-auth-library - │ ├── gitlab-ai-provider, venice-ai-sdk-provider - │ └── fuzzysort, remeda, etc. - ├── { Config } from config/config.ts (1663 lines) - │ ├── jsonc-parser - │ ├── LSPServer (all server definitions) - │ └── Plugin, Auth, Env, Account, etc. - └── { MCP } from mcp/index.ts (930 lines) - ├── @modelcontextprotocol/sdk (3 transports) - └── open (browser launcher) + │ └── more + ├── { Config } from config/config.ts (~1600 lines) + └── { MCP } from mcp/mcp.ts (~900 lines) ``` -All of this gets pulled in to check `.isInstance()` on 6 error classes — code -that needs maybe 200 bytes total. This inflates the binary, increases startup -memory, and slows down initial module evaluation. - -### Why this also hurts memory - -Every module-level import is eagerly evaluated. Even with Bun's fast module -loader, evaluating 20+ AI SDK factory functions, the AWS credential chain, and -Google's auth library allocates objects, closures, and prototype chains that -persist for the lifetime of the process. Most CLI commands never use a provider -at all. - -## What effect-smol does - -effect-smol achieves tree-shakeable namespaced APIs via three structural choices. - -### 1. Each module is a separate file with flat named exports - -```ts -// Effect.ts — no namespace wrapper, just flat exports -export const gen: { ... } = internal.gen -export const fail: (error: E) => Effect = internal.fail -export const succeed: (value: A) => Effect = internal.succeed -// ... 230+ individual named exports -``` - -### 2. Barrel file uses `export * as` (not `export namespace`) - -```ts -// index.ts -export * as Effect from "./Effect.ts" -export * as Schema from "./Schema.ts" -export * as Stream from "./Stream.ts" -// ~134 modules -``` - -This creates a namespace-like API (`Effect.gen`, `Schema.parse`) but the -bundler knows the **exact shape** at compile time — it's the static export list -of that file. It can trace property accesses (`Effect.gen` → keep `gen`, -drop `timeout` if unused). With `export namespace`, the IIFE is opaque and -nothing can be dropped. - -### 3. `sideEffects: []` and deep imports - -```jsonc -// package.json -{ "sideEffects": [] } -``` - -Plus `"./*": "./src/*.ts"` in the exports map, enabling -`import * as Effect from "effect/Effect"` to bypass the barrel entirely. - -### 4. Errors as flat exports, not class declarations - -```ts -// Cause.ts -export const NoSuchElementErrorTypeId = core.NoSuchElementErrorTypeId -export interface NoSuchElementError extends YieldableError { ... } -export const NoSuchElementError: new(msg?: string) => NoSuchElementError = core.NoSuchElementError -export const isNoSuchElementError: (u: unknown) => u is NoSuchElementError = core.isNoSuchElementError -``` - -Each error is 4 independent exports: TypeId, interface, constructor (as const), -type guard. All individually shakeable. - -## The plan - -The core migration is **Phase 1** — convert `export namespace` to -`export * as`. Once that's done, the bundler can tree-shake individual exports -within each module. You do NOT need to break things into subfiles for -tree-shaking to work — the bundler traces which exports you actually access on -the namespace object and drops the rest, including their transitive imports. - -Splitting errors/schemas into separate files (Phase 0) is optional — it's a -lower-risk warmup step that can be done before or after the main conversion, and -it provides extra resilience against bundler edge cases. But the big win comes -from Phase 1. - -### Phase 0 (optional): Pre-split errors into subfiles - -This is a low-risk warmup that provides immediate benefit even before the full -`export * as` conversion. It's optional because Phase 1 alone is sufficient for -tree-shaking. But it's a good starting point if you want incremental progress: - -**For each namespace that defines errors** (15 files, ~30 error classes total): - -1. Create a sibling `errors.ts` file (e.g. `provider/errors.ts`) with the error - definitions as top-level named exports: - - ```ts - // provider/errors.ts - import z from "zod" - import { NamedError } from "@opencode-ai/shared/util/error" - import { ProviderID, ModelID } from "./schema" - - export const ModelNotFoundError = NamedError.create( - "ProviderModelNotFoundError", - z.object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - suggestions: z.array(z.string()).optional(), - }), - ) - - export const InitError = NamedError.create("ProviderInitError", z.object({ providerID: ProviderID.zod })) - ``` - -2. In the namespace file, re-export from the errors file to maintain backward - compatibility: - - ```ts - // provider/provider.ts — inside the namespace - export { ModelNotFoundError, InitError } from "./errors" - ``` - -3. Update `cli/error.ts` (and any other light consumers) to import directly: - - ```ts - // BEFORE - import { Provider } from "../provider/provider" - Provider.ModelNotFoundError.isInstance(input) - - // AFTER - import { ModelNotFoundError as ProviderModelNotFoundError } from "../provider/errors" - ProviderModelNotFoundError.isInstance(input) - ``` - -**Files to split (Phase 0):** - -| Current file | New errors file | Errors to extract | -| ----------------------- | ------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | -| `provider/provider.ts` | `provider/errors.ts` | ModelNotFoundError, InitError | -| `provider/auth.ts` | `provider/auth-errors.ts` | OauthMissing, OauthCodeMissing, OauthCallbackFailed, ValidationFailed | -| `config/config.ts` | (already has `config/paths.ts`) | ConfigDirectoryTypoError → move to paths.ts | -| `config/markdown.ts` | `config/markdown-errors.ts` | FrontmatterError | -| `mcp/index.ts` | `mcp/errors.ts` | Failed | -| `session/message-v2.ts` | `session/message-errors.ts` | OutputLengthError, AbortedError, StructuredOutputError, AuthError, APIError, ContextOverflowError | -| `session/message.ts` | (shares with message-v2) | OutputLengthError, AuthError | -| `cli/ui.ts` | `cli/ui-errors.ts` | CancelledError | -| `skill/index.ts` | `skill/errors.ts` | InvalidError, NameMismatchError | -| `worktree/index.ts` | `worktree/errors.ts` | NotGitError, NameGenerationFailedError, CreateFailedError, StartCommandFailedError, RemoveFailedError, ResetFailedError | -| `storage/storage.ts` | `storage/errors.ts` | NotFoundError | -| `npm/index.ts` | `npm/errors.ts` | InstallFailedError | -| `ide/index.ts` | `ide/errors.ts` | AlreadyInstalledError, InstallFailedError | -| `lsp/client.ts` | `lsp/errors.ts` | InitializeError | - -### Phase 1: The real migration — `export namespace` → `export * as` - -This is the phase that actually fixes tree-shaking. For each module: - -1. **Unwrap** the `export namespace Foo { ... }` — remove the namespace wrapper, - keep all the members as top-level `export const` / `export function` / etc. -2. **Rename** the file if it's currently `index.ts` (e.g. `bus/index.ts` → - `bus/bus.ts`), so the barrel can take `index.ts`. -3. **Create the barrel** `index.ts` with one line: `export * as Foo from "./foo"` - -The file structure change for a module that's currently a single file: - -``` -# BEFORE -provider/ - provider.ts ← 1709-line file with `export namespace Provider { ... }` - -# AFTER -provider/ - index.ts ← NEW: `export * as Provider from "./provider"` - provider.ts ← SAME file, same name, just unwrap the namespace -``` - -And the code change is purely removing the wrapper: - -```ts -// BEFORE: provider/provider.ts -export namespace Provider { - export class Service extends Context.Service<...>()("@opencode/Provider") {} - export const layer = Layer.effect(Service, ...) - export const ModelNotFoundError = NamedError.create(...) - export function parseModel(model: string) { ... } -} - -// AFTER: provider/provider.ts — identical exports, no namespace keyword -export class Service extends Context.Service<...>()("@opencode/Provider") {} -export const layer = Layer.effect(Service, ...) -export const ModelNotFoundError = NamedError.create(...) -export function parseModel(model: string) { ... } -``` - -```ts -// NEW: provider/index.ts -export * as Provider from "./provider" -``` - -Consumer code barely changes — import path gets shorter: - -```ts -// BEFORE -import { Provider } from "../provider/provider" - -// AFTER — resolves to provider/index.ts, same Provider object -import { Provider } from "../provider" -``` - -All access like `Provider.ModelNotFoundError`, `Provider.Service`, -`Provider.layer` works exactly as before. The difference is invisible to -consumers but lets the bundler see inside the namespace. - -**Once this is done, you don't need to break anything into subfiles for -tree-shaking.** The bundler traces that `Provider.ModelNotFoundError` only -depends on `NamedError` + `zod` + the schema file, and drops -`Provider.layer` + all 20 AI SDK imports when they're unused. This works because -`export * as` gives the bundler a static export list it can do inner-graph -analysis on — it knows which exports reference which imports. - -**Order of conversion** (by risk / size, do small modules first): - -1. Tiny utilities: `Archive`, `Color`, `Token`, `Rpc`, `LocalContext` (~7-66 lines each) -2. Small services: `Auth`, `Env`, `BusEvent`, `SessionStatus`, `SessionRunState`, `Editor`, `Selection` (~25-91 lines) -3. Medium services: `Bus`, `Format`, `FileTime`, `FileWatcher`, `Command`, `Question`, `Permission`, `Vcs`, `Project` -4. Large services: `Config`, `Provider`, `MCP`, `Session`, `SessionProcessor`, `SessionPrompt`, `ACP` - -### Phase 2: Build configuration - -After the module structure supports tree-shaking: - -1. Add `"sideEffects": []` to `packages/opencode/package.json` (or - `"sideEffects": false`) — this is safe because our services use explicit - layer composition, not import-time side effects. -2. Verify Bun's bundler respects the new structure. If Bun's tree-shaking is - insufficient, evaluate whether the compiled binary path needs an esbuild - pre-pass. -3. Consider adding `/*#__PURE__*/` annotations to `NamedError.create(...)` calls - — these are factory functions that return classes, and bundlers may not know - they're side-effect-free without the annotation. +All of that currently gets pulled in just to do `.isInstance()` on a handful +of error classes. The namespace IIFE shape is the main reason bundlers cannot +strip the unused parts. Self-reexport + flat ESM fixes it. ## Automation -The transformation is scripted. From `packages/opencode`: +From `packages/opencode`: ```bash bun script/unwrap-namespace.ts [--dry-run] ``` -The script uses ast-grep for accurate AST-based namespace boundary detection -(no false matches from braces in strings/templates/comments), then: +The script: -1. Removes the `export namespace Foo {` line and its closing `}` -2. Dedents the body by one indent level (2 spaces) -3. If the file is `index.ts`, renames it to `.ts` and creates a new - `index.ts` barrel -4. If the file is NOT `index.ts`, rewrites it in place and creates `index.ts` -5. Prints the exact commands to find and rewrite import paths +1. Uses ast-grep to locate the `export namespace Foo { ... }` block accurately. +2. Removes the `export namespace Foo {` line and the matching closing `}`. +3. Dedents the body by one indent level (2 spaces). +4. Rewrites `Foo.Bar` self-references inside the file to just `Bar`. +5. Appends `export * as Foo from "./"` at the bottom of the file. +6. Never creates a barrel `index.ts`. -### Walkthrough: converting a module - -Using `Provider` as an example: +### Typical flow for one file ```bash -# 1. Preview what will change -bun script/unwrap-namespace.ts src/provider/provider.ts --dry-run +# 1. Preview +bun script/unwrap-namespace.ts src/permission/arity.ts --dry-run -# 2. Apply the transformation -bun script/unwrap-namespace.ts src/provider/provider.ts +# 2. Apply +bun script/unwrap-namespace.ts src/permission/arity.ts -# 3. Rewrite import paths (script prints the exact command) -rg -l 'from.*provider/provider' src/ | xargs sed -i '' 's|provider/provider"|provider"|g' - -# 4. Verify -bun typecheck -bun run test +# 3. Verify +cd packages/opencode +bunx --bun tsgo --noEmit +bun run --conditions=browser ./src/index.ts generate +bun run test ``` -**What changes on disk:** +### Consumer imports usually don't need to change -``` -# BEFORE -provider/ - provider.ts ← 1709 lines, `export namespace Provider { ... }` - -# AFTER -provider/ - index.ts ← NEW: `export * as Provider from "./provider"` - provider.ts ← same file, namespace unwrapped to flat exports -``` - -**What changes in consumer code:** +Most consumers already import straight from the file, e.g.: ```ts -// BEFORE -import { Provider } from "../provider/provider" - -// AFTER — shorter path, same Provider object -import { Provider } from "../provider" +import { BashArity } from "@/permission/arity" +import { Config } from "@/config/config" ``` -All property access (`Provider.Service`, `Provider.ModelNotFoundError`, etc.) -stays identical. +Because the file itself now does `export * as Foo from "./foo"`, those imports +keep working with zero edits. -### Two cases the script handles - -**Case A: file is NOT `index.ts`** (e.g. `provider/provider.ts`) - -- Rewrites the file in place (unwrap + dedent) -- Creates `provider/index.ts` as the barrel -- Import paths change: `"../provider/provider"` → `"../provider"` - -**Case B: file IS `index.ts`** (e.g. `bus/index.ts`) - -- Renames `index.ts` → `bus.ts` (kebab-case of namespace name) -- Creates new `index.ts` as the barrel -- **No import rewrites needed** — `"@/bus"` already resolves to `bus/index.ts` - -## Do I need to split errors/schemas into subfiles? - -**No.** Once you do the `export * as` conversion, the bundler can tree-shake -individual exports within the file. If `cli/error.ts` only accesses -`Provider.ModelNotFoundError`, the bundler traces that `ModelNotFoundError` -doesn't reference `createAnthropic` and drops the AI SDK imports. - -Splitting into subfiles (errors.ts, schema.ts) is still a fine idea for **code -organization** — smaller files are easier to read and review. But it's not -required for tree-shaking. The `export * as` conversion alone is sufficient. - -The one case where subfile splitting provides extra tree-shake value is if an -imported package has module-level side effects that the bundler can't prove are -unused. In practice this is rare — most npm packages are side-effect-free — and -adding `"sideEffects": []` to package.json handles the common cases. - -## Scope - -| Metric | Count | -| ----------------------------------------------- | --------------- | -| Files with `export namespace` | 106 | -| Total namespace declarations | 118 (12 nested) | -| Files with `NamedError.create` inside namespace | 15 | -| Total error classes to extract | ~30 | -| Files using `export * as` today | 0 | - -Phase 1 (the `export * as` conversion) is the main change. It's mechanical and -LLM-friendly but touches every import site, so it should be done module by -module with type-checking between each step. Each module is an independent PR. - -## Rules for new code - -Going forward: - -- **No new `export namespace`**. Use a file with flat named exports and - `export * as` in the barrel. -- Keep the service, layer, errors, schemas, and runtime wiring together in one - file if you want — that's fine now. The `export * as` barrel makes everything - individually shakeable regardless of file structure. -- If a file grows large enough that it's hard to navigate, split by concern - (errors.ts, schema.ts, etc.) for readability. Not for tree-shaking — the - bundler handles that. - -## Circular import rules - -Barrel files (`index.ts` with `export * as`) introduce circular import risks. -These cause `ReferenceError: Cannot access 'X' before initialization` at -runtime — not caught by the type checker. - -### Rule 1: Sibling files never import through their own barrel - -Files in the same directory must import directly from the source file, never -through `"."` or `"@/"`: +The only edits needed are when a consumer was importing through a previous +barrel (`"@/config"` or `"../config"` resolving to `config/index.ts`). In +that case, repoint it at the file: ```ts -// BAD — circular: index.ts re-exports both files, so A → index → B → index → A -import { Sibling } from "." +// before +import { Config } from "@/config" -// GOOD — direct, no cycle -import * as Sibling from "./sibling" +// after +import { Config } from "@/config/config" ``` -### Rule 2: Cross-directory imports must not form cycles through barrels +### Dynamic imports in tests -If `src/lsp/lsp.ts` imports `Config` from `"../config"`, and -`src/config/config.ts` imports `LSPServer` from `"../lsp"`, that's a cycle: +If a test did `const { Foo } = await import("../../src/x/y")`, the destructure +still works because of the self-reexport. No change required. -``` -lsp/lsp.ts → config/index.ts → config/config.ts → lsp/index.ts → lsp/lsp.ts 💥 -``` +## Verification checklist (per PR) -Fix by importing the specific file, breaking the cycle: - -```ts -// In config/config.ts — import directly, not through the lsp barrel -import * as LSPServer from "../lsp/server" -``` - -### Why the type checker doesn't catch this - -TypeScript resolves types lazily — it doesn't evaluate module-scope -expressions. The `ReferenceError` only happens at runtime when a module-scope -`const` or function call accesses a value from a circular dependency that -hasn't finished initializing. The SDK build step (`bun run --conditions=browser -./src/index.ts generate`) is the reliable way to catch these because it -evaluates all modules eagerly. - -### How to verify - -After any namespace conversion, run: +Run all of these locally before pushing: ```bash cd packages/opencode +bunx --bun tsgo --noEmit bun run --conditions=browser ./src/index.ts generate +bun run test ``` -If this completes without `ReferenceError`, the module graph is safe. +Also do a quick grep in `src/`, `test/`, and `script/` to make sure no +consumer is still importing the namespace from an old barrel path that no +longer exports it. + +The SDK build step (`bun run --conditions=browser ./src/index.ts generate`) +evaluates every module eagerly and is the most reliable way to catch circular +import regressions at runtime — the typechecker does not catch these. + +## Rules for new code + +- No new `export namespace`. +- Every module file that wants a namespace gets a self-reexport at the + bottom: + `export * as Foo from "./foo"` +- Consumers import from the file itself: + `import { Foo } from "../path/to/foo"` +- No new barrel `index.ts` files for internal code. +- If a file needs a sibling, import the sibling file directly: + `import * as Sibling from "./sibling"`, not `from "."`. + +## Scope + +There are still dozens of `export namespace` files left across the codebase. +Each one is its own small PR. Do them one at a time, verified locally, rather +than batching by directory. diff --git a/packages/opencode/src/permission/arity.ts b/packages/opencode/src/permission/arity.ts index 948841c8e7..cd4b0a7d58 100644 --- a/packages/opencode/src/permission/arity.ts +++ b/packages/opencode/src/permission/arity.ts @@ -1,15 +1,14 @@ -export namespace BashArity { - export function prefix(tokens: string[]) { - for (let len = tokens.length; len > 0; len--) { - const prefix = tokens.slice(0, len).join(" ") - const arity = ARITY[prefix] - if (arity !== undefined) return tokens.slice(0, arity) - } - if (tokens.length === 0) return [] - return tokens.slice(0, 1) +export function prefix(tokens: string[]) { + for (let len = tokens.length; len > 0; len--) { + const prefix = tokens.slice(0, len).join(" ") + const arity = ARITY[prefix] + if (arity !== undefined) return tokens.slice(0, arity) } + if (tokens.length === 0) return [] + return tokens.slice(0, 1) +} - /* Generated with following prompt: +/* Generated with following prompt: You are generating a dictionary of command-prefix arities for bash-style commands. This dictionary is used to identify the "human-understandable command" from an input shell command.### **RULES (follow strictly)**1. Each entry maps a **command prefix string → number**, representing how many **tokens** define the command. 2. **Flags NEVER count as tokens**. Only subcommands count. @@ -22,142 +21,143 @@ This dictionary is used to identify the "human-understandable command" from an i * `npm run dev` → `npm run dev` (because `npm run` has arity 3) * `python script.py` → `python script.py` (default: whole input, not in dictionary)### **Now generate the dictionary.** */ - const ARITY: Record = { - cat: 1, // cat file.txt - cd: 1, // cd /path/to/dir - chmod: 1, // chmod 755 script.sh - chown: 1, // chown user:group file.txt - cp: 1, // cp source.txt dest.txt - echo: 1, // echo "hello world" - env: 1, // env - export: 1, // export PATH=/usr/bin - grep: 1, // grep pattern file.txt - kill: 1, // kill 1234 - killall: 1, // killall process - ln: 1, // ln -s source target - ls: 1, // ls -la - mkdir: 1, // mkdir new-dir - mv: 1, // mv old.txt new.txt - ps: 1, // ps aux - pwd: 1, // pwd - rm: 1, // rm file.txt - rmdir: 1, // rmdir empty-dir - sleep: 1, // sleep 5 - source: 1, // source ~/.bashrc - tail: 1, // tail -f log.txt - touch: 1, // touch file.txt - unset: 1, // unset VAR - which: 1, // which node - aws: 3, // aws s3 ls - az: 3, // az storage blob list - bazel: 2, // bazel build - brew: 2, // brew install node - bun: 2, // bun install - "bun run": 3, // bun run dev - "bun x": 3, // bun x vite - cargo: 2, // cargo build - "cargo add": 3, // cargo add tokio - "cargo run": 3, // cargo run main - cdk: 2, // cdk deploy - cf: 2, // cf push app - cmake: 2, // cmake build - composer: 2, // composer require laravel - consul: 2, // consul members - "consul kv": 3, // consul kv get config/app - crictl: 2, // crictl ps - deno: 2, // deno run server.ts - "deno task": 3, // deno task dev - doctl: 3, // doctl kubernetes cluster list - docker: 2, // docker run nginx - "docker builder": 3, // docker builder prune - "docker compose": 3, // docker compose up - "docker container": 3, // docker container ls - "docker image": 3, // docker image prune - "docker network": 3, // docker network inspect - "docker volume": 3, // docker volume ls - eksctl: 2, // eksctl get clusters - "eksctl create": 3, // eksctl create cluster - firebase: 2, // firebase deploy - flyctl: 2, // flyctl deploy - gcloud: 3, // gcloud compute instances list - gh: 3, // gh pr list - git: 2, // git checkout main - "git config": 3, // git config user.name - "git remote": 3, // git remote add origin - "git stash": 3, // git stash pop - go: 2, // go build - gradle: 2, // gradle build - helm: 2, // helm install mychart - heroku: 2, // heroku logs - hugo: 2, // hugo new site blog - ip: 2, // ip link show - "ip addr": 3, // ip addr show - "ip link": 3, // ip link set eth0 up - "ip netns": 3, // ip netns exec foo bash - "ip route": 3, // ip route add default via 1.1.1.1 - kind: 2, // kind delete cluster - "kind create": 3, // kind create cluster - kubectl: 2, // kubectl get pods - "kubectl kustomize": 3, // kubectl kustomize overlays/dev - "kubectl rollout": 3, // kubectl rollout restart deploy/api - kustomize: 2, // kustomize build . - make: 2, // make build - mc: 2, // mc ls myminio - "mc admin": 3, // mc admin info myminio - minikube: 2, // minikube start - mongosh: 2, // mongosh test - mysql: 2, // mysql -u root - mvn: 2, // mvn compile - ng: 2, // ng generate component home - npm: 2, // npm install - "npm exec": 3, // npm exec vite - "npm init": 3, // npm init vue - "npm run": 3, // npm run dev - "npm view": 3, // npm view react version - nvm: 2, // nvm use 18 - nx: 2, // nx build - openssl: 2, // openssl genrsa 2048 - "openssl req": 3, // openssl req -new -key key.pem - "openssl x509": 3, // openssl x509 -in cert.pem - pip: 2, // pip install numpy - pipenv: 2, // pipenv install flask - pnpm: 2, // pnpm install - "pnpm dlx": 3, // pnpm dlx create-next-app - "pnpm exec": 3, // pnpm exec vite - "pnpm run": 3, // pnpm run dev - poetry: 2, // poetry add requests - podman: 2, // podman run alpine - "podman container": 3, // podman container ls - "podman image": 3, // podman image prune - psql: 2, // psql -d mydb - pulumi: 2, // pulumi up - "pulumi stack": 3, // pulumi stack output - pyenv: 2, // pyenv install 3.11 - python: 2, // python -m venv env - rake: 2, // rake db:migrate - rbenv: 2, // rbenv install 3.2.0 - "redis-cli": 2, // redis-cli ping - rustup: 2, // rustup update - serverless: 2, // serverless invoke - sfdx: 3, // sfdx force:org:list - skaffold: 2, // skaffold dev - sls: 2, // sls deploy - sst: 2, // sst deploy - swift: 2, // swift build - systemctl: 2, // systemctl restart nginx - terraform: 2, // terraform apply - "terraform workspace": 3, // terraform workspace select prod - tmux: 2, // tmux new -s dev - turbo: 2, // turbo run build - ufw: 2, // ufw allow 22 - vault: 2, // vault login - "vault auth": 3, // vault auth list - "vault kv": 3, // vault kv get secret/api - vercel: 2, // vercel deploy - volta: 2, // volta install node - wp: 2, // wp plugin install - yarn: 2, // yarn add react - "yarn dlx": 3, // yarn dlx create-react-app - "yarn run": 3, // yarn run dev - } +const ARITY: Record = { + cat: 1, // cat file.txt + cd: 1, // cd /path/to/dir + chmod: 1, // chmod 755 script.sh + chown: 1, // chown user:group file.txt + cp: 1, // cp source.txt dest.txt + echo: 1, // echo "hello world" + env: 1, // env + export: 1, // export PATH=/usr/bin + grep: 1, // grep pattern file.txt + kill: 1, // kill 1234 + killall: 1, // killall process + ln: 1, // ln -s source target + ls: 1, // ls -la + mkdir: 1, // mkdir new-dir + mv: 1, // mv old.txt new.txt + ps: 1, // ps aux + pwd: 1, // pwd + rm: 1, // rm file.txt + rmdir: 1, // rmdir empty-dir + sleep: 1, // sleep 5 + source: 1, // source ~/.bashrc + tail: 1, // tail -f log.txt + touch: 1, // touch file.txt + unset: 1, // unset VAR + which: 1, // which node + aws: 3, // aws s3 ls + az: 3, // az storage blob list + bazel: 2, // bazel build + brew: 2, // brew install node + bun: 2, // bun install + "bun run": 3, // bun run dev + "bun x": 3, // bun x vite + cargo: 2, // cargo build + "cargo add": 3, // cargo add tokio + "cargo run": 3, // cargo run main + cdk: 2, // cdk deploy + cf: 2, // cf push app + cmake: 2, // cmake build + composer: 2, // composer require laravel + consul: 2, // consul members + "consul kv": 3, // consul kv get config/app + crictl: 2, // crictl ps + deno: 2, // deno run server.ts + "deno task": 3, // deno task dev + doctl: 3, // doctl kubernetes cluster list + docker: 2, // docker run nginx + "docker builder": 3, // docker builder prune + "docker compose": 3, // docker compose up + "docker container": 3, // docker container ls + "docker image": 3, // docker image prune + "docker network": 3, // docker network inspect + "docker volume": 3, // docker volume ls + eksctl: 2, // eksctl get clusters + "eksctl create": 3, // eksctl create cluster + firebase: 2, // firebase deploy + flyctl: 2, // flyctl deploy + gcloud: 3, // gcloud compute instances list + gh: 3, // gh pr list + git: 2, // git checkout main + "git config": 3, // git config user.name + "git remote": 3, // git remote add origin + "git stash": 3, // git stash pop + go: 2, // go build + gradle: 2, // gradle build + helm: 2, // helm install mychart + heroku: 2, // heroku logs + hugo: 2, // hugo new site blog + ip: 2, // ip link show + "ip addr": 3, // ip addr show + "ip link": 3, // ip link set eth0 up + "ip netns": 3, // ip netns exec foo bash + "ip route": 3, // ip route add default via 1.1.1.1 + kind: 2, // kind delete cluster + "kind create": 3, // kind create cluster + kubectl: 2, // kubectl get pods + "kubectl kustomize": 3, // kubectl kustomize overlays/dev + "kubectl rollout": 3, // kubectl rollout restart deploy/api + kustomize: 2, // kustomize build . + make: 2, // make build + mc: 2, // mc ls myminio + "mc admin": 3, // mc admin info myminio + minikube: 2, // minikube start + mongosh: 2, // mongosh test + mysql: 2, // mysql -u root + mvn: 2, // mvn compile + ng: 2, // ng generate component home + npm: 2, // npm install + "npm exec": 3, // npm exec vite + "npm init": 3, // npm init vue + "npm run": 3, // npm run dev + "npm view": 3, // npm view react version + nvm: 2, // nvm use 18 + nx: 2, // nx build + openssl: 2, // openssl genrsa 2048 + "openssl req": 3, // openssl req -new -key key.pem + "openssl x509": 3, // openssl x509 -in cert.pem + pip: 2, // pip install numpy + pipenv: 2, // pipenv install flask + pnpm: 2, // pnpm install + "pnpm dlx": 3, // pnpm dlx create-next-app + "pnpm exec": 3, // pnpm exec vite + "pnpm run": 3, // pnpm run dev + poetry: 2, // poetry add requests + podman: 2, // podman run alpine + "podman container": 3, // podman container ls + "podman image": 3, // podman image prune + psql: 2, // psql -d mydb + pulumi: 2, // pulumi up + "pulumi stack": 3, // pulumi stack output + pyenv: 2, // pyenv install 3.11 + python: 2, // python -m venv env + rake: 2, // rake db:migrate + rbenv: 2, // rbenv install 3.2.0 + "redis-cli": 2, // redis-cli ping + rustup: 2, // rustup update + serverless: 2, // serverless invoke + sfdx: 3, // sfdx force:org:list + skaffold: 2, // skaffold dev + sls: 2, // sls deploy + sst: 2, // sst deploy + swift: 2, // swift build + systemctl: 2, // systemctl restart nginx + terraform: 2, // terraform apply + "terraform workspace": 3, // terraform workspace select prod + tmux: 2, // tmux new -s dev + turbo: 2, // turbo run build + ufw: 2, // ufw allow 22 + vault: 2, // vault login + "vault auth": 3, // vault auth list + "vault kv": 3, // vault kv get secret/api + vercel: 2, // vercel deploy + volta: 2, // volta install node + wp: 2, // wp plugin install + yarn: 2, // yarn add react + "yarn dlx": 3, // yarn dlx create-react-app + "yarn run": 3, // yarn run dev } + +export * as BashArity from "./arity" From 2fe9d9447070f6967d80b0a8f74239e1969d9e1c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:27:53 -0400 Subject: [PATCH 028/120] fix: remove 8 more unnecessary `as any` casts in opencode core (#22877) --- packages/opencode/src/acp/agent.ts | 2 +- packages/opencode/src/cli/cmd/providers.ts | 4 +++- packages/opencode/src/cli/cmd/tui/win32.ts | 5 +++-- packages/opencode/src/lsp/lsp.ts | 6 +++--- packages/opencode/src/mcp/mcp.ts | 2 +- .../copilot/responses/openai-responses-language-model.ts | 4 ++-- packages/opencode/src/provider/transform.ts | 2 +- packages/opencode/src/util/effect-zod.ts | 4 ++-- 8 files changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 9388c87f12..5d8c723ea7 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -178,7 +178,7 @@ export namespace ACP { }) for await (const event of events.stream) { if (this.eventAbort.signal.aborted) return - const payload = (event as any)?.payload + const payload = event?.payload if (!payload) continue await this.handleEvent(payload as Event).catch((error) => { log.error("failed to handle event", { error, type: payload.type }) diff --git a/packages/opencode/src/cli/cmd/providers.ts b/packages/opencode/src/cli/cmd/providers.ts index 4bc3f0ea6c..e2eb0b65a3 100644 --- a/packages/opencode/src/cli/cmd/providers.ts +++ b/packages/opencode/src/cli/cmd/providers.ts @@ -297,7 +297,9 @@ export const ProvidersLoginCommand = cmd({ prompts.intro("Add credential") if (args.url) { const url = args.url.replace(/\/+$/, "") - const wellknown = await fetch(`${url}/.well-known/opencode`).then((x) => x.json() as any) + const wellknown = (await fetch(`${url}/.well-known/opencode`).then((x) => x.json())) as { + auth: { command: string[]; env: string } + } prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``) const proc = Process.spawn(wellknown.auth.command, { stdout: "pipe", diff --git a/packages/opencode/src/cli/cmd/tui/win32.ts b/packages/opencode/src/cli/cmd/tui/win32.ts index 23e9f44857..1aaa80aecd 100644 --- a/packages/opencode/src/cli/cmd/tui/win32.ts +++ b/packages/opencode/src/cli/cmd/tui/win32.ts @@ -1,4 +1,5 @@ import { dlopen, ptr } from "bun:ffi" +import type { ReadStream } from "node:tty" const STD_INPUT_HANDLE = -10 const ENABLE_PROCESSED_INPUT = 0x0001 @@ -71,7 +72,7 @@ export function win32InstallCtrlCGuard() { if (!load()) return if (unhook) return unhook - const stdin = process.stdin as any + const stdin = process.stdin as ReadStream const original = stdin.setRawMode const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE) @@ -93,7 +94,7 @@ export function win32InstallCtrlCGuard() { setImmediate(enforce) } - let wrapped: ((mode: boolean) => unknown) | undefined + let wrapped: ReadStream["setRawMode"] | undefined if (typeof original === "function") { wrapped = (mode: boolean) => { diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index 2c0982eca5..d4d1e75634 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -465,12 +465,12 @@ export const layer = Layer.effect( direction: "callHierarchy/incomingCalls" | "callHierarchy/outgoingCalls", ) { const results = yield* run(input.file, async (client) => { - const items = (await client.connection - .sendRequest("textDocument/prepareCallHierarchy", { + const items = await client.connection + .sendRequest("textDocument/prepareCallHierarchy", { textDocument: { uri: pathToFileURL(input.file).href }, position: { line: input.line, character: input.character }, }) - .catch(() => [])) as any[] + .catch(() => [] as unknown[]) if (!items?.length) return [] return client.connection.sendRequest(direction, { item: items[0] }).catch(() => []) }) diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts index 6666e0854f..61201ce76d 100644 --- a/packages/opencode/src/mcp/mcp.ts +++ b/packages/opencode/src/mcp/mcp.ts @@ -531,7 +531,7 @@ export const layer = Layer.effect( Object.values(s.clients), (client) => Effect.gen(function* () { - const pid = (client.transport as any)?.pid + const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null if (typeof pid === "number") { const pids = yield* descendants(pid) for (const dpid of pids) { diff --git a/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts b/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts index 92c8fd857b..250d1f6f34 100644 --- a/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts +++ b/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts @@ -354,7 +354,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 { details: "flex processing is only available for o3, o4-mini, and gpt-5 models", }) // Remove from args if not supported - delete (baseArgs as any).service_tier + baseArgs.service_tier = undefined } // Validate priority processing support @@ -366,7 +366,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 { "priority processing is only available for supported models (gpt-4, gpt-5, gpt-5-mini, o3, o4-mini) and requires Enterprise access. gpt-5-nano is not supported", }) // Remove from args if not supported - delete (baseArgs as any).service_tier + baseArgs.service_tier = undefined } const { diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 66e87fb3b8..e527251b0f 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -193,7 +193,7 @@ function normalizeMessages( providerOptions: { ...msg.providerOptions, openaiCompatible: { - ...(msg.providerOptions as any)?.openaiCompatible, + ...msg.providerOptions?.openaiCompatible, [field]: reasoningText, }, }, diff --git a/packages/opencode/src/util/effect-zod.ts b/packages/opencode/src/util/effect-zod.ts index 553d7a0650..6e99fd4688 100644 --- a/packages/opencode/src/util/effect-zod.ts +++ b/packages/opencode/src/util/effect-zod.ts @@ -77,8 +77,8 @@ function union(ast: SchemaAST.Union): z.ZodTypeAny { if (items.length === 1) return items[0] if (items.length < 2) return fail(ast) - const discriminator = (ast as any).annotations?.discriminator - if (discriminator) { + const discriminator = ast.annotations?.discriminator + if (typeof discriminator === "string") { return z.discriminatedUnion(discriminator, items as [z.ZodObject, z.ZodObject, ...z.ZodObject[]]) } From 6c3b28db64e47895553949880f296bae74691f4a Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 14:38:39 -0500 Subject: [PATCH 029/120] fix: ensure that double pasting doesnt happen after tui perf commit was merged (#22880) --- .../opencode/src/cli/cmd/tui/component/prompt/index.tsx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index e64a16eb8a..82c4a7222f 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1031,6 +1031,10 @@ export function Prompt(props: PromptProps) { return } + // Once we cross an async boundary below, the terminal may perform its + // default paste unless we suppress it first and handle insertion ourselves. + event.preventDefault() + const filepath = iife(() => { const raw = pastedContent.replace(/^['"]+|['"]+$/g, "") if (raw.startsWith("file://")) { @@ -1048,7 +1052,6 @@ export function Prompt(props: PromptProps) { const filename = path.basename(filepath) // Handle SVG as raw text content, not as base64 image if (mime === "image/svg+xml") { - event.preventDefault() const content = await Filesystem.readText(filepath).catch(() => {}) if (content) { pasteText(content, `[SVG: ${filename ?? "image"}]`) @@ -1056,7 +1059,6 @@ export function Prompt(props: PromptProps) { } } if (mime.startsWith("image/") || mime === "application/pdf") { - event.preventDefault() const content = await Filesystem.readArrayBuffer(filepath) .then((buffer) => Buffer.from(buffer).toString("base64")) .catch(() => {}) @@ -1078,11 +1080,12 @@ export function Prompt(props: PromptProps) { (lineCount >= 3 || pastedContent.length > 150) && !sync.data.config.experimental?.disable_paste_summary ) { - event.preventDefault() pasteText(pastedContent, `[Pasted ~${lineCount} lines]`) return } + input.insertText(normalizedText) + // Force layout update and render for the pasted content setTimeout(() => { // setTimeout is a workaround and needs to be addressed properly From 76275fc3ab8f39cd02ae7eed87c47679e1f4c28e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:49:21 -0400 Subject: [PATCH 030/120] refactor: move Pty into pty/index.ts with self-reexport (#22881) --- .../specs/effect/namespace-treeshake.md | 54 ++- packages/opencode/src/pty/index.ts | 365 +++++++++++++++++- packages/opencode/src/pty/service.ts | 362 ----------------- 3 files changed, 413 insertions(+), 368 deletions(-) delete mode 100644 packages/opencode/src/pty/service.ts diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md index ac4d3987de..ef78c762bb 100644 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ b/packages/opencode/specs/effect/namespace-treeshake.md @@ -196,15 +196,59 @@ import regressions at runtime — the typechecker does not catch these. ## Rules for new code - No new `export namespace`. -- Every module file that wants a namespace gets a self-reexport at the +- Every module directory has a single canonical file — typically + `dir/index.ts` — with flat top-level exports and a self-reexport at the bottom: - `export * as Foo from "./foo"` -- Consumers import from the file itself: - `import { Foo } from "../path/to/foo"` -- No new barrel `index.ts` files for internal code. + `export * as Foo from "."` +- Consumers import from the directory: + `import { Foo } from "@/dir"` or `import { Foo } from "../dir"`. +- No sibling barrel files. If a directory has multiple independent + namespaces, they each get their own file (e.g. `config/config.ts`, + `config/plugin.ts`) and their own self-reexport; the `index.ts` in that + directory stays minimal or does not exist. - If a file needs a sibling, import the sibling file directly: `import * as Sibling from "./sibling"`, not `from "."`. +### Why `dir/index.ts` + `"."` is fine for us + +A single-file module (e.g. `pty/`) can live entirely in `dir/index.ts` +with `export * as Foo from "."` at the bottom. Consumers write the +short form: + +```ts +import { Pty } from "@/pty" +``` + +This works in Bun runtime, Bun build, esbuild, and Rollup. It does NOT +work under Node's `--experimental-strip-types` runner: + +``` +node --experimental-strip-types entry.ts + ERR_UNSUPPORTED_DIR_IMPORT: Directory import '/.../pty' is not supported +``` + +Node requires an explicit file or a `package.json#exports` map for ESM. +We don't care about that target right now because the opencode CLI is +built with Bun and the web apps are built with Vite/Rollup. If we ever +want to run raw `.ts` through Node, we'll need to either use explicit +`.ts` extensions everywhere or add per-directory `package.json` exports +maps. + +### When NOT to collapse to `index.ts` + +Some directories contain multiple independent namespaces where +`dir/index.ts` would be misleading. Examples: + +- `config/` has `Config`, `ConfigPaths`, `ConfigMarkdown`, `ConfigPlugin`, + `ConfigKeybinds`. Each lives in its own file with its own self-reexport + (`config/config.ts`, `config/plugin.ts`, etc.). Consumers import the + specific one: `import { ConfigPlugin } from "@/config/plugin"`. +- Same shape for `session/`, `server/`, etc. + +Collapsing one of those into `index.ts` would mean picking a single +"canonical" namespace for the directory, which breaks the symmetry and +hides the other files. + ## Scope There are still dozens of `export namespace` files left across the codebase. diff --git a/packages/opencode/src/pty/index.ts b/packages/opencode/src/pty/index.ts index 37cb4e49a8..3d00de596a 100644 --- a/packages/opencode/src/pty/index.ts +++ b/packages/opencode/src/pty/index.ts @@ -1 +1,364 @@ -export * as Pty from "./service" +import { BusEvent } from "@/bus/bus-event" +import { Bus } from "@/bus" +import { InstanceState } from "@/effect" +import { Instance } from "@/project/instance" +import type { Proc } from "#pty" +import z from "zod" +import { Log } from "../util" +import { lazy } from "@opencode-ai/shared/util/lazy" +import { Shell } from "@/shell/shell" +import { Plugin } from "@/plugin" +import { PtyID } from "./schema" +import { Effect, Layer, Context } from "effect" +import { EffectBridge } from "@/effect" + +const log = Log.create({ service: "pty" }) + +const BUFFER_LIMIT = 1024 * 1024 * 2 +const BUFFER_CHUNK = 64 * 1024 +const encoder = new TextEncoder() + +type Socket = { + readyState: number + data?: unknown + send: (data: string | Uint8Array | ArrayBuffer) => void + close: (code?: number, reason?: string) => void +} + +const sock = (ws: Socket) => (ws.data && typeof ws.data === "object" ? ws.data : ws) + +type Active = { + info: Info + process: Proc + buffer: string + bufferCursor: number + cursor: number + subscribers: Map +} + +type State = { + dir: string + sessions: Map +} + +// WebSocket control frame: 0x00 + UTF-8 JSON. +const meta = (cursor: number) => { + const json = JSON.stringify({ cursor }) + const bytes = encoder.encode(json) + const out = new Uint8Array(bytes.length + 1) + out[0] = 0 + out.set(bytes, 1) + return out +} + +const pty = lazy(() => import("#pty")) + +export const Info = z + .object({ + id: PtyID.zod, + title: z.string(), + command: z.string(), + args: z.array(z.string()), + cwd: z.string(), + status: z.enum(["running", "exited"]), + pid: z.number(), + }) + .meta({ ref: "Pty" }) + +export type Info = z.infer + +export const CreateInput = z.object({ + command: z.string().optional(), + args: z.array(z.string()).optional(), + cwd: z.string().optional(), + title: z.string().optional(), + env: z.record(z.string(), z.string()).optional(), +}) + +export type CreateInput = z.infer + +export const UpdateInput = z.object({ + title: z.string().optional(), + size: z + .object({ + rows: z.number(), + cols: z.number(), + }) + .optional(), +}) + +export type UpdateInput = z.infer + +export const Event = { + Created: BusEvent.define("pty.created", z.object({ info: Info })), + Updated: BusEvent.define("pty.updated", z.object({ info: Info })), + Exited: BusEvent.define("pty.exited", z.object({ id: PtyID.zod, exitCode: z.number() })), + Deleted: BusEvent.define("pty.deleted", z.object({ id: PtyID.zod })), +} + +export interface Interface { + readonly list: () => Effect.Effect + readonly get: (id: PtyID) => Effect.Effect + readonly create: (input: CreateInput) => Effect.Effect + readonly update: (id: PtyID, input: UpdateInput) => Effect.Effect + readonly remove: (id: PtyID) => Effect.Effect + readonly resize: (id: PtyID, cols: number, rows: number) => Effect.Effect + readonly write: (id: PtyID, data: string) => Effect.Effect + readonly connect: ( + id: PtyID, + ws: Socket, + cursor?: number, + ) => Effect.Effect<{ onMessage: (message: string | ArrayBuffer) => void; onClose: () => void } | undefined> +} + +export class Service extends Context.Service()("@opencode/Pty") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const plugin = yield* Plugin.Service + function teardown(session: Active) { + try { + session.process.kill() + } catch {} + for (const [sub, ws] of session.subscribers.entries()) { + try { + if (sock(ws) === sub) ws.close() + } catch {} + } + session.subscribers.clear() + } + + const state = yield* InstanceState.make( + Effect.fn("Pty.state")(function* (ctx) { + const state = { + dir: ctx.directory, + sessions: new Map(), + } + + yield* Effect.addFinalizer(() => + Effect.sync(() => { + for (const session of state.sessions.values()) { + teardown(session) + } + state.sessions.clear() + }), + ) + + return state + }), + ) + + const remove = Effect.fn("Pty.remove")(function* (id: PtyID) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) return + s.sessions.delete(id) + log.info("removing session", { id }) + teardown(session) + yield* bus.publish(Event.Deleted, { id: session.info.id }) + }) + + const list = Effect.fn("Pty.list")(function* () { + const s = yield* InstanceState.get(state) + return Array.from(s.sessions.values()).map((session) => session.info) + }) + + const get = Effect.fn("Pty.get")(function* (id: PtyID) { + const s = yield* InstanceState.get(state) + return s.sessions.get(id)?.info + }) + + const create = Effect.fn("Pty.create")(function* (input: CreateInput) { + const s = yield* InstanceState.get(state) + const bridge = yield* EffectBridge.make() + const id = PtyID.ascending() + const command = input.command || Shell.preferred() + const args = input.args || [] + if (Shell.login(command)) { + args.push("-l") + } + + const cwd = input.cwd || s.dir + const shell = yield* plugin.trigger("shell.env", { cwd }, { env: {} }) + const env = { + ...process.env, + ...input.env, + ...shell.env, + TERM: "xterm-256color", + OPENCODE_TERMINAL: "1", + } as Record + + if (process.platform === "win32") { + env.LC_ALL = "C.UTF-8" + env.LC_CTYPE = "C.UTF-8" + env.LANG = "C.UTF-8" + } + log.info("creating session", { id, cmd: command, args, cwd }) + + const { spawn } = yield* Effect.promise(() => pty()) + const proc = yield* Effect.sync(() => + spawn(command, args, { + name: "xterm-256color", + cwd, + env, + }), + ) + + const info = { + id, + title: input.title || `Terminal ${id.slice(-4)}`, + command, + args, + cwd, + status: "running", + pid: proc.pid, + } as const + const session: Active = { + info, + process: proc, + buffer: "", + bufferCursor: 0, + cursor: 0, + subscribers: new Map(), + } + s.sessions.set(id, session) + proc.onData( + Instance.bind((chunk) => { + session.cursor += chunk.length + + for (const [key, ws] of session.subscribers.entries()) { + if (ws.readyState !== 1) { + session.subscribers.delete(key) + continue + } + if (sock(ws) !== key) { + session.subscribers.delete(key) + continue + } + try { + ws.send(chunk) + } catch { + session.subscribers.delete(key) + } + } + + session.buffer += chunk + if (session.buffer.length <= BUFFER_LIMIT) return + const excess = session.buffer.length - BUFFER_LIMIT + session.buffer = session.buffer.slice(excess) + session.bufferCursor += excess + }), + ) + proc.onExit( + Instance.bind(({ exitCode }) => { + if (session.info.status === "exited") return + log.info("session exited", { id, exitCode }) + session.info.status = "exited" + bridge.fork(bus.publish(Event.Exited, { id, exitCode })) + bridge.fork(remove(id)) + }), + ) + yield* bus.publish(Event.Created, { info }) + return info + }) + + const update = Effect.fn("Pty.update")(function* (id: PtyID, input: UpdateInput) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) return + if (input.title) { + session.info.title = input.title + } + if (input.size) { + session.process.resize(input.size.cols, input.size.rows) + } + yield* bus.publish(Event.Updated, { info: session.info }) + return session.info + }) + + const resize = Effect.fn("Pty.resize")(function* (id: PtyID, cols: number, rows: number) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (session && session.info.status === "running") { + session.process.resize(cols, rows) + } + }) + + const write = Effect.fn("Pty.write")(function* (id: PtyID, data: string) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (session && session.info.status === "running") { + session.process.write(data) + } + }) + + const connect = Effect.fn("Pty.connect")(function* (id: PtyID, ws: Socket, cursor?: number) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) { + ws.close() + return + } + log.info("client connected to session", { id }) + + const sub = sock(ws) + session.subscribers.delete(sub) + session.subscribers.set(sub, ws) + + const cleanup = () => { + session.subscribers.delete(sub) + } + + const start = session.bufferCursor + const end = session.cursor + const from = + cursor === -1 ? end : typeof cursor === "number" && Number.isSafeInteger(cursor) ? Math.max(0, cursor) : 0 + + const data = (() => { + if (!session.buffer) return "" + if (from >= end) return "" + const offset = Math.max(0, from - start) + if (offset >= session.buffer.length) return "" + return session.buffer.slice(offset) + })() + + if (data) { + try { + for (let i = 0; i < data.length; i += BUFFER_CHUNK) { + ws.send(data.slice(i, i + BUFFER_CHUNK)) + } + } catch { + cleanup() + ws.close() + return + } + } + + try { + ws.send(meta(end)) + } catch { + cleanup() + ws.close() + return + } + + return { + onMessage: (message: string | ArrayBuffer) => { + session.process.write(typeof message === "string" ? message : new TextDecoder().decode(message)) + }, + onClose: () => { + log.info("client disconnected from session", { id }) + cleanup() + }, + } + }) + + return Service.of({ list, get, create, update, remove, resize, write, connect }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Plugin.defaultLayer)) + +export * as Pty from "." diff --git a/packages/opencode/src/pty/service.ts b/packages/opencode/src/pty/service.ts deleted file mode 100644 index 0c810be88f..0000000000 --- a/packages/opencode/src/pty/service.ts +++ /dev/null @@ -1,362 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { Bus } from "@/bus" -import { InstanceState } from "@/effect" -import { Instance } from "@/project/instance" -import type { Proc } from "#pty" -import z from "zod" -import { Log } from "../util" -import { lazy } from "@opencode-ai/shared/util/lazy" -import { Shell } from "@/shell/shell" -import { Plugin } from "@/plugin" -import { PtyID } from "./schema" -import { Effect, Layer, Context } from "effect" -import { EffectBridge } from "@/effect" - -const log = Log.create({ service: "pty" }) - -const BUFFER_LIMIT = 1024 * 1024 * 2 -const BUFFER_CHUNK = 64 * 1024 -const encoder = new TextEncoder() - -type Socket = { - readyState: number - data?: unknown - send: (data: string | Uint8Array | ArrayBuffer) => void - close: (code?: number, reason?: string) => void -} - -const sock = (ws: Socket) => (ws.data && typeof ws.data === "object" ? ws.data : ws) - -type Active = { - info: Info - process: Proc - buffer: string - bufferCursor: number - cursor: number - subscribers: Map -} - -type State = { - dir: string - sessions: Map -} - -// WebSocket control frame: 0x00 + UTF-8 JSON. -const meta = (cursor: number) => { - const json = JSON.stringify({ cursor }) - const bytes = encoder.encode(json) - const out = new Uint8Array(bytes.length + 1) - out[0] = 0 - out.set(bytes, 1) - return out -} - -const pty = lazy(() => import("#pty")) - -export const Info = z - .object({ - id: PtyID.zod, - title: z.string(), - command: z.string(), - args: z.array(z.string()), - cwd: z.string(), - status: z.enum(["running", "exited"]), - pid: z.number(), - }) - .meta({ ref: "Pty" }) - -export type Info = z.infer - -export const CreateInput = z.object({ - command: z.string().optional(), - args: z.array(z.string()).optional(), - cwd: z.string().optional(), - title: z.string().optional(), - env: z.record(z.string(), z.string()).optional(), -}) - -export type CreateInput = z.infer - -export const UpdateInput = z.object({ - title: z.string().optional(), - size: z - .object({ - rows: z.number(), - cols: z.number(), - }) - .optional(), -}) - -export type UpdateInput = z.infer - -export const Event = { - Created: BusEvent.define("pty.created", z.object({ info: Info })), - Updated: BusEvent.define("pty.updated", z.object({ info: Info })), - Exited: BusEvent.define("pty.exited", z.object({ id: PtyID.zod, exitCode: z.number() })), - Deleted: BusEvent.define("pty.deleted", z.object({ id: PtyID.zod })), -} - -export interface Interface { - readonly list: () => Effect.Effect - readonly get: (id: PtyID) => Effect.Effect - readonly create: (input: CreateInput) => Effect.Effect - readonly update: (id: PtyID, input: UpdateInput) => Effect.Effect - readonly remove: (id: PtyID) => Effect.Effect - readonly resize: (id: PtyID, cols: number, rows: number) => Effect.Effect - readonly write: (id: PtyID, data: string) => Effect.Effect - readonly connect: ( - id: PtyID, - ws: Socket, - cursor?: number, - ) => Effect.Effect<{ onMessage: (message: string | ArrayBuffer) => void; onClose: () => void } | undefined> -} - -export class Service extends Context.Service()("@opencode/Pty") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const plugin = yield* Plugin.Service - function teardown(session: Active) { - try { - session.process.kill() - } catch {} - for (const [sub, ws] of session.subscribers.entries()) { - try { - if (sock(ws) === sub) ws.close() - } catch {} - } - session.subscribers.clear() - } - - const state = yield* InstanceState.make( - Effect.fn("Pty.state")(function* (ctx) { - const state = { - dir: ctx.directory, - sessions: new Map(), - } - - yield* Effect.addFinalizer(() => - Effect.sync(() => { - for (const session of state.sessions.values()) { - teardown(session) - } - state.sessions.clear() - }), - ) - - return state - }), - ) - - const remove = Effect.fn("Pty.remove")(function* (id: PtyID) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) return - s.sessions.delete(id) - log.info("removing session", { id }) - teardown(session) - yield* bus.publish(Event.Deleted, { id: session.info.id }) - }) - - const list = Effect.fn("Pty.list")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.sessions.values()).map((session) => session.info) - }) - - const get = Effect.fn("Pty.get")(function* (id: PtyID) { - const s = yield* InstanceState.get(state) - return s.sessions.get(id)?.info - }) - - const create = Effect.fn("Pty.create")(function* (input: CreateInput) { - const s = yield* InstanceState.get(state) - const bridge = yield* EffectBridge.make() - const id = PtyID.ascending() - const command = input.command || Shell.preferred() - const args = input.args || [] - if (Shell.login(command)) { - args.push("-l") - } - - const cwd = input.cwd || s.dir - const shell = yield* plugin.trigger("shell.env", { cwd }, { env: {} }) - const env = { - ...process.env, - ...input.env, - ...shell.env, - TERM: "xterm-256color", - OPENCODE_TERMINAL: "1", - } as Record - - if (process.platform === "win32") { - env.LC_ALL = "C.UTF-8" - env.LC_CTYPE = "C.UTF-8" - env.LANG = "C.UTF-8" - } - log.info("creating session", { id, cmd: command, args, cwd }) - - const { spawn } = yield* Effect.promise(() => pty()) - const proc = yield* Effect.sync(() => - spawn(command, args, { - name: "xterm-256color", - cwd, - env, - }), - ) - - const info = { - id, - title: input.title || `Terminal ${id.slice(-4)}`, - command, - args, - cwd, - status: "running", - pid: proc.pid, - } as const - const session: Active = { - info, - process: proc, - buffer: "", - bufferCursor: 0, - cursor: 0, - subscribers: new Map(), - } - s.sessions.set(id, session) - proc.onData( - Instance.bind((chunk) => { - session.cursor += chunk.length - - for (const [key, ws] of session.subscribers.entries()) { - if (ws.readyState !== 1) { - session.subscribers.delete(key) - continue - } - if (sock(ws) !== key) { - session.subscribers.delete(key) - continue - } - try { - ws.send(chunk) - } catch { - session.subscribers.delete(key) - } - } - - session.buffer += chunk - if (session.buffer.length <= BUFFER_LIMIT) return - const excess = session.buffer.length - BUFFER_LIMIT - session.buffer = session.buffer.slice(excess) - session.bufferCursor += excess - }), - ) - proc.onExit( - Instance.bind(({ exitCode }) => { - if (session.info.status === "exited") return - log.info("session exited", { id, exitCode }) - session.info.status = "exited" - bridge.fork(bus.publish(Event.Exited, { id, exitCode })) - bridge.fork(remove(id)) - }), - ) - yield* bus.publish(Event.Created, { info }) - return info - }) - - const update = Effect.fn("Pty.update")(function* (id: PtyID, input: UpdateInput) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) return - if (input.title) { - session.info.title = input.title - } - if (input.size) { - session.process.resize(input.size.cols, input.size.rows) - } - yield* bus.publish(Event.Updated, { info: session.info }) - return session.info - }) - - const resize = Effect.fn("Pty.resize")(function* (id: PtyID, cols: number, rows: number) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (session && session.info.status === "running") { - session.process.resize(cols, rows) - } - }) - - const write = Effect.fn("Pty.write")(function* (id: PtyID, data: string) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (session && session.info.status === "running") { - session.process.write(data) - } - }) - - const connect = Effect.fn("Pty.connect")(function* (id: PtyID, ws: Socket, cursor?: number) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) { - ws.close() - return - } - log.info("client connected to session", { id }) - - const sub = sock(ws) - session.subscribers.delete(sub) - session.subscribers.set(sub, ws) - - const cleanup = () => { - session.subscribers.delete(sub) - } - - const start = session.bufferCursor - const end = session.cursor - const from = - cursor === -1 ? end : typeof cursor === "number" && Number.isSafeInteger(cursor) ? Math.max(0, cursor) : 0 - - const data = (() => { - if (!session.buffer) return "" - if (from >= end) return "" - const offset = Math.max(0, from - start) - if (offset >= session.buffer.length) return "" - return session.buffer.slice(offset) - })() - - if (data) { - try { - for (let i = 0; i < data.length; i += BUFFER_CHUNK) { - ws.send(data.slice(i, i + BUFFER_CHUNK)) - } - } catch { - cleanup() - ws.close() - return - } - } - - try { - ws.send(meta(end)) - } catch { - cleanup() - ws.close() - return - } - - return { - onMessage: (message: string | ArrayBuffer) => { - session.process.write(typeof message === "string" ? message : new TextDecoder().decode(message)) - }, - onClose: () => { - log.info("client disconnected from session", { id }) - cleanup() - }, - } - }) - - return Service.of({ list, get, create, update, remove, resize, write, connect }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Plugin.defaultLayer)) From 5e650fd9e2c8ebaedb49af2ff771af9721782d98 Mon Sep 17 00:00:00 2001 From: Kobi Hudson Date: Thu, 16 Apr 2026 13:01:21 -0700 Subject: [PATCH 031/120] fix(opencode): drop max_tokens for OpenAI reasoning models on Cloudflare AI Gateway (#22864) Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> --- packages/opencode/src/plugin/cloudflare.ts | 11 +++ .../opencode/test/plugin/cloudflare.test.ts | 68 +++++++++++++++++++ 2 files changed, 79 insertions(+) create mode 100644 packages/opencode/test/plugin/cloudflare.test.ts diff --git a/packages/opencode/src/plugin/cloudflare.ts b/packages/opencode/src/plugin/cloudflare.ts index 2ccf5168d8..c4bf6bb8e7 100644 --- a/packages/opencode/src/plugin/cloudflare.ts +++ b/packages/opencode/src/plugin/cloudflare.ts @@ -61,5 +61,16 @@ export async function CloudflareAIGatewayAuthPlugin(_input: PluginInput): Promis }, ], }, + "chat.params": async (input, output) => { + if (input.model.providerID !== "cloudflare-ai-gateway") return + // The unified gateway routes through @ai-sdk/openai-compatible, which + // always emits max_tokens. OpenAI reasoning models (gpt-5.x, o-series) + // reject that field and require max_completion_tokens instead, and the + // compatible SDK has no way to rename it. Drop the cap so OpenAI falls + // back to the model's default output budget. + if (!input.model.api.id.toLowerCase().startsWith("openai/")) return + if (!input.model.capabilities.reasoning) return + output.maxOutputTokens = undefined + }, } } diff --git a/packages/opencode/test/plugin/cloudflare.test.ts b/packages/opencode/test/plugin/cloudflare.test.ts new file mode 100644 index 0000000000..5fa4106835 --- /dev/null +++ b/packages/opencode/test/plugin/cloudflare.test.ts @@ -0,0 +1,68 @@ +import { expect, test } from "bun:test" +import { CloudflareAIGatewayAuthPlugin } from "@/plugin/cloudflare" + +const pluginInput = { + client: {} as never, + project: {} as never, + directory: "", + worktree: "", + experimental_workspace: { + register() {}, + }, + serverUrl: new URL("https://example.com"), + $: {} as never, +} + +function makeHookInput(overrides: { providerID?: string; apiId?: string; reasoning?: boolean }) { + return { + sessionID: "s", + agent: "a", + provider: {} as never, + message: {} as never, + model: { + providerID: overrides.providerID ?? "cloudflare-ai-gateway", + api: { id: overrides.apiId ?? "openai/gpt-5.2-codex", url: "", npm: "ai-gateway-provider" }, + capabilities: { + reasoning: overrides.reasoning ?? true, + temperature: false, + attachment: true, + toolcall: true, + input: { text: true, audio: false, image: false, video: false, pdf: false }, + output: { text: true, audio: false, image: false, video: false, pdf: false }, + interleaved: false, + }, + } as never, + } +} + +function makeHookOutput() { + return { temperature: 0, topP: 1, topK: 0, maxOutputTokens: 32_000 as number | undefined, options: {} } +} + +test("omits maxOutputTokens for openai reasoning models on cloudflare-ai-gateway", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "openai/gpt-5.2-codex", reasoning: true }), out) + expect(out.maxOutputTokens).toBeUndefined() +}) + +test("keeps maxOutputTokens for openai non-reasoning models", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "openai/gpt-4-turbo", reasoning: false }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) + +test("keeps maxOutputTokens for non-openai reasoning models on cloudflare-ai-gateway", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "anthropic/claude-sonnet-4-5", reasoning: true }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) + +test("ignores non-cloudflare-ai-gateway providers", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ providerID: "openai", apiId: "gpt-5.2-codex", reasoning: true }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) From 1c33b866ba962ed7a4c147c316ad807886a0045e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:11:05 -0400 Subject: [PATCH 032/120] fix: remove 10 more unnecessary `as any` casts in opencode core (#22882) --- .../src/cli/cmd/tui/routes/session/index.tsx | 35 +++++++++++-------- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/lsp/server.ts | 8 +++-- .../src/server/instance/experimental.ts | 4 +-- packages/opencode/src/session/prompt.ts | 7 ++-- .../test/session/structured-output.test.ts | 10 ------ 6 files changed, 30 insertions(+), 36 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 5b4308d593..b0514bf1b1 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -44,6 +44,8 @@ import type { GrepTool } from "@/tool/grep" import type { EditTool } from "@/tool/edit" import type { ApplyPatchTool } from "@/tool/apply_patch" import type { WebFetchTool } from "@/tool/webfetch" +import type { CodeSearchTool } from "@/tool/codesearch" +import type { WebSearchTool } from "@/tool/websearch" import type { TaskTool } from "@/tool/task" import type { QuestionTool } from "@/tool/question" import type { SkillTool } from "@/tool/skill" @@ -1934,28 +1936,26 @@ function Grep(props: ToolProps) { function WebFetch(props: ToolProps) { return ( - - WebFetch {(props.input as any).url} + + WebFetch {props.input.url} ) } -function CodeSearch(props: ToolProps) { - const input = props.input as any - const metadata = props.metadata as any +function CodeSearch(props: ToolProps) { + const metadata = props.metadata as { results?: number } return ( - - Exa Code Search "{input.query}" ({metadata.results} results) + + Exa Code Search "{props.input.query}" ({metadata.results} results) ) } -function WebSearch(props: ToolProps) { - const input = props.input as any - const metadata = props.metadata as any +function WebSearch(props: ToolProps) { + const metadata = props.metadata as { numResults?: number } return ( - - Exa Web Search "{input.query}" ({metadata.numResults} results) + + Exa Web Search "{props.input.query}" ({metadata.numResults} results) ) } @@ -1979,7 +1979,9 @@ function Task(props: ToolProps) { ) }) - const current = createMemo(() => tools().findLast((x) => (x.state as any).title)) + const current = createMemo(() => + tools().findLast((x) => (x.state.status === "running" || x.state.status === "completed") && x.state.title), + ) const isRunning = createMemo(() => props.part.state.status === "running") @@ -1996,8 +1998,11 @@ function Task(props: ToolProps) { if (isRunning() && tools().length > 0) { // content[0] += ` · ${tools().length} toolcalls` - if (current()) content.push(`↳ ${Locale.titlecase(current()!.tool)} ${(current()!.state as any).title}`) - else content.push(`↳ ${tools().length} toolcalls`) + if (current()) { + const state = current()!.state + const title = state.status === "running" || state.status === "completed" ? state.title : undefined + content.push(`↳ ${Locale.titlecase(current()!.tool)} ${title}`) + } else content.push(`↳ ${tools().length} toolcalls`) } if (props.part.state.status === "completed") { diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index a738ebf130..3cbc539600 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -517,7 +517,7 @@ export const layer = Layer.effect( if (!response.ok) { throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) } - const wellknown = (yield* Effect.promise(() => response.json())) as any + const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } const remoteConfig = wellknown.config ?? {} if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" const source = `${url}/.well-known/opencode` diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts index 390c5f2428..760e8eaba0 100644 --- a/packages/opencode/src/lsp/server.ts +++ b/packages/opencode/src/lsp/server.ts @@ -611,7 +611,9 @@ export const Zls: Info = { return } - const release = (await releaseResponse.json()) as any + const release = (await releaseResponse.json()) as { + assets?: { name?: string; browser_download_url?: string }[] + } const platform = process.platform const arch = process.arch @@ -646,8 +648,8 @@ export const Zls: Info = { return } - const asset = release.assets.find((a: any) => a.name === assetName) - if (!asset) { + const asset = release.assets?.find((a) => a.name === assetName) + if (!asset?.browser_download_url) { log.error(`Could not find asset ${assetName} in latest zls release`) return } diff --git a/packages/opencode/src/server/instance/experimental.ts b/packages/opencode/src/server/instance/experimental.ts index fe80173a8b..4f8887a43c 100644 --- a/packages/opencode/src/server/instance/experimental.ts +++ b/packages/opencode/src/server/instance/experimental.ts @@ -12,7 +12,6 @@ import { Config } from "../../config" import { ConsoleState } from "../../config/console-state" import { Account, AccountID, OrgID } from "../../account" import { AppRuntime } from "../../effect/app-runtime" -import { zodToJsonSchema } from "zod-to-json-schema" import { errors } from "../error" import { lazy } from "../../util/lazy" import { Effect, Option } from "effect" @@ -226,8 +225,7 @@ export const ExperimentalRoutes = lazy(() => tools.map((t) => ({ id: t.id, description: t.description, - // Handle both Zod schemas and plain JSON schemas - parameters: (t.parameters as any)?._def ? zodToJsonSchema(t.parameters as any) : t.parameters, + parameters: z.toJSONSchema(t.parameters), })), ) }, diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 4b8b95baa8..004ee19abe 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -10,6 +10,7 @@ import { Agent } from "../agent/agent" import { Provider } from "../provider" import { ModelID, ProviderID } from "../provider/schema" import { type Tool as AITool, tool, jsonSchema, type ToolExecutionOptions, asSchema } from "ai" +import type { JSONSchema7 } from "@ai-sdk/provider" import { SessionCompaction } from "./compaction" import { Bus } from "../bus" import { ProviderTransform } from "../provider" @@ -407,9 +408,8 @@ NOTE: At any point in time through this workflow you should feel free to ask the })) { const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) tools[item.id] = tool({ - id: item.id as any, description: item.description, - inputSchema: jsonSchema(schema as any), + inputSchema: jsonSchema(schema), execute(args, options) { return run.promise( Effect.gen(function* () { @@ -1827,9 +1827,8 @@ NOTE: At any point in time through this workflow you should feel free to ask the const { $schema: _, ...toolSchema } = input.schema return tool({ - id: "StructuredOutput" as any, description: STRUCTURED_OUTPUT_DESCRIPTION, - inputSchema: jsonSchema(toolSchema as any), + inputSchema: jsonSchema(toolSchema as JSONSchema7), async execute(args) { // AI SDK validates args against inputSchema before calling execute() input.onSuccess(args) diff --git a/packages/opencode/test/session/structured-output.test.ts b/packages/opencode/test/session/structured-output.test.ts index db3f8cfded..2debfb76d5 100644 --- a/packages/opencode/test/session/structured-output.test.ts +++ b/packages/opencode/test/session/structured-output.test.ts @@ -157,16 +157,6 @@ describe("structured-output.AssistantMessage", () => { }) describe("structured-output.createStructuredOutputTool", () => { - test("creates tool with correct id", () => { - const tool = SessionPrompt.createStructuredOutputTool({ - schema: { type: "object", properties: { name: { type: "string" } } }, - onSuccess: () => {}, - }) - - // AI SDK tool type doesn't expose id, but we set it internally - expect((tool as any).id).toBe("StructuredOutput") - }) - test("creates tool with description", () => { const tool = SessionPrompt.createStructuredOutputTool({ schema: { type: "object" }, From e0d71f124ef52f557387753ee19abe0f04f0faeb Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:12:46 -0400 Subject: [PATCH 033/120] tooling: add collapse-barrel.ts for single-namespace barrel migration (#22887) --- packages/opencode/script/collapse-barrel.ts | 161 ++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 packages/opencode/script/collapse-barrel.ts diff --git a/packages/opencode/script/collapse-barrel.ts b/packages/opencode/script/collapse-barrel.ts new file mode 100644 index 0000000000..05bb11589c --- /dev/null +++ b/packages/opencode/script/collapse-barrel.ts @@ -0,0 +1,161 @@ +#!/usr/bin/env bun +/** + * Collapse a single-namespace barrel directory into a dir/index.ts module. + * + * Given a directory `src/foo/` that contains: + * + * - `index.ts` (exactly `export * as Foo from "./foo"`) + * - `foo.ts` (the real implementation) + * - zero or more sibling files + * + * this script: + * + * 1. Deletes the old `index.ts` barrel. + * 2. `git mv`s `foo.ts` → `index.ts` so the implementation IS the directory entry. + * 3. Appends `export * as Foo from "."` to the new `index.ts`. + * 4. Rewrites any same-directory sibling `*.ts` files that imported + * `./foo` (with or without the namespace name) to import `"."` instead. + * + * Consumer files outside the directory keep importing from the directory + * (`"@/foo"` / `"../foo"` / etc.) and continue to work, because + * `dir/index.ts` now provides the `Foo` named export directly. + * + * Usage: + * + * bun script/collapse-barrel.ts src/bus + * bun script/collapse-barrel.ts src/bus --dry-run + * + * Notes: + * + * - Only works on directories whose barrel is a single + * `export * as Name from "./file"` line. Refuses otherwise. + * - Refuses if the implementation file name already conflicts with + * `index.ts`. + * - Safe to run repeatedly: a second run on an already-collapsed dir + * will exit with a clear message. + */ + +import fs from "node:fs" +import path from "node:path" +import { spawnSync } from "node:child_process" + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const targetArg = args.find((a) => !a.startsWith("--")) + +if (!targetArg) { + console.error("Usage: bun script/collapse-barrel.ts [--dry-run]") + process.exit(1) +} + +const dir = path.resolve(targetArg) +const indexPath = path.join(dir, "index.ts") + +if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) { + console.error(`Not a directory: ${dir}`) + process.exit(1) +} +if (!fs.existsSync(indexPath)) { + console.error(`No index.ts in ${dir}`) + process.exit(1) +} + +// Validate barrel shape. +const indexContent = fs.readFileSync(indexPath, "utf-8").trim() +const match = indexContent.match(/^export\s+\*\s+as\s+(\w+)\s+from\s+["']\.\/([^"']+)["']\s*;?\s*$/) +if (!match) { + console.error(`Not a simple single-namespace barrel:\n${indexContent}`) + process.exit(1) +} +const namespaceName = match[1] +const implRel = match[2].replace(/\.ts$/, "") +const implPath = path.join(dir, `${implRel}.ts`) + +if (!fs.existsSync(implPath)) { + console.error(`Implementation file not found: ${implPath}`) + process.exit(1) +} + +if (implRel === "index") { + console.error(`Nothing to do — impl file is already index.ts`) + process.exit(0) +} + +console.log(`Collapsing ${path.relative(process.cwd(), dir)}`) +console.log(` namespace: ${namespaceName}`) +console.log(` impl file: ${implRel}.ts → index.ts`) + +// Figure out which sibling files need rewriting. +const siblings = fs + .readdirSync(dir) + .filter((f) => f.endsWith(".ts") || f.endsWith(".tsx")) + .filter((f) => f !== "index.ts" && f !== `${implRel}.ts`) + .map((f) => path.join(dir, f)) + +type SiblingEdit = { file: string; content: string } +const siblingEdits: SiblingEdit[] = [] + +for (const sibling of siblings) { + const content = fs.readFileSync(sibling, "utf-8") + // Match any import or re-export referring to "./" inside this directory. + const siblingRegex = new RegExp(`(from\\s*["'])\\.\\/${implRel.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&")}(["'])`, "g") + if (!siblingRegex.test(content)) continue + const updated = content.replace(siblingRegex, `$1.$2`) + siblingEdits.push({ file: sibling, content: updated }) +} + +if (siblingEdits.length > 0) { + console.log(` sibling rewrites: ${siblingEdits.length}`) + for (const edit of siblingEdits) { + console.log(` ${path.relative(process.cwd(), edit.file)}`) + } +} else { + console.log(` sibling rewrites: none`) +} + +if (dryRun) { + console.log(`\n(dry run) would:`) + console.log(` - delete ${path.relative(process.cwd(), indexPath)}`) + console.log(` - git mv ${path.relative(process.cwd(), implPath)} ${path.relative(process.cwd(), indexPath)}`) + console.log(` - append \`export * as ${namespaceName} from "."\` to the new index.ts`) + for (const edit of siblingEdits) { + console.log(` - rewrite sibling: ${path.relative(process.cwd(), edit.file)}`) + } + process.exit(0) +} + +// Apply: remove the old barrel, git-mv the impl onto it, then rewrite content. +// We can't git-mv on top of an existing tracked file, so we remove the barrel first. +function runGit(...cmd: string[]) { + const res = spawnSync("git", cmd, { stdio: "inherit" }) + if (res.status !== 0) { + console.error(`git ${cmd.join(" ")} failed`) + process.exit(res.status ?? 1) + } +} + +// Step 1: remove the barrel +runGit("rm", "-f", indexPath) + +// Step 2: rename the impl file into index.ts +runGit("mv", implPath, indexPath) + +// Step 3: append the self-reexport to the new index.ts +const newContent = fs.readFileSync(indexPath, "utf-8") +const trimmed = newContent.endsWith("\n") ? newContent : newContent + "\n" +fs.writeFileSync(indexPath, `${trimmed}\nexport * as ${namespaceName} from "."\n`) +console.log(` appended: export * as ${namespaceName} from "."`) + +// Step 4: rewrite siblings +for (const edit of siblingEdits) { + fs.writeFileSync(edit.file, edit.content) +} +if (siblingEdits.length > 0) { + console.log(` rewrote ${siblingEdits.length} sibling file(s)`) +} + +console.log(`\nDone. Verify with:`) +console.log(` cd packages/opencode`) +console.log(` bunx --bun tsgo --noEmit`) +console.log(` bun run --conditions=browser ./src/index.ts generate`) +console.log(` bun run test`) From 25a9de301ad83ac7f6c8ec5ed67d81ee4d2a0221 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 16:21:47 -0400 Subject: [PATCH 034/120] core: eager load config on startup for better traces and refactor npm install for improved error reporting Config is now loaded eagerly during project bootstrap so users can see config loading in traces during startup. This helps diagnose configuration issues earlier in the initialization flow. NPM installation logic has been refactored with a unified reify function and improved InstallFailedError that includes both the packages being installed and the target directory. This provides users with complete context when package installations fail, making it easier to identify which dependency or project directory caused the issue. --- .../opencode/src/effect/bootstrap-runtime.ts | 2 + packages/opencode/src/project/bootstrap.ts | 4 + packages/shared/src/npm.ts | 126 ++++++++---------- 3 files changed, 60 insertions(+), 72 deletions(-) diff --git a/packages/opencode/src/effect/bootstrap-runtime.ts b/packages/opencode/src/effect/bootstrap-runtime.ts index 89cc071561..62b71e58b1 100644 --- a/packages/opencode/src/effect/bootstrap-runtime.ts +++ b/packages/opencode/src/effect/bootstrap-runtime.ts @@ -10,9 +10,11 @@ import { File } from "@/file" import { Vcs } from "@/project" import { Snapshot } from "@/snapshot" import { Bus } from "@/bus" +import { Config } from "@/config" import * as Observability from "./observability" export const BootstrapLayer = Layer.mergeAll( + Config.defaultLayer, Plugin.defaultLayer, ShareNext.defaultLayer, Format.defaultLayer, diff --git a/packages/opencode/src/project/bootstrap.ts b/packages/opencode/src/project/bootstrap.ts index e506d2feda..a7c071a9f8 100644 --- a/packages/opencode/src/project/bootstrap.ts +++ b/packages/opencode/src/project/bootstrap.ts @@ -12,9 +12,13 @@ import { Log } from "@/util" import { FileWatcher } from "@/file/watcher" import { ShareNext } from "@/share" import * as Effect from "effect/Effect" +import { Config } from "@/config" export const InstanceBootstrap = Effect.gen(function* () { Log.Default.info("bootstrapping", { directory: Instance.directory }) + // everything depends on config so eager load it for nice traces + yield* Config.Service.use((svc) => svc.get()) + // Plugin can mutate config so it has to be initialized before anything else. yield* Plugin.Service.use((svc) => svc.init()) yield* Effect.all( [ diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts index e4f42227de..865e827b31 100644 --- a/packages/shared/src/npm.ts +++ b/packages/shared/src/npm.ts @@ -8,7 +8,8 @@ import { EffectFlock } from "./util/effect-flock" export namespace Npm { export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { - pkg: Schema.String, + add: Schema.Array(Schema.String).pipe(Schema.optional), + dir: Schema.String, cause: Schema.optional(Schema.Defect), }) {} @@ -19,7 +20,10 @@ export namespace Npm { export interface Interface { readonly add: (pkg: string) => Effect.Effect - readonly install: (dir: string, input?: { add: string[] }) => Effect.Effect + readonly install: ( + dir: string, + input?: { add: string[] }, + ) => Effect.Effect readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect readonly which: (pkg: string) => Effect.Effect> } @@ -55,6 +59,37 @@ export namespace Npm { interface ArboristTree { edgesOut: Map } + + const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) + export const layer = Layer.effect( Service, Effect.gen(function* () { @@ -91,45 +126,12 @@ export namespace Npm { }) const add = Effect.fn("Npm.add")(function* (pkg: string) { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) const dir = directory(pkg) yield* flock.acquire(`npm-install:${dir}`) - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - - const tree = yield* Effect.tryPromise({ - try: () => arborist.loadVirtual().catch(() => undefined), - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) as Effect.Effect - - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } - - const result = yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: [pkg], - save: true, - saveType: "prod", - }), - catch: (cause) => new InstallFailedError({ pkg, cause }), - }) as Effect.Effect - - const first = result.edgesOut.values().next().value?.to - if (!first) { - return yield* new InstallFailedError({ pkg }) - } - + const tree = yield* reify({ dir, add: [pkg] }) + const first = tree.edgesOut.values().next().value?.to + if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) return resolveEntryPoint(first.name, first.path) }, Effect.scoped) @@ -142,41 +144,20 @@ export namespace Npm { yield* flock.acquire(`npm-install:${dir}`) - const reify = Effect.fn("Npm.reify")(function* () { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - yield* Effect.tryPromise({ - try: () => - arb - .reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }) - .catch(() => {}), - catch: () => {}, - }).pipe(Effect.orElseSucceed(() => {})) - }) - - const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) - if (!nodeModulesExists) { - yield* reify() - return - } - - const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) - const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) - - const pkgAny = pkg as any - const lockAny = lock as any + yield* Effect.gen(function* () { + const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) + if (!nodeModulesExists) { + yield* reify({ add: input?.add, dir }) + return + } + }).pipe(Effect.withSpan("Npm.checkNodeModules")) yield* Effect.gen(function* () { + const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) + const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) + + const pkgAny = pkg as any + const lockAny = lock as any const declared = new Set([ ...Object.keys(pkgAny?.dependencies || {}), ...Object.keys(pkgAny?.devDependencies || {}), @@ -195,11 +176,12 @@ export namespace Npm { for (const name of declared) { if (!locked.has(name)) { - yield* reify() + yield* reify({ dir, add: input?.add }) return } } }).pipe(Effect.withSpan("Npm.checkDirty")) + return }, Effect.scoped) From 26af77cd1e0b34de2bc171a665c2cc7819c15110 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:26:33 -0400 Subject: [PATCH 035/120] fix(core): fix detection of local installation channel (#22899) --- packages/opencode/src/installation/version.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/installation/version.ts b/packages/opencode/src/installation/version.ts index f1668d2646..25d9cd99aa 100644 --- a/packages/opencode/src/installation/version.ts +++ b/packages/opencode/src/installation/version.ts @@ -5,4 +5,4 @@ declare global { export const InstallationVersion = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "local" export const InstallationChannel = typeof OPENCODE_CHANNEL === "string" ? OPENCODE_CHANNEL : "local" -export const InstallationLocal = InstallationVersion === "local" +export const InstallationLocal = InstallationChannel === "local" From 1045a43603436d2328dcdaccc9160bd945e8b765 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:29:51 -0400 Subject: [PATCH 036/120] refactor: collapse format barrel into format/index.ts (#22898) --- packages/opencode/src/format/format.ts | 192 ------------------------ packages/opencode/src/format/index.ts | 195 ++++++++++++++++++++++++- 2 files changed, 194 insertions(+), 193 deletions(-) delete mode 100644 packages/opencode/src/format/format.ts diff --git a/packages/opencode/src/format/format.ts b/packages/opencode/src/format/format.ts deleted file mode 100644 index 40855636f9..0000000000 --- a/packages/opencode/src/format/format.ts +++ /dev/null @@ -1,192 +0,0 @@ -import { Effect, Layer, Context } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" -import path from "path" -import { mergeDeep } from "remeda" -import z from "zod" -import { Config } from "../config" -import { Log } from "../util" -import * as Formatter from "./formatter" - -const log = Log.create({ service: "format" }) - -export const Status = z - .object({ - name: z.string(), - extensions: z.string().array(), - enabled: z.boolean(), - }) - .meta({ - ref: "FormatterStatus", - }) -export type Status = z.infer - -export interface Interface { - readonly init: () => Effect.Effect - readonly status: () => Effect.Effect - readonly file: (filepath: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Format") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const state = yield* InstanceState.make( - Effect.fn("Format.state")(function* (_ctx) { - const commands: Record = {} - const formatters: Record = {} - - const cfg = yield* config.get() - - if (cfg.formatter !== false) { - for (const item of Object.values(Formatter)) { - formatters[item.name] = item - } - for (const [name, item] of Object.entries(cfg.formatter ?? {})) { - // Ruff and uv are both the same formatter, so disabling either should disable both. - if (["ruff", "uv"].includes(name) && (cfg.formatter?.ruff?.disabled || cfg.formatter?.uv?.disabled)) { - // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. - delete formatters.ruff - delete formatters.uv - continue - } - if (item.disabled) { - delete formatters[name] - continue - } - const info = mergeDeep(formatters[name] ?? {}, { - extensions: [], - ...item, - }) - - formatters[name] = { - ...info, - name, - enabled: async () => info.command ?? false, - } - } - } else { - log.info("all formatters are disabled") - } - - async function getCommand(item: Formatter.Info) { - let cmd = commands[item.name] - if (cmd === false || cmd === undefined) { - cmd = await item.enabled() - commands[item.name] = cmd - } - return cmd - } - - async function isEnabled(item: Formatter.Info) { - const cmd = await getCommand(item) - return cmd !== false - } - - async function getFormatter(ext: string) { - const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext)) - const checks = await Promise.all( - matching.map(async (item) => { - log.info("checking", { name: item.name, ext }) - const cmd = await getCommand(item) - if (cmd) { - log.info("enabled", { name: item.name, ext }) - } - return { - item, - cmd, - } - }), - ) - return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! })) - } - - function formatFile(filepath: string) { - return Effect.gen(function* () { - log.info("formatting", { file: filepath }) - const ext = path.extname(filepath) - - for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) { - if (cmd === false) continue - log.info("running", { command: cmd }) - const replaced = cmd.map((x) => x.replace("$FILE", filepath)) - const dir = yield* InstanceState.directory - const code = yield* spawner - .spawn( - ChildProcess.make(replaced[0]!, replaced.slice(1), { - cwd: dir, - env: item.environment, - extendEnv: true, - }), - ) - .pipe( - Effect.flatMap((handle) => handle.exitCode), - Effect.scoped, - Effect.catch(() => - Effect.sync(() => { - log.error("failed to format file", { - error: "spawn failed", - command: cmd, - ...item.environment, - file: filepath, - }) - return ChildProcessSpawner.ExitCode(1) - }), - ), - ) - if (code !== 0) { - log.error("failed", { - command: cmd, - ...item.environment, - }) - } - } - }) - } - - log.info("init") - - return { - formatters, - isEnabled, - formatFile, - } - }), - ) - - const init = Effect.fn("Format.init")(function* () { - yield* InstanceState.get(state) - }) - - const status = Effect.fn("Format.status")(function* () { - const { formatters, isEnabled } = yield* InstanceState.get(state) - const result: Status[] = [] - for (const formatter of Object.values(formatters)) { - const isOn = yield* Effect.promise(() => isEnabled(formatter)) - result.push({ - name: formatter.name, - extensions: formatter.extensions, - enabled: isOn, - }) - } - return result - }) - - const file = Effect.fn("Format.file")(function* (filepath: string) { - const { formatFile } = yield* InstanceState.get(state) - yield* formatFile(filepath) - }) - - return Service.of({ init, status, file }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), -) diff --git a/packages/opencode/src/format/index.ts b/packages/opencode/src/format/index.ts index 435c517ac7..d0ae59d05e 100644 --- a/packages/opencode/src/format/index.ts +++ b/packages/opencode/src/format/index.ts @@ -1 +1,194 @@ -export * as Format from "./format" +import { Effect, Layer, Context } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" +import path from "path" +import { mergeDeep } from "remeda" +import z from "zod" +import { Config } from "../config" +import { Log } from "../util" +import * as Formatter from "./formatter" + +const log = Log.create({ service: "format" }) + +export const Status = z + .object({ + name: z.string(), + extensions: z.string().array(), + enabled: z.boolean(), + }) + .meta({ + ref: "FormatterStatus", + }) +export type Status = z.infer + +export interface Interface { + readonly init: () => Effect.Effect + readonly status: () => Effect.Effect + readonly file: (filepath: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Format") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const state = yield* InstanceState.make( + Effect.fn("Format.state")(function* (_ctx) { + const commands: Record = {} + const formatters: Record = {} + + const cfg = yield* config.get() + + if (cfg.formatter !== false) { + for (const item of Object.values(Formatter)) { + formatters[item.name] = item + } + for (const [name, item] of Object.entries(cfg.formatter ?? {})) { + // Ruff and uv are both the same formatter, so disabling either should disable both. + if (["ruff", "uv"].includes(name) && (cfg.formatter?.ruff?.disabled || cfg.formatter?.uv?.disabled)) { + // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. + delete formatters.ruff + delete formatters.uv + continue + } + if (item.disabled) { + delete formatters[name] + continue + } + const info = mergeDeep(formatters[name] ?? {}, { + extensions: [], + ...item, + }) + + formatters[name] = { + ...info, + name, + enabled: async () => info.command ?? false, + } + } + } else { + log.info("all formatters are disabled") + } + + async function getCommand(item: Formatter.Info) { + let cmd = commands[item.name] + if (cmd === false || cmd === undefined) { + cmd = await item.enabled() + commands[item.name] = cmd + } + return cmd + } + + async function isEnabled(item: Formatter.Info) { + const cmd = await getCommand(item) + return cmd !== false + } + + async function getFormatter(ext: string) { + const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext)) + const checks = await Promise.all( + matching.map(async (item) => { + log.info("checking", { name: item.name, ext }) + const cmd = await getCommand(item) + if (cmd) { + log.info("enabled", { name: item.name, ext }) + } + return { + item, + cmd, + } + }), + ) + return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! })) + } + + function formatFile(filepath: string) { + return Effect.gen(function* () { + log.info("formatting", { file: filepath }) + const ext = path.extname(filepath) + + for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) { + if (cmd === false) continue + log.info("running", { command: cmd }) + const replaced = cmd.map((x) => x.replace("$FILE", filepath)) + const dir = yield* InstanceState.directory + const code = yield* spawner + .spawn( + ChildProcess.make(replaced[0]!, replaced.slice(1), { + cwd: dir, + env: item.environment, + extendEnv: true, + }), + ) + .pipe( + Effect.flatMap((handle) => handle.exitCode), + Effect.scoped, + Effect.catch(() => + Effect.sync(() => { + log.error("failed to format file", { + error: "spawn failed", + command: cmd, + ...item.environment, + file: filepath, + }) + return ChildProcessSpawner.ExitCode(1) + }), + ), + ) + if (code !== 0) { + log.error("failed", { + command: cmd, + ...item.environment, + }) + } + } + }) + } + + log.info("init") + + return { + formatters, + isEnabled, + formatFile, + } + }), + ) + + const init = Effect.fn("Format.init")(function* () { + yield* InstanceState.get(state) + }) + + const status = Effect.fn("Format.status")(function* () { + const { formatters, isEnabled } = yield* InstanceState.get(state) + const result: Status[] = [] + for (const formatter of Object.values(formatters)) { + const isOn = yield* Effect.promise(() => isEnabled(formatter)) + result.push({ + name: formatter.name, + extensions: formatter.extensions, + enabled: isOn, + }) + } + return result + }) + + const file = Effect.fn("Format.file")(function* (filepath: string) { + const { formatFile } = yield* InstanceState.get(state) + yield* formatFile(filepath) + }) + + return Service.of({ init, status, file }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), +) + +export * as Format from "." From 9b77430d0d3bf84fa7784a093b783fad3d7ca824 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:29:54 -0400 Subject: [PATCH 037/120] refactor: collapse env barrel into env/index.ts (#22900) --- packages/opencode/src/env/env.ts | 35 --------------------------- packages/opencode/src/env/index.ts | 38 +++++++++++++++++++++++++++++- 2 files changed, 37 insertions(+), 36 deletions(-) delete mode 100644 packages/opencode/src/env/env.ts diff --git a/packages/opencode/src/env/env.ts b/packages/opencode/src/env/env.ts deleted file mode 100644 index 618ae32684..0000000000 --- a/packages/opencode/src/env/env.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Context, Effect, Layer } from "effect" -import { InstanceState } from "@/effect" - -type State = Record - -export interface Interface { - readonly get: (key: string) => Effect.Effect - readonly all: () => Effect.Effect - readonly set: (key: string, value: string) => Effect.Effect - readonly remove: (key: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Env") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const state = yield* InstanceState.make(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env }))) - - const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key])) - const all = Effect.fn("Env.all")(() => InstanceState.get(state)) - const set = Effect.fn("Env.set")(function* (key: string, value: string) { - const env = yield* InstanceState.get(state) - env[key] = value - }) - const remove = Effect.fn("Env.remove")(function* (key: string) { - const env = yield* InstanceState.get(state) - delete env[key] - }) - - return Service.of({ get, all, set, remove }) - }), -) - -export const defaultLayer = layer diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index c589edbfdd..a53d96def2 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -1 +1,37 @@ -export * as Env from "./env" +import { Context, Effect, Layer } from "effect" +import { InstanceState } from "@/effect" + +type State = Record + +export interface Interface { + readonly get: (key: string) => Effect.Effect + readonly all: () => Effect.Effect + readonly set: (key: string, value: string) => Effect.Effect + readonly remove: (key: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Env") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const state = yield* InstanceState.make(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env }))) + + const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key])) + const all = Effect.fn("Env.all")(() => InstanceState.get(state)) + const set = Effect.fn("Env.set")(function* (key: string, value: string) { + const env = yield* InstanceState.get(state) + env[key] = value + }) + const remove = Effect.fn("Env.remove")(function* (key: string) { + const env = yield* InstanceState.get(state) + delete env[key] + }) + + return Service.of({ get, all, set, remove }) + }), +) + +export const defaultLayer = layer + +export * as Env from "." From a8d8a35cd3033602befc6648d00ed6be37aed826 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:30:11 -0400 Subject: [PATCH 038/120] feat(core): pass auth data to workspace (#22897) --- packages/opencode/src/auth/auth.ts | 6 ++++++ packages/opencode/src/control-plane/types.ts | 2 +- packages/opencode/src/control-plane/workspace.ts | 8 +++++++- packages/opencode/src/server/proxy.ts | 6 ------ 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/packages/opencode/src/auth/auth.ts b/packages/opencode/src/auth/auth.ts index fb9d2b1495..598178fad1 100644 --- a/packages/opencode/src/auth/auth.ts +++ b/packages/opencode/src/auth/auth.ts @@ -56,6 +56,12 @@ export const layer = Layer.effect( const decode = Schema.decodeUnknownOption(Info) const all = Effect.fn("Auth.all")(function* () { + if (process.env.OPENCODE_AUTH_CONTENT) { + try { + return JSON.parse(process.env.OPENCODE_AUTH_CONTENT) + } catch (err) {} + } + const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined)) }) diff --git a/packages/opencode/src/control-plane/types.ts b/packages/opencode/src/control-plane/types.ts index 4e499e45ea..3961cd0e2a 100644 --- a/packages/opencode/src/control-plane/types.ts +++ b/packages/opencode/src/control-plane/types.ts @@ -28,7 +28,7 @@ export type WorkspaceAdaptor = { name: string description: string configure(info: WorkspaceInfo): WorkspaceInfo | Promise - create(info: WorkspaceInfo, from?: WorkspaceInfo): Promise + create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise remove(info: WorkspaceInfo): Promise target(info: WorkspaceInfo): Target | Promise } diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index d870eb6360..08d675b253 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -5,6 +5,7 @@ import { Database, asc, eq, inArray } from "@/storage" import { Project } from "@/project" import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" +import { Auth } from "@/auth" import { SyncEvent } from "@/sync" import { EventTable } from "@/sync/event.sql" import { Flag } from "@/flag/flag" @@ -112,7 +113,12 @@ export namespace Workspace { .run() }) - await adaptor.create(config) + const env = { + OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), + OPENCODE_WORKSPACE_ID: config.id, + OPENCODE_EXPERIMENTAL_WORKSPACES: "true" + } + await adaptor.create(config, env) startSync(info) diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 5e36f2cff9..07703fdc80 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -110,12 +110,6 @@ export namespace ServerProxy { req: Request, workspaceID: WorkspaceID, ) { - console.log("proxy http request", { - method: req.method, - request: req.url, - url: String(url), - }) - if (!Workspace.isSyncing(workspaceID)) { return new Response(`broken sync connection for workspace: ${workspaceID}`, { status: 503, From 3fe906f517eb60aa20fd47c95ec3c131452e8d91 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:30:52 -0400 Subject: [PATCH 039/120] refactor: collapse command barrel into command/index.ts (#22903) --- packages/opencode/src/command/command.ts | 186 ---------------------- packages/opencode/src/command/index.ts | 189 ++++++++++++++++++++++- 2 files changed, 188 insertions(+), 187 deletions(-) delete mode 100644 packages/opencode/src/command/command.ts diff --git a/packages/opencode/src/command/command.ts b/packages/opencode/src/command/command.ts deleted file mode 100644 index 4ea1325240..0000000000 --- a/packages/opencode/src/command/command.ts +++ /dev/null @@ -1,186 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { InstanceState } from "@/effect" -import { EffectBridge } from "@/effect" -import type { InstanceContext } from "@/project/instance" -import { SessionID, MessageID } from "@/session/schema" -import { Effect, Layer, Context } from "effect" -import z from "zod" -import { Config } from "../config" -import { MCP } from "../mcp" -import { Skill } from "../skill" -import PROMPT_INITIALIZE from "./template/initialize.txt" -import PROMPT_REVIEW from "./template/review.txt" - -type State = { - commands: Record -} - -export const Event = { - Executed: BusEvent.define( - "command.executed", - z.object({ - name: z.string(), - sessionID: SessionID.zod, - arguments: z.string(), - messageID: MessageID.zod, - }), - ), -} - -export const Info = z - .object({ - name: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: z.string().optional(), - source: z.enum(["command", "mcp", "skill"]).optional(), - // workaround for zod not supporting async functions natively so we use getters - // https://zod.dev/v4/changelog?id=zfunction - template: z.promise(z.string()).or(z.string()), - subtask: z.boolean().optional(), - hints: z.array(z.string()), - }) - .meta({ - ref: "Command", - }) - -// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it -export type Info = Omit, "template"> & { template: Promise | string } - -export function hints(template: string) { - const result: string[] = [] - const numbered = template.match(/\$\d+/g) - if (numbered) { - for (const match of [...new Set(numbered)].sort()) result.push(match) - } - if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS") - return result -} - -export const Default = { - INIT: "init", - REVIEW: "review", -} as const - -export interface Interface { - readonly get: (name: string) => Effect.Effect - readonly list: () => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Command") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const mcp = yield* MCP.Service - const skill = yield* Skill.Service - - const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) { - const cfg = yield* config.get() - const bridge = yield* EffectBridge.make() - const commands: Record = {} - - commands[Default.INIT] = { - name: Default.INIT, - description: "guided AGENTS.md setup", - source: "command", - get template() { - return PROMPT_INITIALIZE.replace("${path}", ctx.worktree) - }, - hints: hints(PROMPT_INITIALIZE), - } - commands[Default.REVIEW] = { - name: Default.REVIEW, - description: "review changes [commit|branch|pr], defaults to uncommitted", - source: "command", - get template() { - return PROMPT_REVIEW.replace("${path}", ctx.worktree) - }, - subtask: true, - hints: hints(PROMPT_REVIEW), - } - - for (const [name, command] of Object.entries(cfg.command ?? {})) { - commands[name] = { - name, - agent: command.agent, - model: command.model, - description: command.description, - source: "command", - get template() { - return command.template - }, - subtask: command.subtask, - hints: hints(command.template), - } - } - - for (const [name, prompt] of Object.entries(yield* mcp.prompts())) { - commands[name] = { - name, - source: "mcp", - description: prompt.description, - get template() { - return bridge.promise( - mcp - .getPrompt( - prompt.client, - prompt.name, - prompt.arguments - ? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`])) - : {}, - ) - .pipe( - Effect.map( - (template) => - template?.messages - .map((message) => (message.content.type === "text" ? message.content.text : "")) - .join("\n") || "", - ), - ), - ) - }, - hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [], - } - } - - for (const item of yield* skill.all()) { - if (commands[item.name]) continue - commands[item.name] = { - name: item.name, - description: item.description, - source: "skill", - get template() { - return item.content - }, - hints: [], - } - } - - return { - commands, - } - }) - - const state = yield* InstanceState.make((ctx) => init(ctx)) - - const get = Effect.fn("Command.get")(function* (name: string) { - const s = yield* InstanceState.get(state) - return s.commands[name] - }) - - const list = Effect.fn("Command.list")(function* () { - const s = yield* InstanceState.get(state) - return Object.values(s.commands) - }) - - return Service.of({ get, list }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(MCP.defaultLayer), - Layer.provide(Skill.defaultLayer), -) diff --git a/packages/opencode/src/command/index.ts b/packages/opencode/src/command/index.ts index 2e530360c5..27ba357ecc 100644 --- a/packages/opencode/src/command/index.ts +++ b/packages/opencode/src/command/index.ts @@ -1 +1,188 @@ -export * as Command from "./command" +import { BusEvent } from "@/bus/bus-event" +import { InstanceState } from "@/effect" +import { EffectBridge } from "@/effect" +import type { InstanceContext } from "@/project/instance" +import { SessionID, MessageID } from "@/session/schema" +import { Effect, Layer, Context } from "effect" +import z from "zod" +import { Config } from "../config" +import { MCP } from "../mcp" +import { Skill } from "../skill" +import PROMPT_INITIALIZE from "./template/initialize.txt" +import PROMPT_REVIEW from "./template/review.txt" + +type State = { + commands: Record +} + +export const Event = { + Executed: BusEvent.define( + "command.executed", + z.object({ + name: z.string(), + sessionID: SessionID.zod, + arguments: z.string(), + messageID: MessageID.zod, + }), + ), +} + +export const Info = z + .object({ + name: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: z.string().optional(), + source: z.enum(["command", "mcp", "skill"]).optional(), + // workaround for zod not supporting async functions natively so we use getters + // https://zod.dev/v4/changelog?id=zfunction + template: z.promise(z.string()).or(z.string()), + subtask: z.boolean().optional(), + hints: z.array(z.string()), + }) + .meta({ + ref: "Command", + }) + +// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it +export type Info = Omit, "template"> & { template: Promise | string } + +export function hints(template: string) { + const result: string[] = [] + const numbered = template.match(/\$\d+/g) + if (numbered) { + for (const match of [...new Set(numbered)].sort()) result.push(match) + } + if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS") + return result +} + +export const Default = { + INIT: "init", + REVIEW: "review", +} as const + +export interface Interface { + readonly get: (name: string) => Effect.Effect + readonly list: () => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Command") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const mcp = yield* MCP.Service + const skill = yield* Skill.Service + + const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) { + const cfg = yield* config.get() + const bridge = yield* EffectBridge.make() + const commands: Record = {} + + commands[Default.INIT] = { + name: Default.INIT, + description: "guided AGENTS.md setup", + source: "command", + get template() { + return PROMPT_INITIALIZE.replace("${path}", ctx.worktree) + }, + hints: hints(PROMPT_INITIALIZE), + } + commands[Default.REVIEW] = { + name: Default.REVIEW, + description: "review changes [commit|branch|pr], defaults to uncommitted", + source: "command", + get template() { + return PROMPT_REVIEW.replace("${path}", ctx.worktree) + }, + subtask: true, + hints: hints(PROMPT_REVIEW), + } + + for (const [name, command] of Object.entries(cfg.command ?? {})) { + commands[name] = { + name, + agent: command.agent, + model: command.model, + description: command.description, + source: "command", + get template() { + return command.template + }, + subtask: command.subtask, + hints: hints(command.template), + } + } + + for (const [name, prompt] of Object.entries(yield* mcp.prompts())) { + commands[name] = { + name, + source: "mcp", + description: prompt.description, + get template() { + return bridge.promise( + mcp + .getPrompt( + prompt.client, + prompt.name, + prompt.arguments + ? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`])) + : {}, + ) + .pipe( + Effect.map( + (template) => + template?.messages + .map((message) => (message.content.type === "text" ? message.content.text : "")) + .join("\n") || "", + ), + ), + ) + }, + hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [], + } + } + + for (const item of yield* skill.all()) { + if (commands[item.name]) continue + commands[item.name] = { + name: item.name, + description: item.description, + source: "skill", + get template() { + return item.content + }, + hints: [], + } + } + + return { + commands, + } + }) + + const state = yield* InstanceState.make((ctx) => init(ctx)) + + const get = Effect.fn("Command.get")(function* (name: string) { + const s = yield* InstanceState.get(state) + return s.commands[name] + }) + + const list = Effect.fn("Command.list")(function* () { + const s = yield* InstanceState.get(state) + return Object.values(s.commands) + }) + + return Service.of({ get, list }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(MCP.defaultLayer), + Layer.provide(Skill.defaultLayer), +) + +export * as Command from "." From 021ab50fb105153de174c664ce90f5c90e4ba840 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 20:31:50 +0000 Subject: [PATCH 040/120] chore: generate --- packages/opencode/src/control-plane/workspace.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 08d675b253..9c1c4c8960 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -116,7 +116,7 @@ export namespace Workspace { const env = { OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), OPENCODE_WORKSPACE_ID: config.id, - OPENCODE_EXPERIMENTAL_WORKSPACES: "true" + OPENCODE_EXPERIMENTAL_WORKSPACES: "true", } await adaptor.create(config, env) From 23f97ac49d5e39f8b9cd1f269ad3f1c33404a557 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:33:52 -0400 Subject: [PATCH 041/120] refactor: collapse global barrel into global/index.ts (#22905) --- packages/opencode/src/global/global.ts | 56 ------------------------ packages/opencode/src/global/index.ts | 59 +++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 57 deletions(-) delete mode 100644 packages/opencode/src/global/global.ts diff --git a/packages/opencode/src/global/global.ts b/packages/opencode/src/global/global.ts deleted file mode 100644 index 3633e0855a..0000000000 --- a/packages/opencode/src/global/global.ts +++ /dev/null @@ -1,56 +0,0 @@ -import fs from "fs/promises" -import { xdgData, xdgCache, xdgConfig, xdgState } from "xdg-basedir" -import path from "path" -import os from "os" -import { Filesystem } from "../util" -import { Flock } from "@opencode-ai/shared/util/flock" - -const app = "opencode" - -const data = path.join(xdgData!, app) -const cache = path.join(xdgCache!, app) -const config = path.join(xdgConfig!, app) -const state = path.join(xdgState!, app) - -export const Path = { - // Allow override via OPENCODE_TEST_HOME for test isolation - get home() { - return process.env.OPENCODE_TEST_HOME || os.homedir() - }, - data, - bin: path.join(cache, "bin"), - log: path.join(data, "log"), - cache, - config, - state, -} - -// Initialize Flock with global state path -Flock.setGlobal({ state }) - -await Promise.all([ - fs.mkdir(Path.data, { recursive: true }), - fs.mkdir(Path.config, { recursive: true }), - fs.mkdir(Path.state, { recursive: true }), - fs.mkdir(Path.log, { recursive: true }), - fs.mkdir(Path.bin, { recursive: true }), -]) - -const CACHE_VERSION = "21" - -const version = await Filesystem.readText(path.join(Path.cache, "version")).catch(() => "0") - -if (version !== CACHE_VERSION) { - try { - const contents = await fs.readdir(Path.cache) - await Promise.all( - contents.map((item) => - fs.rm(path.join(Path.cache, item), { - recursive: true, - force: true, - }), - ), - ) - } catch {} - await Filesystem.write(path.join(Path.cache, "version"), CACHE_VERSION) -} diff --git a/packages/opencode/src/global/index.ts b/packages/opencode/src/global/index.ts index 9262bf2a93..27bac598fb 100644 --- a/packages/opencode/src/global/index.ts +++ b/packages/opencode/src/global/index.ts @@ -1 +1,58 @@ -export * as Global from "./global" +import fs from "fs/promises" +import { xdgData, xdgCache, xdgConfig, xdgState } from "xdg-basedir" +import path from "path" +import os from "os" +import { Filesystem } from "../util" +import { Flock } from "@opencode-ai/shared/util/flock" + +const app = "opencode" + +const data = path.join(xdgData!, app) +const cache = path.join(xdgCache!, app) +const config = path.join(xdgConfig!, app) +const state = path.join(xdgState!, app) + +export const Path = { + // Allow override via OPENCODE_TEST_HOME for test isolation + get home() { + return process.env.OPENCODE_TEST_HOME || os.homedir() + }, + data, + bin: path.join(cache, "bin"), + log: path.join(data, "log"), + cache, + config, + state, +} + +// Initialize Flock with global state path +Flock.setGlobal({ state }) + +await Promise.all([ + fs.mkdir(Path.data, { recursive: true }), + fs.mkdir(Path.config, { recursive: true }), + fs.mkdir(Path.state, { recursive: true }), + fs.mkdir(Path.log, { recursive: true }), + fs.mkdir(Path.bin, { recursive: true }), +]) + +const CACHE_VERSION = "21" + +const version = await Filesystem.readText(path.join(Path.cache, "version")).catch(() => "0") + +if (version !== CACHE_VERSION) { + try { + const contents = await fs.readdir(Path.cache) + await Promise.all( + contents.map((item) => + fs.rm(path.join(Path.cache, item), { + recursive: true, + force: true, + }), + ), + ) + } catch {} + await Filesystem.write(path.join(Path.cache, "version"), CACHE_VERSION) +} + +export * as Global from "." From cb6a9253fe8c4439bcfeff6c152b22b470de2eda Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:34:33 -0400 Subject: [PATCH 042/120] refactor: collapse sync barrel into sync/index.ts (#22907) --- packages/opencode/src/sync/index.ts | 279 ++++++++++++++++++++++- packages/opencode/src/sync/sync-event.ts | 276 ---------------------- 2 files changed, 278 insertions(+), 277 deletions(-) delete mode 100644 packages/opencode/src/sync/sync-event.ts diff --git a/packages/opencode/src/sync/index.ts b/packages/opencode/src/sync/index.ts index a6dec180bd..125d8c9550 100644 --- a/packages/opencode/src/sync/index.ts +++ b/packages/opencode/src/sync/index.ts @@ -1 +1,278 @@ -export * as SyncEvent from "./sync-event" +import z from "zod" +import type { ZodObject } from "zod" +import { Database, eq } from "@/storage" +import { GlobalBus } from "@/bus/global" +import { Bus as ProjectBus } from "@/bus" +import { BusEvent } from "@/bus/bus-event" +import { Instance } from "@/project/instance" +import { EventSequenceTable, EventTable } from "./event.sql" +import { WorkspaceContext } from "@/control-plane/workspace-context" +import { EventID } from "./schema" +import { Flag } from "@/flag/flag" + +export type Definition = { + type: string + version: number + aggregate: string + schema: z.ZodObject + + // This is temporary and only exists for compatibility with bus + // event definitions + properties: z.ZodObject +} + +export type Event = { + id: string + seq: number + aggregateID: string + data: z.infer +} + +export type SerializedEvent = Event & { type: string } + +type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void + +export const registry = new Map() +let projectors: Map | undefined +const versions = new Map() +let frozen = false +let convertEvent: (type: string, event: Event["data"]) => Promise> | Record + +export function reset() { + frozen = false + projectors = undefined + convertEvent = (_, data) => data +} + +export function init(input: { projectors: Array<[Definition, ProjectorFunc]>; convertEvent?: typeof convertEvent }) { + projectors = new Map(input.projectors) + + // Install all the latest event defs to the bus. We only ever emit + // latest versions from code, and keep around old versions for + // replaying. Replaying does not go through the bus, and it + // simplifies the bus to only use unversioned latest events + for (let [type, version] of versions.entries()) { + let def = registry.get(versionedType(type, version))! + + BusEvent.define(def.type, def.properties || def.schema) + } + + // Freeze the system so it clearly errors if events are defined + // after `init` which would cause bugs + frozen = true + convertEvent = input.convertEvent || ((_, data) => data) +} + +export function versionedType(type: A): A +export function versionedType(type: A, version: B): `${A}/${B}` +export function versionedType(type: string, version?: number) { + return version ? `${type}.${version}` : type +} + +export function define< + Type extends string, + Agg extends string, + Schema extends ZodObject>>, + BusSchema extends ZodObject = Schema, +>(input: { type: Type; version: number; aggregate: Agg; schema: Schema; busSchema?: BusSchema }) { + if (frozen) { + throw new Error("Error defining sync event: sync system has been frozen") + } + + const def = { + type: input.type, + version: input.version, + aggregate: input.aggregate, + schema: input.schema, + properties: input.busSchema ? input.busSchema : input.schema, + } + + versions.set(def.type, Math.max(def.version, versions.get(def.type) || 0)) + + registry.set(versionedType(def.type, def.version), def) + + return def +} + +export function project( + def: Def, + func: (db: Database.TxOrDb, data: Event["data"]) => void, +): [Definition, ProjectorFunc] { + return [def, func as ProjectorFunc] +} + +function process(def: Def, event: Event, options: { publish: boolean }) { + if (projectors == null) { + throw new Error("No projectors available. Call `SyncEvent.init` to install projectors") + } + + const projector = projectors.get(def) + if (!projector) { + throw new Error(`Projector not found for event: ${def.type}`) + } + + // idempotent: need to ignore any events already logged + + Database.transaction((tx) => { + projector(tx, event.data) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + tx.insert(EventSequenceTable) + .values({ + aggregate_id: event.aggregateID, + seq: event.seq, + }) + .onConflictDoUpdate({ + target: EventSequenceTable.aggregate_id, + set: { seq: event.seq }, + }) + .run() + tx.insert(EventTable) + .values({ + id: event.id, + seq: event.seq, + aggregate_id: event.aggregateID, + type: versionedType(def.type, def.version), + data: event.data as Record, + }) + .run() + } + + Database.effect(() => { + if (options?.publish) { + const result = convertEvent(def.type, event.data) + if (result instanceof Promise) { + void result.then((data) => { + void ProjectBus.publish({ type: def.type, properties: def.schema }, data) + }) + } else { + void ProjectBus.publish({ type: def.type, properties: def.schema }, result) + } + + GlobalBus.emit("event", { + directory: Instance.directory, + project: Instance.project.id, + workspace: WorkspaceContext.workspaceID, + payload: { + type: "sync", + syncEvent: { + type: versionedType(def.type, def.version), + ...event, + }, + }, + }) + } + }) + }) +} + +export function replay(event: SerializedEvent, options?: { publish: boolean }) { + const def = registry.get(event.type) + if (!def) { + throw new Error(`Unknown event type: ${event.type}`) + } + + const row = Database.use((db) => + db + .select({ seq: EventSequenceTable.seq }) + .from(EventSequenceTable) + .where(eq(EventSequenceTable.aggregate_id, event.aggregateID)) + .get(), + ) + + const latest = row?.seq ?? -1 + if (event.seq <= latest) { + return + } + + const expected = latest + 1 + if (event.seq !== expected) { + throw new Error(`Sequence mismatch for aggregate "${event.aggregateID}": expected ${expected}, got ${event.seq}`) + } + + process(def, event, { publish: !!options?.publish }) +} + +export function replayAll(events: SerializedEvent[], options?: { publish: boolean }) { + const source = events[0]?.aggregateID + if (!source) return + if (events.some((item) => item.aggregateID !== source)) { + throw new Error("Replay events must belong to the same session") + } + const start = events[0].seq + for (const [i, item] of events.entries()) { + const seq = start + i + if (item.seq !== seq) { + throw new Error(`Replay sequence mismatch at index ${i}: expected ${seq}, got ${item.seq}`) + } + } + for (const item of events) { + replay(item, options) + } + return source +} + +export function run(def: Def, data: Event["data"], options?: { publish?: boolean }) { + const agg = (data as Record)[def.aggregate] + // This should never happen: we've enforced it via typescript in + // the definition + if (agg == null) { + throw new Error(`SyncEvent.run: "${def.aggregate}" required but not found: ${JSON.stringify(data)}`) + } + + if (def.version !== versions.get(def.type)) { + throw new Error(`SyncEvent.run: running old versions of events is not allowed: ${def.type}`) + } + + const { publish = true } = options || {} + + // Note that this is an "immediate" transaction which is critical. + // We need to make sure we can safely read and write with nothing + // else changing the data from under us + Database.transaction( + (tx) => { + const id = EventID.ascending() + const row = tx + .select({ seq: EventSequenceTable.seq }) + .from(EventSequenceTable) + .where(eq(EventSequenceTable.aggregate_id, agg)) + .get() + const seq = row?.seq != null ? row.seq + 1 : 0 + + const event = { id, seq, aggregateID: agg, data } + process(def, event, { publish }) + }, + { + behavior: "immediate", + }, + ) +} + +export function remove(aggregateID: string) { + Database.transaction((tx) => { + tx.delete(EventSequenceTable).where(eq(EventSequenceTable.aggregate_id, aggregateID)).run() + tx.delete(EventTable).where(eq(EventTable.aggregate_id, aggregateID)).run() + }) +} + +export function payloads() { + return registry + .entries() + .map(([type, def]) => { + return z + .object({ + type: z.literal("sync"), + name: z.literal(type), + id: z.string(), + seq: z.number(), + aggregateID: z.literal(def.aggregate), + data: def.schema, + }) + .meta({ + ref: `SyncEvent.${def.type}`, + }) + }) + .toArray() +} + +export * as SyncEvent from "." diff --git a/packages/opencode/src/sync/sync-event.ts b/packages/opencode/src/sync/sync-event.ts deleted file mode 100644 index 94c889d917..0000000000 --- a/packages/opencode/src/sync/sync-event.ts +++ /dev/null @@ -1,276 +0,0 @@ -import z from "zod" -import type { ZodObject } from "zod" -import { Database, eq } from "@/storage" -import { GlobalBus } from "@/bus/global" -import { Bus as ProjectBus } from "@/bus" -import { BusEvent } from "@/bus/bus-event" -import { Instance } from "@/project/instance" -import { EventSequenceTable, EventTable } from "./event.sql" -import { WorkspaceContext } from "@/control-plane/workspace-context" -import { EventID } from "./schema" -import { Flag } from "@/flag/flag" - -export type Definition = { - type: string - version: number - aggregate: string - schema: z.ZodObject - - // This is temporary and only exists for compatibility with bus - // event definitions - properties: z.ZodObject -} - -export type Event = { - id: string - seq: number - aggregateID: string - data: z.infer -} - -export type SerializedEvent = Event & { type: string } - -type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void - -export const registry = new Map() -let projectors: Map | undefined -const versions = new Map() -let frozen = false -let convertEvent: (type: string, event: Event["data"]) => Promise> | Record - -export function reset() { - frozen = false - projectors = undefined - convertEvent = (_, data) => data -} - -export function init(input: { projectors: Array<[Definition, ProjectorFunc]>; convertEvent?: typeof convertEvent }) { - projectors = new Map(input.projectors) - - // Install all the latest event defs to the bus. We only ever emit - // latest versions from code, and keep around old versions for - // replaying. Replaying does not go through the bus, and it - // simplifies the bus to only use unversioned latest events - for (let [type, version] of versions.entries()) { - let def = registry.get(versionedType(type, version))! - - BusEvent.define(def.type, def.properties || def.schema) - } - - // Freeze the system so it clearly errors if events are defined - // after `init` which would cause bugs - frozen = true - convertEvent = input.convertEvent || ((_, data) => data) -} - -export function versionedType(type: A): A -export function versionedType(type: A, version: B): `${A}/${B}` -export function versionedType(type: string, version?: number) { - return version ? `${type}.${version}` : type -} - -export function define< - Type extends string, - Agg extends string, - Schema extends ZodObject>>, - BusSchema extends ZodObject = Schema, ->(input: { type: Type; version: number; aggregate: Agg; schema: Schema; busSchema?: BusSchema }) { - if (frozen) { - throw new Error("Error defining sync event: sync system has been frozen") - } - - const def = { - type: input.type, - version: input.version, - aggregate: input.aggregate, - schema: input.schema, - properties: input.busSchema ? input.busSchema : input.schema, - } - - versions.set(def.type, Math.max(def.version, versions.get(def.type) || 0)) - - registry.set(versionedType(def.type, def.version), def) - - return def -} - -export function project( - def: Def, - func: (db: Database.TxOrDb, data: Event["data"]) => void, -): [Definition, ProjectorFunc] { - return [def, func as ProjectorFunc] -} - -function process(def: Def, event: Event, options: { publish: boolean }) { - if (projectors == null) { - throw new Error("No projectors available. Call `SyncEvent.init` to install projectors") - } - - const projector = projectors.get(def) - if (!projector) { - throw new Error(`Projector not found for event: ${def.type}`) - } - - // idempotent: need to ignore any events already logged - - Database.transaction((tx) => { - projector(tx, event.data) - - if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { - tx.insert(EventSequenceTable) - .values({ - aggregate_id: event.aggregateID, - seq: event.seq, - }) - .onConflictDoUpdate({ - target: EventSequenceTable.aggregate_id, - set: { seq: event.seq }, - }) - .run() - tx.insert(EventTable) - .values({ - id: event.id, - seq: event.seq, - aggregate_id: event.aggregateID, - type: versionedType(def.type, def.version), - data: event.data as Record, - }) - .run() - } - - Database.effect(() => { - if (options?.publish) { - const result = convertEvent(def.type, event.data) - if (result instanceof Promise) { - void result.then((data) => { - void ProjectBus.publish({ type: def.type, properties: def.schema }, data) - }) - } else { - void ProjectBus.publish({ type: def.type, properties: def.schema }, result) - } - - GlobalBus.emit("event", { - directory: Instance.directory, - project: Instance.project.id, - workspace: WorkspaceContext.workspaceID, - payload: { - type: "sync", - syncEvent: { - type: versionedType(def.type, def.version), - ...event, - }, - }, - }) - } - }) - }) -} - -export function replay(event: SerializedEvent, options?: { publish: boolean }) { - const def = registry.get(event.type) - if (!def) { - throw new Error(`Unknown event type: ${event.type}`) - } - - const row = Database.use((db) => - db - .select({ seq: EventSequenceTable.seq }) - .from(EventSequenceTable) - .where(eq(EventSequenceTable.aggregate_id, event.aggregateID)) - .get(), - ) - - const latest = row?.seq ?? -1 - if (event.seq <= latest) { - return - } - - const expected = latest + 1 - if (event.seq !== expected) { - throw new Error(`Sequence mismatch for aggregate "${event.aggregateID}": expected ${expected}, got ${event.seq}`) - } - - process(def, event, { publish: !!options?.publish }) -} - -export function replayAll(events: SerializedEvent[], options?: { publish: boolean }) { - const source = events[0]?.aggregateID - if (!source) return - if (events.some((item) => item.aggregateID !== source)) { - throw new Error("Replay events must belong to the same session") - } - const start = events[0].seq - for (const [i, item] of events.entries()) { - const seq = start + i - if (item.seq !== seq) { - throw new Error(`Replay sequence mismatch at index ${i}: expected ${seq}, got ${item.seq}`) - } - } - for (const item of events) { - replay(item, options) - } - return source -} - -export function run(def: Def, data: Event["data"], options?: { publish?: boolean }) { - const agg = (data as Record)[def.aggregate] - // This should never happen: we've enforced it via typescript in - // the definition - if (agg == null) { - throw new Error(`SyncEvent.run: "${def.aggregate}" required but not found: ${JSON.stringify(data)}`) - } - - if (def.version !== versions.get(def.type)) { - throw new Error(`SyncEvent.run: running old versions of events is not allowed: ${def.type}`) - } - - const { publish = true } = options || {} - - // Note that this is an "immediate" transaction which is critical. - // We need to make sure we can safely read and write with nothing - // else changing the data from under us - Database.transaction( - (tx) => { - const id = EventID.ascending() - const row = tx - .select({ seq: EventSequenceTable.seq }) - .from(EventSequenceTable) - .where(eq(EventSequenceTable.aggregate_id, agg)) - .get() - const seq = row?.seq != null ? row.seq + 1 : 0 - - const event = { id, seq, aggregateID: agg, data } - process(def, event, { publish }) - }, - { - behavior: "immediate", - }, - ) -} - -export function remove(aggregateID: string) { - Database.transaction((tx) => { - tx.delete(EventSequenceTable).where(eq(EventSequenceTable.aggregate_id, aggregateID)).run() - tx.delete(EventTable).where(eq(EventTable.aggregate_id, aggregateID)).run() - }) -} - -export function payloads() { - return registry - .entries() - .map(([type, def]) => { - return z - .object({ - type: z.literal("sync"), - name: z.literal(type), - id: z.string(), - seq: z.number(), - aggregateID: z.literal(def.aggregate), - data: def.schema, - }) - .meta({ - ref: `SyncEvent.${def.type}`, - }) - }) - .toArray() -} From 8c191b10c2bdceee607b0b549f9632f5adb5b511 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:04 -0400 Subject: [PATCH 043/120] refactor: collapse ide barrel into ide/index.ts (#22904) --- packages/opencode/src/ide/ide.ts | 71 ---------------------------- packages/opencode/src/ide/index.ts | 74 +++++++++++++++++++++++++++++- 2 files changed, 73 insertions(+), 72 deletions(-) delete mode 100644 packages/opencode/src/ide/ide.ts diff --git a/packages/opencode/src/ide/ide.ts b/packages/opencode/src/ide/ide.ts deleted file mode 100644 index 65e80d7f28..0000000000 --- a/packages/opencode/src/ide/ide.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Log } from "../util" -import { Process } from "@/util" - -const SUPPORTED_IDES = [ - { name: "Windsurf" as const, cmd: "windsurf" }, - { name: "Visual Studio Code - Insiders" as const, cmd: "code-insiders" }, - { name: "Visual Studio Code" as const, cmd: "code" }, - { name: "Cursor" as const, cmd: "cursor" }, - { name: "VSCodium" as const, cmd: "codium" }, -] - -const log = Log.create({ service: "ide" }) - -export const Event = { - Installed: BusEvent.define( - "ide.installed", - z.object({ - ide: z.string(), - }), - ), -} - -export const AlreadyInstalledError = NamedError.create("AlreadyInstalledError", z.object({})) - -export const InstallFailedError = NamedError.create( - "InstallFailedError", - z.object({ - stderr: z.string(), - }), -) - -export function ide() { - if (process.env["TERM_PROGRAM"] === "vscode") { - const v = process.env["GIT_ASKPASS"] - for (const ide of SUPPORTED_IDES) { - if (v?.includes(ide.name)) return ide.name - } - } - return "unknown" -} - -export function alreadyInstalled() { - return process.env["OPENCODE_CALLER"] === "vscode" || process.env["OPENCODE_CALLER"] === "vscode-insiders" -} - -export async function install(ide: (typeof SUPPORTED_IDES)[number]["name"]) { - const cmd = SUPPORTED_IDES.find((i) => i.name === ide)?.cmd - if (!cmd) throw new Error(`Unknown IDE: ${ide}`) - - const p = await Process.run([cmd, "--install-extension", "sst-dev.opencode"], { - nothrow: true, - }) - const stdout = p.stdout.toString() - const stderr = p.stderr.toString() - - log.info("installed", { - ide, - stdout, - stderr, - }) - - if (p.code !== 0) { - throw new InstallFailedError({ stderr }) - } - if (stdout.includes("already installed")) { - throw new AlreadyInstalledError({}) - } -} diff --git a/packages/opencode/src/ide/index.ts b/packages/opencode/src/ide/index.ts index 9716ecbc74..ee80c34741 100644 --- a/packages/opencode/src/ide/index.ts +++ b/packages/opencode/src/ide/index.ts @@ -1 +1,73 @@ -export * as Ide from "./ide" +import { BusEvent } from "@/bus/bus-event" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Log } from "../util" +import { Process } from "@/util" + +const SUPPORTED_IDES = [ + { name: "Windsurf" as const, cmd: "windsurf" }, + { name: "Visual Studio Code - Insiders" as const, cmd: "code-insiders" }, + { name: "Visual Studio Code" as const, cmd: "code" }, + { name: "Cursor" as const, cmd: "cursor" }, + { name: "VSCodium" as const, cmd: "codium" }, +] + +const log = Log.create({ service: "ide" }) + +export const Event = { + Installed: BusEvent.define( + "ide.installed", + z.object({ + ide: z.string(), + }), + ), +} + +export const AlreadyInstalledError = NamedError.create("AlreadyInstalledError", z.object({})) + +export const InstallFailedError = NamedError.create( + "InstallFailedError", + z.object({ + stderr: z.string(), + }), +) + +export function ide() { + if (process.env["TERM_PROGRAM"] === "vscode") { + const v = process.env["GIT_ASKPASS"] + for (const ide of SUPPORTED_IDES) { + if (v?.includes(ide.name)) return ide.name + } + } + return "unknown" +} + +export function alreadyInstalled() { + return process.env["OPENCODE_CALLER"] === "vscode" || process.env["OPENCODE_CALLER"] === "vscode-insiders" +} + +export async function install(ide: (typeof SUPPORTED_IDES)[number]["name"]) { + const cmd = SUPPORTED_IDES.find((i) => i.name === ide)?.cmd + if (!cmd) throw new Error(`Unknown IDE: ${ide}`) + + const p = await Process.run([cmd, "--install-extension", "sst-dev.opencode"], { + nothrow: true, + }) + const stdout = p.stdout.toString() + const stderr = p.stderr.toString() + + log.info("installed", { + ide, + stdout, + stderr, + }) + + if (p.code !== 0) { + throw new InstallFailedError({ stderr }) + } + if (stdout.includes("already installed")) { + throw new AlreadyInstalledError({}) + } +} + +export * as Ide from "." From bd51a0d35bcdec47a980bb3c34c1d5d7ba144866 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:26 -0400 Subject: [PATCH 044/120] refactor: collapse worktree barrel into worktree/index.ts (#22906) --- packages/opencode/src/worktree/index.ts | 598 ++++++++++++++++++++- packages/opencode/src/worktree/worktree.ts | 595 -------------------- 2 files changed, 597 insertions(+), 596 deletions(-) delete mode 100644 packages/opencode/src/worktree/worktree.ts diff --git a/packages/opencode/src/worktree/index.ts b/packages/opencode/src/worktree/index.ts index 39bf94d69b..aa1dc2f8f1 100644 --- a/packages/opencode/src/worktree/index.ts +++ b/packages/opencode/src/worktree/index.ts @@ -1 +1,597 @@ -export * as Worktree from "./worktree" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Global } from "../global" +import { Instance } from "../project/instance" +import { InstanceBootstrap } from "../project/bootstrap" +import { Project } from "../project" +import { Database, eq } from "../storage" +import { ProjectTable } from "../project/project.sql" +import type { ProjectID } from "../project/schema" +import { Log } from "../util" +import { Slug } from "@opencode-ai/shared/util/slug" +import { errorMessage } from "../util/error" +import { BusEvent } from "@/bus/bus-event" +import { GlobalBus } from "@/bus/global" +import { Git } from "@/git" +import { Effect, Layer, Path, Scope, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import { NodePath } from "@effect/platform-node" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { BootstrapRuntime } from "@/effect/bootstrap-runtime" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" + +const log = Log.create({ service: "worktree" }) + +export const Event = { + Ready: BusEvent.define( + "worktree.ready", + z.object({ + name: z.string(), + branch: z.string(), + }), + ), + Failed: BusEvent.define( + "worktree.failed", + z.object({ + message: z.string(), + }), + ), +} + +export const Info = z + .object({ + name: z.string(), + branch: z.string(), + directory: z.string(), + }) + .meta({ + ref: "Worktree", + }) + +export type Info = z.infer + +export const CreateInput = z + .object({ + name: z.string().optional(), + startCommand: z.string().optional().describe("Additional startup script to run after the project's start command"), + }) + .meta({ + ref: "WorktreeCreateInput", + }) + +export type CreateInput = z.infer + +export const RemoveInput = z + .object({ + directory: z.string(), + }) + .meta({ + ref: "WorktreeRemoveInput", + }) + +export type RemoveInput = z.infer + +export const ResetInput = z + .object({ + directory: z.string(), + }) + .meta({ + ref: "WorktreeResetInput", + }) + +export type ResetInput = z.infer + +export const NotGitError = NamedError.create( + "WorktreeNotGitError", + z.object({ + message: z.string(), + }), +) + +export const NameGenerationFailedError = NamedError.create( + "WorktreeNameGenerationFailedError", + z.object({ + message: z.string(), + }), +) + +export const CreateFailedError = NamedError.create( + "WorktreeCreateFailedError", + z.object({ + message: z.string(), + }), +) + +export const StartCommandFailedError = NamedError.create( + "WorktreeStartCommandFailedError", + z.object({ + message: z.string(), + }), +) + +export const RemoveFailedError = NamedError.create( + "WorktreeRemoveFailedError", + z.object({ + message: z.string(), + }), +) + +export const ResetFailedError = NamedError.create( + "WorktreeResetFailedError", + z.object({ + message: z.string(), + }), +) + +function slugify(input: string) { + return input + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+/, "") + .replace(/-+$/, "") +} + +function failedRemoves(...chunks: string[]) { + return chunks.filter(Boolean).flatMap((chunk) => + chunk + .split("\n") + .map((line) => line.trim()) + .flatMap((line) => { + const match = line.match(/^warning:\s+failed to remove\s+(.+):\s+/i) + if (!match) return [] + const value = match[1]?.trim().replace(/^['"]|['"]$/g, "") + if (!value) return [] + return [value] + }), + ) +} + +// --------------------------------------------------------------------------- +// Effect service +// --------------------------------------------------------------------------- + +export interface Interface { + readonly makeWorktreeInfo: (name?: string) => Effect.Effect + readonly createFromInfo: (info: Info, startCommand?: string) => Effect.Effect + readonly create: (input?: CreateInput) => Effect.Effect + readonly remove: (input: RemoveInput) => Effect.Effect + readonly reset: (input: ResetInput) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Worktree") {} + +type GitResult = { code: number; text: string; stderr: string } + +export const layer: Layer.Layer< + Service, + never, + AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner | Git.Service | Project.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const scope = yield* Scope.Scope + const fs = yield* AppFileSystem.Service + const pathSvc = yield* Path.Path + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const gitSvc = yield* Git.Service + const project = yield* Project.Service + + const git = Effect.fnUntraced( + function* (args: string[], opts?: { cwd?: string }) { + const handle = yield* spawner.spawn( + ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }), + ) + const [text, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, text, stderr } satisfies GitResult + }, + Effect.scoped, + Effect.catch((e) => + Effect.succeed({ code: 1, text: "", stderr: e instanceof Error ? e.message : String(e) } satisfies GitResult), + ), + ) + + const MAX_NAME_ATTEMPTS = 26 + const candidate = Effect.fn("Worktree.candidate")(function* (root: string, base?: string) { + const ctx = yield* InstanceState.context + for (const attempt of Array.from({ length: MAX_NAME_ATTEMPTS }, (_, i) => i)) { + const name = base ? (attempt === 0 ? base : `${base}-${Slug.create()}`) : Slug.create() + const branch = `opencode/${name}` + const directory = pathSvc.join(root, name) + + if (yield* fs.exists(directory).pipe(Effect.orDie)) continue + + const ref = `refs/heads/${branch}` + const branchCheck = yield* git(["show-ref", "--verify", "--quiet", ref], { cwd: ctx.worktree }) + if (branchCheck.code === 0) continue + + return Info.parse({ name, branch, directory }) + } + throw new NameGenerationFailedError({ message: "Failed to generate a unique worktree name" }) + }) + + const makeWorktreeInfo = Effect.fn("Worktree.makeWorktreeInfo")(function* (name?: string) { + const ctx = yield* InstanceState.context + if (ctx.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const root = pathSvc.join(Global.Path.data, "worktree", ctx.project.id) + yield* fs.makeDirectory(root, { recursive: true }).pipe(Effect.orDie) + + const base = name ? slugify(name) : "" + return yield* candidate(root, base || undefined) + }) + + const setup = Effect.fnUntraced(function* (info: Info) { + const ctx = yield* InstanceState.context + const created = yield* git(["worktree", "add", "--no-checkout", "-b", info.branch, info.directory], { + cwd: ctx.worktree, + }) + if (created.code !== 0) { + throw new CreateFailedError({ message: created.stderr || created.text || "Failed to create git worktree" }) + } + + yield* project.addSandbox(ctx.project.id, info.directory).pipe(Effect.catch(() => Effect.void)) + }) + + const boot = Effect.fnUntraced(function* (info: Info, startCommand?: string) { + const ctx = yield* InstanceState.context + const workspaceID = yield* InstanceState.workspaceID + const projectID = ctx.project.id + const extra = startCommand?.trim() + + const populated = yield* git(["reset", "--hard"], { cwd: info.directory }) + if (populated.code !== 0) { + const message = populated.stderr || populated.text || "Failed to populate worktree" + log.error("worktree checkout failed", { directory: info.directory, message }) + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { type: Event.Failed.type, properties: { message } }, + }) + return + } + + const booted = yield* Effect.promise(() => + Instance.provide({ + directory: info.directory, + init: () => BootstrapRuntime.runPromise(InstanceBootstrap), + fn: () => undefined, + }) + .then(() => true) + .catch((error) => { + const message = errorMessage(error) + log.error("worktree bootstrap failed", { directory: info.directory, message }) + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { type: Event.Failed.type, properties: { message } }, + }) + return false + }), + ) + if (!booted) return + + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { + type: Event.Ready.type, + properties: { name: info.name, branch: info.branch }, + }, + }) + + yield* runStartScripts(info.directory, { projectID, extra }) + }) + + const createFromInfo = Effect.fn("Worktree.createFromInfo")(function* (info: Info, startCommand?: string) { + yield* setup(info) + yield* boot(info, startCommand) + }) + + const create = Effect.fn("Worktree.create")(function* (input?: CreateInput) { + const info = yield* makeWorktreeInfo(input?.name) + yield* setup(info) + yield* boot(info, input?.startCommand).pipe( + Effect.catchCause((cause) => Effect.sync(() => log.error("worktree bootstrap failed", { cause }))), + Effect.forkIn(scope), + ) + return info + }) + + const canonical = Effect.fnUntraced(function* (input: string) { + const abs = pathSvc.resolve(input) + const real = yield* fs.realPath(abs).pipe(Effect.catch(() => Effect.succeed(abs))) + const normalized = pathSvc.normalize(real) + return process.platform === "win32" ? normalized.toLowerCase() : normalized + }) + + function parseWorktreeList(text: string) { + return text + .split("\n") + .map((line) => line.trim()) + .reduce<{ path?: string; branch?: string }[]>((acc, line) => { + if (!line) return acc + if (line.startsWith("worktree ")) { + acc.push({ path: line.slice("worktree ".length).trim() }) + return acc + } + const current = acc[acc.length - 1] + if (!current) return acc + if (line.startsWith("branch ")) { + current.branch = line.slice("branch ".length).trim() + } + return acc + }, []) + } + + const locateWorktree = Effect.fnUntraced(function* ( + entries: { path?: string; branch?: string }[], + directory: string, + ) { + for (const item of entries) { + if (!item.path) continue + const key = yield* canonical(item.path) + if (key === directory) return item + } + return undefined + }) + + function stopFsmonitor(target: string) { + return fs.exists(target).pipe( + Effect.orDie, + Effect.flatMap((exists) => (exists ? git(["fsmonitor--daemon", "stop"], { cwd: target }) : Effect.void)), + ) + } + + function cleanDirectory(target: string) { + return Effect.promise(() => + import("fs/promises") + .then((fsp) => fsp.rm(target, { recursive: true, force: true, maxRetries: 5, retryDelay: 100 })) + .catch((error) => { + const message = errorMessage(error) + throw new RemoveFailedError({ message: message || "Failed to remove git worktree directory" }) + }), + ) + } + + const remove = Effect.fn("Worktree.remove")(function* (input: RemoveInput) { + if (Instance.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const directory = yield* canonical(input.directory) + + const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (list.code !== 0) { + throw new RemoveFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) + } + + const entries = parseWorktreeList(list.text) + const entry = yield* locateWorktree(entries, directory) + + if (!entry?.path) { + const directoryExists = yield* fs.exists(directory).pipe(Effect.orDie) + if (directoryExists) { + yield* stopFsmonitor(directory) + yield* cleanDirectory(directory) + } + return true + } + + yield* stopFsmonitor(entry.path) + const removed = yield* git(["worktree", "remove", "--force", entry.path], { cwd: Instance.worktree }) + if (removed.code !== 0) { + const next = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (next.code !== 0) { + throw new RemoveFailedError({ + message: removed.stderr || removed.text || next.stderr || next.text || "Failed to remove git worktree", + }) + } + + const stale = yield* locateWorktree(parseWorktreeList(next.text), directory) + if (stale?.path) { + throw new RemoveFailedError({ message: removed.stderr || removed.text || "Failed to remove git worktree" }) + } + } + + yield* cleanDirectory(entry.path) + + const branch = entry.branch?.replace(/^refs\/heads\//, "") + if (branch) { + const deleted = yield* git(["branch", "-D", branch], { cwd: Instance.worktree }) + if (deleted.code !== 0) { + throw new RemoveFailedError({ + message: deleted.stderr || deleted.text || "Failed to delete worktree branch", + }) + } + } + + return true + }) + + const gitExpect = Effect.fnUntraced(function* ( + args: string[], + opts: { cwd: string }, + error: (r: GitResult) => Error, + ) { + const result = yield* git(args, opts) + if (result.code !== 0) throw error(result) + return result + }) + + const runStartCommand = Effect.fnUntraced( + function* (directory: string, cmd: string) { + const [shell, args] = process.platform === "win32" ? ["cmd", ["/c", cmd]] : ["bash", ["-lc", cmd]] + const handle = yield* spawner.spawn( + ChildProcess.make(shell, args, { cwd: directory, extendEnv: true, stdin: "ignore" }), + ) + // Drain stdout, capture stderr for error reporting + const [, stderr] = yield* Effect.all( + [Stream.runDrain(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ).pipe(Effect.orDie) + const code = yield* handle.exitCode + return { code, stderr } + }, + Effect.scoped, + Effect.catch(() => Effect.succeed({ code: 1, stderr: "" })), + ) + + const runStartScript = Effect.fnUntraced(function* (directory: string, cmd: string, kind: string) { + const text = cmd.trim() + if (!text) return true + const result = yield* runStartCommand(directory, text) + if (result.code === 0) return true + log.error("worktree start command failed", { kind, directory, message: result.stderr }) + return false + }) + + const runStartScripts = Effect.fnUntraced(function* ( + directory: string, + input: { projectID: ProjectID; extra?: string }, + ) { + const row = yield* Effect.sync(() => + Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()), + ) + const project = row ? Project.fromRow(row) : undefined + const startup = project?.commands?.start?.trim() ?? "" + const ok = yield* runStartScript(directory, startup, "project") + if (!ok) return false + yield* runStartScript(directory, input.extra ?? "", "worktree") + return true + }) + + const prune = Effect.fnUntraced(function* (root: string, entries: string[]) { + const base = yield* canonical(root) + yield* Effect.forEach( + entries, + (entry) => + Effect.gen(function* () { + const target = yield* canonical(pathSvc.resolve(root, entry)) + if (target === base) return + if (!target.startsWith(`${base}${pathSvc.sep}`)) return + yield* fs.remove(target, { recursive: true }).pipe(Effect.ignore) + }), + { concurrency: "unbounded" }, + ) + }) + + const sweep = Effect.fnUntraced(function* (root: string) { + const first = yield* git(["clean", "-ffdx"], { cwd: root }) + if (first.code === 0) return first + + const entries = failedRemoves(first.stderr, first.text) + if (!entries.length) return first + + yield* prune(root, entries) + return yield* git(["clean", "-ffdx"], { cwd: root }) + }) + + const reset = Effect.fn("Worktree.reset")(function* (input: ResetInput) { + if (Instance.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const directory = yield* canonical(input.directory) + const primary = yield* canonical(Instance.worktree) + if (directory === primary) { + throw new ResetFailedError({ message: "Cannot reset the primary workspace" }) + } + + const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (list.code !== 0) { + throw new ResetFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) + } + + const entry = yield* locateWorktree(parseWorktreeList(list.text), directory) + if (!entry?.path) { + throw new ResetFailedError({ message: "Worktree not found" }) + } + + const worktreePath = entry.path + + const base = yield* gitSvc.defaultBranch(Instance.worktree) + if (!base) { + throw new ResetFailedError({ message: "Default branch not found" }) + } + + const sep = base.ref.indexOf("/") + if (base.ref !== base.name && sep > 0) { + const remote = base.ref.slice(0, sep) + const branch = base.ref.slice(sep + 1) + yield* gitExpect( + ["fetch", remote, branch], + { cwd: Instance.worktree }, + (r) => new ResetFailedError({ message: r.stderr || r.text || `Failed to fetch ${base.ref}` }), + ) + } + + yield* gitExpect( + ["reset", "--hard", base.ref], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset worktree to target" }), + ) + + const cleanResult = yield* sweep(worktreePath) + if (cleanResult.code !== 0) { + throw new ResetFailedError({ message: cleanResult.stderr || cleanResult.text || "Failed to clean worktree" }) + } + + yield* gitExpect( + ["submodule", "update", "--init", "--recursive", "--force"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to update submodules" }), + ) + + yield* gitExpect( + ["submodule", "foreach", "--recursive", "git", "reset", "--hard"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset submodules" }), + ) + + yield* gitExpect( + ["submodule", "foreach", "--recursive", "git", "clean", "-fdx"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to clean submodules" }), + ) + + const status = yield* git(["-c", "core.fsmonitor=false", "status", "--porcelain=v1"], { cwd: worktreePath }) + if (status.code !== 0) { + throw new ResetFailedError({ message: status.stderr || status.text || "Failed to read git status" }) + } + + if (status.text.trim()) { + throw new ResetFailedError({ message: `Worktree reset left local changes:\n${status.text.trim()}` }) + } + + yield* runStartScripts(worktreePath, { projectID: Instance.project.id }).pipe( + Effect.catchCause((cause) => Effect.sync(() => log.error("worktree start task failed", { cause }))), + Effect.forkIn(scope), + ) + + return true + }) + + return Service.of({ makeWorktreeInfo, createFromInfo, create, remove, reset }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Git.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(Project.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(NodePath.layer), +) + +export * as Worktree from "." diff --git a/packages/opencode/src/worktree/worktree.ts b/packages/opencode/src/worktree/worktree.ts deleted file mode 100644 index d4fab2030b..0000000000 --- a/packages/opencode/src/worktree/worktree.ts +++ /dev/null @@ -1,595 +0,0 @@ -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Instance } from "../project/instance" -import { InstanceBootstrap } from "../project/bootstrap" -import { Project } from "../project" -import { Database, eq } from "../storage" -import { ProjectTable } from "../project/project.sql" -import type { ProjectID } from "../project/schema" -import { Log } from "../util" -import { Slug } from "@opencode-ai/shared/util/slug" -import { errorMessage } from "../util/error" -import { BusEvent } from "@/bus/bus-event" -import { GlobalBus } from "@/bus/global" -import { Git } from "@/git" -import { Effect, Layer, Path, Scope, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import { NodePath } from "@effect/platform-node" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { BootstrapRuntime } from "@/effect/bootstrap-runtime" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" - -const log = Log.create({ service: "worktree" }) - -export const Event = { - Ready: BusEvent.define( - "worktree.ready", - z.object({ - name: z.string(), - branch: z.string(), - }), - ), - Failed: BusEvent.define( - "worktree.failed", - z.object({ - message: z.string(), - }), - ), -} - -export const Info = z - .object({ - name: z.string(), - branch: z.string(), - directory: z.string(), - }) - .meta({ - ref: "Worktree", - }) - -export type Info = z.infer - -export const CreateInput = z - .object({ - name: z.string().optional(), - startCommand: z.string().optional().describe("Additional startup script to run after the project's start command"), - }) - .meta({ - ref: "WorktreeCreateInput", - }) - -export type CreateInput = z.infer - -export const RemoveInput = z - .object({ - directory: z.string(), - }) - .meta({ - ref: "WorktreeRemoveInput", - }) - -export type RemoveInput = z.infer - -export const ResetInput = z - .object({ - directory: z.string(), - }) - .meta({ - ref: "WorktreeResetInput", - }) - -export type ResetInput = z.infer - -export const NotGitError = NamedError.create( - "WorktreeNotGitError", - z.object({ - message: z.string(), - }), -) - -export const NameGenerationFailedError = NamedError.create( - "WorktreeNameGenerationFailedError", - z.object({ - message: z.string(), - }), -) - -export const CreateFailedError = NamedError.create( - "WorktreeCreateFailedError", - z.object({ - message: z.string(), - }), -) - -export const StartCommandFailedError = NamedError.create( - "WorktreeStartCommandFailedError", - z.object({ - message: z.string(), - }), -) - -export const RemoveFailedError = NamedError.create( - "WorktreeRemoveFailedError", - z.object({ - message: z.string(), - }), -) - -export const ResetFailedError = NamedError.create( - "WorktreeResetFailedError", - z.object({ - message: z.string(), - }), -) - -function slugify(input: string) { - return input - .trim() - .toLowerCase() - .replace(/[^a-z0-9]+/g, "-") - .replace(/^-+/, "") - .replace(/-+$/, "") -} - -function failedRemoves(...chunks: string[]) { - return chunks.filter(Boolean).flatMap((chunk) => - chunk - .split("\n") - .map((line) => line.trim()) - .flatMap((line) => { - const match = line.match(/^warning:\s+failed to remove\s+(.+):\s+/i) - if (!match) return [] - const value = match[1]?.trim().replace(/^['"]|['"]$/g, "") - if (!value) return [] - return [value] - }), - ) -} - -// --------------------------------------------------------------------------- -// Effect service -// --------------------------------------------------------------------------- - -export interface Interface { - readonly makeWorktreeInfo: (name?: string) => Effect.Effect - readonly createFromInfo: (info: Info, startCommand?: string) => Effect.Effect - readonly create: (input?: CreateInput) => Effect.Effect - readonly remove: (input: RemoveInput) => Effect.Effect - readonly reset: (input: ResetInput) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Worktree") {} - -type GitResult = { code: number; text: string; stderr: string } - -export const layer: Layer.Layer< - Service, - never, - AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner | Git.Service | Project.Service -> = Layer.effect( - Service, - Effect.gen(function* () { - const scope = yield* Scope.Scope - const fs = yield* AppFileSystem.Service - const pathSvc = yield* Path.Path - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const gitSvc = yield* Git.Service - const project = yield* Project.Service - - const git = Effect.fnUntraced( - function* (args: string[], opts?: { cwd?: string }) { - const handle = yield* spawner.spawn( - ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }), - ) - const [text, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, text, stderr } satisfies GitResult - }, - Effect.scoped, - Effect.catch((e) => - Effect.succeed({ code: 1, text: "", stderr: e instanceof Error ? e.message : String(e) } satisfies GitResult), - ), - ) - - const MAX_NAME_ATTEMPTS = 26 - const candidate = Effect.fn("Worktree.candidate")(function* (root: string, base?: string) { - const ctx = yield* InstanceState.context - for (const attempt of Array.from({ length: MAX_NAME_ATTEMPTS }, (_, i) => i)) { - const name = base ? (attempt === 0 ? base : `${base}-${Slug.create()}`) : Slug.create() - const branch = `opencode/${name}` - const directory = pathSvc.join(root, name) - - if (yield* fs.exists(directory).pipe(Effect.orDie)) continue - - const ref = `refs/heads/${branch}` - const branchCheck = yield* git(["show-ref", "--verify", "--quiet", ref], { cwd: ctx.worktree }) - if (branchCheck.code === 0) continue - - return Info.parse({ name, branch, directory }) - } - throw new NameGenerationFailedError({ message: "Failed to generate a unique worktree name" }) - }) - - const makeWorktreeInfo = Effect.fn("Worktree.makeWorktreeInfo")(function* (name?: string) { - const ctx = yield* InstanceState.context - if (ctx.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const root = pathSvc.join(Global.Path.data, "worktree", ctx.project.id) - yield* fs.makeDirectory(root, { recursive: true }).pipe(Effect.orDie) - - const base = name ? slugify(name) : "" - return yield* candidate(root, base || undefined) - }) - - const setup = Effect.fnUntraced(function* (info: Info) { - const ctx = yield* InstanceState.context - const created = yield* git(["worktree", "add", "--no-checkout", "-b", info.branch, info.directory], { - cwd: ctx.worktree, - }) - if (created.code !== 0) { - throw new CreateFailedError({ message: created.stderr || created.text || "Failed to create git worktree" }) - } - - yield* project.addSandbox(ctx.project.id, info.directory).pipe(Effect.catch(() => Effect.void)) - }) - - const boot = Effect.fnUntraced(function* (info: Info, startCommand?: string) { - const ctx = yield* InstanceState.context - const workspaceID = yield* InstanceState.workspaceID - const projectID = ctx.project.id - const extra = startCommand?.trim() - - const populated = yield* git(["reset", "--hard"], { cwd: info.directory }) - if (populated.code !== 0) { - const message = populated.stderr || populated.text || "Failed to populate worktree" - log.error("worktree checkout failed", { directory: info.directory, message }) - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { type: Event.Failed.type, properties: { message } }, - }) - return - } - - const booted = yield* Effect.promise(() => - Instance.provide({ - directory: info.directory, - init: () => BootstrapRuntime.runPromise(InstanceBootstrap), - fn: () => undefined, - }) - .then(() => true) - .catch((error) => { - const message = errorMessage(error) - log.error("worktree bootstrap failed", { directory: info.directory, message }) - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { type: Event.Failed.type, properties: { message } }, - }) - return false - }), - ) - if (!booted) return - - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { - type: Event.Ready.type, - properties: { name: info.name, branch: info.branch }, - }, - }) - - yield* runStartScripts(info.directory, { projectID, extra }) - }) - - const createFromInfo = Effect.fn("Worktree.createFromInfo")(function* (info: Info, startCommand?: string) { - yield* setup(info) - yield* boot(info, startCommand) - }) - - const create = Effect.fn("Worktree.create")(function* (input?: CreateInput) { - const info = yield* makeWorktreeInfo(input?.name) - yield* setup(info) - yield* boot(info, input?.startCommand).pipe( - Effect.catchCause((cause) => Effect.sync(() => log.error("worktree bootstrap failed", { cause }))), - Effect.forkIn(scope), - ) - return info - }) - - const canonical = Effect.fnUntraced(function* (input: string) { - const abs = pathSvc.resolve(input) - const real = yield* fs.realPath(abs).pipe(Effect.catch(() => Effect.succeed(abs))) - const normalized = pathSvc.normalize(real) - return process.platform === "win32" ? normalized.toLowerCase() : normalized - }) - - function parseWorktreeList(text: string) { - return text - .split("\n") - .map((line) => line.trim()) - .reduce<{ path?: string; branch?: string }[]>((acc, line) => { - if (!line) return acc - if (line.startsWith("worktree ")) { - acc.push({ path: line.slice("worktree ".length).trim() }) - return acc - } - const current = acc[acc.length - 1] - if (!current) return acc - if (line.startsWith("branch ")) { - current.branch = line.slice("branch ".length).trim() - } - return acc - }, []) - } - - const locateWorktree = Effect.fnUntraced(function* ( - entries: { path?: string; branch?: string }[], - directory: string, - ) { - for (const item of entries) { - if (!item.path) continue - const key = yield* canonical(item.path) - if (key === directory) return item - } - return undefined - }) - - function stopFsmonitor(target: string) { - return fs.exists(target).pipe( - Effect.orDie, - Effect.flatMap((exists) => (exists ? git(["fsmonitor--daemon", "stop"], { cwd: target }) : Effect.void)), - ) - } - - function cleanDirectory(target: string) { - return Effect.promise(() => - import("fs/promises") - .then((fsp) => fsp.rm(target, { recursive: true, force: true, maxRetries: 5, retryDelay: 100 })) - .catch((error) => { - const message = errorMessage(error) - throw new RemoveFailedError({ message: message || "Failed to remove git worktree directory" }) - }), - ) - } - - const remove = Effect.fn("Worktree.remove")(function* (input: RemoveInput) { - if (Instance.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const directory = yield* canonical(input.directory) - - const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (list.code !== 0) { - throw new RemoveFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) - } - - const entries = parseWorktreeList(list.text) - const entry = yield* locateWorktree(entries, directory) - - if (!entry?.path) { - const directoryExists = yield* fs.exists(directory).pipe(Effect.orDie) - if (directoryExists) { - yield* stopFsmonitor(directory) - yield* cleanDirectory(directory) - } - return true - } - - yield* stopFsmonitor(entry.path) - const removed = yield* git(["worktree", "remove", "--force", entry.path], { cwd: Instance.worktree }) - if (removed.code !== 0) { - const next = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (next.code !== 0) { - throw new RemoveFailedError({ - message: removed.stderr || removed.text || next.stderr || next.text || "Failed to remove git worktree", - }) - } - - const stale = yield* locateWorktree(parseWorktreeList(next.text), directory) - if (stale?.path) { - throw new RemoveFailedError({ message: removed.stderr || removed.text || "Failed to remove git worktree" }) - } - } - - yield* cleanDirectory(entry.path) - - const branch = entry.branch?.replace(/^refs\/heads\//, "") - if (branch) { - const deleted = yield* git(["branch", "-D", branch], { cwd: Instance.worktree }) - if (deleted.code !== 0) { - throw new RemoveFailedError({ - message: deleted.stderr || deleted.text || "Failed to delete worktree branch", - }) - } - } - - return true - }) - - const gitExpect = Effect.fnUntraced(function* ( - args: string[], - opts: { cwd: string }, - error: (r: GitResult) => Error, - ) { - const result = yield* git(args, opts) - if (result.code !== 0) throw error(result) - return result - }) - - const runStartCommand = Effect.fnUntraced( - function* (directory: string, cmd: string) { - const [shell, args] = process.platform === "win32" ? ["cmd", ["/c", cmd]] : ["bash", ["-lc", cmd]] - const handle = yield* spawner.spawn( - ChildProcess.make(shell, args, { cwd: directory, extendEnv: true, stdin: "ignore" }), - ) - // Drain stdout, capture stderr for error reporting - const [, stderr] = yield* Effect.all( - [Stream.runDrain(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ).pipe(Effect.orDie) - const code = yield* handle.exitCode - return { code, stderr } - }, - Effect.scoped, - Effect.catch(() => Effect.succeed({ code: 1, stderr: "" })), - ) - - const runStartScript = Effect.fnUntraced(function* (directory: string, cmd: string, kind: string) { - const text = cmd.trim() - if (!text) return true - const result = yield* runStartCommand(directory, text) - if (result.code === 0) return true - log.error("worktree start command failed", { kind, directory, message: result.stderr }) - return false - }) - - const runStartScripts = Effect.fnUntraced(function* ( - directory: string, - input: { projectID: ProjectID; extra?: string }, - ) { - const row = yield* Effect.sync(() => - Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()), - ) - const project = row ? Project.fromRow(row) : undefined - const startup = project?.commands?.start?.trim() ?? "" - const ok = yield* runStartScript(directory, startup, "project") - if (!ok) return false - yield* runStartScript(directory, input.extra ?? "", "worktree") - return true - }) - - const prune = Effect.fnUntraced(function* (root: string, entries: string[]) { - const base = yield* canonical(root) - yield* Effect.forEach( - entries, - (entry) => - Effect.gen(function* () { - const target = yield* canonical(pathSvc.resolve(root, entry)) - if (target === base) return - if (!target.startsWith(`${base}${pathSvc.sep}`)) return - yield* fs.remove(target, { recursive: true }).pipe(Effect.ignore) - }), - { concurrency: "unbounded" }, - ) - }) - - const sweep = Effect.fnUntraced(function* (root: string) { - const first = yield* git(["clean", "-ffdx"], { cwd: root }) - if (first.code === 0) return first - - const entries = failedRemoves(first.stderr, first.text) - if (!entries.length) return first - - yield* prune(root, entries) - return yield* git(["clean", "-ffdx"], { cwd: root }) - }) - - const reset = Effect.fn("Worktree.reset")(function* (input: ResetInput) { - if (Instance.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const directory = yield* canonical(input.directory) - const primary = yield* canonical(Instance.worktree) - if (directory === primary) { - throw new ResetFailedError({ message: "Cannot reset the primary workspace" }) - } - - const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (list.code !== 0) { - throw new ResetFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) - } - - const entry = yield* locateWorktree(parseWorktreeList(list.text), directory) - if (!entry?.path) { - throw new ResetFailedError({ message: "Worktree not found" }) - } - - const worktreePath = entry.path - - const base = yield* gitSvc.defaultBranch(Instance.worktree) - if (!base) { - throw new ResetFailedError({ message: "Default branch not found" }) - } - - const sep = base.ref.indexOf("/") - if (base.ref !== base.name && sep > 0) { - const remote = base.ref.slice(0, sep) - const branch = base.ref.slice(sep + 1) - yield* gitExpect( - ["fetch", remote, branch], - { cwd: Instance.worktree }, - (r) => new ResetFailedError({ message: r.stderr || r.text || `Failed to fetch ${base.ref}` }), - ) - } - - yield* gitExpect( - ["reset", "--hard", base.ref], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset worktree to target" }), - ) - - const cleanResult = yield* sweep(worktreePath) - if (cleanResult.code !== 0) { - throw new ResetFailedError({ message: cleanResult.stderr || cleanResult.text || "Failed to clean worktree" }) - } - - yield* gitExpect( - ["submodule", "update", "--init", "--recursive", "--force"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to update submodules" }), - ) - - yield* gitExpect( - ["submodule", "foreach", "--recursive", "git", "reset", "--hard"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset submodules" }), - ) - - yield* gitExpect( - ["submodule", "foreach", "--recursive", "git", "clean", "-fdx"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to clean submodules" }), - ) - - const status = yield* git(["-c", "core.fsmonitor=false", "status", "--porcelain=v1"], { cwd: worktreePath }) - if (status.code !== 0) { - throw new ResetFailedError({ message: status.stderr || status.text || "Failed to read git status" }) - } - - if (status.text.trim()) { - throw new ResetFailedError({ message: `Worktree reset left local changes:\n${status.text.trim()}` }) - } - - yield* runStartScripts(worktreePath, { projectID: Instance.project.id }).pipe( - Effect.catchCause((cause) => Effect.sync(() => log.error("worktree start task failed", { cause }))), - Effect.forkIn(scope), - ) - - return true - }) - - return Service.of({ makeWorktreeInfo, createFromInfo, create, remove, reset }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Git.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(Project.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(NodePath.layer), -) From ae9a6966075042863cd061f798c0ce09fea3ff64 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:28 -0400 Subject: [PATCH 045/120] refactor: collapse installation barrel into installation/index.ts (#22910) --- packages/opencode/src/installation/index.ts | 339 +++++++++++++++++- .../opencode/src/installation/installation.ts | 336 ----------------- 2 files changed, 338 insertions(+), 337 deletions(-) delete mode 100644 packages/opencode/src/installation/installation.ts diff --git a/packages/opencode/src/installation/index.ts b/packages/opencode/src/installation/index.ts index 4e48fcd6a0..babde9dc47 100644 --- a/packages/opencode/src/installation/index.ts +++ b/packages/opencode/src/installation/index.ts @@ -1 +1,338 @@ -export * as Installation from "./installation" +import { Effect, Layer, Schema, Context, Stream } from "effect" +import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { withTransientReadRetry } from "@/util/effect-http-client" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import path from "path" +import z from "zod" +import { BusEvent } from "@/bus/bus-event" +import { Flag } from "../flag/flag" +import { Log } from "../util" + +import semver from "semver" +import { InstallationChannel, InstallationVersion } from "./version" + +const log = Log.create({ service: "installation" }) + +export type Method = "curl" | "npm" | "yarn" | "pnpm" | "bun" | "brew" | "scoop" | "choco" | "unknown" + +export type ReleaseType = "patch" | "minor" | "major" + +export const Event = { + Updated: BusEvent.define( + "installation.updated", + z.object({ + version: z.string(), + }), + ), + UpdateAvailable: BusEvent.define( + "installation.update-available", + z.object({ + version: z.string(), + }), + ), +} + +export function getReleaseType(current: string, latest: string): ReleaseType { + const currMajor = semver.major(current) + const currMinor = semver.minor(current) + const newMajor = semver.major(latest) + const newMinor = semver.minor(latest) + + if (newMajor > currMajor) return "major" + if (newMinor > currMinor) return "minor" + return "patch" +} + +export const Info = z + .object({ + version: z.string(), + latest: z.string(), + }) + .meta({ + ref: "InstallationInfo", + }) +export type Info = z.infer + +export const USER_AGENT = `opencode/${InstallationChannel}/${InstallationVersion}/${Flag.OPENCODE_CLIENT}` + +export function isPreview() { + return InstallationChannel !== "latest" +} + +export function isLocal() { + return InstallationChannel === "local" +} + +export class UpgradeFailedError extends Schema.TaggedErrorClass()("UpgradeFailedError", { + stderr: Schema.String, +}) {} + +// Response schemas for external version APIs +const GitHubRelease = Schema.Struct({ tag_name: Schema.String }) +const NpmPackage = Schema.Struct({ version: Schema.String }) +const BrewFormula = Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) }) +const BrewInfoV2 = Schema.Struct({ + formulae: Schema.Array(Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) })), +}) +const ChocoPackage = Schema.Struct({ + d: Schema.Struct({ results: Schema.Array(Schema.Struct({ Version: Schema.String })) }), +}) +const ScoopManifest = NpmPackage + +export interface Interface { + readonly info: () => Effect.Effect + readonly method: () => Effect.Effect + readonly latest: (method?: Method) => Effect.Effect + readonly upgrade: (method: Method, target: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Installation") {} + +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const http = yield* HttpClient.HttpClient + const httpOk = HttpClient.filterStatusOk(withTransientReadRetry(http)) + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const text = Effect.fnUntraced( + function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { + const proc = ChildProcess.make(cmd[0], cmd.slice(1), { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const out = yield* Stream.mkString(Stream.decodeText(handle.stdout)) + yield* handle.exitCode + return out + }, + Effect.scoped, + Effect.catch(() => Effect.succeed("")), + ) + + const run = Effect.fnUntraced( + function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { + const proc = ChildProcess.make(cmd[0], cmd.slice(1), { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, stdout, stderr } + }, + Effect.scoped, + Effect.catch(() => Effect.succeed({ code: ChildProcessSpawner.ExitCode(1), stdout: "", stderr: "" })), + ) + + const getBrewFormula = Effect.fnUntraced(function* () { + const tapFormula = yield* text(["brew", "list", "--formula", "anomalyco/tap/opencode"]) + if (tapFormula.includes("opencode")) return "anomalyco/tap/opencode" + const coreFormula = yield* text(["brew", "list", "--formula", "opencode"]) + if (coreFormula.includes("opencode")) return "opencode" + return "opencode" + }) + + const upgradeCurl = Effect.fnUntraced( + function* (target: string) { + const response = yield* httpOk.execute(HttpClientRequest.get("https://opencode.ai/install")) + const body = yield* response.text + const bodyBytes = new TextEncoder().encode(body) + const proc = ChildProcess.make("bash", [], { + stdin: Stream.make(bodyBytes), + env: { VERSION: target }, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, stdout, stderr } + }, + Effect.scoped, + Effect.orDie, + ) + + const methodImpl = Effect.fn("Installation.method")(function* () { + if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl" as Method + if (process.execPath.includes(path.join(".local", "bin"))) return "curl" as Method + const exec = process.execPath.toLowerCase() + + const checks: Array<{ name: Method; command: () => Effect.Effect }> = [ + { name: "npm", command: () => text(["npm", "list", "-g", "--depth=0"]) }, + { name: "yarn", command: () => text(["yarn", "global", "list"]) }, + { name: "pnpm", command: () => text(["pnpm", "list", "-g", "--depth=0"]) }, + { name: "bun", command: () => text(["bun", "pm", "ls", "-g"]) }, + { name: "brew", command: () => text(["brew", "list", "--formula", "opencode"]) }, + { name: "scoop", command: () => text(["scoop", "list", "opencode"]) }, + { name: "choco", command: () => text(["choco", "list", "--limit-output", "opencode"]) }, + ] + + checks.sort((a, b) => { + const aMatches = exec.includes(a.name) + const bMatches = exec.includes(b.name) + if (aMatches && !bMatches) return -1 + if (!aMatches && bMatches) return 1 + return 0 + }) + + for (const check of checks) { + const output = yield* check.command() + const installedName = + check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "opencode" : "opencode-ai" + if (output.includes(installedName)) { + return check.name + } + } + + return "unknown" as Method + }) + + const latestImpl = Effect.fn("Installation.latest")(function* (installMethod?: Method) { + const detectedMethod = installMethod || (yield* methodImpl()) + + if (detectedMethod === "brew") { + const formula = yield* getBrewFormula() + if (formula.includes("/")) { + const infoJson = yield* text(["brew", "info", "--json=v2", formula]) + const info = yield* Schema.decodeUnknownEffect(Schema.fromJsonString(BrewInfoV2))(infoJson) + return info.formulae[0].versions.stable + } + const response = yield* httpOk.execute( + HttpClientRequest.get("https://formulae.brew.sh/api/formula/opencode.json").pipe( + HttpClientRequest.acceptJson, + ), + ) + const data = yield* HttpClientResponse.schemaBodyJson(BrewFormula)(response) + return data.versions.stable + } + + if (detectedMethod === "npm" || detectedMethod === "bun" || detectedMethod === "pnpm") { + const r = (yield* text(["npm", "config", "get", "registry"])).trim() + const reg = r || "https://registry.npmjs.org" + const registry = reg.endsWith("/") ? reg.slice(0, -1) : reg + const channel = InstallationChannel + const response = yield* httpOk.execute( + HttpClientRequest.get(`${registry}/opencode-ai/${channel}`).pipe(HttpClientRequest.acceptJson), + ) + const data = yield* HttpClientResponse.schemaBodyJson(NpmPackage)(response) + return data.version + } + + if (detectedMethod === "choco") { + const response = yield* httpOk.execute( + HttpClientRequest.get( + "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27opencode%27%20and%20IsLatestVersion&$select=Version", + ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json;odata=verbose" })), + ) + const data = yield* HttpClientResponse.schemaBodyJson(ChocoPackage)(response) + return data.d.results[0].Version + } + + if (detectedMethod === "scoop") { + const response = yield* httpOk.execute( + HttpClientRequest.get( + "https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/opencode.json", + ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json" })), + ) + const data = yield* HttpClientResponse.schemaBodyJson(ScoopManifest)(response) + return data.version + } + + const response = yield* httpOk.execute( + HttpClientRequest.get("https://api.github.com/repos/anomalyco/opencode/releases/latest").pipe( + HttpClientRequest.acceptJson, + ), + ) + const data = yield* HttpClientResponse.schemaBodyJson(GitHubRelease)(response) + return data.tag_name.replace(/^v/, "") + }, Effect.orDie) + + const upgradeImpl = Effect.fn("Installation.upgrade")(function* (m: Method, target: string) { + let result: { code: ChildProcessSpawner.ExitCode; stdout: string; stderr: string } | undefined + switch (m) { + case "curl": + result = yield* upgradeCurl(target) + break + case "npm": + result = yield* run(["npm", "install", "-g", `opencode-ai@${target}`]) + break + case "pnpm": + result = yield* run(["pnpm", "install", "-g", `opencode-ai@${target}`]) + break + case "bun": + result = yield* run(["bun", "install", "-g", `opencode-ai@${target}`]) + break + case "brew": { + const formula = yield* getBrewFormula() + const env = { HOMEBREW_NO_AUTO_UPDATE: "1" } + if (formula.includes("/")) { + const tap = yield* run(["brew", "tap", "anomalyco/tap"], { env }) + if (tap.code !== 0) { + result = tap + break + } + const repo = yield* text(["brew", "--repo", "anomalyco/tap"]) + const dir = repo.trim() + if (dir) { + const pull = yield* run(["git", "pull", "--ff-only"], { cwd: dir, env }) + if (pull.code !== 0) { + result = pull + break + } + } + } + result = yield* run(["brew", "upgrade", formula], { env }) + break + } + case "choco": + result = yield* run(["choco", "upgrade", "opencode", `--version=${target}`, "-y"]) + break + case "scoop": + result = yield* run(["scoop", "install", `opencode@${target}`]) + break + default: + return yield* new UpgradeFailedError({ stderr: `Unknown method: ${m}` }) + } + if (!result || result.code !== 0) { + const stderr = m === "choco" ? "not running from an elevated command shell" : result?.stderr || "" + return yield* new UpgradeFailedError({ stderr }) + } + log.info("upgraded", { + method: m, + target, + stdout: result.stdout, + stderr: result.stderr, + }) + yield* text([process.execPath, "--version"]) + }) + + return Service.of({ + info: Effect.fn("Installation.info")(function* () { + return { + version: InstallationVersion, + latest: yield* latestImpl(), + } + }), + method: methodImpl, + latest: latestImpl, + upgrade: upgradeImpl, + }) + }), + ) + +export const defaultLayer = layer.pipe( + Layer.provide(FetchHttpClient.layer), + Layer.provide(CrossSpawnSpawner.defaultLayer), +) + +export * as Installation from "." diff --git a/packages/opencode/src/installation/installation.ts b/packages/opencode/src/installation/installation.ts deleted file mode 100644 index 96a99b77a3..0000000000 --- a/packages/opencode/src/installation/installation.ts +++ /dev/null @@ -1,336 +0,0 @@ -import { Effect, Layer, Schema, Context, Stream } from "effect" -import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { withTransientReadRetry } from "@/util/effect-http-client" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import path from "path" -import z from "zod" -import { BusEvent } from "@/bus/bus-event" -import { Flag } from "../flag/flag" -import { Log } from "../util" - -import semver from "semver" -import { InstallationChannel, InstallationVersion } from "./version" - -const log = Log.create({ service: "installation" }) - -export type Method = "curl" | "npm" | "yarn" | "pnpm" | "bun" | "brew" | "scoop" | "choco" | "unknown" - -export type ReleaseType = "patch" | "minor" | "major" - -export const Event = { - Updated: BusEvent.define( - "installation.updated", - z.object({ - version: z.string(), - }), - ), - UpdateAvailable: BusEvent.define( - "installation.update-available", - z.object({ - version: z.string(), - }), - ), -} - -export function getReleaseType(current: string, latest: string): ReleaseType { - const currMajor = semver.major(current) - const currMinor = semver.minor(current) - const newMajor = semver.major(latest) - const newMinor = semver.minor(latest) - - if (newMajor > currMajor) return "major" - if (newMinor > currMinor) return "minor" - return "patch" -} - -export const Info = z - .object({ - version: z.string(), - latest: z.string(), - }) - .meta({ - ref: "InstallationInfo", - }) -export type Info = z.infer - -export const USER_AGENT = `opencode/${InstallationChannel}/${InstallationVersion}/${Flag.OPENCODE_CLIENT}` - -export function isPreview() { - return InstallationChannel !== "latest" -} - -export function isLocal() { - return InstallationChannel === "local" -} - -export class UpgradeFailedError extends Schema.TaggedErrorClass()("UpgradeFailedError", { - stderr: Schema.String, -}) {} - -// Response schemas for external version APIs -const GitHubRelease = Schema.Struct({ tag_name: Schema.String }) -const NpmPackage = Schema.Struct({ version: Schema.String }) -const BrewFormula = Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) }) -const BrewInfoV2 = Schema.Struct({ - formulae: Schema.Array(Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) })), -}) -const ChocoPackage = Schema.Struct({ - d: Schema.Struct({ results: Schema.Array(Schema.Struct({ Version: Schema.String })) }), -}) -const ScoopManifest = NpmPackage - -export interface Interface { - readonly info: () => Effect.Effect - readonly method: () => Effect.Effect - readonly latest: (method?: Method) => Effect.Effect - readonly upgrade: (method: Method, target: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Installation") {} - -export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const http = yield* HttpClient.HttpClient - const httpOk = HttpClient.filterStatusOk(withTransientReadRetry(http)) - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const text = Effect.fnUntraced( - function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { - const proc = ChildProcess.make(cmd[0], cmd.slice(1), { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const out = yield* Stream.mkString(Stream.decodeText(handle.stdout)) - yield* handle.exitCode - return out - }, - Effect.scoped, - Effect.catch(() => Effect.succeed("")), - ) - - const run = Effect.fnUntraced( - function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { - const proc = ChildProcess.make(cmd[0], cmd.slice(1), { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, stdout, stderr } - }, - Effect.scoped, - Effect.catch(() => Effect.succeed({ code: ChildProcessSpawner.ExitCode(1), stdout: "", stderr: "" })), - ) - - const getBrewFormula = Effect.fnUntraced(function* () { - const tapFormula = yield* text(["brew", "list", "--formula", "anomalyco/tap/opencode"]) - if (tapFormula.includes("opencode")) return "anomalyco/tap/opencode" - const coreFormula = yield* text(["brew", "list", "--formula", "opencode"]) - if (coreFormula.includes("opencode")) return "opencode" - return "opencode" - }) - - const upgradeCurl = Effect.fnUntraced( - function* (target: string) { - const response = yield* httpOk.execute(HttpClientRequest.get("https://opencode.ai/install")) - const body = yield* response.text - const bodyBytes = new TextEncoder().encode(body) - const proc = ChildProcess.make("bash", [], { - stdin: Stream.make(bodyBytes), - env: { VERSION: target }, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, stdout, stderr } - }, - Effect.scoped, - Effect.orDie, - ) - - const methodImpl = Effect.fn("Installation.method")(function* () { - if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl" as Method - if (process.execPath.includes(path.join(".local", "bin"))) return "curl" as Method - const exec = process.execPath.toLowerCase() - - const checks: Array<{ name: Method; command: () => Effect.Effect }> = [ - { name: "npm", command: () => text(["npm", "list", "-g", "--depth=0"]) }, - { name: "yarn", command: () => text(["yarn", "global", "list"]) }, - { name: "pnpm", command: () => text(["pnpm", "list", "-g", "--depth=0"]) }, - { name: "bun", command: () => text(["bun", "pm", "ls", "-g"]) }, - { name: "brew", command: () => text(["brew", "list", "--formula", "opencode"]) }, - { name: "scoop", command: () => text(["scoop", "list", "opencode"]) }, - { name: "choco", command: () => text(["choco", "list", "--limit-output", "opencode"]) }, - ] - - checks.sort((a, b) => { - const aMatches = exec.includes(a.name) - const bMatches = exec.includes(b.name) - if (aMatches && !bMatches) return -1 - if (!aMatches && bMatches) return 1 - return 0 - }) - - for (const check of checks) { - const output = yield* check.command() - const installedName = - check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "opencode" : "opencode-ai" - if (output.includes(installedName)) { - return check.name - } - } - - return "unknown" as Method - }) - - const latestImpl = Effect.fn("Installation.latest")(function* (installMethod?: Method) { - const detectedMethod = installMethod || (yield* methodImpl()) - - if (detectedMethod === "brew") { - const formula = yield* getBrewFormula() - if (formula.includes("/")) { - const infoJson = yield* text(["brew", "info", "--json=v2", formula]) - const info = yield* Schema.decodeUnknownEffect(Schema.fromJsonString(BrewInfoV2))(infoJson) - return info.formulae[0].versions.stable - } - const response = yield* httpOk.execute( - HttpClientRequest.get("https://formulae.brew.sh/api/formula/opencode.json").pipe( - HttpClientRequest.acceptJson, - ), - ) - const data = yield* HttpClientResponse.schemaBodyJson(BrewFormula)(response) - return data.versions.stable - } - - if (detectedMethod === "npm" || detectedMethod === "bun" || detectedMethod === "pnpm") { - const r = (yield* text(["npm", "config", "get", "registry"])).trim() - const reg = r || "https://registry.npmjs.org" - const registry = reg.endsWith("/") ? reg.slice(0, -1) : reg - const channel = InstallationChannel - const response = yield* httpOk.execute( - HttpClientRequest.get(`${registry}/opencode-ai/${channel}`).pipe(HttpClientRequest.acceptJson), - ) - const data = yield* HttpClientResponse.schemaBodyJson(NpmPackage)(response) - return data.version - } - - if (detectedMethod === "choco") { - const response = yield* httpOk.execute( - HttpClientRequest.get( - "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27opencode%27%20and%20IsLatestVersion&$select=Version", - ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json;odata=verbose" })), - ) - const data = yield* HttpClientResponse.schemaBodyJson(ChocoPackage)(response) - return data.d.results[0].Version - } - - if (detectedMethod === "scoop") { - const response = yield* httpOk.execute( - HttpClientRequest.get( - "https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/opencode.json", - ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json" })), - ) - const data = yield* HttpClientResponse.schemaBodyJson(ScoopManifest)(response) - return data.version - } - - const response = yield* httpOk.execute( - HttpClientRequest.get("https://api.github.com/repos/anomalyco/opencode/releases/latest").pipe( - HttpClientRequest.acceptJson, - ), - ) - const data = yield* HttpClientResponse.schemaBodyJson(GitHubRelease)(response) - return data.tag_name.replace(/^v/, "") - }, Effect.orDie) - - const upgradeImpl = Effect.fn("Installation.upgrade")(function* (m: Method, target: string) { - let result: { code: ChildProcessSpawner.ExitCode; stdout: string; stderr: string } | undefined - switch (m) { - case "curl": - result = yield* upgradeCurl(target) - break - case "npm": - result = yield* run(["npm", "install", "-g", `opencode-ai@${target}`]) - break - case "pnpm": - result = yield* run(["pnpm", "install", "-g", `opencode-ai@${target}`]) - break - case "bun": - result = yield* run(["bun", "install", "-g", `opencode-ai@${target}`]) - break - case "brew": { - const formula = yield* getBrewFormula() - const env = { HOMEBREW_NO_AUTO_UPDATE: "1" } - if (formula.includes("/")) { - const tap = yield* run(["brew", "tap", "anomalyco/tap"], { env }) - if (tap.code !== 0) { - result = tap - break - } - const repo = yield* text(["brew", "--repo", "anomalyco/tap"]) - const dir = repo.trim() - if (dir) { - const pull = yield* run(["git", "pull", "--ff-only"], { cwd: dir, env }) - if (pull.code !== 0) { - result = pull - break - } - } - } - result = yield* run(["brew", "upgrade", formula], { env }) - break - } - case "choco": - result = yield* run(["choco", "upgrade", "opencode", `--version=${target}`, "-y"]) - break - case "scoop": - result = yield* run(["scoop", "install", `opencode@${target}`]) - break - default: - return yield* new UpgradeFailedError({ stderr: `Unknown method: ${m}` }) - } - if (!result || result.code !== 0) { - const stderr = m === "choco" ? "not running from an elevated command shell" : result?.stderr || "" - return yield* new UpgradeFailedError({ stderr }) - } - log.info("upgraded", { - method: m, - target, - stdout: result.stdout, - stderr: result.stderr, - }) - yield* text([process.execPath, "--version"]) - }) - - return Service.of({ - info: Effect.fn("Installation.info")(function* () { - return { - version: InstallationVersion, - latest: yield* latestImpl(), - } - }), - method: methodImpl, - latest: latestImpl, - upgrade: upgradeImpl, - }) - }), - ) - -export const defaultLayer = layer.pipe( - Layer.provide(FetchHttpClient.layer), - Layer.provide(CrossSpawnSpawner.defaultLayer), -) From 99d392a4fbff2f4203240709221f986c3ffe60a7 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:43 -0400 Subject: [PATCH 046/120] refactor: collapse skill barrel into skill/index.ts (#22912) --- packages/opencode/src/skill/index.ts | 265 ++++++++++++++++++++++++++- packages/opencode/src/skill/skill.ts | 262 -------------------------- 2 files changed, 264 insertions(+), 263 deletions(-) delete mode 100644 packages/opencode/src/skill/skill.ts diff --git a/packages/opencode/src/skill/index.ts b/packages/opencode/src/skill/index.ts index 6d7b428dfb..b139b39e6e 100644 --- a/packages/opencode/src/skill/index.ts +++ b/packages/opencode/src/skill/index.ts @@ -1 +1,264 @@ -export * as Skill from "./skill" +import os from "os" +import path from "path" +import { pathToFileURL } from "url" +import z from "zod" +import { Effect, Layer, Context } from "effect" +import { NamedError } from "@opencode-ai/shared/util/error" +import type { Agent } from "@/agent/agent" +import { Bus } from "@/bus" +import { InstanceState } from "@/effect" +import { Flag } from "@/flag/flag" +import { Global } from "@/global" +import { Permission } from "@/permission" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Config } from "../config" +import { ConfigMarkdown } from "../config" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Log } from "../util" +import { Discovery } from "./discovery" + +const log = Log.create({ service: "skill" }) +const EXTERNAL_DIRS = [".claude", ".agents"] +const EXTERNAL_SKILL_PATTERN = "skills/**/SKILL.md" +const OPENCODE_SKILL_PATTERN = "{skill,skills}/**/SKILL.md" +const SKILL_PATTERN = "**/SKILL.md" + +export const Info = z.object({ + name: z.string(), + description: z.string(), + location: z.string(), + content: z.string(), +}) +export type Info = z.infer + +export const InvalidError = NamedError.create( + "SkillInvalidError", + z.object({ + path: z.string(), + message: z.string().optional(), + issues: z.custom().optional(), + }), +) + +export const NameMismatchError = NamedError.create( + "SkillNameMismatchError", + z.object({ + path: z.string(), + expected: z.string(), + actual: z.string(), + }), +) + +type State = { + skills: Record + dirs: Set +} + +export interface Interface { + readonly get: (name: string) => Effect.Effect + readonly all: () => Effect.Effect + readonly dirs: () => Effect.Effect + readonly available: (agent?: Agent.Info) => Effect.Effect +} + +const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.Interface) { + const md = yield* Effect.tryPromise({ + try: () => ConfigMarkdown.parse(match), + catch: (err) => err, + }).pipe( + Effect.catch( + Effect.fnUntraced(function* (err) { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse skill ${match}` + const { Session } = yield* Effect.promise(() => import("@/session")) + yield* bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load skill", { skill: match, err }) + return undefined + }), + ), + ) + + if (!md) return + + const parsed = Info.pick({ name: true, description: true }).safeParse(md.data) + if (!parsed.success) return + + if (state.skills[parsed.data.name]) { + log.warn("duplicate skill name", { + name: parsed.data.name, + existing: state.skills[parsed.data.name].location, + duplicate: match, + }) + } + + state.dirs.add(path.dirname(match)) + state.skills[parsed.data.name] = { + name: parsed.data.name, + description: parsed.data.description, + location: match, + content: md.content, + } +}) + +const scan = Effect.fnUntraced(function* ( + state: State, + bus: Bus.Interface, + root: string, + pattern: string, + opts?: { dot?: boolean; scope?: string }, +) { + const matches = yield* Effect.tryPromise({ + try: () => + Glob.scan(pattern, { + cwd: root, + absolute: true, + include: "file", + symlink: true, + dot: opts?.dot, + }), + catch: (error) => error, + }).pipe( + Effect.catch((error) => { + if (!opts?.scope) return Effect.die(error) + log.error(`failed to scan ${opts.scope} skills`, { dir: root, error }) + return Effect.succeed([] as string[]) + }), + ) + + yield* Effect.forEach(matches, (match) => add(state, match, bus), { + concurrency: "unbounded", + discard: true, + }) +}) + +const loadSkills = Effect.fnUntraced(function* ( + state: State, + config: Config.Interface, + discovery: Discovery.Interface, + bus: Bus.Interface, + fsys: AppFileSystem.Interface, + directory: string, + worktree: string, +) { + if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { + for (const dir of EXTERNAL_DIRS) { + const root = path.join(Global.Path.home, dir) + if (!(yield* fsys.isDir(root))) continue + yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) + } + + const upDirs = yield* fsys + .up({ targets: EXTERNAL_DIRS, start: directory, stop: worktree }) + .pipe(Effect.catch(() => Effect.succeed([] as string[]))) + + for (const root of upDirs) { + yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) + } + } + + const configDirs = yield* config.directories() + for (const dir of configDirs) { + yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) + } + + const cfg = yield* config.get() + for (const item of cfg.skills?.paths ?? []) { + const expanded = item.startsWith("~/") ? path.join(os.homedir(), item.slice(2)) : item + const dir = path.isAbsolute(expanded) ? expanded : path.join(directory, expanded) + if (!(yield* fsys.isDir(dir))) { + log.warn("skill path not found", { path: dir }) + continue + } + + yield* scan(state, bus, dir, SKILL_PATTERN) + } + + for (const url of cfg.skills?.urls ?? []) { + const pulledDirs = yield* discovery.pull(url) + for (const dir of pulledDirs) { + state.dirs.add(dir) + yield* scan(state, bus, dir, SKILL_PATTERN) + } + } + + log.info("init", { count: Object.keys(state.skills).length }) +}) + +export class Service extends Context.Service()("@opencode/Skill") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const discovery = yield* Discovery.Service + const config = yield* Config.Service + const bus = yield* Bus.Service + const fsys = yield* AppFileSystem.Service + const state = yield* InstanceState.make( + Effect.fn("Skill.state")(function* (ctx) { + const s: State = { skills: {}, dirs: new Set() } + yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) + return s + }), + ) + + const get = Effect.fn("Skill.get")(function* (name: string) { + const s = yield* InstanceState.get(state) + return s.skills[name] + }) + + const all = Effect.fn("Skill.all")(function* () { + const s = yield* InstanceState.get(state) + return Object.values(s.skills) + }) + + const dirs = Effect.fn("Skill.dirs")(function* () { + const s = yield* InstanceState.get(state) + return Array.from(s.dirs) + }) + + const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { + const s = yield* InstanceState.get(state) + const list = Object.values(s.skills).toSorted((a, b) => a.name.localeCompare(b.name)) + if (!agent) return list + return list.filter((skill) => Permission.evaluate("skill", skill.name, agent.permission).action !== "deny") + }) + + return Service.of({ get, all, dirs, available }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Discovery.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(AppFileSystem.defaultLayer), +) + +export function fmt(list: Info[], opts: { verbose: boolean }) { + if (list.length === 0) return "No skills are currently available." + if (opts.verbose) { + return [ + "", + ...list + .sort((a, b) => a.name.localeCompare(b.name)) + .flatMap((skill) => [ + " ", + ` ${skill.name}`, + ` ${skill.description}`, + ` ${pathToFileURL(skill.location).href}`, + " ", + ]), + "", + ].join("\n") + } + + return [ + "## Available Skills", + ...list + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map((skill) => `- **${skill.name}**: ${skill.description}`), + ].join("\n") +} + +export * as Skill from "." diff --git a/packages/opencode/src/skill/skill.ts b/packages/opencode/src/skill/skill.ts deleted file mode 100644 index f8ff7b8f5f..0000000000 --- a/packages/opencode/src/skill/skill.ts +++ /dev/null @@ -1,262 +0,0 @@ -import os from "os" -import path from "path" -import { pathToFileURL } from "url" -import z from "zod" -import { Effect, Layer, Context } from "effect" -import { NamedError } from "@opencode-ai/shared/util/error" -import type { Agent } from "@/agent/agent" -import { Bus } from "@/bus" -import { InstanceState } from "@/effect" -import { Flag } from "@/flag/flag" -import { Global } from "@/global" -import { Permission } from "@/permission" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Config } from "../config" -import { ConfigMarkdown } from "../config" -import { Glob } from "@opencode-ai/shared/util/glob" -import { Log } from "../util" -import { Discovery } from "./discovery" - -const log = Log.create({ service: "skill" }) -const EXTERNAL_DIRS = [".claude", ".agents"] -const EXTERNAL_SKILL_PATTERN = "skills/**/SKILL.md" -const OPENCODE_SKILL_PATTERN = "{skill,skills}/**/SKILL.md" -const SKILL_PATTERN = "**/SKILL.md" - -export const Info = z.object({ - name: z.string(), - description: z.string(), - location: z.string(), - content: z.string(), -}) -export type Info = z.infer - -export const InvalidError = NamedError.create( - "SkillInvalidError", - z.object({ - path: z.string(), - message: z.string().optional(), - issues: z.custom().optional(), - }), -) - -export const NameMismatchError = NamedError.create( - "SkillNameMismatchError", - z.object({ - path: z.string(), - expected: z.string(), - actual: z.string(), - }), -) - -type State = { - skills: Record - dirs: Set -} - -export interface Interface { - readonly get: (name: string) => Effect.Effect - readonly all: () => Effect.Effect - readonly dirs: () => Effect.Effect - readonly available: (agent?: Agent.Info) => Effect.Effect -} - -const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.Interface) { - const md = yield* Effect.tryPromise({ - try: () => ConfigMarkdown.parse(match), - catch: (err) => err, - }).pipe( - Effect.catch( - Effect.fnUntraced(function* (err) { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse skill ${match}` - const { Session } = yield* Effect.promise(() => import("@/session")) - yield* bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load skill", { skill: match, err }) - return undefined - }), - ), - ) - - if (!md) return - - const parsed = Info.pick({ name: true, description: true }).safeParse(md.data) - if (!parsed.success) return - - if (state.skills[parsed.data.name]) { - log.warn("duplicate skill name", { - name: parsed.data.name, - existing: state.skills[parsed.data.name].location, - duplicate: match, - }) - } - - state.dirs.add(path.dirname(match)) - state.skills[parsed.data.name] = { - name: parsed.data.name, - description: parsed.data.description, - location: match, - content: md.content, - } -}) - -const scan = Effect.fnUntraced(function* ( - state: State, - bus: Bus.Interface, - root: string, - pattern: string, - opts?: { dot?: boolean; scope?: string }, -) { - const matches = yield* Effect.tryPromise({ - try: () => - Glob.scan(pattern, { - cwd: root, - absolute: true, - include: "file", - symlink: true, - dot: opts?.dot, - }), - catch: (error) => error, - }).pipe( - Effect.catch((error) => { - if (!opts?.scope) return Effect.die(error) - log.error(`failed to scan ${opts.scope} skills`, { dir: root, error }) - return Effect.succeed([] as string[]) - }), - ) - - yield* Effect.forEach(matches, (match) => add(state, match, bus), { - concurrency: "unbounded", - discard: true, - }) -}) - -const loadSkills = Effect.fnUntraced(function* ( - state: State, - config: Config.Interface, - discovery: Discovery.Interface, - bus: Bus.Interface, - fsys: AppFileSystem.Interface, - directory: string, - worktree: string, -) { - if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { - for (const dir of EXTERNAL_DIRS) { - const root = path.join(Global.Path.home, dir) - if (!(yield* fsys.isDir(root))) continue - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) - } - - const upDirs = yield* fsys - .up({ targets: EXTERNAL_DIRS, start: directory, stop: worktree }) - .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - - for (const root of upDirs) { - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) - } - } - - const configDirs = yield* config.directories() - for (const dir of configDirs) { - yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) - } - - const cfg = yield* config.get() - for (const item of cfg.skills?.paths ?? []) { - const expanded = item.startsWith("~/") ? path.join(os.homedir(), item.slice(2)) : item - const dir = path.isAbsolute(expanded) ? expanded : path.join(directory, expanded) - if (!(yield* fsys.isDir(dir))) { - log.warn("skill path not found", { path: dir }) - continue - } - - yield* scan(state, bus, dir, SKILL_PATTERN) - } - - for (const url of cfg.skills?.urls ?? []) { - const pulledDirs = yield* discovery.pull(url) - for (const dir of pulledDirs) { - state.dirs.add(dir) - yield* scan(state, bus, dir, SKILL_PATTERN) - } - } - - log.info("init", { count: Object.keys(state.skills).length }) -}) - -export class Service extends Context.Service()("@opencode/Skill") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const discovery = yield* Discovery.Service - const config = yield* Config.Service - const bus = yield* Bus.Service - const fsys = yield* AppFileSystem.Service - const state = yield* InstanceState.make( - Effect.fn("Skill.state")(function* (ctx) { - const s: State = { skills: {}, dirs: new Set() } - yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) - return s - }), - ) - - const get = Effect.fn("Skill.get")(function* (name: string) { - const s = yield* InstanceState.get(state) - return s.skills[name] - }) - - const all = Effect.fn("Skill.all")(function* () { - const s = yield* InstanceState.get(state) - return Object.values(s.skills) - }) - - const dirs = Effect.fn("Skill.dirs")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.dirs) - }) - - const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { - const s = yield* InstanceState.get(state) - const list = Object.values(s.skills).toSorted((a, b) => a.name.localeCompare(b.name)) - if (!agent) return list - return list.filter((skill) => Permission.evaluate("skill", skill.name, agent.permission).action !== "deny") - }) - - return Service.of({ get, all, dirs, available }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Discovery.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(AppFileSystem.defaultLayer), -) - -export function fmt(list: Info[], opts: { verbose: boolean }) { - if (list.length === 0) return "No skills are currently available." - if (opts.verbose) { - return [ - "", - ...list - .sort((a, b) => a.name.localeCompare(b.name)) - .flatMap((skill) => [ - " ", - ` ${skill.name}`, - ` ${skill.description}`, - ` ${pathToFileURL(skill.location).href}`, - " ", - ]), - "", - ].join("\n") - } - - return [ - "## Available Skills", - ...list - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map((skill) => `- **${skill.name}**: ${skill.description}`), - ].join("\n") -} From ab15fc1575fbdc9d9233bf7a130b4ae7b9cf8754 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:02 -0400 Subject: [PATCH 047/120] refactor: collapse npm barrel into npm/index.ts (#22911) --- packages/opencode/src/npm/index.ts | 190 ++++++++++++++++++++++++++++- packages/opencode/src/npm/npm.ts | 187 ---------------------------- 2 files changed, 189 insertions(+), 188 deletions(-) delete mode 100644 packages/opencode/src/npm/npm.ts diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 856ed2a2c6..174df12974 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -1 +1,189 @@ -export * as Npm from "./npm" +import semver from "semver" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Global } from "../global" +import { Log } from "../util" +import path from "path" +import { readdir, rm } from "fs/promises" +import { Filesystem } from "@/util" +import { Flock } from "@opencode-ai/shared/util/flock" + +const log = Log.create({ service: "npm" }) +const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined + +export const InstallFailedError = NamedError.create( + "NpmInstallFailedError", + z.object({ + pkg: z.string(), + }), +) + +export function sanitize(pkg: string) { + if (!illegal) return pkg + return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") +} + +function directory(pkg: string) { + return path.join(Global.Path.cache, "packages", sanitize(pkg)) +} + +function resolveEntryPoint(name: string, dir: string) { + let entrypoint: string | undefined + try { + entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) + } catch {} + const result = { + directory: dir, + entrypoint, + } + return result +} + +export async function outdated(pkg: string, cachedVersion: string): Promise { + const response = await fetch(`https://registry.npmjs.org/${pkg}`) + if (!response.ok) { + log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) + return false + } + + const data = (await response.json()) as { "dist-tags"?: { latest?: string } } + const latestVersion = data?.["dist-tags"]?.latest + if (!latestVersion) { + log.warn("No latest version found, using cached", { pkg, cachedVersion }) + return false + } + + const range = /[\s^~*xX<>|=]/.test(cachedVersion) + if (range) return !semver.satisfies(latestVersion, cachedVersion) + + return semver.lt(cachedVersion, latestVersion) +} + +export async function add(pkg: string) { + const { Arborist } = await import("@npmcli/arborist") + const dir = directory(pkg) + await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) + log.info("installing package", { + pkg, + }) + + const arborist = new Arborist({ + path: dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + const tree = await arborist.loadVirtual().catch(() => {}) + if (tree) { + const first = tree.edgesOut.values().next().value?.to + if (first) { + return resolveEntryPoint(first.name, first.path) + } + } + + const result = await arborist + .reify({ + add: [pkg], + save: true, + saveType: "prod", + }) + .catch((cause) => { + throw new InstallFailedError( + { pkg }, + { + cause, + }, + ) + }) + + const first = result.edgesOut.values().next().value?.to + if (!first) throw new InstallFailedError({ pkg }) + return resolveEntryPoint(first.name, first.path) +} + +export async function install(dir: string) { + await using _ = await Flock.acquire(`npm-install:${dir}`) + log.info("checking dependencies", { dir }) + + const reify = async () => { + const { Arborist } = await import("@npmcli/arborist") + const arb = new Arborist({ + path: dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + await arb.reify().catch(() => {}) + } + + if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { + log.info("node_modules missing, reifying") + await reify() + return + } + + const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) + const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) + + const declared = new Set([ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.devDependencies || {}), + ...Object.keys(pkg.peerDependencies || {}), + ...Object.keys(pkg.optionalDependencies || {}), + ]) + + const root = lock.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root.dependencies || {}), + ...Object.keys(root.devDependencies || {}), + ...Object.keys(root.peerDependencies || {}), + ...Object.keys(root.optionalDependencies || {}), + ]) + + for (const name of declared) { + if (!locked.has(name)) { + log.info("dependency not in lock file, reifying", { name }) + await reify() + return + } + } + + log.info("dependencies in sync") +} + +export async function which(pkg: string) { + const dir = directory(pkg) + const binDir = path.join(dir, "node_modules", ".bin") + + const pick = async () => { + const files = await readdir(binDir).catch(() => []) + if (files.length === 0) return undefined + if (files.length === 1) return files[0] + // Multiple binaries — resolve from package.json bin field like npx does + const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( + path.join(dir, "node_modules", pkg, "package.json"), + ).catch(() => undefined) + if (pkgJson?.bin) { + const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg + const bin = pkgJson.bin + if (typeof bin === "string") return unscoped + const keys = Object.keys(bin) + if (keys.length === 1) return keys[0] + return bin[unscoped] ? unscoped : keys[0] + } + return files[0] + } + + const bin = await pick() + if (bin) return path.join(binDir, bin) + + await rm(path.join(dir, "package-lock.json"), { force: true }) + await add(pkg) + const resolved = await pick() + if (!resolved) return + return path.join(binDir, resolved) +} + +export * as Npm from "." diff --git a/packages/opencode/src/npm/npm.ts b/packages/opencode/src/npm/npm.ts deleted file mode 100644 index d74c10d555..0000000000 --- a/packages/opencode/src/npm/npm.ts +++ /dev/null @@ -1,187 +0,0 @@ -import semver from "semver" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Log } from "../util" -import path from "path" -import { readdir, rm } from "fs/promises" -import { Filesystem } from "@/util" -import { Flock } from "@opencode-ai/shared/util/flock" - -const log = Log.create({ service: "npm" }) -const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined - -export const InstallFailedError = NamedError.create( - "NpmInstallFailedError", - z.object({ - pkg: z.string(), - }), -) - -export function sanitize(pkg: string) { - if (!illegal) return pkg - return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") -} - -function directory(pkg: string) { - return path.join(Global.Path.cache, "packages", sanitize(pkg)) -} - -function resolveEntryPoint(name: string, dir: string) { - let entrypoint: string | undefined - try { - entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - } catch {} - const result = { - directory: dir, - entrypoint, - } - return result -} - -export async function outdated(pkg: string, cachedVersion: string): Promise { - const response = await fetch(`https://registry.npmjs.org/${pkg}`) - if (!response.ok) { - log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) - return false - } - - const data = (await response.json()) as { "dist-tags"?: { latest?: string } } - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - log.warn("No latest version found, using cached", { pkg, cachedVersion }) - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) -} - -export async function add(pkg: string) { - const { Arborist } = await import("@npmcli/arborist") - const dir = directory(pkg) - await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) - log.info("installing package", { - pkg, - }) - - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - const tree = await arborist.loadVirtual().catch(() => {}) - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } - - const result = await arborist - .reify({ - add: [pkg], - save: true, - saveType: "prod", - }) - .catch((cause) => { - throw new InstallFailedError( - { pkg }, - { - cause, - }, - ) - }) - - const first = result.edgesOut.values().next().value?.to - if (!first) throw new InstallFailedError({ pkg }) - return resolveEntryPoint(first.name, first.path) -} - -export async function install(dir: string) { - await using _ = await Flock.acquire(`npm-install:${dir}`) - log.info("checking dependencies", { dir }) - - const reify = async () => { - const { Arborist } = await import("@npmcli/arborist") - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - await arb.reify().catch(() => {}) - } - - if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { - log.info("node_modules missing, reifying") - await reify() - return - } - - const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) - - const declared = new Set([ - ...Object.keys(pkg.dependencies || {}), - ...Object.keys(pkg.devDependencies || {}), - ...Object.keys(pkg.peerDependencies || {}), - ...Object.keys(pkg.optionalDependencies || {}), - ]) - - const root = lock.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root.dependencies || {}), - ...Object.keys(root.devDependencies || {}), - ...Object.keys(root.peerDependencies || {}), - ...Object.keys(root.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - log.info("dependency not in lock file, reifying", { name }) - await reify() - return - } - } - - log.info("dependencies in sync") -} - -export async function which(pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = async () => { - const files = await readdir(binDir).catch(() => []) - if (files.length === 0) return undefined - if (files.length === 1) return files[0] - // Multiple binaries — resolve from package.json bin field like npx does - const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( - path.join(dir, "node_modules", pkg, "package.json"), - ).catch(() => undefined) - if (pkgJson?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = pkgJson.bin - if (typeof bin === "string") return unscoped - const keys = Object.keys(bin) - if (keys.length === 1) return keys[0] - return bin[unscoped] ? unscoped : keys[0] - } - return files[0] - } - - const bin = await pick() - if (bin) return path.join(binDir, bin) - - await rm(path.join(dir, "package-lock.json"), { force: true }) - await add(pkg) - const resolved = await pick() - if (!resolved) return - return path.join(binDir, resolved) -} From 964474a1b1cb2d864ef982e6ab7280b5b509c26f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:04 -0400 Subject: [PATCH 048/120] refactor: collapse permission barrel into permission/index.ts (#22915) --- packages/opencode/src/permission/index.ts | 326 +++++++++++++++++- .../opencode/src/permission/permission.ts | 323 ----------------- 2 files changed, 325 insertions(+), 324 deletions(-) delete mode 100644 packages/opencode/src/permission/permission.ts diff --git a/packages/opencode/src/permission/index.ts b/packages/opencode/src/permission/index.ts index 7d8a2fff82..b9a221155c 100644 --- a/packages/opencode/src/permission/index.ts +++ b/packages/opencode/src/permission/index.ts @@ -1 +1,325 @@ -export * as Permission from "./permission" +import { Bus } from "@/bus" +import { BusEvent } from "@/bus/bus-event" +import { ConfigPermission } from "@/config/permission" +import { InstanceState } from "@/effect" +import { ProjectID } from "@/project/schema" +import { MessageID, SessionID } from "@/session/schema" +import { PermissionTable } from "@/session/session.sql" +import { Database, eq } from "@/storage" +import { zod } from "@/util/effect-zod" +import { Log } from "@/util" +import { withStatics } from "@/util/schema" +import { Wildcard } from "@/util" +import { Deferred, Effect, Layer, Schema, Context } from "effect" +import os from "os" +import { evaluate as evalRule } from "./evaluate" +import { PermissionID } from "./schema" + +const log = Log.create({ service: "permission" }) + +export const Action = Schema.Literals(["allow", "deny", "ask"]) + .annotate({ identifier: "PermissionAction" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Action = Schema.Schema.Type + +export class Rule extends Schema.Class("PermissionRule")({ + permission: Schema.String, + pattern: Schema.String, + action: Action, +}) { + static readonly zod = zod(this) +} + +export const Ruleset = Schema.mutable(Schema.Array(Rule)) + .annotate({ identifier: "PermissionRuleset" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Ruleset = Schema.Schema.Type + +export class Request extends Schema.Class("PermissionRequest")({ + id: PermissionID, + sessionID: SessionID, + permission: Schema.String, + patterns: Schema.Array(Schema.String), + metadata: Schema.Record(Schema.String, Schema.Unknown), + always: Schema.Array(Schema.String), + tool: Schema.optional( + Schema.Struct({ + messageID: MessageID, + callID: Schema.String, + }), + ), +}) { + static readonly zod = zod(this) +} + +export const Reply = Schema.Literals(["once", "always", "reject"]).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Reply = Schema.Schema.Type + +const reply = { + reply: Reply, + message: Schema.optional(Schema.String), +} + +export const ReplyBody = Schema.Struct(reply) + .annotate({ identifier: "PermissionReplyBody" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ReplyBody = Schema.Schema.Type + +export class Approval extends Schema.Class("PermissionApproval")({ + projectID: ProjectID, + patterns: Schema.Array(Schema.String), +}) { + static readonly zod = zod(this) +} + +export const Event = { + Asked: BusEvent.define("permission.asked", Request.zod), + Replied: BusEvent.define( + "permission.replied", + zod( + Schema.Struct({ + sessionID: SessionID, + requestID: PermissionID, + reply: Reply, + }), + ), + ), +} + +export class RejectedError extends Schema.TaggedErrorClass()("PermissionRejectedError", {}) { + override get message() { + return "The user rejected permission to use this specific tool call." + } +} + +export class CorrectedError extends Schema.TaggedErrorClass()("PermissionCorrectedError", { + feedback: Schema.String, +}) { + override get message() { + return `The user rejected permission to use this specific tool call with the following feedback: ${this.feedback}` + } +} + +export class DeniedError extends Schema.TaggedErrorClass()("PermissionDeniedError", { + ruleset: Schema.Any, +}) { + override get message() { + return `The user has specified a rule which prevents you from using this specific tool call. Here are some of the relevant rules ${JSON.stringify(this.ruleset)}` + } +} + +export type Error = DeniedError | RejectedError | CorrectedError + +export const AskInput = Schema.Struct({ + ...Request.fields, + id: Schema.optional(PermissionID), + ruleset: Ruleset, +}) + .annotate({ identifier: "PermissionAskInput" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type AskInput = Schema.Schema.Type + +export const ReplyInput = Schema.Struct({ + requestID: PermissionID, + ...reply, +}) + .annotate({ identifier: "PermissionReplyInput" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ReplyInput = Schema.Schema.Type + +export interface Interface { + readonly ask: (input: AskInput) => Effect.Effect + readonly reply: (input: ReplyInput) => Effect.Effect + readonly list: () => Effect.Effect> +} + +interface PendingEntry { + info: Request + deferred: Deferred.Deferred +} + +interface State { + pending: Map + approved: Ruleset +} + +export function evaluate(permission: string, pattern: string, ...rulesets: Ruleset[]): Rule { + log.info("evaluate", { permission, pattern, ruleset: rulesets.flat() }) + return evalRule(permission, pattern, ...rulesets) +} + +export class Service extends Context.Service()("@opencode/Permission") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const state = yield* InstanceState.make( + Effect.fn("Permission.state")(function* (ctx) { + const row = Database.use((db) => + db.select().from(PermissionTable).where(eq(PermissionTable.project_id, ctx.project.id)).get(), + ) + const state = { + pending: new Map(), + approved: row?.data ?? [], + } + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + for (const item of state.pending.values()) { + yield* Deferred.fail(item.deferred, new RejectedError()) + } + state.pending.clear() + }), + ) + + return state + }), + ) + + const ask = Effect.fn("Permission.ask")(function* (input: AskInput) { + const { approved, pending } = yield* InstanceState.get(state) + const { ruleset, ...request } = input + let needsAsk = false + + for (const pattern of request.patterns) { + const rule = evaluate(request.permission, pattern, ruleset, approved) + log.info("evaluated", { permission: request.permission, pattern, action: rule }) + if (rule.action === "deny") { + return yield* new DeniedError({ + ruleset: ruleset.filter((rule) => Wildcard.match(request.permission, rule.permission)), + }) + } + if (rule.action === "allow") continue + needsAsk = true + } + + if (!needsAsk) return + + const id = request.id ?? PermissionID.ascending() + const info = Schema.decodeUnknownSync(Request)({ + id, + ...request, + }) + log.info("asking", { id, permission: info.permission, patterns: info.patterns }) + + const deferred = yield* Deferred.make() + pending.set(id, { info, deferred }) + yield* bus.publish(Event.Asked, info) + return yield* Effect.ensuring( + Deferred.await(deferred), + Effect.sync(() => { + pending.delete(id) + }), + ) + }) + + const reply = Effect.fn("Permission.reply")(function* (input: ReplyInput) { + const { approved, pending } = yield* InstanceState.get(state) + const existing = pending.get(input.requestID) + if (!existing) return + + pending.delete(input.requestID) + yield* bus.publish(Event.Replied, { + sessionID: existing.info.sessionID, + requestID: existing.info.id, + reply: input.reply, + }) + + if (input.reply === "reject") { + yield* Deferred.fail( + existing.deferred, + input.message ? new CorrectedError({ feedback: input.message }) : new RejectedError(), + ) + + for (const [id, item] of pending.entries()) { + if (item.info.sessionID !== existing.info.sessionID) continue + pending.delete(id) + yield* bus.publish(Event.Replied, { + sessionID: item.info.sessionID, + requestID: item.info.id, + reply: "reject", + }) + yield* Deferred.fail(item.deferred, new RejectedError()) + } + return + } + + yield* Deferred.succeed(existing.deferred, undefined) + if (input.reply === "once") return + + for (const pattern of existing.info.always) { + approved.push({ + permission: existing.info.permission, + pattern, + action: "allow", + }) + } + + for (const [id, item] of pending.entries()) { + if (item.info.sessionID !== existing.info.sessionID) continue + const ok = item.info.patterns.every( + (pattern) => evaluate(item.info.permission, pattern, approved).action === "allow", + ) + if (!ok) continue + pending.delete(id) + yield* bus.publish(Event.Replied, { + sessionID: item.info.sessionID, + requestID: item.info.id, + reply: "always", + }) + yield* Deferred.succeed(item.deferred, undefined) + } + }) + + const list = Effect.fn("Permission.list")(function* () { + const pending = (yield* InstanceState.get(state)).pending + return Array.from(pending.values(), (item) => item.info) + }) + + return Service.of({ ask, reply, list }) + }), +) + +function expand(pattern: string): string { + if (pattern.startsWith("~/")) return os.homedir() + pattern.slice(1) + if (pattern === "~") return os.homedir() + if (pattern.startsWith("$HOME/")) return os.homedir() + pattern.slice(5) + if (pattern.startsWith("$HOME")) return os.homedir() + pattern.slice(5) + return pattern +} + +export function fromConfig(permission: ConfigPermission.Info) { + const ruleset: Ruleset = [] + for (const [key, value] of Object.entries(permission)) { + if (typeof value === "string") { + ruleset.push({ permission: key, action: value, pattern: "*" }) + continue + } + ruleset.push( + ...Object.entries(value).map(([pattern, action]) => ({ permission: key, pattern: expand(pattern), action })), + ) + } + return ruleset +} + +export function merge(...rulesets: Ruleset[]): Ruleset { + return rulesets.flat() +} + +const EDIT_TOOLS = ["edit", "write", "apply_patch", "multiedit"] + +export function disabled(tools: string[], ruleset: Ruleset): Set { + const result = new Set() + for (const tool of tools) { + const permission = EDIT_TOOLS.includes(tool) ? "edit" : tool + const rule = ruleset.findLast((rule) => Wildcard.match(permission, rule.permission)) + if (!rule) continue + if (rule.pattern === "*" && rule.action === "deny") result.add(tool) + } + return result +} + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as Permission from "." diff --git a/packages/opencode/src/permission/permission.ts b/packages/opencode/src/permission/permission.ts deleted file mode 100644 index 44dac3b1db..0000000000 --- a/packages/opencode/src/permission/permission.ts +++ /dev/null @@ -1,323 +0,0 @@ -import { Bus } from "@/bus" -import { BusEvent } from "@/bus/bus-event" -import { ConfigPermission } from "@/config/permission" -import { InstanceState } from "@/effect" -import { ProjectID } from "@/project/schema" -import { MessageID, SessionID } from "@/session/schema" -import { PermissionTable } from "@/session/session.sql" -import { Database, eq } from "@/storage" -import { zod } from "@/util/effect-zod" -import { Log } from "@/util" -import { withStatics } from "@/util/schema" -import { Wildcard } from "@/util" -import { Deferred, Effect, Layer, Schema, Context } from "effect" -import os from "os" -import { evaluate as evalRule } from "./evaluate" -import { PermissionID } from "./schema" - -const log = Log.create({ service: "permission" }) - -export const Action = Schema.Literals(["allow", "deny", "ask"]) - .annotate({ identifier: "PermissionAction" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Action = Schema.Schema.Type - -export class Rule extends Schema.Class("PermissionRule")({ - permission: Schema.String, - pattern: Schema.String, - action: Action, -}) { - static readonly zod = zod(this) -} - -export const Ruleset = Schema.mutable(Schema.Array(Rule)) - .annotate({ identifier: "PermissionRuleset" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Ruleset = Schema.Schema.Type - -export class Request extends Schema.Class("PermissionRequest")({ - id: PermissionID, - sessionID: SessionID, - permission: Schema.String, - patterns: Schema.Array(Schema.String), - metadata: Schema.Record(Schema.String, Schema.Unknown), - always: Schema.Array(Schema.String), - tool: Schema.optional( - Schema.Struct({ - messageID: MessageID, - callID: Schema.String, - }), - ), -}) { - static readonly zod = zod(this) -} - -export const Reply = Schema.Literals(["once", "always", "reject"]).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Reply = Schema.Schema.Type - -const reply = { - reply: Reply, - message: Schema.optional(Schema.String), -} - -export const ReplyBody = Schema.Struct(reply) - .annotate({ identifier: "PermissionReplyBody" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type ReplyBody = Schema.Schema.Type - -export class Approval extends Schema.Class("PermissionApproval")({ - projectID: ProjectID, - patterns: Schema.Array(Schema.String), -}) { - static readonly zod = zod(this) -} - -export const Event = { - Asked: BusEvent.define("permission.asked", Request.zod), - Replied: BusEvent.define( - "permission.replied", - zod( - Schema.Struct({ - sessionID: SessionID, - requestID: PermissionID, - reply: Reply, - }), - ), - ), -} - -export class RejectedError extends Schema.TaggedErrorClass()("PermissionRejectedError", {}) { - override get message() { - return "The user rejected permission to use this specific tool call." - } -} - -export class CorrectedError extends Schema.TaggedErrorClass()("PermissionCorrectedError", { - feedback: Schema.String, -}) { - override get message() { - return `The user rejected permission to use this specific tool call with the following feedback: ${this.feedback}` - } -} - -export class DeniedError extends Schema.TaggedErrorClass()("PermissionDeniedError", { - ruleset: Schema.Any, -}) { - override get message() { - return `The user has specified a rule which prevents you from using this specific tool call. Here are some of the relevant rules ${JSON.stringify(this.ruleset)}` - } -} - -export type Error = DeniedError | RejectedError | CorrectedError - -export const AskInput = Schema.Struct({ - ...Request.fields, - id: Schema.optional(PermissionID), - ruleset: Ruleset, -}) - .annotate({ identifier: "PermissionAskInput" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type AskInput = Schema.Schema.Type - -export const ReplyInput = Schema.Struct({ - requestID: PermissionID, - ...reply, -}) - .annotate({ identifier: "PermissionReplyInput" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type ReplyInput = Schema.Schema.Type - -export interface Interface { - readonly ask: (input: AskInput) => Effect.Effect - readonly reply: (input: ReplyInput) => Effect.Effect - readonly list: () => Effect.Effect> -} - -interface PendingEntry { - info: Request - deferred: Deferred.Deferred -} - -interface State { - pending: Map - approved: Ruleset -} - -export function evaluate(permission: string, pattern: string, ...rulesets: Ruleset[]): Rule { - log.info("evaluate", { permission, pattern, ruleset: rulesets.flat() }) - return evalRule(permission, pattern, ...rulesets) -} - -export class Service extends Context.Service()("@opencode/Permission") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const state = yield* InstanceState.make( - Effect.fn("Permission.state")(function* (ctx) { - const row = Database.use((db) => - db.select().from(PermissionTable).where(eq(PermissionTable.project_id, ctx.project.id)).get(), - ) - const state = { - pending: new Map(), - approved: row?.data ?? [], - } - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - for (const item of state.pending.values()) { - yield* Deferred.fail(item.deferred, new RejectedError()) - } - state.pending.clear() - }), - ) - - return state - }), - ) - - const ask = Effect.fn("Permission.ask")(function* (input: AskInput) { - const { approved, pending } = yield* InstanceState.get(state) - const { ruleset, ...request } = input - let needsAsk = false - - for (const pattern of request.patterns) { - const rule = evaluate(request.permission, pattern, ruleset, approved) - log.info("evaluated", { permission: request.permission, pattern, action: rule }) - if (rule.action === "deny") { - return yield* new DeniedError({ - ruleset: ruleset.filter((rule) => Wildcard.match(request.permission, rule.permission)), - }) - } - if (rule.action === "allow") continue - needsAsk = true - } - - if (!needsAsk) return - - const id = request.id ?? PermissionID.ascending() - const info = Schema.decodeUnknownSync(Request)({ - id, - ...request, - }) - log.info("asking", { id, permission: info.permission, patterns: info.patterns }) - - const deferred = yield* Deferred.make() - pending.set(id, { info, deferred }) - yield* bus.publish(Event.Asked, info) - return yield* Effect.ensuring( - Deferred.await(deferred), - Effect.sync(() => { - pending.delete(id) - }), - ) - }) - - const reply = Effect.fn("Permission.reply")(function* (input: ReplyInput) { - const { approved, pending } = yield* InstanceState.get(state) - const existing = pending.get(input.requestID) - if (!existing) return - - pending.delete(input.requestID) - yield* bus.publish(Event.Replied, { - sessionID: existing.info.sessionID, - requestID: existing.info.id, - reply: input.reply, - }) - - if (input.reply === "reject") { - yield* Deferred.fail( - existing.deferred, - input.message ? new CorrectedError({ feedback: input.message }) : new RejectedError(), - ) - - for (const [id, item] of pending.entries()) { - if (item.info.sessionID !== existing.info.sessionID) continue - pending.delete(id) - yield* bus.publish(Event.Replied, { - sessionID: item.info.sessionID, - requestID: item.info.id, - reply: "reject", - }) - yield* Deferred.fail(item.deferred, new RejectedError()) - } - return - } - - yield* Deferred.succeed(existing.deferred, undefined) - if (input.reply === "once") return - - for (const pattern of existing.info.always) { - approved.push({ - permission: existing.info.permission, - pattern, - action: "allow", - }) - } - - for (const [id, item] of pending.entries()) { - if (item.info.sessionID !== existing.info.sessionID) continue - const ok = item.info.patterns.every( - (pattern) => evaluate(item.info.permission, pattern, approved).action === "allow", - ) - if (!ok) continue - pending.delete(id) - yield* bus.publish(Event.Replied, { - sessionID: item.info.sessionID, - requestID: item.info.id, - reply: "always", - }) - yield* Deferred.succeed(item.deferred, undefined) - } - }) - - const list = Effect.fn("Permission.list")(function* () { - const pending = (yield* InstanceState.get(state)).pending - return Array.from(pending.values(), (item) => item.info) - }) - - return Service.of({ ask, reply, list }) - }), -) - -function expand(pattern: string): string { - if (pattern.startsWith("~/")) return os.homedir() + pattern.slice(1) - if (pattern === "~") return os.homedir() - if (pattern.startsWith("$HOME/")) return os.homedir() + pattern.slice(5) - if (pattern.startsWith("$HOME")) return os.homedir() + pattern.slice(5) - return pattern -} - -export function fromConfig(permission: ConfigPermission.Info) { - const ruleset: Ruleset = [] - for (const [key, value] of Object.entries(permission)) { - if (typeof value === "string") { - ruleset.push({ permission: key, action: value, pattern: "*" }) - continue - } - ruleset.push( - ...Object.entries(value).map(([pattern, action]) => ({ permission: key, pattern: expand(pattern), action })), - ) - } - return ruleset -} - -export function merge(...rulesets: Ruleset[]): Ruleset { - return rulesets.flat() -} - -const EDIT_TOOLS = ["edit", "write", "apply_patch", "multiedit"] - -export function disabled(tools: string[], ruleset: Ruleset): Set { - const result = new Set() - for (const tool of tools) { - const permission = EDIT_TOOLS.includes(tool) ? "edit" : tool - const rule = ruleset.findLast((rule) => Wildcard.match(permission, rule.permission)) - if (!rule) continue - if (rule.pattern === "*" && rule.action === "deny") result.add(tool) - } - return result -} - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) From 664b2c36e8200c70d4b6a70702aceaf0604cd00a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:07 -0400 Subject: [PATCH 049/120] refactor: collapse git barrel into git/index.ts (#22909) --- packages/opencode/src/git/git.ts | 258 ---------------------------- packages/opencode/src/git/index.ts | 261 ++++++++++++++++++++++++++++- 2 files changed, 260 insertions(+), 259 deletions(-) delete mode 100644 packages/opencode/src/git/git.ts diff --git a/packages/opencode/src/git/git.ts b/packages/opencode/src/git/git.ts deleted file mode 100644 index 908c718521..0000000000 --- a/packages/opencode/src/git/git.ts +++ /dev/null @@ -1,258 +0,0 @@ -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { Effect, Layer, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" - -const cfg = [ - "--no-optional-locks", - "-c", - "core.autocrlf=false", - "-c", - "core.fsmonitor=false", - "-c", - "core.longpaths=true", - "-c", - "core.symlinks=true", - "-c", - "core.quotepath=false", -] as const - -const out = (result: { text(): string }) => result.text().trim() -const nuls = (text: string) => text.split("\0").filter(Boolean) -const fail = (err: unknown) => - ({ - exitCode: 1, - text: () => "", - stdout: Buffer.alloc(0), - stderr: Buffer.from(err instanceof Error ? err.message : String(err)), - }) satisfies Result - -export type Kind = "added" | "deleted" | "modified" - -export type Base = { - readonly name: string - readonly ref: string -} - -export type Item = { - readonly file: string - readonly code: string - readonly status: Kind -} - -export type Stat = { - readonly file: string - readonly additions: number - readonly deletions: number -} - -export interface Result { - readonly exitCode: number - readonly text: () => string - readonly stdout: Buffer - readonly stderr: Buffer -} - -export interface Options { - readonly cwd: string - readonly env?: Record -} - -export interface Interface { - readonly run: (args: string[], opts: Options) => Effect.Effect - readonly branch: (cwd: string) => Effect.Effect - readonly prefix: (cwd: string) => Effect.Effect - readonly defaultBranch: (cwd: string) => Effect.Effect - readonly hasHead: (cwd: string) => Effect.Effect - readonly mergeBase: (cwd: string, base: string, head?: string) => Effect.Effect - readonly show: (cwd: string, ref: string, file: string, prefix?: string) => Effect.Effect - readonly status: (cwd: string) => Effect.Effect - readonly diff: (cwd: string, ref: string) => Effect.Effect - readonly stats: (cwd: string, ref: string) => Effect.Effect -} - -const kind = (code: string): Kind => { - if (code === "??") return "added" - if (code.includes("U")) return "modified" - if (code.includes("A") && !code.includes("D")) return "added" - if (code.includes("D") && !code.includes("A")) return "deleted" - return "modified" -} - -export class Service extends Context.Service()("@opencode/Git") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const run = Effect.fn("Git.run")( - function* (args: string[], opts: Options) { - const proc = ChildProcess.make("git", [...cfg, ...args], { - cwd: opts.cwd, - env: opts.env, - extendEnv: true, - stdin: "ignore", - stdout: "pipe", - stderr: "pipe", - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - return { - exitCode: yield* handle.exitCode, - text: () => stdout, - stdout: Buffer.from(stdout), - stderr: Buffer.from(stderr), - } satisfies Result - }, - Effect.scoped, - Effect.catch((err) => Effect.succeed(fail(err))), - ) - - const text = Effect.fn("Git.text")(function* (args: string[], opts: Options) { - return (yield* run(args, opts)).text() - }) - - const lines = Effect.fn("Git.lines")(function* (args: string[], opts: Options) { - return (yield* text(args, opts)) - .split(/\r?\n/) - .map((item) => item.trim()) - .filter(Boolean) - }) - - const refs = Effect.fnUntraced(function* (cwd: string) { - return yield* lines(["for-each-ref", "--format=%(refname:short)", "refs/heads"], { cwd }) - }) - - const configured = Effect.fnUntraced(function* (cwd: string, list: string[]) { - const result = yield* run(["config", "init.defaultBranch"], { cwd }) - const name = out(result) - if (!name || !list.includes(name)) return - return { name, ref: name } satisfies Base - }) - - const primary = Effect.fnUntraced(function* (cwd: string) { - const list = yield* lines(["remote"], { cwd }) - if (list.includes("origin")) return "origin" - if (list.length === 1) return list[0] - if (list.includes("upstream")) return "upstream" - return list[0] - }) - - const branch = Effect.fn("Git.branch")(function* (cwd: string) { - const result = yield* run(["symbolic-ref", "--quiet", "--short", "HEAD"], { cwd }) - if (result.exitCode !== 0) return - const text = out(result) - return text || undefined - }) - - const prefix = Effect.fn("Git.prefix")(function* (cwd: string) { - const result = yield* run(["rev-parse", "--show-prefix"], { cwd }) - if (result.exitCode !== 0) return "" - return out(result) - }) - - const defaultBranch = Effect.fn("Git.defaultBranch")(function* (cwd: string) { - const remote = yield* primary(cwd) - if (remote) { - const head = yield* run(["symbolic-ref", `refs/remotes/${remote}/HEAD`], { cwd }) - if (head.exitCode === 0) { - const ref = out(head).replace(/^refs\/remotes\//, "") - const name = ref.startsWith(`${remote}/`) ? ref.slice(`${remote}/`.length) : "" - if (name) return { name, ref } satisfies Base - } - } - - const list = yield* refs(cwd) - const next = yield* configured(cwd, list) - if (next) return next - if (list.includes("main")) return { name: "main", ref: "main" } satisfies Base - if (list.includes("master")) return { name: "master", ref: "master" } satisfies Base - }) - - const hasHead = Effect.fn("Git.hasHead")(function* (cwd: string) { - const result = yield* run(["rev-parse", "--verify", "HEAD"], { cwd }) - return result.exitCode === 0 - }) - - const mergeBase = Effect.fn("Git.mergeBase")(function* (cwd: string, base: string, head = "HEAD") { - const result = yield* run(["merge-base", base, head], { cwd }) - if (result.exitCode !== 0) return - const text = out(result) - return text || undefined - }) - - const show = Effect.fn("Git.show")(function* (cwd: string, ref: string, file: string, prefix = "") { - const target = prefix ? `${prefix}${file}` : file - const result = yield* run(["show", `${ref}:${target}`], { cwd }) - if (result.exitCode !== 0) return "" - if (result.stdout.includes(0)) return "" - return result.text() - }) - - const status = Effect.fn("Git.status")(function* (cwd: string) { - return nuls( - yield* text(["status", "--porcelain=v1", "--untracked-files=all", "--no-renames", "-z", "--", "."], { - cwd, - }), - ).flatMap((item) => { - const file = item.slice(3) - if (!file) return [] - const code = item.slice(0, 2) - return [{ file, code, status: kind(code) } satisfies Item] - }) - }) - - const diff = Effect.fn("Git.diff")(function* (cwd: string, ref: string) { - const list = nuls( - yield* text(["diff", "--no-ext-diff", "--no-renames", "--name-status", "-z", ref, "--", "."], { cwd }), - ) - return list.flatMap((code, idx) => { - if (idx % 2 !== 0) return [] - const file = list[idx + 1] - if (!code || !file) return [] - return [{ file, code, status: kind(code) } satisfies Item] - }) - }) - - const stats = Effect.fn("Git.stats")(function* (cwd: string, ref: string) { - return nuls( - yield* text(["diff", "--no-ext-diff", "--no-renames", "--numstat", "-z", ref, "--", "."], { cwd }), - ).flatMap((item) => { - const a = item.indexOf("\t") - const b = item.indexOf("\t", a + 1) - if (a === -1 || b === -1) return [] - const file = item.slice(b + 1) - if (!file) return [] - const adds = item.slice(0, a) - const dels = item.slice(a + 1, b) - const additions = adds === "-" ? 0 : Number.parseInt(adds || "0", 10) - const deletions = dels === "-" ? 0 : Number.parseInt(dels || "0", 10) - return [ - { - file, - additions: Number.isFinite(additions) ? additions : 0, - deletions: Number.isFinite(deletions) ? deletions : 0, - } satisfies Stat, - ] - }) - }) - - return Service.of({ - run, - branch, - prefix, - defaultBranch, - hasHead, - mergeBase, - show, - status, - diff, - stats, - }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(CrossSpawnSpawner.defaultLayer)) diff --git a/packages/opencode/src/git/index.ts b/packages/opencode/src/git/index.ts index 019819d6e3..719b5607fb 100644 --- a/packages/opencode/src/git/index.ts +++ b/packages/opencode/src/git/index.ts @@ -1 +1,260 @@ -export * as Git from "./git" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { Effect, Layer, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" + +const cfg = [ + "--no-optional-locks", + "-c", + "core.autocrlf=false", + "-c", + "core.fsmonitor=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", +] as const + +const out = (result: { text(): string }) => result.text().trim() +const nuls = (text: string) => text.split("\0").filter(Boolean) +const fail = (err: unknown) => + ({ + exitCode: 1, + text: () => "", + stdout: Buffer.alloc(0), + stderr: Buffer.from(err instanceof Error ? err.message : String(err)), + }) satisfies Result + +export type Kind = "added" | "deleted" | "modified" + +export type Base = { + readonly name: string + readonly ref: string +} + +export type Item = { + readonly file: string + readonly code: string + readonly status: Kind +} + +export type Stat = { + readonly file: string + readonly additions: number + readonly deletions: number +} + +export interface Result { + readonly exitCode: number + readonly text: () => string + readonly stdout: Buffer + readonly stderr: Buffer +} + +export interface Options { + readonly cwd: string + readonly env?: Record +} + +export interface Interface { + readonly run: (args: string[], opts: Options) => Effect.Effect + readonly branch: (cwd: string) => Effect.Effect + readonly prefix: (cwd: string) => Effect.Effect + readonly defaultBranch: (cwd: string) => Effect.Effect + readonly hasHead: (cwd: string) => Effect.Effect + readonly mergeBase: (cwd: string, base: string, head?: string) => Effect.Effect + readonly show: (cwd: string, ref: string, file: string, prefix?: string) => Effect.Effect + readonly status: (cwd: string) => Effect.Effect + readonly diff: (cwd: string, ref: string) => Effect.Effect + readonly stats: (cwd: string, ref: string) => Effect.Effect +} + +const kind = (code: string): Kind => { + if (code === "??") return "added" + if (code.includes("U")) return "modified" + if (code.includes("A") && !code.includes("D")) return "added" + if (code.includes("D") && !code.includes("A")) return "deleted" + return "modified" +} + +export class Service extends Context.Service()("@opencode/Git") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const run = Effect.fn("Git.run")( + function* (args: string[], opts: Options) { + const proc = ChildProcess.make("git", [...cfg, ...args], { + cwd: opts.cwd, + env: opts.env, + extendEnv: true, + stdin: "ignore", + stdout: "pipe", + stderr: "pipe", + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + return { + exitCode: yield* handle.exitCode, + text: () => stdout, + stdout: Buffer.from(stdout), + stderr: Buffer.from(stderr), + } satisfies Result + }, + Effect.scoped, + Effect.catch((err) => Effect.succeed(fail(err))), + ) + + const text = Effect.fn("Git.text")(function* (args: string[], opts: Options) { + return (yield* run(args, opts)).text() + }) + + const lines = Effect.fn("Git.lines")(function* (args: string[], opts: Options) { + return (yield* text(args, opts)) + .split(/\r?\n/) + .map((item) => item.trim()) + .filter(Boolean) + }) + + const refs = Effect.fnUntraced(function* (cwd: string) { + return yield* lines(["for-each-ref", "--format=%(refname:short)", "refs/heads"], { cwd }) + }) + + const configured = Effect.fnUntraced(function* (cwd: string, list: string[]) { + const result = yield* run(["config", "init.defaultBranch"], { cwd }) + const name = out(result) + if (!name || !list.includes(name)) return + return { name, ref: name } satisfies Base + }) + + const primary = Effect.fnUntraced(function* (cwd: string) { + const list = yield* lines(["remote"], { cwd }) + if (list.includes("origin")) return "origin" + if (list.length === 1) return list[0] + if (list.includes("upstream")) return "upstream" + return list[0] + }) + + const branch = Effect.fn("Git.branch")(function* (cwd: string) { + const result = yield* run(["symbolic-ref", "--quiet", "--short", "HEAD"], { cwd }) + if (result.exitCode !== 0) return + const text = out(result) + return text || undefined + }) + + const prefix = Effect.fn("Git.prefix")(function* (cwd: string) { + const result = yield* run(["rev-parse", "--show-prefix"], { cwd }) + if (result.exitCode !== 0) return "" + return out(result) + }) + + const defaultBranch = Effect.fn("Git.defaultBranch")(function* (cwd: string) { + const remote = yield* primary(cwd) + if (remote) { + const head = yield* run(["symbolic-ref", `refs/remotes/${remote}/HEAD`], { cwd }) + if (head.exitCode === 0) { + const ref = out(head).replace(/^refs\/remotes\//, "") + const name = ref.startsWith(`${remote}/`) ? ref.slice(`${remote}/`.length) : "" + if (name) return { name, ref } satisfies Base + } + } + + const list = yield* refs(cwd) + const next = yield* configured(cwd, list) + if (next) return next + if (list.includes("main")) return { name: "main", ref: "main" } satisfies Base + if (list.includes("master")) return { name: "master", ref: "master" } satisfies Base + }) + + const hasHead = Effect.fn("Git.hasHead")(function* (cwd: string) { + const result = yield* run(["rev-parse", "--verify", "HEAD"], { cwd }) + return result.exitCode === 0 + }) + + const mergeBase = Effect.fn("Git.mergeBase")(function* (cwd: string, base: string, head = "HEAD") { + const result = yield* run(["merge-base", base, head], { cwd }) + if (result.exitCode !== 0) return + const text = out(result) + return text || undefined + }) + + const show = Effect.fn("Git.show")(function* (cwd: string, ref: string, file: string, prefix = "") { + const target = prefix ? `${prefix}${file}` : file + const result = yield* run(["show", `${ref}:${target}`], { cwd }) + if (result.exitCode !== 0) return "" + if (result.stdout.includes(0)) return "" + return result.text() + }) + + const status = Effect.fn("Git.status")(function* (cwd: string) { + return nuls( + yield* text(["status", "--porcelain=v1", "--untracked-files=all", "--no-renames", "-z", "--", "."], { + cwd, + }), + ).flatMap((item) => { + const file = item.slice(3) + if (!file) return [] + const code = item.slice(0, 2) + return [{ file, code, status: kind(code) } satisfies Item] + }) + }) + + const diff = Effect.fn("Git.diff")(function* (cwd: string, ref: string) { + const list = nuls( + yield* text(["diff", "--no-ext-diff", "--no-renames", "--name-status", "-z", ref, "--", "."], { cwd }), + ) + return list.flatMap((code, idx) => { + if (idx % 2 !== 0) return [] + const file = list[idx + 1] + if (!code || !file) return [] + return [{ file, code, status: kind(code) } satisfies Item] + }) + }) + + const stats = Effect.fn("Git.stats")(function* (cwd: string, ref: string) { + return nuls( + yield* text(["diff", "--no-ext-diff", "--no-renames", "--numstat", "-z", ref, "--", "."], { cwd }), + ).flatMap((item) => { + const a = item.indexOf("\t") + const b = item.indexOf("\t", a + 1) + if (a === -1 || b === -1) return [] + const file = item.slice(b + 1) + if (!file) return [] + const adds = item.slice(0, a) + const dels = item.slice(a + 1, b) + const additions = adds === "-" ? 0 : Number.parseInt(adds || "0", 10) + const deletions = dels === "-" ? 0 : Number.parseInt(dels || "0", 10) + return [ + { + file, + additions: Number.isFinite(additions) ? additions : 0, + deletions: Number.isFinite(deletions) ? deletions : 0, + } satisfies Stat, + ] + }) + }) + + return Service.of({ + run, + branch, + prefix, + defaultBranch, + hasHead, + mergeBase, + show, + status, + diff, + stats, + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(CrossSpawnSpawner.defaultLayer)) + +export * as Git from "." From 5fccdc9fc7979be5f5b04ae9701d550fbec21535 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:23 -0400 Subject: [PATCH 050/120] refactor: collapse mcp barrel into mcp/index.ts (#22913) --- packages/opencode/src/mcp/index.ts | 934 ++++++++++++++++++++++++++++- packages/opencode/src/mcp/mcp.ts | 931 ---------------------------- 2 files changed, 933 insertions(+), 932 deletions(-) delete mode 100644 packages/opencode/src/mcp/mcp.ts diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index c42b9eb5c1..ba53e7c0b5 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -1 +1,933 @@ -export * as MCP from "./mcp" +import { dynamicTool, type Tool, jsonSchema, type JSONSchema7 } from "ai" +import { Client } from "@modelcontextprotocol/sdk/client/index.js" +import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js" +import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js" +import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js" +import { UnauthorizedError } from "@modelcontextprotocol/sdk/client/auth.js" +import { + CallToolResultSchema, + type Tool as MCPToolDef, + ToolListChangedNotificationSchema, +} from "@modelcontextprotocol/sdk/types.js" +import { Config } from "../config" +import { ConfigMCP } from "../config/mcp" +import { Log } from "../util" +import { NamedError } from "@opencode-ai/shared/util/error" +import z from "zod/v4" +import { Instance } from "../project/instance" +import { Installation } from "../installation" +import { InstallationVersion } from "../installation/version" +import { withTimeout } from "@/util/timeout" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { McpOAuthProvider } from "./oauth-provider" +import { McpOAuthCallback } from "./oauth-callback" +import { McpAuth } from "./auth" +import { BusEvent } from "../bus/bus-event" +import { Bus } from "@/bus" +import { TuiEvent } from "@/cli/cmd/tui/event" +import open from "open" +import { Effect, Exit, Layer, Option, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { InstanceState } from "@/effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" + +const log = Log.create({ service: "mcp" }) +const DEFAULT_TIMEOUT = 30_000 + +export const Resource = z + .object({ + name: z.string(), + uri: z.string(), + description: z.string().optional(), + mimeType: z.string().optional(), + client: z.string(), + }) + .meta({ ref: "McpResource" }) +export type Resource = z.infer + +export const ToolsChanged = BusEvent.define( + "mcp.tools.changed", + z.object({ + server: z.string(), + }), +) + +export const BrowserOpenFailed = BusEvent.define( + "mcp.browser.open.failed", + z.object({ + mcpName: z.string(), + url: z.string(), + }), +) + +export const Failed = NamedError.create( + "MCPFailed", + z.object({ + name: z.string(), + }), +) + +type MCPClient = Client + +export const Status = z + .discriminatedUnion("status", [ + z + .object({ + status: z.literal("connected"), + }) + .meta({ + ref: "MCPStatusConnected", + }), + z + .object({ + status: z.literal("disabled"), + }) + .meta({ + ref: "MCPStatusDisabled", + }), + z + .object({ + status: z.literal("failed"), + error: z.string(), + }) + .meta({ + ref: "MCPStatusFailed", + }), + z + .object({ + status: z.literal("needs_auth"), + }) + .meta({ + ref: "MCPStatusNeedsAuth", + }), + z + .object({ + status: z.literal("needs_client_registration"), + error: z.string(), + }) + .meta({ + ref: "MCPStatusNeedsClientRegistration", + }), + ]) + .meta({ + ref: "MCPStatus", + }) +export type Status = z.infer + +// Store transports for OAuth servers to allow finishing auth +type TransportWithAuth = StreamableHTTPClientTransport | SSEClientTransport +const pendingOAuthTransports = new Map() + +// Prompt cache types +type PromptInfo = Awaited>["prompts"][number] +type ResourceInfo = Awaited>["resources"][number] +type McpEntry = NonNullable[string] + +function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { + return typeof entry === "object" && entry !== null && "type" in entry +} + +const sanitize = (s: string) => s.replace(/[^a-zA-Z0-9_-]/g, "_") + +// Convert MCP tool definition to AI SDK Tool type +function convertMcpTool(mcpTool: MCPToolDef, client: MCPClient, timeout?: number): Tool { + const inputSchema = mcpTool.inputSchema + + // Spread first, then override type to ensure it's always "object" + const schema: JSONSchema7 = { + ...(inputSchema as JSONSchema7), + type: "object", + properties: (inputSchema.properties ?? {}) as JSONSchema7["properties"], + additionalProperties: false, + } + + return dynamicTool({ + description: mcpTool.description ?? "", + inputSchema: jsonSchema(schema), + execute: async (args: unknown) => { + return client.callTool( + { + name: mcpTool.name, + arguments: (args || {}) as Record, + }, + CallToolResultSchema, + { + resetTimeoutOnProgress: true, + timeout, + }, + ) + }, + }) +} + +function defs(key: string, client: MCPClient, timeout?: number) { + return Effect.tryPromise({ + try: () => withTimeout(client.listTools(), timeout ?? DEFAULT_TIMEOUT), + catch: (err) => (err instanceof Error ? err : new Error(String(err))), + }).pipe( + Effect.map((result) => result.tools), + Effect.catch((err) => { + log.error("failed to get tools from client", { key, error: err }) + return Effect.succeed(undefined) + }), + ) +} + +function fetchFromClient( + clientName: string, + client: Client, + listFn: (c: Client) => Promise, + label: string, +) { + return Effect.tryPromise({ + try: () => listFn(client), + catch: (e: any) => { + log.error(`failed to get ${label}`, { clientName, error: e.message }) + return e + }, + }).pipe( + Effect.map((items) => { + const out: Record = {} + const sanitizedClient = sanitize(clientName) + for (const item of items) { + out[sanitizedClient + ":" + sanitize(item.name)] = { ...item, client: clientName } + } + return out + }), + Effect.orElseSucceed(() => undefined), + ) +} + +interface CreateResult { + mcpClient?: MCPClient + status: Status + defs?: MCPToolDef[] +} + +interface AuthResult { + authorizationUrl: string + oauthState: string + client?: MCPClient +} + +// --- Effect Service --- + +interface State { + status: Record + clients: Record + defs: Record +} + +export interface Interface { + readonly status: () => Effect.Effect> + readonly clients: () => Effect.Effect> + readonly tools: () => Effect.Effect> + readonly prompts: () => Effect.Effect> + readonly resources: () => Effect.Effect> + readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> + readonly connect: (name: string) => Effect.Effect + readonly disconnect: (name: string) => Effect.Effect + readonly getPrompt: ( + clientName: string, + name: string, + args?: Record, + ) => Effect.Effect> | undefined> + readonly readResource: ( + clientName: string, + resourceUri: string, + ) => Effect.Effect> | undefined> + readonly startAuth: (mcpName: string) => Effect.Effect<{ authorizationUrl: string; oauthState: string }> + readonly authenticate: (mcpName: string) => Effect.Effect + readonly finishAuth: (mcpName: string, authorizationCode: string) => Effect.Effect + readonly removeAuth: (mcpName: string) => Effect.Effect + readonly supportsOAuth: (mcpName: string) => Effect.Effect + readonly hasStoredTokens: (mcpName: string) => Effect.Effect + readonly getAuthStatus: (mcpName: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/MCP") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const auth = yield* McpAuth.Service + const bus = yield* Bus.Service + + type Transport = StdioClientTransport | StreamableHTTPClientTransport | SSEClientTransport + + /** + * Connect a client via the given transport with resource safety: + * on failure the transport is closed; on success the caller owns it. + */ + const connectTransport = (transport: Transport, timeout: number) => + Effect.acquireUseRelease( + Effect.succeed(transport), + (t) => + Effect.tryPromise({ + try: () => { + const client = new Client({ name: "opencode", version: InstallationVersion }) + return withTimeout(client.connect(t), timeout).then(() => client) + }, + catch: (e) => (e instanceof Error ? e : new Error(String(e))), + }), + (t, exit) => (Exit.isFailure(exit) ? Effect.tryPromise(() => t.close()).pipe(Effect.ignore) : Effect.void), + ) + + const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } + + const connectRemote = Effect.fn("MCP.connectRemote")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "remote" }, + ) { + const oauthDisabled = mcp.oauth === false + const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined + let authProvider: McpOAuthProvider | undefined + + if (!oauthDisabled) { + authProvider = new McpOAuthProvider( + key, + mcp.url, + { + clientId: oauthConfig?.clientId, + clientSecret: oauthConfig?.clientSecret, + scope: oauthConfig?.scope, + redirectUri: oauthConfig?.redirectUri, + }, + { + onRedirect: async (url) => { + log.info("oauth redirect requested", { key, url: url.toString() }) + }, + }, + auth, + ) + } + + const transports: Array<{ name: string; transport: TransportWithAuth }> = [ + { + name: "StreamableHTTP", + transport: new StreamableHTTPClientTransport(new URL(mcp.url), { + authProvider, + requestInit: mcp.headers ? { headers: mcp.headers } : undefined, + }), + }, + { + name: "SSE", + transport: new SSEClientTransport(new URL(mcp.url), { + authProvider, + requestInit: mcp.headers ? { headers: mcp.headers } : undefined, + }), + }, + ] + + const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT + let lastStatus: Status | undefined + + for (const { name, transport } of transports) { + const result = yield* connectTransport(transport, connectTimeout).pipe( + Effect.map((client) => ({ client, transportName: name })), + Effect.catch((error) => { + const lastError = error instanceof Error ? error : new Error(String(error)) + const isAuthError = + error instanceof UnauthorizedError || (authProvider && lastError.message.includes("OAuth")) + + if (isAuthError) { + log.info("mcp server requires authentication", { key, transport: name }) + + if (lastError.message.includes("registration") || lastError.message.includes("client_id")) { + lastStatus = { + status: "needs_client_registration" as const, + error: "Server does not support dynamic client registration. Please provide clientId in config.", + } + return bus + .publish(TuiEvent.ToastShow, { + title: "MCP Authentication Required", + message: `Server "${key}" requires a pre-registered client ID. Add clientId to your config.`, + variant: "warning", + duration: 8000, + }) + .pipe(Effect.ignore, Effect.as(undefined)) + } else { + pendingOAuthTransports.set(key, transport) + lastStatus = { status: "needs_auth" as const } + return bus + .publish(TuiEvent.ToastShow, { + title: "MCP Authentication Required", + message: `Server "${key}" requires authentication. Run: opencode mcp auth ${key}`, + variant: "warning", + duration: 8000, + }) + .pipe(Effect.ignore, Effect.as(undefined)) + } + } + + log.debug("transport connection failed", { + key, + transport: name, + url: mcp.url, + error: lastError.message, + }) + lastStatus = { status: "failed" as const, error: lastError.message } + return Effect.succeed(undefined) + }), + ) + if (result) { + log.info("connected", { key, transport: result.transportName }) + return { client: result.client as MCPClient | undefined, status: { status: "connected" } as Status } + } + // If this was an auth error, stop trying other transports + if (lastStatus?.status === "needs_auth" || lastStatus?.status === "needs_client_registration") break + } + + return { + client: undefined as MCPClient | undefined, + status: (lastStatus ?? { status: "failed", error: "Unknown error" }) as Status, + } + }) + + const connectLocal = Effect.fn("MCP.connectLocal")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "local" }, + ) { + const [cmd, ...args] = mcp.command + const cwd = Instance.directory + const transport = new StdioClientTransport({ + stderr: "pipe", + command: cmd, + args, + cwd, + env: { + ...process.env, + ...(cmd === "opencode" ? { BUN_BE_BUN: "1" } : {}), + ...mcp.environment, + }, + }) + transport.stderr?.on("data", (chunk: Buffer) => { + log.info(`mcp stderr: ${chunk.toString()}`, { key }) + }) + + const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT + return yield* connectTransport(transport, connectTimeout).pipe( + Effect.map((client): { client: MCPClient | undefined; status: Status } => ({ + client, + status: { status: "connected" }, + })), + Effect.catch((error): Effect.Effect<{ client: MCPClient | undefined; status: Status }> => { + const msg = error instanceof Error ? error.message : String(error) + log.error("local mcp startup failed", { key, command: mcp.command, cwd, error: msg }) + return Effect.succeed({ client: undefined, status: { status: "failed", error: msg } }) + }), + ) + }) + + const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { + if (mcp.enabled === false) { + log.info("mcp server disabled", { key }) + return DISABLED_RESULT + } + + log.info("found", { key, type: mcp.type }) + + const { client: mcpClient, status } = + mcp.type === "remote" + ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) + : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) + + if (!mcpClient) { + return { status } satisfies CreateResult + } + + const listed = yield* defs(key, mcpClient, mcp.timeout) + if (!listed) { + yield* Effect.tryPromise(() => mcpClient.close()).pipe(Effect.ignore) + return { status: { status: "failed", error: "Failed to get tools" } } satisfies CreateResult + } + + log.info("create() successfully created client", { key, toolCount: listed.length }) + return { mcpClient, status, defs: listed } satisfies CreateResult + }) + const cfgSvc = yield* Config.Service + + const descendants = Effect.fnUntraced( + function* (pid: number) { + if (process.platform === "win32") return [] as number[] + const pids: number[] = [] + const queue = [pid] + while (queue.length > 0) { + const current = queue.shift()! + const handle = yield* spawner.spawn(ChildProcess.make("pgrep", ["-P", String(current)], { stdin: "ignore" })) + const text = yield* Stream.mkString(Stream.decodeText(handle.stdout)) + yield* handle.exitCode + for (const tok of text.split("\n")) { + const cpid = parseInt(tok, 10) + if (!isNaN(cpid) && !pids.includes(cpid)) { + pids.push(cpid) + queue.push(cpid) + } + } + } + return pids + }, + Effect.scoped, + Effect.catch(() => Effect.succeed([] as number[])), + ) + + function watch(s: State, name: string, client: MCPClient, bridge: EffectBridge.Shape, timeout?: number) { + client.setNotificationHandler(ToolListChangedNotificationSchema, async () => { + log.info("tools list changed notification received", { server: name }) + if (s.clients[name] !== client || s.status[name]?.status !== "connected") return + + const listed = await bridge.promise(defs(name, client, timeout)) + if (!listed) return + if (s.clients[name] !== client || s.status[name]?.status !== "connected") return + + s.defs[name] = listed + await bridge.promise(bus.publish(ToolsChanged, { server: name }).pipe(Effect.ignore)) + }) + } + + const state = yield* InstanceState.make( + Effect.fn("MCP.state")(function* () { + const cfg = yield* cfgSvc.get() + const bridge = yield* EffectBridge.make() + const config = cfg.mcp ?? {} + const s: State = { + status: {}, + clients: {}, + defs: {}, + } + + yield* Effect.forEach( + Object.entries(config), + ([key, mcp]) => + Effect.gen(function* () { + if (!isMcpConfigured(mcp)) { + log.error("Ignoring MCP config entry without type", { key }) + return + } + + if (mcp.enabled === false) { + s.status[key] = { status: "disabled" } + return + } + + const result = yield* create(key, mcp).pipe(Effect.catch(() => Effect.void)) + if (!result) return + + s.status[key] = result.status + if (result.mcpClient) { + s.clients[key] = result.mcpClient + s.defs[key] = result.defs! + watch(s, key, result.mcpClient, bridge, mcp.timeout) + } + }), + { concurrency: "unbounded" }, + ) + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + yield* Effect.forEach( + Object.values(s.clients), + (client) => + Effect.gen(function* () { + const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null + if (typeof pid === "number") { + const pids = yield* descendants(pid) + for (const dpid of pids) { + try { + process.kill(dpid, "SIGTERM") + } catch {} + } + } + yield* Effect.tryPromise(() => client.close()).pipe(Effect.ignore) + }), + { concurrency: "unbounded" }, + ) + pendingOAuthTransports.clear() + }), + ) + + return s + }), + ) + + function closeClient(s: State, name: string) { + const client = s.clients[name] + delete s.defs[name] + if (!client) return Effect.void + return Effect.tryPromise(() => client.close()).pipe(Effect.ignore) + } + + const storeClient = Effect.fnUntraced(function* ( + s: State, + name: string, + client: MCPClient, + listed: MCPToolDef[], + timeout?: number, + ) { + const bridge = yield* EffectBridge.make() + yield* closeClient(s, name) + s.status[name] = { status: "connected" } + s.clients[name] = client + s.defs[name] = listed + watch(s, name, client, bridge, timeout) + return s.status[name] + }) + + const status = Effect.fn("MCP.status")(function* () { + const s = yield* InstanceState.get(state) + + const cfg = yield* cfgSvc.get() + const config = cfg.mcp ?? {} + const result: Record = {} + + for (const [key, mcp] of Object.entries(config)) { + if (!isMcpConfigured(mcp)) continue + result[key] = s.status[key] ?? { status: "disabled" } + } + + return result + }) + + const clients = Effect.fn("MCP.clients")(function* () { + const s = yield* InstanceState.get(state) + return s.clients + }) + + const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { + const s = yield* InstanceState.get(state) + const result = yield* create(name, mcp) + + s.status[name] = result.status + if (!result.mcpClient) { + yield* closeClient(s, name) + delete s.clients[name] + return result.status + } + + return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) + }) + + const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { + yield* createAndStore(name, mcp) + const s = yield* InstanceState.get(state) + return { status: s.status } + }) + + const connect = Effect.fn("MCP.connect")(function* (name: string) { + const mcp = yield* getMcpConfig(name) + if (!mcp) { + log.error("MCP config not found or invalid", { name }) + return + } + yield* createAndStore(name, { ...mcp, enabled: true }) + }) + + const disconnect = Effect.fn("MCP.disconnect")(function* (name: string) { + const s = yield* InstanceState.get(state) + yield* closeClient(s, name) + delete s.clients[name] + s.status[name] = { status: "disabled" } + }) + + const tools = Effect.fn("MCP.tools")(function* () { + const result: Record = {} + const s = yield* InstanceState.get(state) + + const cfg = yield* cfgSvc.get() + const config = cfg.mcp ?? {} + const defaultTimeout = cfg.experimental?.mcp_timeout + + const connectedClients = Object.entries(s.clients).filter( + ([clientName]) => s.status[clientName]?.status === "connected", + ) + + yield* Effect.forEach( + connectedClients, + ([clientName, client]) => + Effect.gen(function* () { + const mcpConfig = config[clientName] + const entry = mcpConfig && isMcpConfigured(mcpConfig) ? mcpConfig : undefined + + const listed = s.defs[clientName] + if (!listed) { + log.warn("missing cached tools for connected server", { clientName }) + return + } + + const timeout = entry?.timeout ?? defaultTimeout + for (const mcpTool of listed) { + result[sanitize(clientName) + "_" + sanitize(mcpTool.name)] = convertMcpTool(mcpTool, client, timeout) + } + }), + { concurrency: "unbounded" }, + ) + return result + }) + + function collectFromConnected( + s: State, + listFn: (c: Client) => Promise, + label: string, + ) { + return Effect.forEach( + Object.entries(s.clients).filter(([name]) => s.status[name]?.status === "connected"), + ([clientName, client]) => + fetchFromClient(clientName, client, listFn, label).pipe(Effect.map((items) => Object.entries(items ?? {}))), + { concurrency: "unbounded" }, + ).pipe(Effect.map((results) => Object.fromEntries(results.flat()))) + } + + const prompts = Effect.fn("MCP.prompts")(function* () { + const s = yield* InstanceState.get(state) + return yield* collectFromConnected(s, (c) => c.listPrompts().then((r) => r.prompts), "prompts") + }) + + const resources = Effect.fn("MCP.resources")(function* () { + const s = yield* InstanceState.get(state) + return yield* collectFromConnected(s, (c) => c.listResources().then((r) => r.resources), "resources") + }) + + const withClient = Effect.fnUntraced(function* ( + clientName: string, + fn: (client: MCPClient) => Promise, + label: string, + meta?: Record, + ) { + const s = yield* InstanceState.get(state) + const client = s.clients[clientName] + if (!client) { + log.warn(`client not found for ${label}`, { clientName }) + return undefined + } + return yield* Effect.tryPromise({ + try: () => fn(client), + catch: (e: any) => { + log.error(`failed to ${label}`, { clientName, ...meta, error: e?.message }) + return e + }, + }).pipe(Effect.orElseSucceed(() => undefined)) + }) + + const getPrompt = Effect.fn("MCP.getPrompt")(function* ( + clientName: string, + name: string, + args?: Record, + ) { + return yield* withClient(clientName, (client) => client.getPrompt({ name, arguments: args }), "getPrompt", { + promptName: name, + }) + }) + + const readResource = Effect.fn("MCP.readResource")(function* (clientName: string, resourceUri: string) { + return yield* withClient(clientName, (client) => client.readResource({ uri: resourceUri }), "readResource", { + resourceUri, + }) + }) + + const getMcpConfig = Effect.fnUntraced(function* (mcpName: string) { + const cfg = yield* cfgSvc.get() + const mcpConfig = cfg.mcp?.[mcpName] + if (!mcpConfig || !isMcpConfigured(mcpConfig)) return undefined + return mcpConfig + }) + + const startAuth = Effect.fn("MCP.startAuth")(function* (mcpName: string) { + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) throw new Error(`MCP server ${mcpName} not found or disabled`) + if (mcpConfig.type !== "remote") throw new Error(`MCP server ${mcpName} is not a remote server`) + if (mcpConfig.oauth === false) throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`) + + // OAuth config is optional - if not provided, we'll use auto-discovery + const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined + + // Start the callback server with custom redirectUri if configured + yield* Effect.promise(() => McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)) + + const oauthState = Array.from(crypto.getRandomValues(new Uint8Array(32))) + .map((b) => b.toString(16).padStart(2, "0")) + .join("") + yield* auth.updateOAuthState(mcpName, oauthState) + let capturedUrl: URL | undefined + const authProvider = new McpOAuthProvider( + mcpName, + mcpConfig.url, + { + clientId: oauthConfig?.clientId, + clientSecret: oauthConfig?.clientSecret, + scope: oauthConfig?.scope, + redirectUri: oauthConfig?.redirectUri, + }, + { + onRedirect: async (url) => { + capturedUrl = url + }, + }, + auth, + ) + + const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider }) + + return yield* Effect.tryPromise({ + try: () => { + const client = new Client({ name: "opencode", version: InstallationVersion }) + return client + .connect(transport) + .then(() => ({ authorizationUrl: "", oauthState, client }) satisfies AuthResult) + }, + catch: (error) => error, + }).pipe( + Effect.catch((error) => { + if (error instanceof UnauthorizedError && capturedUrl) { + pendingOAuthTransports.set(mcpName, transport) + return Effect.succeed({ authorizationUrl: capturedUrl.toString(), oauthState } satisfies AuthResult) + } + return Effect.die(error) + }), + ) + }) + + const authenticate = Effect.fn("MCP.authenticate")(function* (mcpName: string) { + const result = yield* startAuth(mcpName) + if (!result.authorizationUrl) { + const client = "client" in result ? result.client : undefined + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) { + yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) + return { status: "failed", error: "MCP config not found after auth" } as Status + } + + const listed = client ? yield* defs(mcpName, client, mcpConfig.timeout) : undefined + if (!client || !listed) { + yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) + return { status: "failed", error: "Failed to get tools" } as Status + } + + const s = yield* InstanceState.get(state) + yield* auth.clearOAuthState(mcpName) + return yield* storeClient(s, mcpName, client, listed, mcpConfig.timeout) + } + + log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: result.oauthState }) + + const callbackPromise = McpOAuthCallback.waitForCallback(result.oauthState, mcpName) + + yield* Effect.tryPromise(() => open(result.authorizationUrl)).pipe( + Effect.flatMap((subprocess) => + Effect.callback((resume) => { + const timer = setTimeout(() => resume(Effect.void), 500) + subprocess.on("error", (err) => { + clearTimeout(timer) + resume(Effect.fail(err)) + }) + subprocess.on("exit", (code) => { + if (code !== null && code !== 0) { + clearTimeout(timer) + resume(Effect.fail(new Error(`Browser open failed with exit code ${code}`))) + } + }) + }), + ), + Effect.catch(() => { + log.warn("failed to open browser, user must open URL manually", { mcpName }) + return bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl }).pipe(Effect.ignore) + }), + ) + + const code = yield* Effect.promise(() => callbackPromise) + + const storedState = yield* auth.getOAuthState(mcpName) + if (storedState !== result.oauthState) { + yield* auth.clearOAuthState(mcpName) + throw new Error("OAuth state mismatch - potential CSRF attack") + } + yield* auth.clearOAuthState(mcpName) + return yield* finishAuth(mcpName, code) + }) + + const finishAuth = Effect.fn("MCP.finishAuth")(function* (mcpName: string, authorizationCode: string) { + const transport = pendingOAuthTransports.get(mcpName) + if (!transport) throw new Error(`No pending OAuth flow for MCP server: ${mcpName}`) + + const result = yield* Effect.tryPromise({ + try: () => transport.finishAuth(authorizationCode).then(() => true as const), + catch: (error) => { + log.error("failed to finish oauth", { mcpName, error }) + return error + }, + }).pipe(Effect.option) + + if (Option.isNone(result)) { + return { status: "failed", error: "OAuth completion failed" } as Status + } + + yield* auth.clearCodeVerifier(mcpName) + pendingOAuthTransports.delete(mcpName) + + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) return { status: "failed", error: "MCP config not found after auth" } as Status + + return yield* createAndStore(mcpName, mcpConfig) + }) + + const removeAuth = Effect.fn("MCP.removeAuth")(function* (mcpName: string) { + yield* auth.remove(mcpName) + McpOAuthCallback.cancelPending(mcpName) + pendingOAuthTransports.delete(mcpName) + log.info("removed oauth credentials", { mcpName }) + }) + + const supportsOAuth = Effect.fn("MCP.supportsOAuth")(function* (mcpName: string) { + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) return false + return mcpConfig.type === "remote" && mcpConfig.oauth !== false + }) + + const hasStoredTokens = Effect.fn("MCP.hasStoredTokens")(function* (mcpName: string) { + const entry = yield* auth.get(mcpName) + return !!entry?.tokens + }) + + const getAuthStatus = Effect.fn("MCP.getAuthStatus")(function* (mcpName: string) { + const entry = yield* auth.get(mcpName) + if (!entry?.tokens) return "not_authenticated" as AuthStatus + const expired = yield* auth.isTokenExpired(mcpName) + return (expired ? "expired" : "authenticated") as AuthStatus + }) + + return Service.of({ + status, + clients, + tools, + prompts, + resources, + add, + connect, + disconnect, + getPrompt, + readResource, + startAuth, + authenticate, + finishAuth, + removeAuth, + supportsOAuth, + hasStoredTokens, + getAuthStatus, + }) + }), +) + +export type AuthStatus = "authenticated" | "expired" | "not_authenticated" + +// --- Per-service runtime --- + +export const defaultLayer = layer.pipe( + Layer.provide(McpAuth.layer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), +) + +export * as MCP from "." diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts deleted file mode 100644 index 61201ce76d..0000000000 --- a/packages/opencode/src/mcp/mcp.ts +++ /dev/null @@ -1,931 +0,0 @@ -import { dynamicTool, type Tool, jsonSchema, type JSONSchema7 } from "ai" -import { Client } from "@modelcontextprotocol/sdk/client/index.js" -import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js" -import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js" -import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js" -import { UnauthorizedError } from "@modelcontextprotocol/sdk/client/auth.js" -import { - CallToolResultSchema, - type Tool as MCPToolDef, - ToolListChangedNotificationSchema, -} from "@modelcontextprotocol/sdk/types.js" -import { Config } from "../config" -import { ConfigMCP } from "../config/mcp" -import { Log } from "../util" -import { NamedError } from "@opencode-ai/shared/util/error" -import z from "zod/v4" -import { Instance } from "../project/instance" -import { Installation } from "../installation" -import { InstallationVersion } from "../installation/version" -import { withTimeout } from "@/util/timeout" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { McpOAuthProvider } from "./oauth-provider" -import { McpOAuthCallback } from "./oauth-callback" -import { McpAuth } from "./auth" -import { BusEvent } from "../bus/bus-event" -import { Bus } from "@/bus" -import { TuiEvent } from "@/cli/cmd/tui/event" -import open from "open" -import { Effect, Exit, Layer, Option, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { InstanceState } from "@/effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" - -const log = Log.create({ service: "mcp" }) -const DEFAULT_TIMEOUT = 30_000 - -export const Resource = z - .object({ - name: z.string(), - uri: z.string(), - description: z.string().optional(), - mimeType: z.string().optional(), - client: z.string(), - }) - .meta({ ref: "McpResource" }) -export type Resource = z.infer - -export const ToolsChanged = BusEvent.define( - "mcp.tools.changed", - z.object({ - server: z.string(), - }), -) - -export const BrowserOpenFailed = BusEvent.define( - "mcp.browser.open.failed", - z.object({ - mcpName: z.string(), - url: z.string(), - }), -) - -export const Failed = NamedError.create( - "MCPFailed", - z.object({ - name: z.string(), - }), -) - -type MCPClient = Client - -export const Status = z - .discriminatedUnion("status", [ - z - .object({ - status: z.literal("connected"), - }) - .meta({ - ref: "MCPStatusConnected", - }), - z - .object({ - status: z.literal("disabled"), - }) - .meta({ - ref: "MCPStatusDisabled", - }), - z - .object({ - status: z.literal("failed"), - error: z.string(), - }) - .meta({ - ref: "MCPStatusFailed", - }), - z - .object({ - status: z.literal("needs_auth"), - }) - .meta({ - ref: "MCPStatusNeedsAuth", - }), - z - .object({ - status: z.literal("needs_client_registration"), - error: z.string(), - }) - .meta({ - ref: "MCPStatusNeedsClientRegistration", - }), - ]) - .meta({ - ref: "MCPStatus", - }) -export type Status = z.infer - -// Store transports for OAuth servers to allow finishing auth -type TransportWithAuth = StreamableHTTPClientTransport | SSEClientTransport -const pendingOAuthTransports = new Map() - -// Prompt cache types -type PromptInfo = Awaited>["prompts"][number] -type ResourceInfo = Awaited>["resources"][number] -type McpEntry = NonNullable[string] - -function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { - return typeof entry === "object" && entry !== null && "type" in entry -} - -const sanitize = (s: string) => s.replace(/[^a-zA-Z0-9_-]/g, "_") - -// Convert MCP tool definition to AI SDK Tool type -function convertMcpTool(mcpTool: MCPToolDef, client: MCPClient, timeout?: number): Tool { - const inputSchema = mcpTool.inputSchema - - // Spread first, then override type to ensure it's always "object" - const schema: JSONSchema7 = { - ...(inputSchema as JSONSchema7), - type: "object", - properties: (inputSchema.properties ?? {}) as JSONSchema7["properties"], - additionalProperties: false, - } - - return dynamicTool({ - description: mcpTool.description ?? "", - inputSchema: jsonSchema(schema), - execute: async (args: unknown) => { - return client.callTool( - { - name: mcpTool.name, - arguments: (args || {}) as Record, - }, - CallToolResultSchema, - { - resetTimeoutOnProgress: true, - timeout, - }, - ) - }, - }) -} - -function defs(key: string, client: MCPClient, timeout?: number) { - return Effect.tryPromise({ - try: () => withTimeout(client.listTools(), timeout ?? DEFAULT_TIMEOUT), - catch: (err) => (err instanceof Error ? err : new Error(String(err))), - }).pipe( - Effect.map((result) => result.tools), - Effect.catch((err) => { - log.error("failed to get tools from client", { key, error: err }) - return Effect.succeed(undefined) - }), - ) -} - -function fetchFromClient( - clientName: string, - client: Client, - listFn: (c: Client) => Promise, - label: string, -) { - return Effect.tryPromise({ - try: () => listFn(client), - catch: (e: any) => { - log.error(`failed to get ${label}`, { clientName, error: e.message }) - return e - }, - }).pipe( - Effect.map((items) => { - const out: Record = {} - const sanitizedClient = sanitize(clientName) - for (const item of items) { - out[sanitizedClient + ":" + sanitize(item.name)] = { ...item, client: clientName } - } - return out - }), - Effect.orElseSucceed(() => undefined), - ) -} - -interface CreateResult { - mcpClient?: MCPClient - status: Status - defs?: MCPToolDef[] -} - -interface AuthResult { - authorizationUrl: string - oauthState: string - client?: MCPClient -} - -// --- Effect Service --- - -interface State { - status: Record - clients: Record - defs: Record -} - -export interface Interface { - readonly status: () => Effect.Effect> - readonly clients: () => Effect.Effect> - readonly tools: () => Effect.Effect> - readonly prompts: () => Effect.Effect> - readonly resources: () => Effect.Effect> - readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> - readonly connect: (name: string) => Effect.Effect - readonly disconnect: (name: string) => Effect.Effect - readonly getPrompt: ( - clientName: string, - name: string, - args?: Record, - ) => Effect.Effect> | undefined> - readonly readResource: ( - clientName: string, - resourceUri: string, - ) => Effect.Effect> | undefined> - readonly startAuth: (mcpName: string) => Effect.Effect<{ authorizationUrl: string; oauthState: string }> - readonly authenticate: (mcpName: string) => Effect.Effect - readonly finishAuth: (mcpName: string, authorizationCode: string) => Effect.Effect - readonly removeAuth: (mcpName: string) => Effect.Effect - readonly supportsOAuth: (mcpName: string) => Effect.Effect - readonly hasStoredTokens: (mcpName: string) => Effect.Effect - readonly getAuthStatus: (mcpName: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/MCP") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const auth = yield* McpAuth.Service - const bus = yield* Bus.Service - - type Transport = StdioClientTransport | StreamableHTTPClientTransport | SSEClientTransport - - /** - * Connect a client via the given transport with resource safety: - * on failure the transport is closed; on success the caller owns it. - */ - const connectTransport = (transport: Transport, timeout: number) => - Effect.acquireUseRelease( - Effect.succeed(transport), - (t) => - Effect.tryPromise({ - try: () => { - const client = new Client({ name: "opencode", version: InstallationVersion }) - return withTimeout(client.connect(t), timeout).then(() => client) - }, - catch: (e) => (e instanceof Error ? e : new Error(String(e))), - }), - (t, exit) => (Exit.isFailure(exit) ? Effect.tryPromise(() => t.close()).pipe(Effect.ignore) : Effect.void), - ) - - const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } - - const connectRemote = Effect.fn("MCP.connectRemote")(function* ( - key: string, - mcp: ConfigMCP.Info & { type: "remote" }, - ) { - const oauthDisabled = mcp.oauth === false - const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined - let authProvider: McpOAuthProvider | undefined - - if (!oauthDisabled) { - authProvider = new McpOAuthProvider( - key, - mcp.url, - { - clientId: oauthConfig?.clientId, - clientSecret: oauthConfig?.clientSecret, - scope: oauthConfig?.scope, - redirectUri: oauthConfig?.redirectUri, - }, - { - onRedirect: async (url) => { - log.info("oauth redirect requested", { key, url: url.toString() }) - }, - }, - auth, - ) - } - - const transports: Array<{ name: string; transport: TransportWithAuth }> = [ - { - name: "StreamableHTTP", - transport: new StreamableHTTPClientTransport(new URL(mcp.url), { - authProvider, - requestInit: mcp.headers ? { headers: mcp.headers } : undefined, - }), - }, - { - name: "SSE", - transport: new SSEClientTransport(new URL(mcp.url), { - authProvider, - requestInit: mcp.headers ? { headers: mcp.headers } : undefined, - }), - }, - ] - - const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT - let lastStatus: Status | undefined - - for (const { name, transport } of transports) { - const result = yield* connectTransport(transport, connectTimeout).pipe( - Effect.map((client) => ({ client, transportName: name })), - Effect.catch((error) => { - const lastError = error instanceof Error ? error : new Error(String(error)) - const isAuthError = - error instanceof UnauthorizedError || (authProvider && lastError.message.includes("OAuth")) - - if (isAuthError) { - log.info("mcp server requires authentication", { key, transport: name }) - - if (lastError.message.includes("registration") || lastError.message.includes("client_id")) { - lastStatus = { - status: "needs_client_registration" as const, - error: "Server does not support dynamic client registration. Please provide clientId in config.", - } - return bus - .publish(TuiEvent.ToastShow, { - title: "MCP Authentication Required", - message: `Server "${key}" requires a pre-registered client ID. Add clientId to your config.`, - variant: "warning", - duration: 8000, - }) - .pipe(Effect.ignore, Effect.as(undefined)) - } else { - pendingOAuthTransports.set(key, transport) - lastStatus = { status: "needs_auth" as const } - return bus - .publish(TuiEvent.ToastShow, { - title: "MCP Authentication Required", - message: `Server "${key}" requires authentication. Run: opencode mcp auth ${key}`, - variant: "warning", - duration: 8000, - }) - .pipe(Effect.ignore, Effect.as(undefined)) - } - } - - log.debug("transport connection failed", { - key, - transport: name, - url: mcp.url, - error: lastError.message, - }) - lastStatus = { status: "failed" as const, error: lastError.message } - return Effect.succeed(undefined) - }), - ) - if (result) { - log.info("connected", { key, transport: result.transportName }) - return { client: result.client as MCPClient | undefined, status: { status: "connected" } as Status } - } - // If this was an auth error, stop trying other transports - if (lastStatus?.status === "needs_auth" || lastStatus?.status === "needs_client_registration") break - } - - return { - client: undefined as MCPClient | undefined, - status: (lastStatus ?? { status: "failed", error: "Unknown error" }) as Status, - } - }) - - const connectLocal = Effect.fn("MCP.connectLocal")(function* ( - key: string, - mcp: ConfigMCP.Info & { type: "local" }, - ) { - const [cmd, ...args] = mcp.command - const cwd = Instance.directory - const transport = new StdioClientTransport({ - stderr: "pipe", - command: cmd, - args, - cwd, - env: { - ...process.env, - ...(cmd === "opencode" ? { BUN_BE_BUN: "1" } : {}), - ...mcp.environment, - }, - }) - transport.stderr?.on("data", (chunk: Buffer) => { - log.info(`mcp stderr: ${chunk.toString()}`, { key }) - }) - - const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT - return yield* connectTransport(transport, connectTimeout).pipe( - Effect.map((client): { client: MCPClient | undefined; status: Status } => ({ - client, - status: { status: "connected" }, - })), - Effect.catch((error): Effect.Effect<{ client: MCPClient | undefined; status: Status }> => { - const msg = error instanceof Error ? error.message : String(error) - log.error("local mcp startup failed", { key, command: mcp.command, cwd, error: msg }) - return Effect.succeed({ client: undefined, status: { status: "failed", error: msg } }) - }), - ) - }) - - const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { - if (mcp.enabled === false) { - log.info("mcp server disabled", { key }) - return DISABLED_RESULT - } - - log.info("found", { key, type: mcp.type }) - - const { client: mcpClient, status } = - mcp.type === "remote" - ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) - : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) - - if (!mcpClient) { - return { status } satisfies CreateResult - } - - const listed = yield* defs(key, mcpClient, mcp.timeout) - if (!listed) { - yield* Effect.tryPromise(() => mcpClient.close()).pipe(Effect.ignore) - return { status: { status: "failed", error: "Failed to get tools" } } satisfies CreateResult - } - - log.info("create() successfully created client", { key, toolCount: listed.length }) - return { mcpClient, status, defs: listed } satisfies CreateResult - }) - const cfgSvc = yield* Config.Service - - const descendants = Effect.fnUntraced( - function* (pid: number) { - if (process.platform === "win32") return [] as number[] - const pids: number[] = [] - const queue = [pid] - while (queue.length > 0) { - const current = queue.shift()! - const handle = yield* spawner.spawn(ChildProcess.make("pgrep", ["-P", String(current)], { stdin: "ignore" })) - const text = yield* Stream.mkString(Stream.decodeText(handle.stdout)) - yield* handle.exitCode - for (const tok of text.split("\n")) { - const cpid = parseInt(tok, 10) - if (!isNaN(cpid) && !pids.includes(cpid)) { - pids.push(cpid) - queue.push(cpid) - } - } - } - return pids - }, - Effect.scoped, - Effect.catch(() => Effect.succeed([] as number[])), - ) - - function watch(s: State, name: string, client: MCPClient, bridge: EffectBridge.Shape, timeout?: number) { - client.setNotificationHandler(ToolListChangedNotificationSchema, async () => { - log.info("tools list changed notification received", { server: name }) - if (s.clients[name] !== client || s.status[name]?.status !== "connected") return - - const listed = await bridge.promise(defs(name, client, timeout)) - if (!listed) return - if (s.clients[name] !== client || s.status[name]?.status !== "connected") return - - s.defs[name] = listed - await bridge.promise(bus.publish(ToolsChanged, { server: name }).pipe(Effect.ignore)) - }) - } - - const state = yield* InstanceState.make( - Effect.fn("MCP.state")(function* () { - const cfg = yield* cfgSvc.get() - const bridge = yield* EffectBridge.make() - const config = cfg.mcp ?? {} - const s: State = { - status: {}, - clients: {}, - defs: {}, - } - - yield* Effect.forEach( - Object.entries(config), - ([key, mcp]) => - Effect.gen(function* () { - if (!isMcpConfigured(mcp)) { - log.error("Ignoring MCP config entry without type", { key }) - return - } - - if (mcp.enabled === false) { - s.status[key] = { status: "disabled" } - return - } - - const result = yield* create(key, mcp).pipe(Effect.catch(() => Effect.void)) - if (!result) return - - s.status[key] = result.status - if (result.mcpClient) { - s.clients[key] = result.mcpClient - s.defs[key] = result.defs! - watch(s, key, result.mcpClient, bridge, mcp.timeout) - } - }), - { concurrency: "unbounded" }, - ) - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - yield* Effect.forEach( - Object.values(s.clients), - (client) => - Effect.gen(function* () { - const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null - if (typeof pid === "number") { - const pids = yield* descendants(pid) - for (const dpid of pids) { - try { - process.kill(dpid, "SIGTERM") - } catch {} - } - } - yield* Effect.tryPromise(() => client.close()).pipe(Effect.ignore) - }), - { concurrency: "unbounded" }, - ) - pendingOAuthTransports.clear() - }), - ) - - return s - }), - ) - - function closeClient(s: State, name: string) { - const client = s.clients[name] - delete s.defs[name] - if (!client) return Effect.void - return Effect.tryPromise(() => client.close()).pipe(Effect.ignore) - } - - const storeClient = Effect.fnUntraced(function* ( - s: State, - name: string, - client: MCPClient, - listed: MCPToolDef[], - timeout?: number, - ) { - const bridge = yield* EffectBridge.make() - yield* closeClient(s, name) - s.status[name] = { status: "connected" } - s.clients[name] = client - s.defs[name] = listed - watch(s, name, client, bridge, timeout) - return s.status[name] - }) - - const status = Effect.fn("MCP.status")(function* () { - const s = yield* InstanceState.get(state) - - const cfg = yield* cfgSvc.get() - const config = cfg.mcp ?? {} - const result: Record = {} - - for (const [key, mcp] of Object.entries(config)) { - if (!isMcpConfigured(mcp)) continue - result[key] = s.status[key] ?? { status: "disabled" } - } - - return result - }) - - const clients = Effect.fn("MCP.clients")(function* () { - const s = yield* InstanceState.get(state) - return s.clients - }) - - const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { - const s = yield* InstanceState.get(state) - const result = yield* create(name, mcp) - - s.status[name] = result.status - if (!result.mcpClient) { - yield* closeClient(s, name) - delete s.clients[name] - return result.status - } - - return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) - }) - - const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { - yield* createAndStore(name, mcp) - const s = yield* InstanceState.get(state) - return { status: s.status } - }) - - const connect = Effect.fn("MCP.connect")(function* (name: string) { - const mcp = yield* getMcpConfig(name) - if (!mcp) { - log.error("MCP config not found or invalid", { name }) - return - } - yield* createAndStore(name, { ...mcp, enabled: true }) - }) - - const disconnect = Effect.fn("MCP.disconnect")(function* (name: string) { - const s = yield* InstanceState.get(state) - yield* closeClient(s, name) - delete s.clients[name] - s.status[name] = { status: "disabled" } - }) - - const tools = Effect.fn("MCP.tools")(function* () { - const result: Record = {} - const s = yield* InstanceState.get(state) - - const cfg = yield* cfgSvc.get() - const config = cfg.mcp ?? {} - const defaultTimeout = cfg.experimental?.mcp_timeout - - const connectedClients = Object.entries(s.clients).filter( - ([clientName]) => s.status[clientName]?.status === "connected", - ) - - yield* Effect.forEach( - connectedClients, - ([clientName, client]) => - Effect.gen(function* () { - const mcpConfig = config[clientName] - const entry = mcpConfig && isMcpConfigured(mcpConfig) ? mcpConfig : undefined - - const listed = s.defs[clientName] - if (!listed) { - log.warn("missing cached tools for connected server", { clientName }) - return - } - - const timeout = entry?.timeout ?? defaultTimeout - for (const mcpTool of listed) { - result[sanitize(clientName) + "_" + sanitize(mcpTool.name)] = convertMcpTool(mcpTool, client, timeout) - } - }), - { concurrency: "unbounded" }, - ) - return result - }) - - function collectFromConnected( - s: State, - listFn: (c: Client) => Promise, - label: string, - ) { - return Effect.forEach( - Object.entries(s.clients).filter(([name]) => s.status[name]?.status === "connected"), - ([clientName, client]) => - fetchFromClient(clientName, client, listFn, label).pipe(Effect.map((items) => Object.entries(items ?? {}))), - { concurrency: "unbounded" }, - ).pipe(Effect.map((results) => Object.fromEntries(results.flat()))) - } - - const prompts = Effect.fn("MCP.prompts")(function* () { - const s = yield* InstanceState.get(state) - return yield* collectFromConnected(s, (c) => c.listPrompts().then((r) => r.prompts), "prompts") - }) - - const resources = Effect.fn("MCP.resources")(function* () { - const s = yield* InstanceState.get(state) - return yield* collectFromConnected(s, (c) => c.listResources().then((r) => r.resources), "resources") - }) - - const withClient = Effect.fnUntraced(function* ( - clientName: string, - fn: (client: MCPClient) => Promise, - label: string, - meta?: Record, - ) { - const s = yield* InstanceState.get(state) - const client = s.clients[clientName] - if (!client) { - log.warn(`client not found for ${label}`, { clientName }) - return undefined - } - return yield* Effect.tryPromise({ - try: () => fn(client), - catch: (e: any) => { - log.error(`failed to ${label}`, { clientName, ...meta, error: e?.message }) - return e - }, - }).pipe(Effect.orElseSucceed(() => undefined)) - }) - - const getPrompt = Effect.fn("MCP.getPrompt")(function* ( - clientName: string, - name: string, - args?: Record, - ) { - return yield* withClient(clientName, (client) => client.getPrompt({ name, arguments: args }), "getPrompt", { - promptName: name, - }) - }) - - const readResource = Effect.fn("MCP.readResource")(function* (clientName: string, resourceUri: string) { - return yield* withClient(clientName, (client) => client.readResource({ uri: resourceUri }), "readResource", { - resourceUri, - }) - }) - - const getMcpConfig = Effect.fnUntraced(function* (mcpName: string) { - const cfg = yield* cfgSvc.get() - const mcpConfig = cfg.mcp?.[mcpName] - if (!mcpConfig || !isMcpConfigured(mcpConfig)) return undefined - return mcpConfig - }) - - const startAuth = Effect.fn("MCP.startAuth")(function* (mcpName: string) { - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) throw new Error(`MCP server ${mcpName} not found or disabled`) - if (mcpConfig.type !== "remote") throw new Error(`MCP server ${mcpName} is not a remote server`) - if (mcpConfig.oauth === false) throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`) - - // OAuth config is optional - if not provided, we'll use auto-discovery - const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined - - // Start the callback server with custom redirectUri if configured - yield* Effect.promise(() => McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)) - - const oauthState = Array.from(crypto.getRandomValues(new Uint8Array(32))) - .map((b) => b.toString(16).padStart(2, "0")) - .join("") - yield* auth.updateOAuthState(mcpName, oauthState) - let capturedUrl: URL | undefined - const authProvider = new McpOAuthProvider( - mcpName, - mcpConfig.url, - { - clientId: oauthConfig?.clientId, - clientSecret: oauthConfig?.clientSecret, - scope: oauthConfig?.scope, - redirectUri: oauthConfig?.redirectUri, - }, - { - onRedirect: async (url) => { - capturedUrl = url - }, - }, - auth, - ) - - const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider }) - - return yield* Effect.tryPromise({ - try: () => { - const client = new Client({ name: "opencode", version: InstallationVersion }) - return client - .connect(transport) - .then(() => ({ authorizationUrl: "", oauthState, client }) satisfies AuthResult) - }, - catch: (error) => error, - }).pipe( - Effect.catch((error) => { - if (error instanceof UnauthorizedError && capturedUrl) { - pendingOAuthTransports.set(mcpName, transport) - return Effect.succeed({ authorizationUrl: capturedUrl.toString(), oauthState } satisfies AuthResult) - } - return Effect.die(error) - }), - ) - }) - - const authenticate = Effect.fn("MCP.authenticate")(function* (mcpName: string) { - const result = yield* startAuth(mcpName) - if (!result.authorizationUrl) { - const client = "client" in result ? result.client : undefined - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) { - yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) - return { status: "failed", error: "MCP config not found after auth" } as Status - } - - const listed = client ? yield* defs(mcpName, client, mcpConfig.timeout) : undefined - if (!client || !listed) { - yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) - return { status: "failed", error: "Failed to get tools" } as Status - } - - const s = yield* InstanceState.get(state) - yield* auth.clearOAuthState(mcpName) - return yield* storeClient(s, mcpName, client, listed, mcpConfig.timeout) - } - - log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: result.oauthState }) - - const callbackPromise = McpOAuthCallback.waitForCallback(result.oauthState, mcpName) - - yield* Effect.tryPromise(() => open(result.authorizationUrl)).pipe( - Effect.flatMap((subprocess) => - Effect.callback((resume) => { - const timer = setTimeout(() => resume(Effect.void), 500) - subprocess.on("error", (err) => { - clearTimeout(timer) - resume(Effect.fail(err)) - }) - subprocess.on("exit", (code) => { - if (code !== null && code !== 0) { - clearTimeout(timer) - resume(Effect.fail(new Error(`Browser open failed with exit code ${code}`))) - } - }) - }), - ), - Effect.catch(() => { - log.warn("failed to open browser, user must open URL manually", { mcpName }) - return bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl }).pipe(Effect.ignore) - }), - ) - - const code = yield* Effect.promise(() => callbackPromise) - - const storedState = yield* auth.getOAuthState(mcpName) - if (storedState !== result.oauthState) { - yield* auth.clearOAuthState(mcpName) - throw new Error("OAuth state mismatch - potential CSRF attack") - } - yield* auth.clearOAuthState(mcpName) - return yield* finishAuth(mcpName, code) - }) - - const finishAuth = Effect.fn("MCP.finishAuth")(function* (mcpName: string, authorizationCode: string) { - const transport = pendingOAuthTransports.get(mcpName) - if (!transport) throw new Error(`No pending OAuth flow for MCP server: ${mcpName}`) - - const result = yield* Effect.tryPromise({ - try: () => transport.finishAuth(authorizationCode).then(() => true as const), - catch: (error) => { - log.error("failed to finish oauth", { mcpName, error }) - return error - }, - }).pipe(Effect.option) - - if (Option.isNone(result)) { - return { status: "failed", error: "OAuth completion failed" } as Status - } - - yield* auth.clearCodeVerifier(mcpName) - pendingOAuthTransports.delete(mcpName) - - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) return { status: "failed", error: "MCP config not found after auth" } as Status - - return yield* createAndStore(mcpName, mcpConfig) - }) - - const removeAuth = Effect.fn("MCP.removeAuth")(function* (mcpName: string) { - yield* auth.remove(mcpName) - McpOAuthCallback.cancelPending(mcpName) - pendingOAuthTransports.delete(mcpName) - log.info("removed oauth credentials", { mcpName }) - }) - - const supportsOAuth = Effect.fn("MCP.supportsOAuth")(function* (mcpName: string) { - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) return false - return mcpConfig.type === "remote" && mcpConfig.oauth !== false - }) - - const hasStoredTokens = Effect.fn("MCP.hasStoredTokens")(function* (mcpName: string) { - const entry = yield* auth.get(mcpName) - return !!entry?.tokens - }) - - const getAuthStatus = Effect.fn("MCP.getAuthStatus")(function* (mcpName: string) { - const entry = yield* auth.get(mcpName) - if (!entry?.tokens) return "not_authenticated" as AuthStatus - const expired = yield* auth.isTokenExpired(mcpName) - return (expired ? "expired" : "authenticated") as AuthStatus - }) - - return Service.of({ - status, - clients, - tools, - prompts, - resources, - add, - connect, - disconnect, - getPrompt, - readResource, - startAuth, - authenticate, - finishAuth, - removeAuth, - supportsOAuth, - hasStoredTokens, - getAuthStatus, - }) - }), -) - -export type AuthStatus = "authenticated" | "expired" | "not_authenticated" - -// --- Per-service runtime --- - -export const defaultLayer = layer.pipe( - Layer.provide(McpAuth.layer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), -) From 49bbea5aed2c4662c9740745b760817c1a88cd56 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:45 -0400 Subject: [PATCH 051/120] refactor: collapse snapshot barrel into snapshot/index.ts (#22916) --- packages/opencode/src/snapshot/index.ts | 778 ++++++++++++++++++++- packages/opencode/src/snapshot/snapshot.ts | 775 -------------------- 2 files changed, 777 insertions(+), 776 deletions(-) delete mode 100644 packages/opencode/src/snapshot/snapshot.ts diff --git a/packages/opencode/src/snapshot/index.ts b/packages/opencode/src/snapshot/index.ts index 49eafe4450..d38034e998 100644 --- a/packages/opencode/src/snapshot/index.ts +++ b/packages/opencode/src/snapshot/index.ts @@ -1 +1,777 @@ -export * as Snapshot from "./snapshot" +import { Cause, Duration, Effect, Layer, Schedule, Semaphore, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import { formatPatch, structuredPatch } from "diff" +import path from "path" +import z from "zod" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Hash } from "@opencode-ai/shared/util/hash" +import { Config } from "../config" +import { Global } from "../global" +import { Log } from "../util" + +export const Patch = z.object({ + hash: z.string(), + files: z.string().array(), +}) +export type Patch = z.infer + +export const FileDiff = z + .object({ + file: z.string(), + patch: z.string(), + additions: z.number(), + deletions: z.number(), + status: z.enum(["added", "deleted", "modified"]).optional(), + }) + .meta({ + ref: "SnapshotFileDiff", + }) +export type FileDiff = z.infer + +const log = Log.create({ service: "snapshot" }) +const prune = "7.days" +const limit = 2 * 1024 * 1024 +const core = ["-c", "core.longpaths=true", "-c", "core.symlinks=true"] +const cfg = ["-c", "core.autocrlf=false", ...core] +const quote = [...cfg, "-c", "core.quotepath=false"] +interface GitResult { + readonly code: ChildProcessSpawner.ExitCode + readonly text: string + readonly stderr: string +} + +type State = Omit + +export interface Interface { + readonly init: () => Effect.Effect + readonly cleanup: () => Effect.Effect + readonly track: () => Effect.Effect + readonly patch: (hash: string) => Effect.Effect + readonly restore: (snapshot: string) => Effect.Effect + readonly revert: (patches: Patch[]) => Effect.Effect + readonly diff: (hash: string) => Effect.Effect + readonly diffFull: (from: string, to: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Snapshot") {} + +export const layer: Layer.Layer< + Service, + never, + AppFileSystem.Service | ChildProcessSpawner.ChildProcessSpawner | Config.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const fs = yield* AppFileSystem.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const config = yield* Config.Service + const locks = new Map() + + const lock = (key: string) => { + const hit = locks.get(key) + if (hit) return hit + + const next = Semaphore.makeUnsafe(1) + locks.set(key, next) + return next + } + + const state = yield* InstanceState.make( + Effect.fn("Snapshot.state")(function* (ctx) { + const state = { + directory: ctx.directory, + worktree: ctx.worktree, + gitdir: path.join(Global.Path.data, "snapshot", ctx.project.id, Hash.fast(ctx.worktree)), + vcs: ctx.project.vcs, + } + + const args = (cmd: string[]) => ["--git-dir", state.gitdir, "--work-tree", state.worktree, ...cmd] + + const enc = new TextEncoder() + const feed = (list: string[]) => Stream.make(enc.encode(list.join("\0") + "\0")) + + const git = Effect.fnUntraced( + function* ( + cmd: string[], + opts?: { cwd?: string; env?: Record; stdin?: ChildProcess.CommandInput }, + ) { + const proc = ChildProcess.make("git", cmd, { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + stdin: opts?.stdin, + }) + const handle = yield* spawner.spawn(proc) + const [text, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, text, stderr } satisfies GitResult + }, + Effect.scoped, + Effect.catch((err) => + Effect.succeed({ + code: ChildProcessSpawner.ExitCode(1), + text: "", + stderr: err instanceof Error ? err.message : String(err), + }), + ), + ) + + const ignore = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return new Set() + const check = yield* git( + [ + ...quote, + "--git-dir", + path.join(state.worktree, ".git"), + "--work-tree", + state.worktree, + "check-ignore", + "--no-index", + "--stdin", + "-z", + ], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + if (check.code !== 0 && check.code !== 1) return new Set() + return new Set(check.text.split("\0").filter(Boolean)) + }) + + const drop = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return + yield* git( + [ + ...cfg, + ...args(["rm", "--cached", "-f", "--ignore-unmatch", "--pathspec-from-file=-", "--pathspec-file-nul"]), + ], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + }) + + const stage = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return + const result = yield* git( + [...cfg, ...args(["add", "--all", "--sparse", "--pathspec-from-file=-", "--pathspec-file-nul"])], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + if (result.code === 0) return + log.warn("failed to add snapshot files", { + exitCode: result.code, + stderr: result.stderr, + }) + }) + + const exists = (file: string) => fs.exists(file).pipe(Effect.orDie) + const read = (file: string) => fs.readFileString(file).pipe(Effect.catch(() => Effect.succeed(""))) + const remove = (file: string) => fs.remove(file).pipe(Effect.catch(() => Effect.void)) + const locked = (fx: Effect.Effect) => lock(state.gitdir).withPermits(1)(fx) + + const enabled = Effect.fnUntraced(function* () { + if (state.vcs !== "git") return false + return (yield* config.get()).snapshot !== false + }) + + const excludes = Effect.fnUntraced(function* () { + const result = yield* git(["rev-parse", "--path-format=absolute", "--git-path", "info/exclude"], { + cwd: state.worktree, + }) + const file = result.text.trim() + if (!file) return + if (!(yield* exists(file))) return + return file + }) + + const sync = Effect.fnUntraced(function* (list: string[] = []) { + const file = yield* excludes() + const target = path.join(state.gitdir, "info", "exclude") + const text = [ + file ? (yield* read(file)).trimEnd() : "", + ...list.map((item) => `/${item.replaceAll("\\", "/")}`), + ] + .filter(Boolean) + .join("\n") + yield* fs.ensureDir(path.join(state.gitdir, "info")).pipe(Effect.orDie) + yield* fs.writeFileString(target, text ? `${text}\n` : "").pipe(Effect.orDie) + }) + + const add = Effect.fnUntraced(function* () { + yield* sync() + const [diff, other] = yield* Effect.all( + [ + git([...quote, ...args(["diff-files", "--name-only", "-z", "--", "."])], { + cwd: state.directory, + }), + git([...quote, ...args(["ls-files", "--others", "--exclude-standard", "-z", "--", "."])], { + cwd: state.directory, + }), + ], + { concurrency: 2 }, + ) + if (diff.code !== 0 || other.code !== 0) { + log.warn("failed to list snapshot files", { + diffCode: diff.code, + diffStderr: diff.stderr, + otherCode: other.code, + otherStderr: other.stderr, + }) + return + } + + const tracked = diff.text.split("\0").filter(Boolean) + const untracked = other.text.split("\0").filter(Boolean) + const all = Array.from(new Set([...tracked, ...untracked])) + if (!all.length) return + + // Resolve source-repo ignore rules against the exact candidate set. + // --no-index keeps this pattern-based even when a path is already tracked. + const ignored = yield* ignore(all) + + // Remove newly-ignored files from snapshot index to prevent re-adding + if (ignored.size > 0) { + const ignoredFiles = Array.from(ignored) + log.info("removing gitignored files from snapshot", { count: ignoredFiles.length }) + yield* drop(ignoredFiles) + } + + const allow = all.filter((item) => !ignored.has(item)) + if (!allow.length) return + + const large = new Set( + (yield* Effect.all( + allow.map((item) => + fs + .stat(path.join(state.directory, item)) + .pipe(Effect.catch(() => Effect.void)) + .pipe( + Effect.map((stat) => { + if (!stat || stat.type !== "File") return + const size = typeof stat.size === "bigint" ? Number(stat.size) : stat.size + return size > limit ? item : undefined + }), + ), + ), + { concurrency: 8 }, + )).filter((item): item is string => Boolean(item)), + ) + const block = new Set(untracked.filter((item) => large.has(item))) + yield* sync(Array.from(block)) + // Stage only the allowed candidate paths so snapshot updates stay scoped. + yield* stage(allow.filter((item) => !block.has(item))) + }) + + const cleanup = Effect.fnUntraced(function* () { + return yield* locked( + Effect.gen(function* () { + if (!(yield* enabled())) return + if (!(yield* exists(state.gitdir))) return + const result = yield* git(args(["gc", `--prune=${prune}`]), { cwd: state.directory }) + if (result.code !== 0) { + log.warn("cleanup failed", { + exitCode: result.code, + stderr: result.stderr, + }) + return + } + log.info("cleanup", { prune }) + }), + ) + }) + + const track = Effect.fnUntraced(function* () { + return yield* locked( + Effect.gen(function* () { + if (!(yield* enabled())) return + const existed = yield* exists(state.gitdir) + yield* fs.ensureDir(state.gitdir).pipe(Effect.orDie) + if (!existed) { + yield* git(["init"], { + env: { GIT_DIR: state.gitdir, GIT_WORK_TREE: state.worktree }, + }) + yield* git(["--git-dir", state.gitdir, "config", "core.autocrlf", "false"]) + yield* git(["--git-dir", state.gitdir, "config", "core.longpaths", "true"]) + yield* git(["--git-dir", state.gitdir, "config", "core.symlinks", "true"]) + yield* git(["--git-dir", state.gitdir, "config", "core.fsmonitor", "false"]) + log.info("initialized") + } + yield* add() + const result = yield* git(args(["write-tree"]), { cwd: state.directory }) + const hash = result.text.trim() + log.info("tracking", { hash, cwd: state.directory, git: state.gitdir }) + return hash + }), + ) + }) + + const patch = Effect.fnUntraced(function* (hash: string) { + return yield* locked( + Effect.gen(function* () { + yield* add() + const result = yield* git( + [...quote, ...args(["diff", "--cached", "--no-ext-diff", "--name-only", hash, "--", "."])], + { + cwd: state.directory, + }, + ) + if (result.code !== 0) { + log.warn("failed to get diff", { hash, exitCode: result.code }) + return { hash, files: [] } + } + const files = result.text + .trim() + .split("\n") + .map((x) => x.trim()) + .filter(Boolean) + + // Hide ignored-file removals from the user-facing patch output. + const ignored = yield* ignore(files) + + return { + hash, + files: files + .filter((item) => !ignored.has(item)) + .map((x) => path.join(state.worktree, x).replaceAll("\\", "/")), + } + }), + ) + }) + + const restore = Effect.fnUntraced(function* (snapshot: string) { + return yield* locked( + Effect.gen(function* () { + log.info("restore", { commit: snapshot }) + const result = yield* git([...core, ...args(["read-tree", snapshot])], { cwd: state.worktree }) + if (result.code === 0) { + const checkout = yield* git([...core, ...args(["checkout-index", "-a", "-f"])], { + cwd: state.worktree, + }) + if (checkout.code === 0) return + log.error("failed to restore snapshot", { + snapshot, + exitCode: checkout.code, + stderr: checkout.stderr, + }) + return + } + log.error("failed to restore snapshot", { + snapshot, + exitCode: result.code, + stderr: result.stderr, + }) + }), + ) + }) + + const revert = Effect.fnUntraced(function* (patches: Patch[]) { + return yield* locked( + Effect.gen(function* () { + const ops: { hash: string; file: string; rel: string }[] = [] + const seen = new Set() + for (const item of patches) { + for (const file of item.files) { + if (seen.has(file)) continue + seen.add(file) + ops.push({ + hash: item.hash, + file, + rel: path.relative(state.worktree, file).replaceAll("\\", "/"), + }) + } + } + + const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) { + log.info("reverting", { file: op.file, hash: op.hash }) + const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], { + cwd: state.worktree, + }) + if (result.code === 0) return + const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], { + cwd: state.worktree, + }) + if (tree.code === 0 && tree.text.trim()) { + log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash }) + return + } + log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) + yield* remove(op.file) + }) + + const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`) + + for (let i = 0; i < ops.length; ) { + const first = ops[i]! + const run = [first] + let j = i + 1 + // Only batch adjacent files when their paths cannot affect each other. + while (j < ops.length && run.length < 100) { + const next = ops[j]! + if (next.hash !== first.hash) break + if (run.some((item) => clash(item.rel, next.rel))) break + run.push(next) + j += 1 + } + + if (run.length === 1) { + yield* single(first) + i = j + continue + } + + const tree = yield* git( + [...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])], + { + cwd: state.worktree, + }, + ) + + if (tree.code !== 0) { + log.info("batched ls-tree failed, falling back to single-file revert", { + hash: first.hash, + files: run.length, + }) + for (const op of run) { + yield* single(op) + } + i = j + continue + } + + const have = new Set( + tree.text + .trim() + .split("\n") + .map((item) => item.trim()) + .filter(Boolean), + ) + const list = run.filter((item) => have.has(item.rel)) + if (list.length) { + log.info("reverting", { hash: first.hash, files: list.length }) + const result = yield* git( + [...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])], + { + cwd: state.worktree, + }, + ) + if (result.code !== 0) { + log.info("batched checkout failed, falling back to single-file revert", { + hash: first.hash, + files: list.length, + }) + for (const op of run) { + yield* single(op) + } + i = j + continue + } + } + + for (const op of run) { + if (have.has(op.rel)) continue + log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) + yield* remove(op.file) + } + + i = j + } + }), + ) + }) + + const diff = Effect.fnUntraced(function* (hash: string) { + return yield* locked( + Effect.gen(function* () { + yield* add() + const result = yield* git([...quote, ...args(["diff", "--cached", "--no-ext-diff", hash, "--", "."])], { + cwd: state.worktree, + }) + if (result.code !== 0) { + log.warn("failed to get diff", { + hash, + exitCode: result.code, + stderr: result.stderr, + }) + return "" + } + return result.text.trim() + }), + ) + }) + + const diffFull = Effect.fnUntraced(function* (from: string, to: string) { + return yield* locked( + Effect.gen(function* () { + type Row = { + file: string + status: "added" | "deleted" | "modified" + binary: boolean + additions: number + deletions: number + } + + type Ref = { + file: string + side: "before" | "after" + ref: string + } + + const show = Effect.fnUntraced(function* (row: Row) { + if (row.binary) return ["", ""] + if (row.status === "added") { + return [ + "", + yield* git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + ] + } + if (row.status === "deleted") { + return [ + yield* git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe( + Effect.map((item) => item.text), + ), + "", + ] + } + return yield* Effect.all( + [ + git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + ], + { concurrency: 2 }, + ) + }) + + const load = Effect.fnUntraced( + function* (rows: Row[]) { + const refs = rows.flatMap((row) => { + if (row.binary) return [] + if (row.status === "added") + return [{ file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref] + if (row.status === "deleted") { + return [{ file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref] + } + return [ + { file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref, + { file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref, + ] + }) + if (!refs.length) return new Map() + + const proc = ChildProcess.make("git", [...cfg, ...args(["cat-file", "--batch"])], { + cwd: state.directory, + extendEnv: true, + stdin: Stream.make(new TextEncoder().encode(refs.map((item) => item.ref).join("\n") + "\n")), + }) + const handle = yield* spawner.spawn(proc) + const [out, err] = yield* Effect.all( + [Stream.mkUint8Array(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + if (code !== 0) { + log.info("git cat-file --batch failed during snapshot diff, falling back to per-file git show", { + stderr: err, + refs: refs.length, + }) + return + } + + const fail = (msg: string, extra?: Record) => { + log.info(msg, { ...extra, refs: refs.length }) + return undefined + } + + const map = new Map() + const dec = new TextDecoder() + let i = 0 + for (const ref of refs) { + let end = i + while (end < out.length && out[end] !== 10) end += 1 + if (end >= out.length) { + return fail( + "git cat-file --batch returned a truncated header during snapshot diff, falling back to per-file git show", + ) + } + + const head = dec.decode(out.slice(i, end)) + i = end + 1 + const hit = map.get(ref.file) ?? { before: "", after: "" } + if (head.endsWith(" missing")) { + map.set(ref.file, hit) + continue + } + + const match = head.match(/^[0-9a-f]+ blob (\d+)$/) + if (!match) { + return fail( + "git cat-file --batch returned an unexpected header during snapshot diff, falling back to per-file git show", + { head }, + ) + } + + const size = Number(match[1]) + if (!Number.isInteger(size) || size < 0 || i + size >= out.length || out[i + size] !== 10) { + return fail( + "git cat-file --batch returned truncated content during snapshot diff, falling back to per-file git show", + { head }, + ) + } + + const text = dec.decode(out.slice(i, i + size)) + if (ref.side === "before") hit.before = text + if (ref.side === "after") hit.after = text + map.set(ref.file, hit) + i += size + 1 + } + + if (i !== out.length) { + return fail( + "git cat-file --batch returned trailing data during snapshot diff, falling back to per-file git show", + ) + } + + return map + }, + Effect.scoped, + Effect.catch(() => + Effect.succeed | undefined>(undefined), + ), + ) + + const result: FileDiff[] = [] + const status = new Map() + + const statuses = yield* git( + [...quote, ...args(["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."])], + { cwd: state.directory }, + ) + + for (const line of statuses.text.trim().split("\n")) { + if (!line) continue + const [code, file] = line.split("\t") + if (!code || !file) continue + status.set(file, code.startsWith("A") ? "added" : code.startsWith("D") ? "deleted" : "modified") + } + + const numstat = yield* git( + [...quote, ...args(["diff", "--no-ext-diff", "--no-renames", "--numstat", from, to, "--", "."])], + { + cwd: state.directory, + }, + ) + + const rows = numstat.text + .trim() + .split("\n") + .filter(Boolean) + .flatMap((line) => { + const [adds, dels, file] = line.split("\t") + if (!file) return [] + const binary = adds === "-" && dels === "-" + const additions = binary ? 0 : parseInt(adds) + const deletions = binary ? 0 : parseInt(dels) + return [ + { + file, + status: status.get(file) ?? "modified", + binary, + additions: Number.isFinite(additions) ? additions : 0, + deletions: Number.isFinite(deletions) ? deletions : 0, + } satisfies Row, + ] + }) + + // Hide ignored-file removals from the user-facing diff output. + const ignored = yield* ignore(rows.map((r) => r.file)) + if (ignored.size > 0) { + const filtered = rows.filter((r) => !ignored.has(r.file)) + rows.length = 0 + rows.push(...filtered) + } + + const step = 100 + const patch = (file: string, before: string, after: string) => + formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER })) + + for (let i = 0; i < rows.length; i += step) { + const run = rows.slice(i, i + step) + const text = yield* load(run) + + for (const row of run) { + const hit = text?.get(row.file) ?? { before: "", after: "" } + const [before, after] = row.binary ? ["", ""] : text ? [hit.before, hit.after] : yield* show(row) + result.push({ + file: row.file, + patch: row.binary ? "" : patch(row.file, before, after), + additions: row.additions, + deletions: row.deletions, + status: row.status, + }) + } + } + + return result + }), + ) + }) + + yield* cleanup().pipe( + Effect.catchCause((cause) => { + log.error("cleanup loop failed", { cause: Cause.pretty(cause) }) + return Effect.void + }), + Effect.repeat(Schedule.spaced(Duration.hours(1))), + Effect.delay(Duration.minutes(1)), + Effect.forkScoped, + ) + + return { cleanup, track, patch, restore, revert, diff, diffFull } + }), + ) + + return Service.of({ + init: Effect.fn("Snapshot.init")(function* () { + yield* InstanceState.get(state) + }), + cleanup: Effect.fn("Snapshot.cleanup")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.cleanup()) + }), + track: Effect.fn("Snapshot.track")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.track()) + }), + patch: Effect.fn("Snapshot.patch")(function* (hash: string) { + return yield* InstanceState.useEffect(state, (s) => s.patch(hash)) + }), + restore: Effect.fn("Snapshot.restore")(function* (snapshot: string) { + return yield* InstanceState.useEffect(state, (s) => s.restore(snapshot)) + }), + revert: Effect.fn("Snapshot.revert")(function* (patches: Patch[]) { + return yield* InstanceState.useEffect(state, (s) => s.revert(patches)) + }), + diff: Effect.fn("Snapshot.diff")(function* (hash: string) { + return yield* InstanceState.useEffect(state, (s) => s.diff(hash)) + }), + diffFull: Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) { + return yield* InstanceState.useEffect(state, (s) => s.diffFull(from, to)) + }), + }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Config.defaultLayer), +) + +export * as Snapshot from "." diff --git a/packages/opencode/src/snapshot/snapshot.ts b/packages/opencode/src/snapshot/snapshot.ts deleted file mode 100644 index 7a5c0a4dca..0000000000 --- a/packages/opencode/src/snapshot/snapshot.ts +++ /dev/null @@ -1,775 +0,0 @@ -import { Cause, Duration, Effect, Layer, Schedule, Semaphore, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import { formatPatch, structuredPatch } from "diff" -import path from "path" -import z from "zod" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Hash } from "@opencode-ai/shared/util/hash" -import { Config } from "../config" -import { Global } from "../global" -import { Log } from "../util" - -export const Patch = z.object({ - hash: z.string(), - files: z.string().array(), -}) -export type Patch = z.infer - -export const FileDiff = z - .object({ - file: z.string(), - patch: z.string(), - additions: z.number(), - deletions: z.number(), - status: z.enum(["added", "deleted", "modified"]).optional(), - }) - .meta({ - ref: "SnapshotFileDiff", - }) -export type FileDiff = z.infer - -const log = Log.create({ service: "snapshot" }) -const prune = "7.days" -const limit = 2 * 1024 * 1024 -const core = ["-c", "core.longpaths=true", "-c", "core.symlinks=true"] -const cfg = ["-c", "core.autocrlf=false", ...core] -const quote = [...cfg, "-c", "core.quotepath=false"] -interface GitResult { - readonly code: ChildProcessSpawner.ExitCode - readonly text: string - readonly stderr: string -} - -type State = Omit - -export interface Interface { - readonly init: () => Effect.Effect - readonly cleanup: () => Effect.Effect - readonly track: () => Effect.Effect - readonly patch: (hash: string) => Effect.Effect - readonly restore: (snapshot: string) => Effect.Effect - readonly revert: (patches: Patch[]) => Effect.Effect - readonly diff: (hash: string) => Effect.Effect - readonly diffFull: (from: string, to: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Snapshot") {} - -export const layer: Layer.Layer< - Service, - never, - AppFileSystem.Service | ChildProcessSpawner.ChildProcessSpawner | Config.Service -> = Layer.effect( - Service, - Effect.gen(function* () { - const fs = yield* AppFileSystem.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const config = yield* Config.Service - const locks = new Map() - - const lock = (key: string) => { - const hit = locks.get(key) - if (hit) return hit - - const next = Semaphore.makeUnsafe(1) - locks.set(key, next) - return next - } - - const state = yield* InstanceState.make( - Effect.fn("Snapshot.state")(function* (ctx) { - const state = { - directory: ctx.directory, - worktree: ctx.worktree, - gitdir: path.join(Global.Path.data, "snapshot", ctx.project.id, Hash.fast(ctx.worktree)), - vcs: ctx.project.vcs, - } - - const args = (cmd: string[]) => ["--git-dir", state.gitdir, "--work-tree", state.worktree, ...cmd] - - const enc = new TextEncoder() - const feed = (list: string[]) => Stream.make(enc.encode(list.join("\0") + "\0")) - - const git = Effect.fnUntraced( - function* ( - cmd: string[], - opts?: { cwd?: string; env?: Record; stdin?: ChildProcess.CommandInput }, - ) { - const proc = ChildProcess.make("git", cmd, { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - stdin: opts?.stdin, - }) - const handle = yield* spawner.spawn(proc) - const [text, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, text, stderr } satisfies GitResult - }, - Effect.scoped, - Effect.catch((err) => - Effect.succeed({ - code: ChildProcessSpawner.ExitCode(1), - text: "", - stderr: err instanceof Error ? err.message : String(err), - }), - ), - ) - - const ignore = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return new Set() - const check = yield* git( - [ - ...quote, - "--git-dir", - path.join(state.worktree, ".git"), - "--work-tree", - state.worktree, - "check-ignore", - "--no-index", - "--stdin", - "-z", - ], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - if (check.code !== 0 && check.code !== 1) return new Set() - return new Set(check.text.split("\0").filter(Boolean)) - }) - - const drop = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return - yield* git( - [ - ...cfg, - ...args(["rm", "--cached", "-f", "--ignore-unmatch", "--pathspec-from-file=-", "--pathspec-file-nul"]), - ], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - }) - - const stage = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return - const result = yield* git( - [...cfg, ...args(["add", "--all", "--sparse", "--pathspec-from-file=-", "--pathspec-file-nul"])], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - if (result.code === 0) return - log.warn("failed to add snapshot files", { - exitCode: result.code, - stderr: result.stderr, - }) - }) - - const exists = (file: string) => fs.exists(file).pipe(Effect.orDie) - const read = (file: string) => fs.readFileString(file).pipe(Effect.catch(() => Effect.succeed(""))) - const remove = (file: string) => fs.remove(file).pipe(Effect.catch(() => Effect.void)) - const locked = (fx: Effect.Effect) => lock(state.gitdir).withPermits(1)(fx) - - const enabled = Effect.fnUntraced(function* () { - if (state.vcs !== "git") return false - return (yield* config.get()).snapshot !== false - }) - - const excludes = Effect.fnUntraced(function* () { - const result = yield* git(["rev-parse", "--path-format=absolute", "--git-path", "info/exclude"], { - cwd: state.worktree, - }) - const file = result.text.trim() - if (!file) return - if (!(yield* exists(file))) return - return file - }) - - const sync = Effect.fnUntraced(function* (list: string[] = []) { - const file = yield* excludes() - const target = path.join(state.gitdir, "info", "exclude") - const text = [ - file ? (yield* read(file)).trimEnd() : "", - ...list.map((item) => `/${item.replaceAll("\\", "/")}`), - ] - .filter(Boolean) - .join("\n") - yield* fs.ensureDir(path.join(state.gitdir, "info")).pipe(Effect.orDie) - yield* fs.writeFileString(target, text ? `${text}\n` : "").pipe(Effect.orDie) - }) - - const add = Effect.fnUntraced(function* () { - yield* sync() - const [diff, other] = yield* Effect.all( - [ - git([...quote, ...args(["diff-files", "--name-only", "-z", "--", "."])], { - cwd: state.directory, - }), - git([...quote, ...args(["ls-files", "--others", "--exclude-standard", "-z", "--", "."])], { - cwd: state.directory, - }), - ], - { concurrency: 2 }, - ) - if (diff.code !== 0 || other.code !== 0) { - log.warn("failed to list snapshot files", { - diffCode: diff.code, - diffStderr: diff.stderr, - otherCode: other.code, - otherStderr: other.stderr, - }) - return - } - - const tracked = diff.text.split("\0").filter(Boolean) - const untracked = other.text.split("\0").filter(Boolean) - const all = Array.from(new Set([...tracked, ...untracked])) - if (!all.length) return - - // Resolve source-repo ignore rules against the exact candidate set. - // --no-index keeps this pattern-based even when a path is already tracked. - const ignored = yield* ignore(all) - - // Remove newly-ignored files from snapshot index to prevent re-adding - if (ignored.size > 0) { - const ignoredFiles = Array.from(ignored) - log.info("removing gitignored files from snapshot", { count: ignoredFiles.length }) - yield* drop(ignoredFiles) - } - - const allow = all.filter((item) => !ignored.has(item)) - if (!allow.length) return - - const large = new Set( - (yield* Effect.all( - allow.map((item) => - fs - .stat(path.join(state.directory, item)) - .pipe(Effect.catch(() => Effect.void)) - .pipe( - Effect.map((stat) => { - if (!stat || stat.type !== "File") return - const size = typeof stat.size === "bigint" ? Number(stat.size) : stat.size - return size > limit ? item : undefined - }), - ), - ), - { concurrency: 8 }, - )).filter((item): item is string => Boolean(item)), - ) - const block = new Set(untracked.filter((item) => large.has(item))) - yield* sync(Array.from(block)) - // Stage only the allowed candidate paths so snapshot updates stay scoped. - yield* stage(allow.filter((item) => !block.has(item))) - }) - - const cleanup = Effect.fnUntraced(function* () { - return yield* locked( - Effect.gen(function* () { - if (!(yield* enabled())) return - if (!(yield* exists(state.gitdir))) return - const result = yield* git(args(["gc", `--prune=${prune}`]), { cwd: state.directory }) - if (result.code !== 0) { - log.warn("cleanup failed", { - exitCode: result.code, - stderr: result.stderr, - }) - return - } - log.info("cleanup", { prune }) - }), - ) - }) - - const track = Effect.fnUntraced(function* () { - return yield* locked( - Effect.gen(function* () { - if (!(yield* enabled())) return - const existed = yield* exists(state.gitdir) - yield* fs.ensureDir(state.gitdir).pipe(Effect.orDie) - if (!existed) { - yield* git(["init"], { - env: { GIT_DIR: state.gitdir, GIT_WORK_TREE: state.worktree }, - }) - yield* git(["--git-dir", state.gitdir, "config", "core.autocrlf", "false"]) - yield* git(["--git-dir", state.gitdir, "config", "core.longpaths", "true"]) - yield* git(["--git-dir", state.gitdir, "config", "core.symlinks", "true"]) - yield* git(["--git-dir", state.gitdir, "config", "core.fsmonitor", "false"]) - log.info("initialized") - } - yield* add() - const result = yield* git(args(["write-tree"]), { cwd: state.directory }) - const hash = result.text.trim() - log.info("tracking", { hash, cwd: state.directory, git: state.gitdir }) - return hash - }), - ) - }) - - const patch = Effect.fnUntraced(function* (hash: string) { - return yield* locked( - Effect.gen(function* () { - yield* add() - const result = yield* git( - [...quote, ...args(["diff", "--cached", "--no-ext-diff", "--name-only", hash, "--", "."])], - { - cwd: state.directory, - }, - ) - if (result.code !== 0) { - log.warn("failed to get diff", { hash, exitCode: result.code }) - return { hash, files: [] } - } - const files = result.text - .trim() - .split("\n") - .map((x) => x.trim()) - .filter(Boolean) - - // Hide ignored-file removals from the user-facing patch output. - const ignored = yield* ignore(files) - - return { - hash, - files: files - .filter((item) => !ignored.has(item)) - .map((x) => path.join(state.worktree, x).replaceAll("\\", "/")), - } - }), - ) - }) - - const restore = Effect.fnUntraced(function* (snapshot: string) { - return yield* locked( - Effect.gen(function* () { - log.info("restore", { commit: snapshot }) - const result = yield* git([...core, ...args(["read-tree", snapshot])], { cwd: state.worktree }) - if (result.code === 0) { - const checkout = yield* git([...core, ...args(["checkout-index", "-a", "-f"])], { - cwd: state.worktree, - }) - if (checkout.code === 0) return - log.error("failed to restore snapshot", { - snapshot, - exitCode: checkout.code, - stderr: checkout.stderr, - }) - return - } - log.error("failed to restore snapshot", { - snapshot, - exitCode: result.code, - stderr: result.stderr, - }) - }), - ) - }) - - const revert = Effect.fnUntraced(function* (patches: Patch[]) { - return yield* locked( - Effect.gen(function* () { - const ops: { hash: string; file: string; rel: string }[] = [] - const seen = new Set() - for (const item of patches) { - for (const file of item.files) { - if (seen.has(file)) continue - seen.add(file) - ops.push({ - hash: item.hash, - file, - rel: path.relative(state.worktree, file).replaceAll("\\", "/"), - }) - } - } - - const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) { - log.info("reverting", { file: op.file, hash: op.hash }) - const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], { - cwd: state.worktree, - }) - if (result.code === 0) return - const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], { - cwd: state.worktree, - }) - if (tree.code === 0 && tree.text.trim()) { - log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash }) - return - } - log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) - yield* remove(op.file) - }) - - const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`) - - for (let i = 0; i < ops.length; ) { - const first = ops[i]! - const run = [first] - let j = i + 1 - // Only batch adjacent files when their paths cannot affect each other. - while (j < ops.length && run.length < 100) { - const next = ops[j]! - if (next.hash !== first.hash) break - if (run.some((item) => clash(item.rel, next.rel))) break - run.push(next) - j += 1 - } - - if (run.length === 1) { - yield* single(first) - i = j - continue - } - - const tree = yield* git( - [...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])], - { - cwd: state.worktree, - }, - ) - - if (tree.code !== 0) { - log.info("batched ls-tree failed, falling back to single-file revert", { - hash: first.hash, - files: run.length, - }) - for (const op of run) { - yield* single(op) - } - i = j - continue - } - - const have = new Set( - tree.text - .trim() - .split("\n") - .map((item) => item.trim()) - .filter(Boolean), - ) - const list = run.filter((item) => have.has(item.rel)) - if (list.length) { - log.info("reverting", { hash: first.hash, files: list.length }) - const result = yield* git( - [...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])], - { - cwd: state.worktree, - }, - ) - if (result.code !== 0) { - log.info("batched checkout failed, falling back to single-file revert", { - hash: first.hash, - files: list.length, - }) - for (const op of run) { - yield* single(op) - } - i = j - continue - } - } - - for (const op of run) { - if (have.has(op.rel)) continue - log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) - yield* remove(op.file) - } - - i = j - } - }), - ) - }) - - const diff = Effect.fnUntraced(function* (hash: string) { - return yield* locked( - Effect.gen(function* () { - yield* add() - const result = yield* git([...quote, ...args(["diff", "--cached", "--no-ext-diff", hash, "--", "."])], { - cwd: state.worktree, - }) - if (result.code !== 0) { - log.warn("failed to get diff", { - hash, - exitCode: result.code, - stderr: result.stderr, - }) - return "" - } - return result.text.trim() - }), - ) - }) - - const diffFull = Effect.fnUntraced(function* (from: string, to: string) { - return yield* locked( - Effect.gen(function* () { - type Row = { - file: string - status: "added" | "deleted" | "modified" - binary: boolean - additions: number - deletions: number - } - - type Ref = { - file: string - side: "before" | "after" - ref: string - } - - const show = Effect.fnUntraced(function* (row: Row) { - if (row.binary) return ["", ""] - if (row.status === "added") { - return [ - "", - yield* git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - ] - } - if (row.status === "deleted") { - return [ - yield* git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe( - Effect.map((item) => item.text), - ), - "", - ] - } - return yield* Effect.all( - [ - git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - ], - { concurrency: 2 }, - ) - }) - - const load = Effect.fnUntraced( - function* (rows: Row[]) { - const refs = rows.flatMap((row) => { - if (row.binary) return [] - if (row.status === "added") - return [{ file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref] - if (row.status === "deleted") { - return [{ file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref] - } - return [ - { file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref, - { file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref, - ] - }) - if (!refs.length) return new Map() - - const proc = ChildProcess.make("git", [...cfg, ...args(["cat-file", "--batch"])], { - cwd: state.directory, - extendEnv: true, - stdin: Stream.make(new TextEncoder().encode(refs.map((item) => item.ref).join("\n") + "\n")), - }) - const handle = yield* spawner.spawn(proc) - const [out, err] = yield* Effect.all( - [Stream.mkUint8Array(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - if (code !== 0) { - log.info("git cat-file --batch failed during snapshot diff, falling back to per-file git show", { - stderr: err, - refs: refs.length, - }) - return - } - - const fail = (msg: string, extra?: Record) => { - log.info(msg, { ...extra, refs: refs.length }) - return undefined - } - - const map = new Map() - const dec = new TextDecoder() - let i = 0 - for (const ref of refs) { - let end = i - while (end < out.length && out[end] !== 10) end += 1 - if (end >= out.length) { - return fail( - "git cat-file --batch returned a truncated header during snapshot diff, falling back to per-file git show", - ) - } - - const head = dec.decode(out.slice(i, end)) - i = end + 1 - const hit = map.get(ref.file) ?? { before: "", after: "" } - if (head.endsWith(" missing")) { - map.set(ref.file, hit) - continue - } - - const match = head.match(/^[0-9a-f]+ blob (\d+)$/) - if (!match) { - return fail( - "git cat-file --batch returned an unexpected header during snapshot diff, falling back to per-file git show", - { head }, - ) - } - - const size = Number(match[1]) - if (!Number.isInteger(size) || size < 0 || i + size >= out.length || out[i + size] !== 10) { - return fail( - "git cat-file --batch returned truncated content during snapshot diff, falling back to per-file git show", - { head }, - ) - } - - const text = dec.decode(out.slice(i, i + size)) - if (ref.side === "before") hit.before = text - if (ref.side === "after") hit.after = text - map.set(ref.file, hit) - i += size + 1 - } - - if (i !== out.length) { - return fail( - "git cat-file --batch returned trailing data during snapshot diff, falling back to per-file git show", - ) - } - - return map - }, - Effect.scoped, - Effect.catch(() => - Effect.succeed | undefined>(undefined), - ), - ) - - const result: FileDiff[] = [] - const status = new Map() - - const statuses = yield* git( - [...quote, ...args(["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."])], - { cwd: state.directory }, - ) - - for (const line of statuses.text.trim().split("\n")) { - if (!line) continue - const [code, file] = line.split("\t") - if (!code || !file) continue - status.set(file, code.startsWith("A") ? "added" : code.startsWith("D") ? "deleted" : "modified") - } - - const numstat = yield* git( - [...quote, ...args(["diff", "--no-ext-diff", "--no-renames", "--numstat", from, to, "--", "."])], - { - cwd: state.directory, - }, - ) - - const rows = numstat.text - .trim() - .split("\n") - .filter(Boolean) - .flatMap((line) => { - const [adds, dels, file] = line.split("\t") - if (!file) return [] - const binary = adds === "-" && dels === "-" - const additions = binary ? 0 : parseInt(adds) - const deletions = binary ? 0 : parseInt(dels) - return [ - { - file, - status: status.get(file) ?? "modified", - binary, - additions: Number.isFinite(additions) ? additions : 0, - deletions: Number.isFinite(deletions) ? deletions : 0, - } satisfies Row, - ] - }) - - // Hide ignored-file removals from the user-facing diff output. - const ignored = yield* ignore(rows.map((r) => r.file)) - if (ignored.size > 0) { - const filtered = rows.filter((r) => !ignored.has(r.file)) - rows.length = 0 - rows.push(...filtered) - } - - const step = 100 - const patch = (file: string, before: string, after: string) => - formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER })) - - for (let i = 0; i < rows.length; i += step) { - const run = rows.slice(i, i + step) - const text = yield* load(run) - - for (const row of run) { - const hit = text?.get(row.file) ?? { before: "", after: "" } - const [before, after] = row.binary ? ["", ""] : text ? [hit.before, hit.after] : yield* show(row) - result.push({ - file: row.file, - patch: row.binary ? "" : patch(row.file, before, after), - additions: row.additions, - deletions: row.deletions, - status: row.status, - }) - } - } - - return result - }), - ) - }) - - yield* cleanup().pipe( - Effect.catchCause((cause) => { - log.error("cleanup loop failed", { cause: Cause.pretty(cause) }) - return Effect.void - }), - Effect.repeat(Schedule.spaced(Duration.hours(1))), - Effect.delay(Duration.minutes(1)), - Effect.forkScoped, - ) - - return { cleanup, track, patch, restore, revert, diff, diffFull } - }), - ) - - return Service.of({ - init: Effect.fn("Snapshot.init")(function* () { - yield* InstanceState.get(state) - }), - cleanup: Effect.fn("Snapshot.cleanup")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.cleanup()) - }), - track: Effect.fn("Snapshot.track")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.track()) - }), - patch: Effect.fn("Snapshot.patch")(function* (hash: string) { - return yield* InstanceState.useEffect(state, (s) => s.patch(hash)) - }), - restore: Effect.fn("Snapshot.restore")(function* (snapshot: string) { - return yield* InstanceState.useEffect(state, (s) => s.restore(snapshot)) - }), - revert: Effect.fn("Snapshot.revert")(function* (patches: Patch[]) { - return yield* InstanceState.useEffect(state, (s) => s.revert(patches)) - }), - diff: Effect.fn("Snapshot.diff")(function* (hash: string) { - return yield* InstanceState.useEffect(state, (s) => s.diff(hash)) - }), - diffFull: Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) { - return yield* InstanceState.useEffect(state, (s) => s.diffFull(from, to)) - }), - }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Config.defaultLayer), -) From 2638e2acfa4dc5198dc3454986515022fac6559b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:37:13 -0400 Subject: [PATCH 052/120] refactor: collapse plugin barrel into plugin/index.ts (#22914) --- packages/opencode/src/plugin/index.ts | 290 +++++++++++++++++- packages/opencode/src/plugin/plugin.ts | 287 ----------------- .../test/plugin/auth-override.test.ts | 2 +- 3 files changed, 290 insertions(+), 289 deletions(-) delete mode 100644 packages/opencode/src/plugin/plugin.ts diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index 20f38c41c2..dd2a784694 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -1 +1,289 @@ -export * as Plugin from "./plugin" +import type { + Hooks, + PluginInput, + Plugin as PluginInstance, + PluginModule, + WorkspaceAdaptor as PluginWorkspaceAdaptor, +} from "@opencode-ai/plugin" +import { Config } from "../config" +import { Bus } from "../bus" +import { Log } from "../util" +import { createOpencodeClient } from "@opencode-ai/sdk" +import { Flag } from "../flag/flag" +import { CodexAuthPlugin } from "./codex" +import { Session } from "../session" +import { NamedError } from "@opencode-ai/shared/util/error" +import { CopilotAuthPlugin } from "./github-copilot/copilot" +import { gitlabAuthPlugin as GitlabAuthPlugin } from "opencode-gitlab-auth" +import { PoeAuthPlugin } from "opencode-poe-auth" +import { CloudflareAIGatewayAuthPlugin, CloudflareWorkersAuthPlugin } from "./cloudflare" +import { Effect, Layer, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { InstanceState } from "@/effect" +import { errorMessage } from "@/util/error" +import { PluginLoader } from "./loader" +import { parsePluginSpecifier, readPluginId, readV1Plugin, resolvePluginId } from "./shared" +import { registerAdaptor } from "@/control-plane/adaptors" +import type { WorkspaceAdaptor } from "@/control-plane/types" + +const log = Log.create({ service: "plugin" }) + +type State = { + hooks: Hooks[] +} + +// Hook names that follow the (input, output) => Promise trigger pattern +type TriggerName = { + [K in keyof Hooks]-?: NonNullable extends (input: any, output: any) => Promise ? K : never +}[keyof Hooks] + +export interface Interface { + readonly trigger: < + Name extends TriggerName, + Input = Parameters[Name]>[0], + Output = Parameters[Name]>[1], + >( + name: Name, + input: Input, + output: Output, + ) => Effect.Effect + readonly list: () => Effect.Effect + readonly init: () => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Plugin") {} + +// Built-in plugins that are directly imported (not installed from npm) +const INTERNAL_PLUGINS: PluginInstance[] = [ + CodexAuthPlugin, + CopilotAuthPlugin, + GitlabAuthPlugin, + PoeAuthPlugin, + CloudflareWorkersAuthPlugin, + CloudflareAIGatewayAuthPlugin, +] + +function isServerPlugin(value: unknown): value is PluginInstance { + return typeof value === "function" +} + +function getServerPlugin(value: unknown) { + if (isServerPlugin(value)) return value + if (!value || typeof value !== "object" || !("server" in value)) return + if (!isServerPlugin(value.server)) return + return value.server +} + +function getLegacyPlugins(mod: Record) { + const seen = new Set() + const result: PluginInstance[] = [] + + for (const entry of Object.values(mod)) { + if (seen.has(entry)) continue + seen.add(entry) + const plugin = getServerPlugin(entry) + if (!plugin) throw new TypeError("Plugin export is not a function") + result.push(plugin) + } + + return result +} + +async function applyPlugin(load: PluginLoader.Loaded, input: PluginInput, hooks: Hooks[]) { + const plugin = readV1Plugin(load.mod, load.spec, "server", "detect") + if (plugin) { + await resolvePluginId(load.source, load.spec, load.target, readPluginId(plugin.id, load.spec), load.pkg) + hooks.push(await (plugin as PluginModule).server(input, load.options)) + return + } + + for (const server of getLegacyPlugins(load.mod)) { + hooks.push(await server(input, load.options)) + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const config = yield* Config.Service + + const state = yield* InstanceState.make( + Effect.fn("Plugin.state")(function* (ctx) { + const hooks: Hooks[] = [] + const bridge = yield* EffectBridge.make() + + function publishPluginError(message: string) { + bridge.fork(bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })) + } + + const { Server } = yield* Effect.promise(() => import("../server/server")) + + const client = createOpencodeClient({ + baseUrl: "http://localhost:4096", + directory: ctx.directory, + headers: Flag.OPENCODE_SERVER_PASSWORD + ? { + Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`, + } + : undefined, + fetch: async (...args) => (await Server.Default()).app.fetch(...args), + }) + const cfg = yield* config.get() + const input: PluginInput = { + client, + project: ctx.project, + worktree: ctx.worktree, + directory: ctx.directory, + experimental_workspace: { + register(type: string, adaptor: PluginWorkspaceAdaptor) { + registerAdaptor(ctx.project.id, type, adaptor as WorkspaceAdaptor) + }, + }, + get serverUrl(): URL { + return Server.url ?? new URL("http://localhost:4096") + }, + // @ts-expect-error + $: typeof Bun === "undefined" ? undefined : Bun.$, + } + + for (const plugin of INTERNAL_PLUGINS) { + log.info("loading internal plugin", { name: plugin.name }) + const init = yield* Effect.tryPromise({ + try: () => plugin(input), + catch: (err) => { + log.error("failed to load internal plugin", { name: plugin.name, error: err }) + }, + }).pipe(Effect.option) + if (init._tag === "Some") hooks.push(init.value) + } + + const plugins = Flag.OPENCODE_PURE ? [] : (cfg.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && cfg.plugin_origins?.length) { + log.info("skipping external plugins in pure mode", { count: cfg.plugin_origins.length }) + } + if (plugins.length) yield* config.waitForDependencies() + + const loaded = yield* Effect.promise(() => + PluginLoader.loadExternal({ + items: plugins, + kind: "server", + report: { + start(candidate) { + log.info("loading plugin", { path: candidate.plan.spec }) + }, + missing(candidate, _retry, message) { + log.warn("plugin has no server entrypoint", { path: candidate.plan.spec, message }) + }, + error(candidate, _retry, stage, error, resolved) { + const spec = candidate.plan.spec + const cause = error instanceof Error ? (error.cause ?? error) : error + const message = stage === "load" ? errorMessage(error) : errorMessage(cause) + + if (stage === "install") { + const parsed = parsePluginSpecifier(spec) + log.error("failed to install plugin", { pkg: parsed.pkg, version: parsed.version, error: message }) + publishPluginError(`Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`) + return + } + + if (stage === "compatibility") { + log.warn("plugin incompatible", { path: spec, error: message }) + publishPluginError(`Plugin ${spec} skipped: ${message}`) + return + } + + if (stage === "entry") { + log.error("failed to resolve plugin server entry", { path: spec, error: message }) + publishPluginError(`Failed to load plugin ${spec}: ${message}`) + return + } + + log.error("failed to load plugin", { path: spec, target: resolved?.entry, error: message }) + publishPluginError(`Failed to load plugin ${spec}: ${message}`) + }, + }, + }), + ) + for (const load of loaded) { + if (!load) continue + + // Keep plugin execution sequential so hook registration and execution + // order remains deterministic across plugin runs. + yield* Effect.tryPromise({ + try: () => applyPlugin(load, input, hooks), + catch: (err) => { + const message = errorMessage(err) + log.error("failed to load plugin", { path: load.spec, error: message }) + return message + }, + }).pipe( + Effect.catch(() => { + // TODO: make proper events for this + // bus.publish(Session.Event.Error, { + // error: new NamedError.Unknown({ + // message: `Failed to load plugin ${load.spec}: ${message}`, + // }).toObject(), + // }) + return Effect.void + }), + ) + } + + // Notify plugins of current config + for (const hook of hooks) { + yield* Effect.tryPromise({ + try: () => Promise.resolve((hook as any).config?.(cfg)), + catch: (err) => { + log.error("plugin config hook failed", { error: err }) + }, + }).pipe(Effect.ignore) + } + + // Subscribe to bus events, fiber interrupted when scope closes + yield* bus.subscribeAll().pipe( + Stream.runForEach((input) => + Effect.sync(() => { + for (const hook of hooks) { + void hook["event"]?.({ event: input as any }) + } + }), + ), + Effect.forkScoped, + ) + + return { hooks } + }), + ) + + const trigger = Effect.fn("Plugin.trigger")(function* < + Name extends TriggerName, + Input = Parameters[Name]>[0], + Output = Parameters[Name]>[1], + >(name: Name, input: Input, output: Output) { + if (!name) return output + const s = yield* InstanceState.get(state) + for (const hook of s.hooks) { + const fn = hook[name] as any + if (!fn) continue + yield* Effect.promise(async () => fn(input, output)) + } + return output + }) + + const list = Effect.fn("Plugin.list")(function* () { + const s = yield* InstanceState.get(state) + return s.hooks + }) + + const init = Effect.fn("Plugin.init")(function* () { + yield* InstanceState.get(state) + }) + + return Service.of({ trigger, list, init }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Config.defaultLayer)) + +export * as Plugin from "." diff --git a/packages/opencode/src/plugin/plugin.ts b/packages/opencode/src/plugin/plugin.ts deleted file mode 100644 index d1fc60d993..0000000000 --- a/packages/opencode/src/plugin/plugin.ts +++ /dev/null @@ -1,287 +0,0 @@ -import type { - Hooks, - PluginInput, - Plugin as PluginInstance, - PluginModule, - WorkspaceAdaptor as PluginWorkspaceAdaptor, -} from "@opencode-ai/plugin" -import { Config } from "../config" -import { Bus } from "../bus" -import { Log } from "../util" -import { createOpencodeClient } from "@opencode-ai/sdk" -import { Flag } from "../flag/flag" -import { CodexAuthPlugin } from "./codex" -import { Session } from "../session" -import { NamedError } from "@opencode-ai/shared/util/error" -import { CopilotAuthPlugin } from "./github-copilot/copilot" -import { gitlabAuthPlugin as GitlabAuthPlugin } from "opencode-gitlab-auth" -import { PoeAuthPlugin } from "opencode-poe-auth" -import { CloudflareAIGatewayAuthPlugin, CloudflareWorkersAuthPlugin } from "./cloudflare" -import { Effect, Layer, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { InstanceState } from "@/effect" -import { errorMessage } from "@/util/error" -import { PluginLoader } from "./loader" -import { parsePluginSpecifier, readPluginId, readV1Plugin, resolvePluginId } from "./shared" -import { registerAdaptor } from "@/control-plane/adaptors" -import type { WorkspaceAdaptor } from "@/control-plane/types" - -const log = Log.create({ service: "plugin" }) - -type State = { - hooks: Hooks[] -} - -// Hook names that follow the (input, output) => Promise trigger pattern -type TriggerName = { - [K in keyof Hooks]-?: NonNullable extends (input: any, output: any) => Promise ? K : never -}[keyof Hooks] - -export interface Interface { - readonly trigger: < - Name extends TriggerName, - Input = Parameters[Name]>[0], - Output = Parameters[Name]>[1], - >( - name: Name, - input: Input, - output: Output, - ) => Effect.Effect - readonly list: () => Effect.Effect - readonly init: () => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Plugin") {} - -// Built-in plugins that are directly imported (not installed from npm) -const INTERNAL_PLUGINS: PluginInstance[] = [ - CodexAuthPlugin, - CopilotAuthPlugin, - GitlabAuthPlugin, - PoeAuthPlugin, - CloudflareWorkersAuthPlugin, - CloudflareAIGatewayAuthPlugin, -] - -function isServerPlugin(value: unknown): value is PluginInstance { - return typeof value === "function" -} - -function getServerPlugin(value: unknown) { - if (isServerPlugin(value)) return value - if (!value || typeof value !== "object" || !("server" in value)) return - if (!isServerPlugin(value.server)) return - return value.server -} - -function getLegacyPlugins(mod: Record) { - const seen = new Set() - const result: PluginInstance[] = [] - - for (const entry of Object.values(mod)) { - if (seen.has(entry)) continue - seen.add(entry) - const plugin = getServerPlugin(entry) - if (!plugin) throw new TypeError("Plugin export is not a function") - result.push(plugin) - } - - return result -} - -async function applyPlugin(load: PluginLoader.Loaded, input: PluginInput, hooks: Hooks[]) { - const plugin = readV1Plugin(load.mod, load.spec, "server", "detect") - if (plugin) { - await resolvePluginId(load.source, load.spec, load.target, readPluginId(plugin.id, load.spec), load.pkg) - hooks.push(await (plugin as PluginModule).server(input, load.options)) - return - } - - for (const server of getLegacyPlugins(load.mod)) { - hooks.push(await server(input, load.options)) - } -} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const config = yield* Config.Service - - const state = yield* InstanceState.make( - Effect.fn("Plugin.state")(function* (ctx) { - const hooks: Hooks[] = [] - const bridge = yield* EffectBridge.make() - - function publishPluginError(message: string) { - bridge.fork(bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })) - } - - const { Server } = yield* Effect.promise(() => import("../server/server")) - - const client = createOpencodeClient({ - baseUrl: "http://localhost:4096", - directory: ctx.directory, - headers: Flag.OPENCODE_SERVER_PASSWORD - ? { - Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`, - } - : undefined, - fetch: async (...args) => (await Server.Default()).app.fetch(...args), - }) - const cfg = yield* config.get() - const input: PluginInput = { - client, - project: ctx.project, - worktree: ctx.worktree, - directory: ctx.directory, - experimental_workspace: { - register(type: string, adaptor: PluginWorkspaceAdaptor) { - registerAdaptor(ctx.project.id, type, adaptor as WorkspaceAdaptor) - }, - }, - get serverUrl(): URL { - return Server.url ?? new URL("http://localhost:4096") - }, - // @ts-expect-error - $: typeof Bun === "undefined" ? undefined : Bun.$, - } - - for (const plugin of INTERNAL_PLUGINS) { - log.info("loading internal plugin", { name: plugin.name }) - const init = yield* Effect.tryPromise({ - try: () => plugin(input), - catch: (err) => { - log.error("failed to load internal plugin", { name: plugin.name, error: err }) - }, - }).pipe(Effect.option) - if (init._tag === "Some") hooks.push(init.value) - } - - const plugins = Flag.OPENCODE_PURE ? [] : (cfg.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && cfg.plugin_origins?.length) { - log.info("skipping external plugins in pure mode", { count: cfg.plugin_origins.length }) - } - if (plugins.length) yield* config.waitForDependencies() - - const loaded = yield* Effect.promise(() => - PluginLoader.loadExternal({ - items: plugins, - kind: "server", - report: { - start(candidate) { - log.info("loading plugin", { path: candidate.plan.spec }) - }, - missing(candidate, _retry, message) { - log.warn("plugin has no server entrypoint", { path: candidate.plan.spec, message }) - }, - error(candidate, _retry, stage, error, resolved) { - const spec = candidate.plan.spec - const cause = error instanceof Error ? (error.cause ?? error) : error - const message = stage === "load" ? errorMessage(error) : errorMessage(cause) - - if (stage === "install") { - const parsed = parsePluginSpecifier(spec) - log.error("failed to install plugin", { pkg: parsed.pkg, version: parsed.version, error: message }) - publishPluginError(`Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`) - return - } - - if (stage === "compatibility") { - log.warn("plugin incompatible", { path: spec, error: message }) - publishPluginError(`Plugin ${spec} skipped: ${message}`) - return - } - - if (stage === "entry") { - log.error("failed to resolve plugin server entry", { path: spec, error: message }) - publishPluginError(`Failed to load plugin ${spec}: ${message}`) - return - } - - log.error("failed to load plugin", { path: spec, target: resolved?.entry, error: message }) - publishPluginError(`Failed to load plugin ${spec}: ${message}`) - }, - }, - }), - ) - for (const load of loaded) { - if (!load) continue - - // Keep plugin execution sequential so hook registration and execution - // order remains deterministic across plugin runs. - yield* Effect.tryPromise({ - try: () => applyPlugin(load, input, hooks), - catch: (err) => { - const message = errorMessage(err) - log.error("failed to load plugin", { path: load.spec, error: message }) - return message - }, - }).pipe( - Effect.catch(() => { - // TODO: make proper events for this - // bus.publish(Session.Event.Error, { - // error: new NamedError.Unknown({ - // message: `Failed to load plugin ${load.spec}: ${message}`, - // }).toObject(), - // }) - return Effect.void - }), - ) - } - - // Notify plugins of current config - for (const hook of hooks) { - yield* Effect.tryPromise({ - try: () => Promise.resolve((hook as any).config?.(cfg)), - catch: (err) => { - log.error("plugin config hook failed", { error: err }) - }, - }).pipe(Effect.ignore) - } - - // Subscribe to bus events, fiber interrupted when scope closes - yield* bus.subscribeAll().pipe( - Stream.runForEach((input) => - Effect.sync(() => { - for (const hook of hooks) { - void hook["event"]?.({ event: input as any }) - } - }), - ), - Effect.forkScoped, - ) - - return { hooks } - }), - ) - - const trigger = Effect.fn("Plugin.trigger")(function* < - Name extends TriggerName, - Input = Parameters[Name]>[0], - Output = Parameters[Name]>[1], - >(name: Name, input: Input, output: Output) { - if (!name) return output - const s = yield* InstanceState.get(state) - for (const hook of s.hooks) { - const fn = hook[name] as any - if (!fn) continue - yield* Effect.promise(async () => fn(input, output)) - } - return output - }) - - const list = Effect.fn("Plugin.list")(function* () { - const s = yield* InstanceState.get(state) - return s.hooks - }) - - const init = Effect.fn("Plugin.init")(function* () { - yield* InstanceState.get(state) - }) - - return Service.of({ trigger, list, init }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Config.defaultLayer)) diff --git a/packages/opencode/test/plugin/auth-override.test.ts b/packages/opencode/test/plugin/auth-override.test.ts index b570d8b141..89d1641afd 100644 --- a/packages/opencode/test/plugin/auth-override.test.ts +++ b/packages/opencode/test/plugin/auth-override.test.ts @@ -63,7 +63,7 @@ describe("plugin.auth-override", () => { }, 30000) // Increased timeout for plugin installation }) -const file = path.join(import.meta.dir, "../../src/plugin/plugin.ts") +const file = path.join(import.meta.dir, "../../src/plugin/index.ts") describe("plugin.config-hook-error-isolation", () => { test("config hooks are individually error-isolated in the layer factory", async () => { From 610c036ef1e30d0209dbeb0a815c9b16e03ab1e3 Mon Sep 17 00:00:00 2001 From: thakrarsagar Date: Fri, 17 Apr 2026 02:14:58 +0530 Subject: [PATCH 053/120] fix(opencode): use low reasoning effort for GitHub Copilot gpt-5 models (#22824) Co-authored-by: opencode-agent[bot] Co-authored-by: rekram1-node --- packages/opencode/src/provider/transform.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index e527251b0f..492db40520 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -923,7 +923,7 @@ export function smallOptions(model: Provider.Model) { model.api.npm === "@ai-sdk/github-copilot" ) { if (model.api.id.includes("gpt-5")) { - if (model.api.id.includes("5.")) { + if (model.api.id.includes("5.") || model.api.id.includes("5-mini")) { return { store: false, reasoningEffort: "low" } } return { store: false, reasoningEffort: "minimal" } From cdfbb26c003a42d2fd1e2875dd6cce43e5d19678 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:55:57 -0400 Subject: [PATCH 054/120] refactor: collapse bus barrel into bus/index.ts (#22902) --- packages/opencode/src/bus/bus.ts | 191 ---------------------------- packages/opencode/src/bus/index.ts | 194 ++++++++++++++++++++++++++++- 2 files changed, 193 insertions(+), 192 deletions(-) delete mode 100644 packages/opencode/src/bus/bus.ts diff --git a/packages/opencode/src/bus/bus.ts b/packages/opencode/src/bus/bus.ts deleted file mode 100644 index beac809925..0000000000 --- a/packages/opencode/src/bus/bus.ts +++ /dev/null @@ -1,191 +0,0 @@ -import z from "zod" -import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { Log } from "../util" -import { BusEvent } from "./bus-event" -import { GlobalBus } from "./global" -import { InstanceState } from "@/effect" -import { makeRuntime } from "@/effect/run-service" - -const log = Log.create({ service: "bus" }) - -export const InstanceDisposed = BusEvent.define( - "server.instance.disposed", - z.object({ - directory: z.string(), - }), -) - -type Payload = { - type: D["type"] - properties: z.infer -} - -type State = { - wildcard: PubSub.PubSub - typed: Map> -} - -export interface Interface { - readonly publish: ( - def: D, - properties: z.output, - ) => Effect.Effect - readonly subscribe: (def: D) => Stream.Stream> - readonly subscribeAll: () => Stream.Stream - readonly subscribeCallback: ( - def: D, - callback: (event: Payload) => unknown, - ) => Effect.Effect<() => void> - readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void> -} - -export class Service extends Context.Service()("@opencode/Bus") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const state = yield* InstanceState.make( - Effect.fn("Bus.state")(function* (ctx) { - const wildcard = yield* PubSub.unbounded() - const typed = new Map>() - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - // Publish InstanceDisposed before shutting down so subscribers see it - yield* PubSub.publish(wildcard, { - type: InstanceDisposed.type, - properties: { directory: ctx.directory }, - }) - yield* PubSub.shutdown(wildcard) - for (const ps of typed.values()) { - yield* PubSub.shutdown(ps) - } - }), - ) - - return { wildcard, typed } - }), - ) - - function getOrCreate(state: State, def: D) { - return Effect.gen(function* () { - let ps = state.typed.get(def.type) - if (!ps) { - ps = yield* PubSub.unbounded() - state.typed.set(def.type, ps) - } - return ps as unknown as PubSub.PubSub> - }) - } - - function publish(def: D, properties: z.output) { - return Effect.gen(function* () { - const s = yield* InstanceState.get(state) - const payload: Payload = { type: def.type, properties } - log.info("publishing", { type: def.type }) - - const ps = s.typed.get(def.type) - if (ps) yield* PubSub.publish(ps, payload) - yield* PubSub.publish(s.wildcard, payload) - - const dir = yield* InstanceState.directory - const context = yield* InstanceState.context - const workspace = yield* InstanceState.workspaceID - - GlobalBus.emit("event", { - directory: dir, - project: context.project.id, - workspace, - payload, - }) - }) - } - - function subscribe(def: D): Stream.Stream> { - log.info("subscribing", { type: def.type }) - return Stream.unwrap( - Effect.gen(function* () { - const s = yield* InstanceState.get(state) - const ps = yield* getOrCreate(s, def) - return Stream.fromPubSub(ps) - }), - ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type })))) - } - - function subscribeAll(): Stream.Stream { - log.info("subscribing", { type: "*" }) - return Stream.unwrap( - Effect.gen(function* () { - const s = yield* InstanceState.get(state) - return Stream.fromPubSub(s.wildcard) - }), - ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" })))) - } - - function on(pubsub: PubSub.PubSub, type: string, callback: (event: T) => unknown) { - return Effect.gen(function* () { - log.info("subscribing", { type }) - const bridge = yield* EffectBridge.make() - const scope = yield* Scope.make() - const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub)) - - yield* Scope.provide(scope)( - Stream.fromSubscription(subscription).pipe( - Stream.runForEach((msg) => - Effect.tryPromise({ - try: () => Promise.resolve().then(() => callback(msg)), - catch: (cause) => { - log.error("subscriber failed", { type, cause }) - }, - }).pipe(Effect.ignore), - ), - Effect.forkScoped, - ), - ) - - return () => { - log.info("unsubscribing", { type }) - bridge.fork(Scope.close(scope, Exit.void)) - } - }) - } - - const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* ( - def: D, - callback: (event: Payload) => unknown, - ) { - const s = yield* InstanceState.get(state) - const ps = yield* getOrCreate(s, def) - return yield* on(ps, def.type, callback) - }) - - const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) { - const s = yield* InstanceState.get(state) - return yield* on(s.wildcard, "*", callback) - }) - - return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback }) - }), -) - -export const defaultLayer = layer - -const { runPromise, runSync } = makeRuntime(Service, layer) - -// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe, -// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw. -export async function publish(def: D, properties: z.output) { - return runPromise((svc) => svc.publish(def, properties)) -} - -export function subscribe( - def: D, - callback: (event: { type: D["type"]; properties: z.infer }) => unknown, -) { - return runSync((svc) => svc.subscribeCallback(def, callback)) -} - -export function subscribeAll(callback: (event: any) => unknown) { - return runSync((svc) => svc.subscribeAllCallback(callback)) -} diff --git a/packages/opencode/src/bus/index.ts b/packages/opencode/src/bus/index.ts index 3c21d7c7d1..8a9579b599 100644 --- a/packages/opencode/src/bus/index.ts +++ b/packages/opencode/src/bus/index.ts @@ -1 +1,193 @@ -export * as Bus from "./bus" +import z from "zod" +import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { Log } from "../util" +import { BusEvent } from "./bus-event" +import { GlobalBus } from "./global" +import { InstanceState } from "@/effect" +import { makeRuntime } from "@/effect/run-service" + +const log = Log.create({ service: "bus" }) + +export const InstanceDisposed = BusEvent.define( + "server.instance.disposed", + z.object({ + directory: z.string(), + }), +) + +type Payload = { + type: D["type"] + properties: z.infer +} + +type State = { + wildcard: PubSub.PubSub + typed: Map> +} + +export interface Interface { + readonly publish: ( + def: D, + properties: z.output, + ) => Effect.Effect + readonly subscribe: (def: D) => Stream.Stream> + readonly subscribeAll: () => Stream.Stream + readonly subscribeCallback: ( + def: D, + callback: (event: Payload) => unknown, + ) => Effect.Effect<() => void> + readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void> +} + +export class Service extends Context.Service()("@opencode/Bus") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const state = yield* InstanceState.make( + Effect.fn("Bus.state")(function* (ctx) { + const wildcard = yield* PubSub.unbounded() + const typed = new Map>() + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + // Publish InstanceDisposed before shutting down so subscribers see it + yield* PubSub.publish(wildcard, { + type: InstanceDisposed.type, + properties: { directory: ctx.directory }, + }) + yield* PubSub.shutdown(wildcard) + for (const ps of typed.values()) { + yield* PubSub.shutdown(ps) + } + }), + ) + + return { wildcard, typed } + }), + ) + + function getOrCreate(state: State, def: D) { + return Effect.gen(function* () { + let ps = state.typed.get(def.type) + if (!ps) { + ps = yield* PubSub.unbounded() + state.typed.set(def.type, ps) + } + return ps as unknown as PubSub.PubSub> + }) + } + + function publish(def: D, properties: z.output) { + return Effect.gen(function* () { + const s = yield* InstanceState.get(state) + const payload: Payload = { type: def.type, properties } + log.info("publishing", { type: def.type }) + + const ps = s.typed.get(def.type) + if (ps) yield* PubSub.publish(ps, payload) + yield* PubSub.publish(s.wildcard, payload) + + const dir = yield* InstanceState.directory + const context = yield* InstanceState.context + const workspace = yield* InstanceState.workspaceID + + GlobalBus.emit("event", { + directory: dir, + project: context.project.id, + workspace, + payload, + }) + }) + } + + function subscribe(def: D): Stream.Stream> { + log.info("subscribing", { type: def.type }) + return Stream.unwrap( + Effect.gen(function* () { + const s = yield* InstanceState.get(state) + const ps = yield* getOrCreate(s, def) + return Stream.fromPubSub(ps) + }), + ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type })))) + } + + function subscribeAll(): Stream.Stream { + log.info("subscribing", { type: "*" }) + return Stream.unwrap( + Effect.gen(function* () { + const s = yield* InstanceState.get(state) + return Stream.fromPubSub(s.wildcard) + }), + ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" })))) + } + + function on(pubsub: PubSub.PubSub, type: string, callback: (event: T) => unknown) { + return Effect.gen(function* () { + log.info("subscribing", { type }) + const bridge = yield* EffectBridge.make() + const scope = yield* Scope.make() + const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub)) + + yield* Scope.provide(scope)( + Stream.fromSubscription(subscription).pipe( + Stream.runForEach((msg) => + Effect.tryPromise({ + try: () => Promise.resolve().then(() => callback(msg)), + catch: (cause) => { + log.error("subscriber failed", { type, cause }) + }, + }).pipe(Effect.ignore), + ), + Effect.forkScoped, + ), + ) + + return () => { + log.info("unsubscribing", { type }) + bridge.fork(Scope.close(scope, Exit.void)) + } + }) + } + + const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* ( + def: D, + callback: (event: Payload) => unknown, + ) { + const s = yield* InstanceState.get(state) + const ps = yield* getOrCreate(s, def) + return yield* on(ps, def.type, callback) + }) + + const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) { + const s = yield* InstanceState.get(state) + return yield* on(s.wildcard, "*", callback) + }) + + return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback }) + }), +) + +export const defaultLayer = layer + +const { runPromise, runSync } = makeRuntime(Service, layer) + +// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe, +// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw. +export async function publish(def: D, properties: z.output) { + return runPromise((svc) => svc.publish(def, properties)) +} + +export function subscribe( + def: D, + callback: (event: { type: D["type"]; properties: z.infer }) => unknown, +) { + return runSync((svc) => svc.subscribeCallback(def, callback)) +} + +export function subscribeAll(callback: (event: any) => unknown) { + return runSync((svc) => svc.subscribeAllCallback(callback)) +} + +export * as Bus from "." From 1694c5bfe1248c4997bbf76849f6e297e31d710d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:56:09 -0400 Subject: [PATCH 055/120] refactor: collapse file barrel into file/index.ts (#22901) --- packages/opencode/src/file/file.ts | 654 --------------------------- packages/opencode/src/file/index.ts | 657 +++++++++++++++++++++++++++- 2 files changed, 656 insertions(+), 655 deletions(-) delete mode 100644 packages/opencode/src/file/file.ts diff --git a/packages/opencode/src/file/file.ts b/packages/opencode/src/file/file.ts deleted file mode 100644 index ee8df2b0b9..0000000000 --- a/packages/opencode/src/file/file.ts +++ /dev/null @@ -1,654 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { InstanceState } from "@/effect" - -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Git } from "@/git" -import { Effect, Layer, Context, Scope } from "effect" -import * as Stream from "effect/Stream" -import { formatPatch, structuredPatch } from "diff" -import fuzzysort from "fuzzysort" -import ignore from "ignore" -import path from "path" -import z from "zod" -import { Global } from "../global" -import { Instance } from "../project/instance" -import { Log } from "../util" -import { Protected } from "./protected" -import { Ripgrep } from "./ripgrep" - -export const Info = z - .object({ - path: z.string(), - added: z.number().int(), - removed: z.number().int(), - status: z.enum(["added", "deleted", "modified"]), - }) - .meta({ - ref: "File", - }) - -export type Info = z.infer - -export const Node = z - .object({ - name: z.string(), - path: z.string(), - absolute: z.string(), - type: z.enum(["file", "directory"]), - ignored: z.boolean(), - }) - .meta({ - ref: "FileNode", - }) -export type Node = z.infer - -export const Content = z - .object({ - type: z.enum(["text", "binary"]), - content: z.string(), - diff: z.string().optional(), - patch: z - .object({ - oldFileName: z.string(), - newFileName: z.string(), - oldHeader: z.string().optional(), - newHeader: z.string().optional(), - hunks: z.array( - z.object({ - oldStart: z.number(), - oldLines: z.number(), - newStart: z.number(), - newLines: z.number(), - lines: z.array(z.string()), - }), - ), - index: z.string().optional(), - }) - .optional(), - encoding: z.literal("base64").optional(), - mimeType: z.string().optional(), - }) - .meta({ - ref: "FileContent", - }) -export type Content = z.infer - -export const Event = { - Edited: BusEvent.define( - "file.edited", - z.object({ - file: z.string(), - }), - ), -} - -const log = Log.create({ service: "file" }) - -const binary = new Set([ - "exe", - "dll", - "pdb", - "bin", - "so", - "dylib", - "o", - "a", - "lib", - "wav", - "mp3", - "ogg", - "oga", - "ogv", - "ogx", - "flac", - "aac", - "wma", - "m4a", - "weba", - "mp4", - "avi", - "mov", - "wmv", - "flv", - "webm", - "mkv", - "zip", - "tar", - "gz", - "gzip", - "bz", - "bz2", - "bzip", - "bzip2", - "7z", - "rar", - "xz", - "lz", - "z", - "pdf", - "doc", - "docx", - "ppt", - "pptx", - "xls", - "xlsx", - "dmg", - "iso", - "img", - "vmdk", - "ttf", - "otf", - "woff", - "woff2", - "eot", - "sqlite", - "db", - "mdb", - "apk", - "ipa", - "aab", - "xapk", - "app", - "pkg", - "deb", - "rpm", - "snap", - "flatpak", - "appimage", - "msi", - "msp", - "jar", - "war", - "ear", - "class", - "kotlin_module", - "dex", - "vdex", - "odex", - "oat", - "art", - "wasm", - "wat", - "bc", - "ll", - "s", - "ko", - "sys", - "drv", - "efi", - "rom", - "com", -]) - -const image = new Set([ - "png", - "jpg", - "jpeg", - "gif", - "bmp", - "webp", - "ico", - "tif", - "tiff", - "svg", - "svgz", - "avif", - "apng", - "jxl", - "heic", - "heif", - "raw", - "cr2", - "nef", - "arw", - "dng", - "orf", - "raf", - "pef", - "x3f", -]) - -const text = new Set([ - "ts", - "tsx", - "mts", - "cts", - "mtsx", - "ctsx", - "js", - "jsx", - "mjs", - "cjs", - "sh", - "bash", - "zsh", - "fish", - "ps1", - "psm1", - "cmd", - "bat", - "json", - "jsonc", - "json5", - "yaml", - "yml", - "toml", - "md", - "mdx", - "txt", - "xml", - "html", - "htm", - "css", - "scss", - "sass", - "less", - "graphql", - "gql", - "sql", - "ini", - "cfg", - "conf", - "env", -]) - -const textName = new Set([ - "dockerfile", - "makefile", - ".gitignore", - ".gitattributes", - ".editorconfig", - ".npmrc", - ".nvmrc", - ".prettierrc", - ".eslintrc", -]) - -const mime: Record = { - png: "image/png", - jpg: "image/jpeg", - jpeg: "image/jpeg", - gif: "image/gif", - bmp: "image/bmp", - webp: "image/webp", - ico: "image/x-icon", - tif: "image/tiff", - tiff: "image/tiff", - svg: "image/svg+xml", - svgz: "image/svg+xml", - avif: "image/avif", - apng: "image/apng", - jxl: "image/jxl", - heic: "image/heic", - heif: "image/heif", -} - -type Entry = { files: string[]; dirs: string[] } - -const ext = (file: string) => path.extname(file).toLowerCase().slice(1) -const name = (file: string) => path.basename(file).toLowerCase() -const isImageByExtension = (file: string) => image.has(ext(file)) -const isTextByExtension = (file: string) => text.has(ext(file)) -const isTextByName = (file: string) => textName.has(name(file)) -const isBinaryByExtension = (file: string) => binary.has(ext(file)) -const isImage = (mimeType: string) => mimeType.startsWith("image/") -const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file) - -function shouldEncode(mimeType: string) { - const type = mimeType.toLowerCase() - log.debug("shouldEncode", { type }) - if (!type) return false - if (type.startsWith("text/")) return false - if (type.includes("charset=")) return false - const top = type.split("/", 2)[0] - return ["image", "audio", "video", "font", "model", "multipart"].includes(top) -} - -const hidden = (item: string) => { - const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "") - return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1) -} - -const sortHiddenLast = (items: string[], prefer: boolean) => { - if (prefer) return items - const visible: string[] = [] - const hiddenItems: string[] = [] - for (const item of items) { - if (hidden(item)) hiddenItems.push(item) - else visible.push(item) - } - return [...visible, ...hiddenItems] -} - -interface State { - cache: Entry -} - -export interface Interface { - readonly init: () => Effect.Effect - readonly status: () => Effect.Effect - readonly read: (file: string) => Effect.Effect - readonly list: (dir?: string) => Effect.Effect - readonly search: (input: { - query: string - limit?: number - dirs?: boolean - type?: "file" | "directory" - }) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/File") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const appFs = yield* AppFileSystem.Service - const rg = yield* Ripgrep.Service - const git = yield* Git.Service - const scope = yield* Scope.Scope - - const state = yield* InstanceState.make( - Effect.fn("File.state")(() => - Effect.succeed({ - cache: { files: [], dirs: [] } as Entry, - }), - ), - ) - - const scan = Effect.fn("File.scan")(function* () { - if (Instance.directory === path.parse(Instance.directory).root) return - const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global" - const next: Entry = { files: [], dirs: [] } - - if (isGlobalHome) { - const dirs = new Set() - const protectedNames = Protected.names() - const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) - const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name) - const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name) - const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => [])) - - for (const entry of top) { - if (entry.type !== "directory") continue - if (shouldIgnoreName(entry.name)) continue - dirs.add(entry.name + "/") - - const base = path.join(Instance.directory, entry.name) - const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => [])) - for (const child of children) { - if (child.type !== "directory") continue - if (shouldIgnoreNested(child.name)) continue - dirs.add(entry.name + "/" + child.name + "/") - } - } - - next.dirs = Array.from(dirs).toSorted() - } else { - const files = yield* rg.files({ cwd: Instance.directory }).pipe( - Stream.runCollect, - Effect.map((chunk) => [...chunk]), - ) - const seen = new Set() - for (const file of files) { - next.files.push(file) - let current = file - while (true) { - const dir = path.dirname(current) - if (dir === ".") break - if (dir === current) break - current = dir - if (seen.has(dir)) continue - seen.add(dir) - next.dirs.push(dir + "/") - } - } - } - - const s = yield* InstanceState.get(state) - s.cache = next - }) - - let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) - - const ensure = Effect.fn("File.ensure")(function* () { - yield* cachedScan - cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) - }) - - const gitText = Effect.fnUntraced(function* (args: string[]) { - return (yield* git.run(args, { cwd: Instance.directory })).text() - }) - - const init = Effect.fn("File.init")(function* () { - yield* ensure().pipe(Effect.forkIn(scope)) - }) - - const status = Effect.fn("File.status")(function* () { - if (Instance.project.vcs !== "git") return [] - - const diffOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "diff", - "--numstat", - "HEAD", - ]) - - const changed: Info[] = [] - - if (diffOutput.trim()) { - for (const line of diffOutput.trim().split("\n")) { - const [added, removed, file] = line.split("\t") - changed.push({ - path: file, - added: added === "-" ? 0 : parseInt(added, 10), - removed: removed === "-" ? 0 : parseInt(removed, 10), - status: "modified", - }) - } - } - - const untrackedOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "ls-files", - "--others", - "--exclude-standard", - ]) - - if (untrackedOutput.trim()) { - for (const file of untrackedOutput.trim().split("\n")) { - const content = yield* appFs - .readFileString(path.join(Instance.directory, file)) - .pipe(Effect.catch(() => Effect.succeed(undefined))) - if (content === undefined) continue - changed.push({ - path: file, - added: content.split("\n").length, - removed: 0, - status: "added", - }) - } - } - - const deletedOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "diff", - "--name-only", - "--diff-filter=D", - "HEAD", - ]) - - if (deletedOutput.trim()) { - for (const file of deletedOutput.trim().split("\n")) { - changed.push({ - path: file, - added: 0, - removed: 0, - status: "deleted", - }) - } - } - - return changed.map((item) => { - const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path) - return { - ...item, - path: path.relative(Instance.directory, full), - } - }) - }) - - const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) { - using _ = log.time("read", { file }) - const full = path.join(Instance.directory, file) - - if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory") - - if (isImageByExtension(file)) { - const exists = yield* appFs.existsSafe(full) - if (exists) { - const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) - return { - type: "text" as const, - content: Buffer.from(bytes).toString("base64"), - mimeType: getImageMimeType(file), - encoding: "base64" as const, - } - } - return { type: "text" as const, content: "" } - } - - const knownText = isTextByExtension(file) || isTextByName(file) - - if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" } - - const exists = yield* appFs.existsSafe(full) - if (!exists) return { type: "text" as const, content: "" } - - const mimeType = AppFileSystem.mimeType(full) - const encode = knownText ? false : shouldEncode(mimeType) - - if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType } - - if (encode) { - const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) - return { - type: "text" as const, - content: Buffer.from(bytes).toString("base64"), - mimeType, - encoding: "base64" as const, - } - } - - const content = yield* appFs.readFileString(full).pipe( - Effect.map((s) => s.trim()), - Effect.catch(() => Effect.succeed("")), - ) - - if (Instance.project.vcs === "git") { - let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file]) - if (!diff.trim()) { - diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file]) - } - if (diff.trim()) { - const original = yield* git.show(Instance.directory, "HEAD", file) - const patch = structuredPatch(file, file, original, content, "old", "new", { - context: Infinity, - ignoreWhitespace: true, - }) - return { type: "text" as const, content, patch, diff: formatPatch(patch) } - } - return { type: "text" as const, content } - } - - return { type: "text" as const, content } - }) - - const list = Effect.fn("File.list")(function* (dir?: string) { - const exclude = [".git", ".DS_Store"] - let ignored = (_: string) => false - if (Instance.project.vcs === "git") { - const ig = ignore() - const gitignore = path.join(Instance.project.worktree, ".gitignore") - const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed(""))) - if (gitignoreText) ig.add(gitignoreText) - const ignoreFile = path.join(Instance.project.worktree, ".ignore") - const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed(""))) - if (ignoreText) ig.add(ignoreText) - ignored = ig.ignores.bind(ig) - } - - const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory - if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory") - - const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => [])) - - const nodes: Node[] = [] - for (const entry of entries) { - if (exclude.includes(entry.name)) continue - const absolute = path.join(resolved, entry.name) - const file = path.relative(Instance.directory, absolute) - const type = entry.type === "directory" ? "directory" : "file" - nodes.push({ - name: entry.name, - path: file, - absolute, - type, - ignored: ignored(type === "directory" ? file + "/" : file), - }) - } - return nodes.sort((a, b) => { - if (a.type !== b.type) return a.type === "directory" ? -1 : 1 - return a.name.localeCompare(b.name) - }) - }) - - const search = Effect.fn("File.search")(function* (input: { - query: string - limit?: number - dirs?: boolean - type?: "file" | "directory" - }) { - yield* ensure() - const { cache } = yield* InstanceState.get(state) - - const query = input.query.trim() - const limit = input.limit ?? 100 - const kind = input.type ?? (input.dirs === false ? "file" : "all") - log.info("search", { query, kind }) - - const preferHidden = query.startsWith(".") || query.includes("/.") - - if (!query) { - if (kind === "file") return cache.files.slice(0, limit) - return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit) - } - - const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs] - - const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit - const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target) - const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted - - log.info("search", { query, kind, results: output.length }) - return output - }) - - log.info("init") - return Service.of({ init, status, read, list, search }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Ripgrep.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Git.defaultLayer), -) diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index b65ac9d686..2f30b5400d 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -1 +1,656 @@ -export * as File from "./file" +import { BusEvent } from "@/bus/bus-event" +import { InstanceState } from "@/effect" + +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Git } from "@/git" +import { Effect, Layer, Context, Scope } from "effect" +import * as Stream from "effect/Stream" +import { formatPatch, structuredPatch } from "diff" +import fuzzysort from "fuzzysort" +import ignore from "ignore" +import path from "path" +import z from "zod" +import { Global } from "../global" +import { Instance } from "../project/instance" +import { Log } from "../util" +import { Protected } from "./protected" +import { Ripgrep } from "./ripgrep" + +export const Info = z + .object({ + path: z.string(), + added: z.number().int(), + removed: z.number().int(), + status: z.enum(["added", "deleted", "modified"]), + }) + .meta({ + ref: "File", + }) + +export type Info = z.infer + +export const Node = z + .object({ + name: z.string(), + path: z.string(), + absolute: z.string(), + type: z.enum(["file", "directory"]), + ignored: z.boolean(), + }) + .meta({ + ref: "FileNode", + }) +export type Node = z.infer + +export const Content = z + .object({ + type: z.enum(["text", "binary"]), + content: z.string(), + diff: z.string().optional(), + patch: z + .object({ + oldFileName: z.string(), + newFileName: z.string(), + oldHeader: z.string().optional(), + newHeader: z.string().optional(), + hunks: z.array( + z.object({ + oldStart: z.number(), + oldLines: z.number(), + newStart: z.number(), + newLines: z.number(), + lines: z.array(z.string()), + }), + ), + index: z.string().optional(), + }) + .optional(), + encoding: z.literal("base64").optional(), + mimeType: z.string().optional(), + }) + .meta({ + ref: "FileContent", + }) +export type Content = z.infer + +export const Event = { + Edited: BusEvent.define( + "file.edited", + z.object({ + file: z.string(), + }), + ), +} + +const log = Log.create({ service: "file" }) + +const binary = new Set([ + "exe", + "dll", + "pdb", + "bin", + "so", + "dylib", + "o", + "a", + "lib", + "wav", + "mp3", + "ogg", + "oga", + "ogv", + "ogx", + "flac", + "aac", + "wma", + "m4a", + "weba", + "mp4", + "avi", + "mov", + "wmv", + "flv", + "webm", + "mkv", + "zip", + "tar", + "gz", + "gzip", + "bz", + "bz2", + "bzip", + "bzip2", + "7z", + "rar", + "xz", + "lz", + "z", + "pdf", + "doc", + "docx", + "ppt", + "pptx", + "xls", + "xlsx", + "dmg", + "iso", + "img", + "vmdk", + "ttf", + "otf", + "woff", + "woff2", + "eot", + "sqlite", + "db", + "mdb", + "apk", + "ipa", + "aab", + "xapk", + "app", + "pkg", + "deb", + "rpm", + "snap", + "flatpak", + "appimage", + "msi", + "msp", + "jar", + "war", + "ear", + "class", + "kotlin_module", + "dex", + "vdex", + "odex", + "oat", + "art", + "wasm", + "wat", + "bc", + "ll", + "s", + "ko", + "sys", + "drv", + "efi", + "rom", + "com", +]) + +const image = new Set([ + "png", + "jpg", + "jpeg", + "gif", + "bmp", + "webp", + "ico", + "tif", + "tiff", + "svg", + "svgz", + "avif", + "apng", + "jxl", + "heic", + "heif", + "raw", + "cr2", + "nef", + "arw", + "dng", + "orf", + "raf", + "pef", + "x3f", +]) + +const text = new Set([ + "ts", + "tsx", + "mts", + "cts", + "mtsx", + "ctsx", + "js", + "jsx", + "mjs", + "cjs", + "sh", + "bash", + "zsh", + "fish", + "ps1", + "psm1", + "cmd", + "bat", + "json", + "jsonc", + "json5", + "yaml", + "yml", + "toml", + "md", + "mdx", + "txt", + "xml", + "html", + "htm", + "css", + "scss", + "sass", + "less", + "graphql", + "gql", + "sql", + "ini", + "cfg", + "conf", + "env", +]) + +const textName = new Set([ + "dockerfile", + "makefile", + ".gitignore", + ".gitattributes", + ".editorconfig", + ".npmrc", + ".nvmrc", + ".prettierrc", + ".eslintrc", +]) + +const mime: Record = { + png: "image/png", + jpg: "image/jpeg", + jpeg: "image/jpeg", + gif: "image/gif", + bmp: "image/bmp", + webp: "image/webp", + ico: "image/x-icon", + tif: "image/tiff", + tiff: "image/tiff", + svg: "image/svg+xml", + svgz: "image/svg+xml", + avif: "image/avif", + apng: "image/apng", + jxl: "image/jxl", + heic: "image/heic", + heif: "image/heif", +} + +type Entry = { files: string[]; dirs: string[] } + +const ext = (file: string) => path.extname(file).toLowerCase().slice(1) +const name = (file: string) => path.basename(file).toLowerCase() +const isImageByExtension = (file: string) => image.has(ext(file)) +const isTextByExtension = (file: string) => text.has(ext(file)) +const isTextByName = (file: string) => textName.has(name(file)) +const isBinaryByExtension = (file: string) => binary.has(ext(file)) +const isImage = (mimeType: string) => mimeType.startsWith("image/") +const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file) + +function shouldEncode(mimeType: string) { + const type = mimeType.toLowerCase() + log.debug("shouldEncode", { type }) + if (!type) return false + if (type.startsWith("text/")) return false + if (type.includes("charset=")) return false + const top = type.split("/", 2)[0] + return ["image", "audio", "video", "font", "model", "multipart"].includes(top) +} + +const hidden = (item: string) => { + const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "") + return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1) +} + +const sortHiddenLast = (items: string[], prefer: boolean) => { + if (prefer) return items + const visible: string[] = [] + const hiddenItems: string[] = [] + for (const item of items) { + if (hidden(item)) hiddenItems.push(item) + else visible.push(item) + } + return [...visible, ...hiddenItems] +} + +interface State { + cache: Entry +} + +export interface Interface { + readonly init: () => Effect.Effect + readonly status: () => Effect.Effect + readonly read: (file: string) => Effect.Effect + readonly list: (dir?: string) => Effect.Effect + readonly search: (input: { + query: string + limit?: number + dirs?: boolean + type?: "file" | "directory" + }) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/File") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const appFs = yield* AppFileSystem.Service + const rg = yield* Ripgrep.Service + const git = yield* Git.Service + const scope = yield* Scope.Scope + + const state = yield* InstanceState.make( + Effect.fn("File.state")(() => + Effect.succeed({ + cache: { files: [], dirs: [] } as Entry, + }), + ), + ) + + const scan = Effect.fn("File.scan")(function* () { + if (Instance.directory === path.parse(Instance.directory).root) return + const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global" + const next: Entry = { files: [], dirs: [] } + + if (isGlobalHome) { + const dirs = new Set() + const protectedNames = Protected.names() + const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) + const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name) + const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name) + const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => [])) + + for (const entry of top) { + if (entry.type !== "directory") continue + if (shouldIgnoreName(entry.name)) continue + dirs.add(entry.name + "/") + + const base = path.join(Instance.directory, entry.name) + const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => [])) + for (const child of children) { + if (child.type !== "directory") continue + if (shouldIgnoreNested(child.name)) continue + dirs.add(entry.name + "/" + child.name + "/") + } + } + + next.dirs = Array.from(dirs).toSorted() + } else { + const files = yield* rg.files({ cwd: Instance.directory }).pipe( + Stream.runCollect, + Effect.map((chunk) => [...chunk]), + ) + const seen = new Set() + for (const file of files) { + next.files.push(file) + let current = file + while (true) { + const dir = path.dirname(current) + if (dir === ".") break + if (dir === current) break + current = dir + if (seen.has(dir)) continue + seen.add(dir) + next.dirs.push(dir + "/") + } + } + } + + const s = yield* InstanceState.get(state) + s.cache = next + }) + + let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) + + const ensure = Effect.fn("File.ensure")(function* () { + yield* cachedScan + cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) + }) + + const gitText = Effect.fnUntraced(function* (args: string[]) { + return (yield* git.run(args, { cwd: Instance.directory })).text() + }) + + const init = Effect.fn("File.init")(function* () { + yield* ensure().pipe(Effect.forkIn(scope)) + }) + + const status = Effect.fn("File.status")(function* () { + if (Instance.project.vcs !== "git") return [] + + const diffOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "diff", + "--numstat", + "HEAD", + ]) + + const changed: Info[] = [] + + if (diffOutput.trim()) { + for (const line of diffOutput.trim().split("\n")) { + const [added, removed, file] = line.split("\t") + changed.push({ + path: file, + added: added === "-" ? 0 : parseInt(added, 10), + removed: removed === "-" ? 0 : parseInt(removed, 10), + status: "modified", + }) + } + } + + const untrackedOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "ls-files", + "--others", + "--exclude-standard", + ]) + + if (untrackedOutput.trim()) { + for (const file of untrackedOutput.trim().split("\n")) { + const content = yield* appFs + .readFileString(path.join(Instance.directory, file)) + .pipe(Effect.catch(() => Effect.succeed(undefined))) + if (content === undefined) continue + changed.push({ + path: file, + added: content.split("\n").length, + removed: 0, + status: "added", + }) + } + } + + const deletedOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "diff", + "--name-only", + "--diff-filter=D", + "HEAD", + ]) + + if (deletedOutput.trim()) { + for (const file of deletedOutput.trim().split("\n")) { + changed.push({ + path: file, + added: 0, + removed: 0, + status: "deleted", + }) + } + } + + return changed.map((item) => { + const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path) + return { + ...item, + path: path.relative(Instance.directory, full), + } + }) + }) + + const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) { + using _ = log.time("read", { file }) + const full = path.join(Instance.directory, file) + + if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory") + + if (isImageByExtension(file)) { + const exists = yield* appFs.existsSafe(full) + if (exists) { + const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) + return { + type: "text" as const, + content: Buffer.from(bytes).toString("base64"), + mimeType: getImageMimeType(file), + encoding: "base64" as const, + } + } + return { type: "text" as const, content: "" } + } + + const knownText = isTextByExtension(file) || isTextByName(file) + + if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" } + + const exists = yield* appFs.existsSafe(full) + if (!exists) return { type: "text" as const, content: "" } + + const mimeType = AppFileSystem.mimeType(full) + const encode = knownText ? false : shouldEncode(mimeType) + + if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType } + + if (encode) { + const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) + return { + type: "text" as const, + content: Buffer.from(bytes).toString("base64"), + mimeType, + encoding: "base64" as const, + } + } + + const content = yield* appFs.readFileString(full).pipe( + Effect.map((s) => s.trim()), + Effect.catch(() => Effect.succeed("")), + ) + + if (Instance.project.vcs === "git") { + let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file]) + if (!diff.trim()) { + diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file]) + } + if (diff.trim()) { + const original = yield* git.show(Instance.directory, "HEAD", file) + const patch = structuredPatch(file, file, original, content, "old", "new", { + context: Infinity, + ignoreWhitespace: true, + }) + return { type: "text" as const, content, patch, diff: formatPatch(patch) } + } + return { type: "text" as const, content } + } + + return { type: "text" as const, content } + }) + + const list = Effect.fn("File.list")(function* (dir?: string) { + const exclude = [".git", ".DS_Store"] + let ignored = (_: string) => false + if (Instance.project.vcs === "git") { + const ig = ignore() + const gitignore = path.join(Instance.project.worktree, ".gitignore") + const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed(""))) + if (gitignoreText) ig.add(gitignoreText) + const ignoreFile = path.join(Instance.project.worktree, ".ignore") + const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed(""))) + if (ignoreText) ig.add(ignoreText) + ignored = ig.ignores.bind(ig) + } + + const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory + if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory") + + const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => [])) + + const nodes: Node[] = [] + for (const entry of entries) { + if (exclude.includes(entry.name)) continue + const absolute = path.join(resolved, entry.name) + const file = path.relative(Instance.directory, absolute) + const type = entry.type === "directory" ? "directory" : "file" + nodes.push({ + name: entry.name, + path: file, + absolute, + type, + ignored: ignored(type === "directory" ? file + "/" : file), + }) + } + return nodes.sort((a, b) => { + if (a.type !== b.type) return a.type === "directory" ? -1 : 1 + return a.name.localeCompare(b.name) + }) + }) + + const search = Effect.fn("File.search")(function* (input: { + query: string + limit?: number + dirs?: boolean + type?: "file" | "directory" + }) { + yield* ensure() + const { cache } = yield* InstanceState.get(state) + + const query = input.query.trim() + const limit = input.limit ?? 100 + const kind = input.type ?? (input.dirs === false ? "file" : "all") + log.info("search", { query, kind }) + + const preferHidden = query.startsWith(".") || query.includes("/.") + + if (!query) { + if (kind === "file") return cache.files.slice(0, limit) + return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit) + } + + const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs] + + const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit + const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target) + const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted + + log.info("search", { query, kind, results: output.length }) + return output + }) + + log.info("init") + return Service.of({ init, status, read, list, search }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Ripgrep.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Git.defaultLayer), +) + +export * as File from "." From ae584332b36668ddfe4faa1b65157f5d276d34ad Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 15:56:29 -0500 Subject: [PATCH 056/120] fix: uncomment import (#22923) --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index ac1c0fc3b8..e1b2eca1dd 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -1,4 +1,4 @@ -// import "@opentui/solid/runtime-plugin-support" +import "@opentui/solid/runtime-plugin-support" import { type TuiDispose, type TuiPlugin, From 86c54c5acc7b3bf4d527cbd9fdfbd5dfc925b4d6 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:58:17 -0400 Subject: [PATCH 057/120] fix(tui): minor logging cleanup (#22924) --- .../src/cli/cmd/tui/component/dialog-workspace-create.tsx | 2 -- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 1 - 2 files changed, 3 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index ca504d864d..ad5cd45782 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -41,9 +41,7 @@ export async function openWorkspaceSession(input: { workspaceID: input.workspaceID, }) - console.log("opening!") while (true) { - console.log("creating") const result = await client.session.create({ workspace: input.workspaceID }).catch((err) => { log.error("workspace session create request failed", { workspaceID: input.workspaceID, diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 10b70d50ac..29511b8ebf 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -492,7 +492,6 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { - console.log("YO", sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return const [session, messages, todo, diff] = await Promise.all([ sdk.client.session.get({ sessionID }, { throwOnError: true }), From 32548bcb4af7db393d91f315ec2222954326e766 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:59:17 -0400 Subject: [PATCH 058/120] refactor: unwrap ConfigPlugin namespace to flat exports + self-reexport (#22876) --- packages/opencode/src/config/plugin.ts | 148 ++++++++++++------------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index 3a10c0a715..7d335bcc53 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -4,81 +4,81 @@ import { pathToFileURL } from "url" import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" import path from "path" -export namespace ConfigPlugin { - const Options = z.record(z.string(), z.unknown()) - export type Options = z.infer +const Options = z.record(z.string(), z.unknown()) +export type Options = z.infer - // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. - // It answers "what should we load?" but says nothing about where that value came from. - export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) - export type Spec = z.infer +// Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. +// It answers "what should we load?" but says nothing about where that value came from. +export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) +export type Spec = z.infer - export type Scope = "global" | "local" +export type Scope = "global" | "local" - // Origin keeps the original config provenance attached to a spec. - // After multiple config files are merged, callers still need to know which file declared the plugin - // and whether it should behave like a global or project-local plugin. - export type Origin = { - spec: Spec - source: string - scope: Scope - } - - export async function load(dir: string) { - const plugins: ConfigPlugin.Spec[] = [] - - for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - plugins.push(pathToFileURL(item).href) - } - return plugins - } - - export function pluginSpecifier(plugin: Spec): string { - return Array.isArray(plugin) ? plugin[0] : plugin - } - - export function pluginOptions(plugin: Spec): Options | undefined { - return Array.isArray(plugin) ? plugin[1] : undefined - } - - // Path-like specs are resolved relative to the config file that declared them so merges later on do not - // accidentally reinterpret `./plugin.ts` relative to some other directory. - export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { - const spec = pluginSpecifier(plugin) - if (!isPathPluginSpec(spec)) return plugin - - const base = path.dirname(configFilepath) - const file = (() => { - if (spec.startsWith("file://")) return spec - if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href - return pathToFileURL(path.resolve(base, spec)).href - })() - - const resolved = await resolvePathPluginTarget(file).catch(() => file) - - if (Array.isArray(plugin)) return [resolved, plugin[1]] - return resolved - } - - // Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the - // full Origin so downstream code still knows which config file won and where follow-up writes should go. - export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { - const seen = new Set() - const list: Origin[] = [] - - for (const plugin of plugins.toReversed()) { - const spec = pluginSpecifier(plugin.spec) - const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg - if (seen.has(name)) continue - seen.add(name) - list.push(plugin) - } - - return list.toReversed() - } +// Origin keeps the original config provenance attached to a spec. +// After multiple config files are merged, callers still need to know which file declared the plugin +// and whether it should behave like a global or project-local plugin. +export type Origin = { + spec: Spec + source: string + scope: Scope } + +export async function load(dir: string) { + const plugins: Spec[] = [] + + for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + plugins.push(pathToFileURL(item).href) + } + return plugins +} + +export function pluginSpecifier(plugin: Spec): string { + return Array.isArray(plugin) ? plugin[0] : plugin +} + +export function pluginOptions(plugin: Spec): Options | undefined { + return Array.isArray(plugin) ? plugin[1] : undefined +} + +// Path-like specs are resolved relative to the config file that declared them so merges later on do not +// accidentally reinterpret `./plugin.ts` relative to some other directory. +export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { + const spec = pluginSpecifier(plugin) + if (!isPathPluginSpec(spec)) return plugin + + const base = path.dirname(configFilepath) + const file = (() => { + if (spec.startsWith("file://")) return spec + if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href + return pathToFileURL(path.resolve(base, spec)).href + })() + + const resolved = await resolvePathPluginTarget(file).catch(() => file) + + if (Array.isArray(plugin)) return [resolved, plugin[1]] + return resolved +} + +// Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the +// full Origin so downstream code still knows which config file won and where follow-up writes should go. +export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { + const seen = new Set() + const list: Origin[] = [] + + for (const plugin of plugins.toReversed()) { + const spec = pluginSpecifier(plugin.spec) + const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg + if (seen.has(name)) continue + seen.add(name) + list.push(plugin) + } + + return list.toReversed() +} + +export * as ConfigPlugin from "./plugin" From 0e86466f990edd046867820b9fac97766d11db3f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:59:30 -0400 Subject: [PATCH 059/120] refactor: unwrap Discovery namespace to flat exports + self-reexport (#22878) --- packages/opencode/src/skill/discovery.ts | 208 +++++++++++------------ 1 file changed, 104 insertions(+), 104 deletions(-) diff --git a/packages/opencode/src/skill/discovery.ts b/packages/opencode/src/skill/discovery.ts index eff64ed2bb..debd68dd3d 100644 --- a/packages/opencode/src/skill/discovery.ts +++ b/packages/opencode/src/skill/discovery.ts @@ -6,111 +6,111 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Global } from "../global" import { Log } from "../util" -export namespace Discovery { - const skillConcurrency = 4 - const fileConcurrency = 8 +const skillConcurrency = 4 +const fileConcurrency = 8 - class IndexSkill extends Schema.Class("IndexSkill")({ - name: Schema.String, - files: Schema.Array(Schema.String), - }) {} +class IndexSkill extends Schema.Class("IndexSkill")({ + name: Schema.String, + files: Schema.Array(Schema.String), +}) {} - class Index extends Schema.Class("Index")({ - skills: Schema.Array(IndexSkill), - }) {} +class Index extends Schema.Class("Index")({ + skills: Schema.Array(IndexSkill), +}) {} - export interface Interface { - readonly pull: (url: string) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SkillDiscovery") {} - - export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const log = Log.create({ service: "skill-discovery" }) - const fs = yield* AppFileSystem.Service - const path = yield* Path.Path - const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) - const cache = path.join(Global.Path.cache, "skills") - - const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) { - if (yield* fs.exists(dest).pipe(Effect.orDie)) return true - - return yield* HttpClientRequest.get(url).pipe( - http.execute, - Effect.flatMap((res) => res.arrayBuffer), - Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))), - Effect.as(true), - Effect.catch((err) => - Effect.sync(() => { - log.error("failed to download", { url, err }) - return false - }), - ), - ) - }) - - const pull = Effect.fn("Discovery.pull")(function* (url: string) { - const base = url.endsWith("/") ? url : `${url}/` - const index = new URL("index.json", base).href - const host = base.slice(0, -1) - - log.info("fetching index", { url: index }) - - const data = yield* HttpClientRequest.get(index).pipe( - HttpClientRequest.acceptJson, - http.execute, - Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)), - Effect.catch((err) => - Effect.sync(() => { - log.error("failed to fetch index", { url: index, err }) - return null - }), - ), - ) - - if (!data) return [] - - const list = data.skills.filter((skill) => { - if (!skill.files.includes("SKILL.md")) { - log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name }) - return false - } - return true - }) - - const dirs = yield* Effect.forEach( - list, - (skill) => - Effect.gen(function* () { - const root = path.join(cache, skill.name) - - yield* Effect.forEach( - skill.files, - (file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)), - { - concurrency: fileConcurrency, - }, - ) - - const md = path.join(root, "SKILL.md") - return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null - }), - { concurrency: skillConcurrency }, - ) - - return dirs.filter((dir): dir is string => dir !== null) - }) - - return Service.of({ pull }) - }), - ) - - export const defaultLayer: Layer.Layer = layer.pipe( - Layer.provide(FetchHttpClient.layer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(NodePath.layer), - ) +export interface Interface { + readonly pull: (url: string) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SkillDiscovery") {} + +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const log = Log.create({ service: "skill-discovery" }) + const fs = yield* AppFileSystem.Service + const path = yield* Path.Path + const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) + const cache = path.join(Global.Path.cache, "skills") + + const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) { + if (yield* fs.exists(dest).pipe(Effect.orDie)) return true + + return yield* HttpClientRequest.get(url).pipe( + http.execute, + Effect.flatMap((res) => res.arrayBuffer), + Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))), + Effect.as(true), + Effect.catch((err) => + Effect.sync(() => { + log.error("failed to download", { url, err }) + return false + }), + ), + ) + }) + + const pull = Effect.fn("Discovery.pull")(function* (url: string) { + const base = url.endsWith("/") ? url : `${url}/` + const index = new URL("index.json", base).href + const host = base.slice(0, -1) + + log.info("fetching index", { url: index }) + + const data = yield* HttpClientRequest.get(index).pipe( + HttpClientRequest.acceptJson, + http.execute, + Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)), + Effect.catch((err) => + Effect.sync(() => { + log.error("failed to fetch index", { url: index, err }) + return null + }), + ), + ) + + if (!data) return [] + + const list = data.skills.filter((skill) => { + if (!skill.files.includes("SKILL.md")) { + log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name }) + return false + } + return true + }) + + const dirs = yield* Effect.forEach( + list, + (skill) => + Effect.gen(function* () { + const root = path.join(cache, skill.name) + + yield* Effect.forEach( + skill.files, + (file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)), + { + concurrency: fileConcurrency, + }, + ) + + const md = path.join(root, "SKILL.md") + return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null + }), + { concurrency: skillConcurrency }, + ) + + return dirs.filter((dir): dir is string => dir !== null) + }) + + return Service.of({ pull }) + }), + ) + +export const defaultLayer: Layer.Layer = layer.pipe( + Layer.provide(FetchHttpClient.layer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(NodePath.layer), +) + +export * as Discovery from "./discovery" From 9f201d637034ff0a4c1e518481c76ba1a4f0eeb6 Mon Sep 17 00:00:00 2001 From: opencode Date: Thu, 16 Apr 2026 21:54:54 +0000 Subject: [PATCH 060/120] release: v1.4.7 --- bun.lock | 32 +++++++++++++------------- packages/app/package.json | 2 +- packages/console/app/package.json | 2 +- packages/console/core/package.json | 2 +- packages/console/function/package.json | 2 +- packages/console/mail/package.json | 2 +- packages/desktop-electron/package.json | 2 +- packages/desktop/package.json | 2 +- packages/enterprise/package.json | 2 +- packages/extensions/zed/extension.toml | 12 +++++----- packages/function/package.json | 2 +- packages/opencode/package.json | 2 +- packages/plugin/package.json | 2 +- packages/sdk/js/package.json | 2 +- packages/shared/package.json | 2 +- packages/slack/package.json | 2 +- packages/ui/package.json | 2 +- packages/web/package.json | 2 +- sdks/vscode/package.json | 2 +- 19 files changed, 39 insertions(+), 39 deletions(-) diff --git a/bun.lock b/bun.lock index e236f3f491..63232cb29e 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.6", + "version": "1.4.7", "bin": { "opencode": "./bin/opencode", }, @@ -458,7 +458,7 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.6", + "version": "1.4.7", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", diff --git a/packages/app/package.json b/packages/app/package.json index 483c71dc50..2941637d08 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.6", + "version": "1.4.7", "description": "", "type": "module", "exports": { diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 062114eebd..8783f3fd05 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/core/package.json b/packages/console/core/package.json index 760174cf04..cdefd0e609 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.6", + "version": "1.4.7", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/function/package.json b/packages/console/function/package.json index 840f32742d..898c540bac 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.6", + "version": "1.4.7", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index 5aa94224e7..46ff28b7d1 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index cc0bad7d74..e1f69b5b20 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index 3fd02ad881..d8eea4ea36 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index 3c4a835f35..12a72e647f 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.6", + "version": "1.4.7", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index d6be9c6b61..d164534cf7 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.6" +version = "1.4.7" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 941f093fcc..36a9ddc321 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.6", + "version": "1.4.7", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 3a98d0eb93..1dabd91b8d 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.6", + "version": "1.4.7", "name": "opencode", "type": "module", "license": "MIT", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 76fe2e862f..6f9a0ea1dc 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 49c441600c..53a5893143 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 4d10a30a36..9dec6bdb6c 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.6", + "version": "1.4.7", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/slack/package.json b/packages/slack/package.json index 9239ac5727..a23500241e 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index 21974e3ec7..cd559041cc 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "exports": { diff --git a/packages/web/package.json b/packages/web/package.json index 5ca11ea296..a53ef51932 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.6", + "version": "1.4.7", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index 634583bd31..c499f679fe 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.6", + "version": "1.4.7", "publisher": "sst-dev", "repository": { "type": "git", From 9db40996cc5ce5877565d99e4199656345e8b80f Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 18:01:14 -0400 Subject: [PATCH 061/120] fix build script --- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 29511b8ebf..b5734e67d0 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -463,6 +463,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return store.status }, get ready() { + if (process.env.OPENCODE_FAST_BOOT) return true return store.status !== "loading" }, get path() { From dbe2ff52b25abf8cdeb878aa32614be22f28131f Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 18:40:22 -0400 Subject: [PATCH 062/120] fix tui otel profiling --- packages/opencode/src/cli/effect/runtime.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/effect/runtime.ts b/packages/opencode/src/cli/effect/runtime.ts index 4d85fa55b6..57b9f8ede9 100644 --- a/packages/opencode/src/cli/effect/runtime.ts +++ b/packages/opencode/src/cli/effect/runtime.ts @@ -6,7 +6,7 @@ export const memoMap = Layer.makeMemoMapUnsafe() export function makeRuntime(service: Context.Service, layer: Layer.Layer) { let rt: ManagedRuntime.ManagedRuntime | undefined const getRuntime = () => - (rt ??= ManagedRuntime.make(Layer.merge(layer, Observability.layer) as Layer.Layer, { memoMap })) + (rt ??= ManagedRuntime.make(Layer.provideMerge(layer, Observability.layer) as Layer.Layer, { memoMap })) return { runSync: (fn: (svc: S) => Effect.Effect) => getRuntime().runSync(service.use(fn)), From cb18f2ef407c49e7e91e03f0b7c4a72c2d4d05c1 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 17:45:35 -0500 Subject: [PATCH 063/120] fix: ensure azure sets prompt cache key by default (#22957) --- packages/opencode/src/provider/transform.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 492db40520..0ebd8bbf59 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -798,6 +798,7 @@ export function options(input: { if (input.model.api.npm === "@ai-sdk/azure") { result["store"] = true + result["promptCacheKey"] = input.sessionID } if (input.model.api.npm === "@openrouter/ai-sdk-provider") { From 23d48a7cf1af47870ef39def684eb8d569c66f4b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:46:49 -0400 Subject: [PATCH 064/120] refactor: unwrap BusEvent namespace + self-reexport (#22962) --- packages/opencode/src/bus/bus-event.ts | 52 +++++++++++++------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/opencode/src/bus/bus-event.ts b/packages/opencode/src/bus/bus-event.ts index 369a40ed88..efaed94406 100644 --- a/packages/opencode/src/bus/bus-event.ts +++ b/packages/opencode/src/bus/bus-event.ts @@ -1,33 +1,33 @@ import z from "zod" import type { ZodType } from "zod" -export namespace BusEvent { - export type Definition = ReturnType +export type Definition = ReturnType - const registry = new Map() +const registry = new Map() - export function define(type: Type, properties: Properties) { - const result = { - type, - properties, - } - registry.set(type, result) - return result - } - - export function payloads() { - return registry - .entries() - .map(([type, def]) => { - return z - .object({ - type: z.literal(type), - properties: def.properties, - }) - .meta({ - ref: `Event.${def.type}`, - }) - }) - .toArray() +export function define(type: Type, properties: Properties) { + const result = { + type, + properties, } + registry.set(type, result) + return result } + +export function payloads() { + return registry + .entries() + .map(([type, def]) => { + return z + .object({ + type: z.literal(type), + properties: def.properties, + }) + .meta({ + ref: `Event.${def.type}`, + }) + }) + .toArray() +} + +export * as BusEvent from "./bus-event" From e2d161dfdd54fdd30f8e36e8cf4f46e261dab96e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:48:24 -0400 Subject: [PATCH 065/120] refactor: unwrap Identifier namespace + self-reexport (#22963) --- packages/opencode/src/id/id.ts | 162 ++++++++++++++++----------------- 1 file changed, 81 insertions(+), 81 deletions(-) diff --git a/packages/opencode/src/id/id.ts b/packages/opencode/src/id/id.ts index 3d4cddf530..46c210fa5d 100644 --- a/packages/opencode/src/id/id.ts +++ b/packages/opencode/src/id/id.ts @@ -1,86 +1,86 @@ import z from "zod" import { randomBytes } from "crypto" -export namespace Identifier { - const prefixes = { - event: "evt", - session: "ses", - message: "msg", - permission: "per", - question: "que", - user: "usr", - part: "prt", - pty: "pty", - tool: "tool", - workspace: "wrk", - entry: "ent", - } as const +const prefixes = { + event: "evt", + session: "ses", + message: "msg", + permission: "per", + question: "que", + user: "usr", + part: "prt", + pty: "pty", + tool: "tool", + workspace: "wrk", + entry: "ent", +} as const - export function schema(prefix: keyof typeof prefixes) { - return z.string().startsWith(prefixes[prefix]) - } - - const LENGTH = 26 - - // State for monotonic ID generation - let lastTimestamp = 0 - let counter = 0 - - export function ascending(prefix: keyof typeof prefixes, given?: string) { - return generateID(prefix, "ascending", given) - } - - export function descending(prefix: keyof typeof prefixes, given?: string) { - return generateID(prefix, "descending", given) - } - - function generateID(prefix: keyof typeof prefixes, direction: "descending" | "ascending", given?: string): string { - if (!given) { - return create(prefixes[prefix], direction) - } - - if (!given.startsWith(prefixes[prefix])) { - throw new Error(`ID ${given} does not start with ${prefixes[prefix]}`) - } - return given - } - - function randomBase62(length: number): string { - const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - let result = "" - const bytes = randomBytes(length) - for (let i = 0; i < length; i++) { - result += chars[bytes[i] % 62] - } - return result - } - - export function create(prefix: string, direction: "descending" | "ascending", timestamp?: number): string { - const currentTimestamp = timestamp ?? Date.now() - - if (currentTimestamp !== lastTimestamp) { - lastTimestamp = currentTimestamp - counter = 0 - } - counter++ - - let now = BigInt(currentTimestamp) * BigInt(0x1000) + BigInt(counter) - - now = direction === "descending" ? ~now : now - - const timeBytes = Buffer.alloc(6) - for (let i = 0; i < 6; i++) { - timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) - } - - return prefix + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12) - } - - /** Extract timestamp from an ascending ID. Does not work with descending IDs. */ - export function timestamp(id: string): number { - const prefix = id.split("_")[0] - const hex = id.slice(prefix.length + 1, prefix.length + 13) - const encoded = BigInt("0x" + hex) - return Number(encoded / BigInt(0x1000)) - } +export function schema(prefix: keyof typeof prefixes) { + return z.string().startsWith(prefixes[prefix]) } + +const LENGTH = 26 + +// State for monotonic ID generation +let lastTimestamp = 0 +let counter = 0 + +export function ascending(prefix: keyof typeof prefixes, given?: string) { + return generateID(prefix, "ascending", given) +} + +export function descending(prefix: keyof typeof prefixes, given?: string) { + return generateID(prefix, "descending", given) +} + +function generateID(prefix: keyof typeof prefixes, direction: "descending" | "ascending", given?: string): string { + if (!given) { + return create(prefixes[prefix], direction) + } + + if (!given.startsWith(prefixes[prefix])) { + throw new Error(`ID ${given} does not start with ${prefixes[prefix]}`) + } + return given +} + +function randomBase62(length: number): string { + const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + let result = "" + const bytes = randomBytes(length) + for (let i = 0; i < length; i++) { + result += chars[bytes[i] % 62] + } + return result +} + +export function create(prefix: string, direction: "descending" | "ascending", timestamp?: number): string { + const currentTimestamp = timestamp ?? Date.now() + + if (currentTimestamp !== lastTimestamp) { + lastTimestamp = currentTimestamp + counter = 0 + } + counter++ + + let now = BigInt(currentTimestamp) * BigInt(0x1000) + BigInt(counter) + + now = direction === "descending" ? ~now : now + + const timeBytes = Buffer.alloc(6) + for (let i = 0; i < 6; i++) { + timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) + } + + return prefix + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12) +} + +/** Extract timestamp from an ascending ID. Does not work with descending IDs. */ +export function timestamp(id: string): number { + const prefix = id.split("_")[0] + const hex = id.slice(prefix.length + 1, prefix.length + 13) + const encoded = BigInt("0x" + hex) + return Number(encoded / BigInt(0x1000)) +} + +export * as Identifier from "./id" From 30fc791480ebdabc9c62c70713e6cb52b44caff2 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:49:52 -0400 Subject: [PATCH 066/120] refactor: unwrap Ripgrep namespace + self-reexport (#22965) --- packages/opencode/src/file/ripgrep.ts | 1048 ++++++++++++------------- 1 file changed, 524 insertions(+), 524 deletions(-) diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index 9a78c5b7fb..ac450108e1 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -8,568 +8,568 @@ import { ripgrep } from "ripgrep" import { Filesystem } from "@/util" import { Log } from "@/util" -export namespace Ripgrep { - const log = Log.create({ service: "ripgrep" }) +const log = Log.create({ service: "ripgrep" }) - const Stats = z.object({ - elapsed: z.object({ - secs: z.number(), - nanos: z.number(), +const Stats = z.object({ + elapsed: z.object({ + secs: z.number(), + nanos: z.number(), + human: z.string(), + }), + searches: z.number(), + searches_with_match: z.number(), + bytes_searched: z.number(), + bytes_printed: z.number(), + matched_lines: z.number(), + matches: z.number(), +}) + +const Begin = z.object({ + type: z.literal("begin"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + }), +}) + +export const Match = z.object({ + type: z.literal("match"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + lines: z.object({ + text: z.string(), + }), + line_number: z.number(), + absolute_offset: z.number(), + submatches: z.array( + z.object({ + match: z.object({ + text: z.string(), + }), + start: z.number(), + end: z.number(), + }), + ), + }), +}) + +const End = z.object({ + type: z.literal("end"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + binary_offset: z.number().nullable(), + stats: Stats, + }), +}) + +const Summary = z.object({ + type: z.literal("summary"), + data: z.object({ + elapsed_total: z.object({ human: z.string(), + nanos: z.number(), + secs: z.number(), }), - searches: z.number(), - searches_with_match: z.number(), - bytes_searched: z.number(), - bytes_printed: z.number(), - matched_lines: z.number(), - matches: z.number(), - }) + stats: Stats, + }), +}) - const Begin = z.object({ - type: z.literal("begin"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - }), - }) +const Result = z.union([Begin, Match, End, Summary]) - export const Match = z.object({ - type: z.literal("match"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - lines: z.object({ - text: z.string(), - }), - line_number: z.number(), - absolute_offset: z.number(), - submatches: z.array( - z.object({ - match: z.object({ - text: z.string(), - }), - start: z.number(), - end: z.number(), - }), - ), - }), - }) +export type Result = z.infer +export type Match = z.infer +export type Item = Match["data"] +export type Begin = z.infer +export type End = z.infer +export type Summary = z.infer +export type Row = Match["data"] - const End = z.object({ - type: z.literal("end"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - binary_offset: z.number().nullable(), - stats: Stats, - }), - }) +export interface SearchResult { + items: Item[] + partial: boolean +} - const Summary = z.object({ - type: z.literal("summary"), - data: z.object({ - elapsed_total: z.object({ - human: z.string(), - nanos: z.number(), - secs: z.number(), - }), - stats: Stats, - }), - }) +export interface FilesInput { + cwd: string + glob?: string[] + hidden?: boolean + follow?: boolean + maxDepth?: number + signal?: AbortSignal +} - const Result = z.union([Begin, Match, End, Summary]) +export interface SearchInput { + cwd: string + pattern: string + glob?: string[] + limit?: number + follow?: boolean + file?: string[] + signal?: AbortSignal +} - export type Result = z.infer - export type Match = z.infer - export type Item = Match["data"] - export type Begin = z.infer - export type End = z.infer - export type Summary = z.infer - export type Row = Match["data"] +export interface TreeInput { + cwd: string + limit?: number + signal?: AbortSignal +} - export interface SearchResult { - items: Item[] - partial: boolean +export interface Interface { + readonly files: (input: FilesInput) => Stream.Stream + readonly tree: (input: TreeInput) => Effect.Effect + readonly search: (input: SearchInput) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Ripgrep") {} + +type Run = { kind: "files" | "search"; cwd: string; args: string[] } + +type WorkerResult = { + type: "result" + code: number + stdout: string + stderr: string +} + +type WorkerLine = { + type: "line" + line: string +} + +type WorkerDone = { + type: "done" + code: number + stderr: string +} + +type WorkerError = { + type: "error" + error: { + message: string + name?: string + stack?: string } +} - export interface FilesInput { - cwd: string - glob?: string[] - hidden?: boolean - follow?: boolean - maxDepth?: number - signal?: AbortSignal +function env() { + const env = Object.fromEntries( + Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), + ) + delete env.RIPGREP_CONFIG_PATH + return env +} + +function text(input: unknown) { + if (typeof input === "string") return input + if (input instanceof ArrayBuffer) return Buffer.from(input).toString() + if (ArrayBuffer.isView(input)) return Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString() + return String(input) +} + +function toError(input: unknown) { + if (input instanceof Error) return input + if (typeof input === "string") return new Error(input) + return new Error(String(input)) +} + +function abort(signal?: AbortSignal) { + const err = signal?.reason + if (err instanceof Error) return err + const out = new Error("Aborted") + out.name = "AbortError" + return out +} + +function error(stderr: string, code: number) { + const err = new Error(stderr.trim() || `ripgrep failed with code ${code}`) + err.name = "RipgrepError" + return err +} + +function clean(file: string) { + return path.normalize(file.replace(/^\.[\\/]/, "")) +} + +function row(data: Row): Row { + return { + ...data, + path: { + ...data.path, + text: clean(data.path.text), + }, } +} - export interface SearchInput { - cwd: string - pattern: string - glob?: string[] - limit?: number - follow?: boolean - file?: string[] - signal?: AbortSignal +function opts(cwd: string) { + return { + env: env(), + preopens: { ".": cwd }, } +} - export interface TreeInput { - cwd: string - limit?: number - signal?: AbortSignal - } - - export interface Interface { - readonly files: (input: FilesInput) => Stream.Stream - readonly tree: (input: TreeInput) => Effect.Effect - readonly search: (input: SearchInput) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/Ripgrep") {} - - type Run = { kind: "files" | "search"; cwd: string; args: string[] } - - type WorkerResult = { - type: "result" - code: number - stdout: string - stderr: string - } - - type WorkerLine = { - type: "line" - line: string - } - - type WorkerDone = { - type: "done" - code: number - stderr: string - } - - type WorkerError = { - type: "error" - error: { - message: string - name?: string - stack?: string - } - } - - function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) - delete env.RIPGREP_CONFIG_PATH - return env - } - - function text(input: unknown) { - if (typeof input === "string") return input - if (input instanceof ArrayBuffer) return Buffer.from(input).toString() - if (ArrayBuffer.isView(input)) return Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString() - return String(input) - } - - function toError(input: unknown) { - if (input instanceof Error) return input - if (typeof input === "string") return new Error(input) - return new Error(String(input)) - } - - function abort(signal?: AbortSignal) { - const err = signal?.reason - if (err instanceof Error) return err - const out = new Error("Aborted") - out.name = "AbortError" - return out - } - - function error(stderr: string, code: number) { - const err = new Error(stderr.trim() || `ripgrep failed with code ${code}`) - err.name = "RipgrepError" - return err - } - - function clean(file: string) { - return path.normalize(file.replace(/^\.[\\/]/, "")) - } - - function row(data: Row): Row { - return { - ...data, - path: { - ...data.path, - text: clean(data.path.text), - }, - } - } - - function opts(cwd: string) { - return { - env: env(), - preopens: { ".": cwd }, - } - } - - function check(cwd: string) { - return Effect.tryPromise({ - try: () => fs.stat(cwd).catch(() => undefined), - catch: toError, - }).pipe( - Effect.flatMap((stat) => - stat?.isDirectory() - ? Effect.void - : Effect.fail( - Object.assign(new Error(`No such file or directory: '${cwd}'`), { - code: "ENOENT", - errno: -2, - path: cwd, - }), - ), - ), - ) - } - - function filesArgs(input: FilesInput) { - const args = ["--files", "--glob=!.git/*"] - if (input.follow) args.push("--follow") - if (input.hidden !== false) args.push("--hidden") - if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`) - if (input.glob) { - for (const glob of input.glob) { - args.push(`--glob=${glob}`) - } - } - args.push(".") - return args - } - - function searchArgs(input: SearchInput) { - const args = ["--json", "--hidden", "--glob=!.git/*", "--no-messages"] - if (input.follow) args.push("--follow") - if (input.glob) { - for (const glob of input.glob) { - args.push(`--glob=${glob}`) - } - } - if (input.limit) args.push(`--max-count=${input.limit}`) - args.push("--", input.pattern, ...(input.file ?? ["."])) - return args - } - - function parse(stdout: string) { - return stdout - .trim() - .split(/\r?\n/) - .filter(Boolean) - .map((line) => Result.parse(JSON.parse(line))) - .flatMap((item) => (item.type === "match" ? [row(item.data)] : [])) - } - - declare const OPENCODE_RIPGREP_WORKER_PATH: string - - function target(): Effect.Effect { - if (typeof OPENCODE_RIPGREP_WORKER_PATH !== "undefined") { - return Effect.succeed(OPENCODE_RIPGREP_WORKER_PATH) - } - const js = new URL("./ripgrep.worker.js", import.meta.url) - return Effect.tryPromise({ - try: () => Filesystem.exists(fileURLToPath(js)), - catch: toError, - }).pipe(Effect.map((exists) => (exists ? js : new URL("./ripgrep.worker.ts", import.meta.url)))) - } - - function worker() { - return target().pipe(Effect.flatMap((file) => Effect.sync(() => new Worker(file, { env: env() })))) - } - - function drain(buf: string, chunk: unknown, push: (line: string) => void) { - const lines = (buf + text(chunk)).split(/\r?\n/) - buf = lines.pop() || "" - for (const line of lines) { - if (line) push(line) - } - return buf - } - - function fail(queue: Queue.Queue, err: Error) { - Queue.failCauseUnsafe(queue, Cause.fail(err)) - } - - function searchDirect(input: SearchInput) { - return Effect.tryPromise({ - try: () => - ripgrep(searchArgs(input), { - buffer: true, - ...opts(input.cwd), - }), - catch: toError, - }).pipe( - Effect.flatMap((ret) => { - const out = ret.stdout ?? "" - if (ret.code !== 0 && ret.code !== 1 && ret.code !== 2) { - return Effect.fail(error(ret.stderr ?? "", ret.code ?? 1)) - } - return Effect.sync(() => ({ - items: ret.code === 1 ? [] : parse(out), - partial: ret.code === 2, - })) - }), - ) - } - - function searchWorker(input: SearchInput) { - if (input.signal?.aborted) return Effect.fail(abort(input.signal)) - - return Effect.acquireUseRelease( - worker(), - (w) => - Effect.callback((resume, signal) => { - let open = true - const done = (effect: Effect.Effect) => { - if (!open) return - open = false - resume(effect) - } - const onabort = () => done(Effect.fail(abort(input.signal))) - - w.onerror = (evt) => { - done(Effect.fail(toError(evt.error ?? evt.message))) - } - w.onmessage = (evt: MessageEvent) => { - const msg = evt.data - if (msg.type === "error") { - done(Effect.fail(Object.assign(new Error(msg.error.message), msg.error))) - return - } - if (msg.code === 1) { - done(Effect.succeed({ items: [], partial: false })) - return - } - if (msg.code !== 0 && msg.code !== 1 && msg.code !== 2) { - done(Effect.fail(error(msg.stderr, msg.code))) - return - } - done( - Effect.sync(() => ({ - items: parse(msg.stdout), - partial: msg.code === 2, - })), - ) - } - - input.signal?.addEventListener("abort", onabort, { once: true }) - signal.addEventListener("abort", onabort, { once: true }) - w.postMessage({ - kind: "search", - cwd: input.cwd, - args: searchArgs(input), - } satisfies Run) - - return Effect.sync(() => { - input.signal?.removeEventListener("abort", onabort) - signal.removeEventListener("abort", onabort) - w.onerror = null - w.onmessage = null - }) - }), - (w) => Effect.sync(() => w.terminate()), - ) - } - - function filesDirect(input: FilesInput) { - return Stream.callback( - Effect.fnUntraced(function* (queue: Queue.Queue) { - let buf = "" - let err = "" - - const out = { - write(chunk: unknown) { - buf = drain(buf, chunk, (line) => { - Queue.offerUnsafe(queue, clean(line)) - }) - }, - } - - const stderr = { - write(chunk: unknown) { - err += text(chunk) - }, - } - - yield* Effect.forkScoped( - Effect.gen(function* () { - yield* check(input.cwd) - const ret = yield* Effect.tryPromise({ - try: () => - ripgrep(filesArgs(input), { - stdout: out, - stderr, - ...opts(input.cwd), - }), - catch: toError, - }) - if (buf) Queue.offerUnsafe(queue, clean(buf)) - if (ret.code === 0 || ret.code === 1) { - Queue.endUnsafe(queue) - return - } - fail(queue, error(err, ret.code ?? 1)) - }).pipe( - Effect.catch((err) => - Effect.sync(() => { - fail(queue, err) - }), - ), +function check(cwd: string) { + return Effect.tryPromise({ + try: () => fs.stat(cwd).catch(() => undefined), + catch: toError, + }).pipe( + Effect.flatMap((stat) => + stat?.isDirectory() + ? Effect.void + : Effect.fail( + Object.assign(new Error(`No such file or directory: '${cwd}'`), { + code: "ENOENT", + errno: -2, + path: cwd, + }), ), - ) - }), - ) + ), + ) +} + +function filesArgs(input: FilesInput) { + const args = ["--files", "--glob=!.git/*"] + if (input.follow) args.push("--follow") + if (input.hidden !== false) args.push("--hidden") + if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`) + if (input.glob) { + for (const glob of input.glob) { + args.push(`--glob=${glob}`) + } } + args.push(".") + return args +} - function filesWorker(input: FilesInput) { - return Stream.callback( - Effect.fnUntraced(function* (queue: Queue.Queue) { - if (input.signal?.aborted) { - fail(queue, abort(input.signal)) - return - } +function searchArgs(input: SearchInput) { + const args = ["--json", "--hidden", "--glob=!.git/*", "--no-messages"] + if (input.follow) args.push("--follow") + if (input.glob) { + for (const glob of input.glob) { + args.push(`--glob=${glob}`) + } + } + if (input.limit) args.push(`--max-count=${input.limit}`) + args.push("--", input.pattern, ...(input.file ?? ["."])) + return args +} - const w = yield* Effect.acquireRelease(worker(), (w) => Effect.sync(() => w.terminate())) +function parse(stdout: string) { + return stdout + .trim() + .split(/\r?\n/) + .filter(Boolean) + .map((line) => Result.parse(JSON.parse(line))) + .flatMap((item) => (item.type === "match" ? [row(item.data)] : [])) +} + +declare const OPENCODE_RIPGREP_WORKER_PATH: string + +function target(): Effect.Effect { + if (typeof OPENCODE_RIPGREP_WORKER_PATH !== "undefined") { + return Effect.succeed(OPENCODE_RIPGREP_WORKER_PATH) + } + const js = new URL("./ripgrep.worker.js", import.meta.url) + return Effect.tryPromise({ + try: () => Filesystem.exists(fileURLToPath(js)), + catch: toError, + }).pipe(Effect.map((exists) => (exists ? js : new URL("./ripgrep.worker.ts", import.meta.url)))) +} + +function worker() { + return target().pipe(Effect.flatMap((file) => Effect.sync(() => new Worker(file, { env: env() })))) +} + +function drain(buf: string, chunk: unknown, push: (line: string) => void) { + const lines = (buf + text(chunk)).split(/\r?\n/) + buf = lines.pop() || "" + for (const line of lines) { + if (line) push(line) + } + return buf +} + +function fail(queue: Queue.Queue, err: Error) { + Queue.failCauseUnsafe(queue, Cause.fail(err)) +} + +function searchDirect(input: SearchInput) { + return Effect.tryPromise({ + try: () => + ripgrep(searchArgs(input), { + buffer: true, + ...opts(input.cwd), + }), + catch: toError, + }).pipe( + Effect.flatMap((ret) => { + const out = ret.stdout ?? "" + if (ret.code !== 0 && ret.code !== 1 && ret.code !== 2) { + return Effect.fail(error(ret.stderr ?? "", ret.code ?? 1)) + } + return Effect.sync(() => ({ + items: ret.code === 1 ? [] : parse(out), + partial: ret.code === 2, + })) + }), + ) +} + +function searchWorker(input: SearchInput) { + if (input.signal?.aborted) return Effect.fail(abort(input.signal)) + + return Effect.acquireUseRelease( + worker(), + (w) => + Effect.callback((resume, signal) => { let open = true - const close = () => { - if (!open) return false + const done = (effect: Effect.Effect) => { + if (!open) return open = false - return true - } - const onabort = () => { - if (!close()) return - fail(queue, abort(input.signal)) + resume(effect) } + const onabort = () => done(Effect.fail(abort(input.signal))) w.onerror = (evt) => { - if (!close()) return - fail(queue, toError(evt.error ?? evt.message)) + done(Effect.fail(toError(evt.error ?? evt.message))) } - w.onmessage = (evt: MessageEvent) => { + w.onmessage = (evt: MessageEvent) => { const msg = evt.data - if (msg.type === "line") { - if (open) Queue.offerUnsafe(queue, msg.line) - return - } - if (!close()) return if (msg.type === "error") { - fail(queue, Object.assign(new Error(msg.error.message), msg.error)) + done(Effect.fail(Object.assign(new Error(msg.error.message), msg.error))) return } - if (msg.code === 0 || msg.code === 1) { - Queue.endUnsafe(queue) + if (msg.code === 1) { + done(Effect.succeed({ items: [], partial: false })) return } - fail(queue, error(msg.stderr, msg.code)) - } - - yield* Effect.acquireRelease( - Effect.sync(() => { - input.signal?.addEventListener("abort", onabort, { once: true }) - w.postMessage({ - kind: "files", - cwd: input.cwd, - args: filesArgs(input), - } satisfies Run) - }), - () => - Effect.sync(() => { - input.signal?.removeEventListener("abort", onabort) - w.onerror = null - w.onmessage = null - }), - ) - }), - ) - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const source = (input: FilesInput) => { - const useWorker = !!input.signal && typeof Worker !== "undefined" - if (!useWorker && input.signal) { - log.warn("worker unavailable, ripgrep abort disabled") - } - return useWorker ? filesWorker(input) : filesDirect(input) - } - - const files: Interface["files"] = (input) => source(input) - - const tree: Interface["tree"] = Effect.fn("Ripgrep.tree")(function* (input: TreeInput) { - log.info("tree", input) - const list = Array.from(yield* source({ cwd: input.cwd, signal: input.signal }).pipe(Stream.runCollect)) - - interface Node { - name: string - children: Map - } - - function child(node: Node, name: string) { - const item = node.children.get(name) - if (item) return item - const next = { name, children: new Map() } - node.children.set(name, next) - return next - } - - function count(node: Node): number { - return Array.from(node.children.values()).reduce((sum, child) => sum + 1 + count(child), 0) - } - - const root: Node = { name: "", children: new Map() } - for (const file of list) { - if (file.includes(".opencode")) continue - const parts = file.split(path.sep) - if (parts.length < 2) continue - let node = root - for (const part of parts.slice(0, -1)) { - node = child(node, part) + if (msg.code !== 0 && msg.code !== 1 && msg.code !== 2) { + done(Effect.fail(error(msg.stderr, msg.code))) + return } - } - - const total = count(root) - const limit = input.limit ?? total - const lines: string[] = [] - const queue: Array<{ node: Node; path: string }> = Array.from(root.children.values()) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((node) => ({ node, path: node.name })) - - let used = 0 - for (let i = 0; i < queue.length && used < limit; i++) { - const item = queue[i] - lines.push(item.path) - used++ - queue.push( - ...Array.from(item.node.children.values()) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((node) => ({ node, path: `${item.path}/${node.name}` })), + done( + Effect.sync(() => ({ + items: parse(msg.stdout), + partial: msg.code === 2, + })), ) } - if (total > used) lines.push(`[${total - used} truncated]`) - return lines.join("\n") - }) + input.signal?.addEventListener("abort", onabort, { once: true }) + signal.addEventListener("abort", onabort, { once: true }) + w.postMessage({ + kind: "search", + cwd: input.cwd, + args: searchArgs(input), + } satisfies Run) - const search: Interface["search"] = Effect.fn("Ripgrep.search")(function* (input: SearchInput) { - const useWorker = !!input.signal && typeof Worker !== "undefined" - if (!useWorker && input.signal) { - log.warn("worker unavailable, ripgrep abort disabled") - } - return yield* useWorker ? searchWorker(input) : searchDirect(input) - }) + return Effect.sync(() => { + input.signal?.removeEventListener("abort", onabort) + signal.removeEventListener("abort", onabort) + w.onerror = null + w.onmessage = null + }) + }), + (w) => Effect.sync(() => w.terminate()), + ) +} - return Service.of({ files, tree, search }) +function filesDirect(input: FilesInput) { + return Stream.callback( + Effect.fnUntraced(function* (queue: Queue.Queue) { + let buf = "" + let err = "" + + const out = { + write(chunk: unknown) { + buf = drain(buf, chunk, (line) => { + Queue.offerUnsafe(queue, clean(line)) + }) + }, + } + + const stderr = { + write(chunk: unknown) { + err += text(chunk) + }, + } + + yield* Effect.forkScoped( + Effect.gen(function* () { + yield* check(input.cwd) + const ret = yield* Effect.tryPromise({ + try: () => + ripgrep(filesArgs(input), { + stdout: out, + stderr, + ...opts(input.cwd), + }), + catch: toError, + }) + if (buf) Queue.offerUnsafe(queue, clean(buf)) + if (ret.code === 0 || ret.code === 1) { + Queue.endUnsafe(queue) + return + } + fail(queue, error(err, ret.code ?? 1)) + }).pipe( + Effect.catch((err) => + Effect.sync(() => { + fail(queue, err) + }), + ), + ), + ) }), ) - - export const defaultLayer = layer } + +function filesWorker(input: FilesInput) { + return Stream.callback( + Effect.fnUntraced(function* (queue: Queue.Queue) { + if (input.signal?.aborted) { + fail(queue, abort(input.signal)) + return + } + + const w = yield* Effect.acquireRelease(worker(), (w) => Effect.sync(() => w.terminate())) + let open = true + const close = () => { + if (!open) return false + open = false + return true + } + const onabort = () => { + if (!close()) return + fail(queue, abort(input.signal)) + } + + w.onerror = (evt) => { + if (!close()) return + fail(queue, toError(evt.error ?? evt.message)) + } + w.onmessage = (evt: MessageEvent) => { + const msg = evt.data + if (msg.type === "line") { + if (open) Queue.offerUnsafe(queue, msg.line) + return + } + if (!close()) return + if (msg.type === "error") { + fail(queue, Object.assign(new Error(msg.error.message), msg.error)) + return + } + if (msg.code === 0 || msg.code === 1) { + Queue.endUnsafe(queue) + return + } + fail(queue, error(msg.stderr, msg.code)) + } + + yield* Effect.acquireRelease( + Effect.sync(() => { + input.signal?.addEventListener("abort", onabort, { once: true }) + w.postMessage({ + kind: "files", + cwd: input.cwd, + args: filesArgs(input), + } satisfies Run) + }), + () => + Effect.sync(() => { + input.signal?.removeEventListener("abort", onabort) + w.onerror = null + w.onmessage = null + }), + ) + }), + ) +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const source = (input: FilesInput) => { + const useWorker = !!input.signal && typeof Worker !== "undefined" + if (!useWorker && input.signal) { + log.warn("worker unavailable, ripgrep abort disabled") + } + return useWorker ? filesWorker(input) : filesDirect(input) + } + + const files: Interface["files"] = (input) => source(input) + + const tree: Interface["tree"] = Effect.fn("Ripgrep.tree")(function* (input: TreeInput) { + log.info("tree", input) + const list = Array.from(yield* source({ cwd: input.cwd, signal: input.signal }).pipe(Stream.runCollect)) + + interface Node { + name: string + children: Map + } + + function child(node: Node, name: string) { + const item = node.children.get(name) + if (item) return item + const next = { name, children: new Map() } + node.children.set(name, next) + return next + } + + function count(node: Node): number { + return Array.from(node.children.values()).reduce((sum, child) => sum + 1 + count(child), 0) + } + + const root: Node = { name: "", children: new Map() } + for (const file of list) { + if (file.includes(".opencode")) continue + const parts = file.split(path.sep) + if (parts.length < 2) continue + let node = root + for (const part of parts.slice(0, -1)) { + node = child(node, part) + } + } + + const total = count(root) + const limit = input.limit ?? total + const lines: string[] = [] + const queue: Array<{ node: Node; path: string }> = Array.from(root.children.values()) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: node.name })) + + let used = 0 + for (let i = 0; i < queue.length && used < limit; i++) { + const item = queue[i] + lines.push(item.path) + used++ + queue.push( + ...Array.from(item.node.children.values()) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: `${item.path}/${node.name}` })), + ) + } + + if (total > used) lines.push(`[${total - used} truncated]`) + return lines.join("\n") + }) + + const search: Interface["search"] = Effect.fn("Ripgrep.search")(function* (input: SearchInput) { + const useWorker = !!input.signal && typeof Worker !== "undefined" + if (!useWorker && input.signal) { + log.warn("worker unavailable, ripgrep abort disabled") + } + return yield* useWorker ? searchWorker(input) : searchDirect(input) + }) + + return Service.of({ files, tree, search }) + }), +) + +export const defaultLayer = layer + +export * as Ripgrep from "./ripgrep" From 218eca7c2bc95355f594c0fe50853326c86c469f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:11 -0400 Subject: [PATCH 067/120] refactor: unwrap MDNS namespace + self-reexport (#22968) --- packages/opencode/src/server/mdns.ts | 88 ++++++++++++++-------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/packages/opencode/src/server/mdns.ts b/packages/opencode/src/server/mdns.ts index 2011771a20..580456754d 100644 --- a/packages/opencode/src/server/mdns.ts +++ b/packages/opencode/src/server/mdns.ts @@ -3,58 +3,58 @@ import { Bonjour } from "bonjour-service" const log = Log.create({ service: "mdns" }) -export namespace MDNS { - let bonjour: Bonjour | undefined - let currentPort: number | undefined +let bonjour: Bonjour | undefined +let currentPort: number | undefined - export function publish(port: number, domain?: string) { - if (currentPort === port) return - if (bonjour) unpublish() +export function publish(port: number, domain?: string) { + if (currentPort === port) return + if (bonjour) unpublish() - try { - const host = domain ?? "opencode.local" - const name = `opencode-${port}` - bonjour = new Bonjour() - const service = bonjour.publish({ - name, - type: "http", - host, - port, - txt: { path: "/" }, - }) + try { + const host = domain ?? "opencode.local" + const name = `opencode-${port}` + bonjour = new Bonjour() + const service = bonjour.publish({ + name, + type: "http", + host, + port, + txt: { path: "/" }, + }) - service.on("up", () => { - log.info("mDNS service published", { name, port }) - }) + service.on("up", () => { + log.info("mDNS service published", { name, port }) + }) - service.on("error", (err) => { - log.error("mDNS service error", { error: err }) - }) + service.on("error", (err) => { + log.error("mDNS service error", { error: err }) + }) - currentPort = port - } catch (err) { - log.error("mDNS publish failed", { error: err }) - if (bonjour) { - try { - bonjour.destroy() - } catch {} - } - bonjour = undefined - currentPort = undefined - } - } - - export function unpublish() { + currentPort = port + } catch (err) { + log.error("mDNS publish failed", { error: err }) if (bonjour) { try { - bonjour.unpublishAll() bonjour.destroy() - } catch (err) { - log.error("mDNS unpublish failed", { error: err }) - } - bonjour = undefined - currentPort = undefined - log.info("mDNS service unpublished") + } catch {} } + bonjour = undefined + currentPort = undefined } } + +export function unpublish() { + if (bonjour) { + try { + bonjour.unpublishAll() + bonjour.destroy() + } catch (err) { + log.error("mDNS unpublish failed", { error: err }) + } + bonjour = undefined + currentPort = undefined + log.info("mDNS service unpublished") + } +} + +export * as MDNS from "./mdns" From 715786bbf96304f617c5f2a48ed49fe6101c90ef Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:15 -0400 Subject: [PATCH 068/120] refactor: unwrap FileTime namespace + self-reexport (#22966) --- packages/opencode/src/file/time.ts | 208 ++++++++++++++--------------- 1 file changed, 104 insertions(+), 104 deletions(-) diff --git a/packages/opencode/src/file/time.ts b/packages/opencode/src/file/time.ts index 327eadbef5..cc26682d57 100644 --- a/packages/opencode/src/file/time.ts +++ b/packages/opencode/src/file/time.ts @@ -5,109 +5,109 @@ import { Flag } from "@/flag/flag" import type { SessionID } from "@/session/schema" import { Log } from "../util" -export namespace FileTime { - const log = Log.create({ service: "file.time" }) +const log = Log.create({ service: "file.time" }) - export type Stamp = { - readonly read: Date - readonly mtime: number | undefined - readonly size: number | undefined - } - - const session = (reads: Map>, sessionID: SessionID) => { - const value = reads.get(sessionID) - if (value) return value - - const next = new Map() - reads.set(sessionID, next) - return next - } - - interface State { - reads: Map> - locks: Map - } - - export interface Interface { - readonly read: (sessionID: SessionID, file: string) => Effect.Effect - readonly get: (sessionID: SessionID, file: string) => Effect.Effect - readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect - readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/FileTime") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fsys = yield* AppFileSystem.Service - const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK - - const stamp = Effect.fnUntraced(function* (file: string) { - const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) - return { - read: yield* DateTime.nowAsDate, - mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, - size: info ? Number(info.size) : undefined, - } - }) - const state = yield* InstanceState.make( - Effect.fn("FileTime.state")(() => - Effect.succeed({ - reads: new Map>(), - locks: new Map(), - }), - ), - ) - - const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { - filepath = AppFileSystem.normalizePath(filepath) - const locks = (yield* InstanceState.get(state)).locks - const lock = locks.get(filepath) - if (lock) return lock - - const next = Semaphore.makeUnsafe(1) - locks.set(filepath, next) - return next - }) - - const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - log.info("read", { sessionID, file }) - session(reads, sessionID).set(file, yield* stamp(file)) - }) - - const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - return reads.get(sessionID)?.get(file)?.read - }) - - const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { - if (disableCheck) return - filepath = AppFileSystem.normalizePath(filepath) - - const reads = (yield* InstanceState.get(state)).reads - const time = reads.get(sessionID)?.get(filepath) - if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) - - const next = yield* stamp(filepath) - const changed = next.mtime !== time.mtime || next.size !== time.size - if (!changed) return - - throw new Error( - `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, - ) - }) - - const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { - return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) - }) - - return Service.of({ read, get, assert, withLock }) - }), - ).pipe(Layer.orDie) - - export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) +export type Stamp = { + readonly read: Date + readonly mtime: number | undefined + readonly size: number | undefined } + +const session = (reads: Map>, sessionID: SessionID) => { + const value = reads.get(sessionID) + if (value) return value + + const next = new Map() + reads.set(sessionID, next) + return next +} + +interface State { + reads: Map> + locks: Map +} + +export interface Interface { + readonly read: (sessionID: SessionID, file: string) => Effect.Effect + readonly get: (sessionID: SessionID, file: string) => Effect.Effect + readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect + readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/FileTime") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const fsys = yield* AppFileSystem.Service + const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK + + const stamp = Effect.fnUntraced(function* (file: string) { + const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) + return { + read: yield* DateTime.nowAsDate, + mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, + size: info ? Number(info.size) : undefined, + } + }) + const state = yield* InstanceState.make( + Effect.fn("FileTime.state")(() => + Effect.succeed({ + reads: new Map>(), + locks: new Map(), + }), + ), + ) + + const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { + filepath = AppFileSystem.normalizePath(filepath) + const locks = (yield* InstanceState.get(state)).locks + const lock = locks.get(filepath) + if (lock) return lock + + const next = Semaphore.makeUnsafe(1) + locks.set(filepath, next) + return next + }) + + const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { + file = AppFileSystem.normalizePath(file) + const reads = (yield* InstanceState.get(state)).reads + log.info("read", { sessionID, file }) + session(reads, sessionID).set(file, yield* stamp(file)) + }) + + const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { + file = AppFileSystem.normalizePath(file) + const reads = (yield* InstanceState.get(state)).reads + return reads.get(sessionID)?.get(file)?.read + }) + + const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { + if (disableCheck) return + filepath = AppFileSystem.normalizePath(filepath) + + const reads = (yield* InstanceState.get(state)).reads + const time = reads.get(sessionID)?.get(filepath) + if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) + + const next = yield* stamp(filepath) + const changed = next.mtime !== time.mtime || next.size !== time.size + if (!changed) return + + throw new Error( + `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, + ) + }) + + const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { + return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) + }) + + return Service.of({ read, get, assert, withLock }) + }), +).pipe(Layer.orDie) + +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) + +export * as FileTime from "./time" From 1089fa041561d76a58d72e464d95219af682eb8c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:32 -0400 Subject: [PATCH 069/120] refactor: unwrap ServerProxy namespace + self-reexport (#22969) --- packages/opencode/src/server/proxy.ts | 146 +++++++++++++------------- 1 file changed, 73 insertions(+), 73 deletions(-) diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 07703fdc80..22bc89baf2 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -101,83 +101,83 @@ const app = (upgrade: UpgradeWebSocket) => }), ) -export namespace ServerProxy { - const log = Log.Default.clone().tag("service", "server-proxy") +const log = Log.Default.clone().tag("service", "server-proxy") - export async function http( - url: string | URL, - extra: HeadersInit | undefined, - req: Request, - workspaceID: WorkspaceID, - ) { - if (!Workspace.isSyncing(workspaceID)) { - return new Response(`broken sync connection for workspace: ${workspaceID}`, { - status: 503, - headers: { - "content-type": "text/plain; charset=utf-8", - }, - }) - } - - return fetch( - new Request(url, { - method: req.method, - headers: headers(req, extra), - body: req.method === "GET" || req.method === "HEAD" ? undefined : req.body, - redirect: "manual", - signal: req.signal, - }), - ).then((res) => { - const sync = Fence.parse(res.headers) - const next = new Headers(res.headers) - next.delete("content-encoding") - next.delete("content-length") - - const done = sync ? Fence.wait(workspaceID, sync, req.signal) : Promise.resolve() - - return done.then(async () => { - console.log("proxy http response", { - method: req.method, - request: req.url, - url: String(url), - status: res.status, - statusText: res.statusText, - }) - return new Response(res.body, { - status: res.status, - statusText: res.statusText, - headers: next, - }) - }) +export async function http( + url: string | URL, + extra: HeadersInit | undefined, + req: Request, + workspaceID: WorkspaceID, +) { + if (!Workspace.isSyncing(workspaceID)) { + return new Response(`broken sync connection for workspace: ${workspaceID}`, { + status: 503, + headers: { + "content-type": "text/plain; charset=utf-8", + }, }) } - export function websocket( - upgrade: UpgradeWebSocket, - target: string | URL, - extra: HeadersInit | undefined, - req: Request, - env: unknown, - ) { - const proxy = new URL(req.url) - proxy.pathname = "/__workspace_ws" - proxy.search = "" - const next = new Headers(req.headers) - next.set("x-opencode-proxy-url", socket(target)) - for (const [key, value] of new Headers(extra).entries()) { - next.set(key, value) - } - log.info("proxy websocket", { - request: req.url, - target: String(target), - }) - return app(upgrade).fetch( - new Request(proxy, { + return fetch( + new Request(url, { + method: req.method, + headers: headers(req, extra), + body: req.method === "GET" || req.method === "HEAD" ? undefined : req.body, + redirect: "manual", + signal: req.signal, + }), + ).then((res) => { + const sync = Fence.parse(res.headers) + const next = new Headers(res.headers) + next.delete("content-encoding") + next.delete("content-length") + + const done = sync ? Fence.wait(workspaceID, sync, req.signal) : Promise.resolve() + + return done.then(async () => { + console.log("proxy http response", { method: req.method, + request: req.url, + url: String(url), + status: res.status, + statusText: res.statusText, + }) + return new Response(res.body, { + status: res.status, + statusText: res.statusText, headers: next, - signal: req.signal, - }), - env as never, - ) - } + }) + }) + }) } + +export function websocket( + upgrade: UpgradeWebSocket, + target: string | URL, + extra: HeadersInit | undefined, + req: Request, + env: unknown, +) { + const proxy = new URL(req.url) + proxy.pathname = "/__workspace_ws" + proxy.search = "" + const next = new Headers(req.headers) + next.set("x-opencode-proxy-url", socket(target)) + for (const [key, value] of new Headers(extra).entries()) { + next.set(key, value) + } + log.info("proxy websocket", { + request: req.url, + target: String(target), + }) + return app(upgrade).fetch( + new Request(proxy, { + method: req.method, + headers: next, + signal: req.signal, + }), + env as never, + ) +} + +export * as ServerProxy from "./proxy" From c03fa362572d8108d2d76c2a18bbf616a7345dac Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:51:01 -0400 Subject: [PATCH 070/120] refactor: unwrap Server namespace + self-reexport (#22970) --- packages/opencode/src/server/server.ts | 180 ++++++++++++------------- 1 file changed, 90 insertions(+), 90 deletions(-) diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index fc3b399f79..892a99a77c 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -17,37 +17,22 @@ globalThis.AI_SDK_LOG_WARNINGS = false initProjectors() -export namespace Server { - const log = Log.create({ service: "server" }) +const log = Log.create({ service: "server" }) - export type Listener = { - hostname: string - port: number - url: URL - stop: (close?: boolean) => Promise - } +export type Listener = { + hostname: string + port: number + url: URL + stop: (close?: boolean) => Promise +} - export const Default = lazy(() => create({})) +export const Default = lazy(() => create({})) - function create(opts: { cors?: string[] }) { - const app = new Hono() - const runtime = adapter.create(app) - - if (Flag.OPENCODE_WORKSPACE_ID) { - return { - app: app - .onError(ErrorMiddleware) - .use(AuthMiddleware) - .use(LoggerMiddleware) - .use(CompressionMiddleware) - .use(CorsMiddleware(opts)) - .use(FenceMiddleware) - .route("/", ControlPlaneRoutes()) - .route("/", InstanceRoutes(runtime.upgradeWebSocket)), - runtime, - } - } +function create(opts: { cors?: string[] }) { + const app = new Hono() + const runtime = adapter.create(app) + if (Flag.OPENCODE_WORKSPACE_ID) { return { app: app .onError(ErrorMiddleware) @@ -55,73 +40,88 @@ export namespace Server { .use(LoggerMiddleware) .use(CompressionMiddleware) .use(CorsMiddleware(opts)) + .use(FenceMiddleware) .route("/", ControlPlaneRoutes()) - .route("/", InstanceRoutes(runtime.upgradeWebSocket)) - .route("/", UIRoutes()), + .route("/", InstanceRoutes(runtime.upgradeWebSocket)), runtime, } } - export async function openapi() { - // Build a fresh app with all routes registered directly so - // hono-openapi can see describeRoute metadata (`.route()` wraps - // handlers when the sub-app has a custom errorHandler, which - // strips the metadata symbol). - const { app } = create({}) - const result = await generateSpecs(app, { - documentation: { - info: { - title: "opencode", - version: "1.0.0", - description: "opencode api", - }, - openapi: "3.1.1", - }, - }) - return result - } - - export let url: URL - - export async function listen(opts: { - port: number - hostname: string - mdns?: boolean - mdnsDomain?: string - cors?: string[] - }): Promise { - const built = create(opts) - const server = await built.runtime.listen(opts) - - const next = new URL("http://localhost") - next.hostname = opts.hostname - next.port = String(server.port) - url = next - - const mdns = - opts.mdns && - server.port && - opts.hostname !== "127.0.0.1" && - opts.hostname !== "localhost" && - opts.hostname !== "::1" - if (mdns) { - MDNS.publish(server.port, opts.mdnsDomain) - } else if (opts.mdns) { - log.warn("mDNS enabled but hostname is loopback; skipping mDNS publish") - } - - let closing: Promise | undefined - return { - hostname: opts.hostname, - port: server.port, - url: next, - stop(close?: boolean) { - closing ??= (async () => { - if (mdns) MDNS.unpublish() - await server.stop(close) - })() - return closing - }, - } + return { + app: app + .onError(ErrorMiddleware) + .use(AuthMiddleware) + .use(LoggerMiddleware) + .use(CompressionMiddleware) + .use(CorsMiddleware(opts)) + .route("/", ControlPlaneRoutes()) + .route("/", InstanceRoutes(runtime.upgradeWebSocket)) + .route("/", UIRoutes()), + runtime, } } + +export async function openapi() { + // Build a fresh app with all routes registered directly so + // hono-openapi can see describeRoute metadata (`.route()` wraps + // handlers when the sub-app has a custom errorHandler, which + // strips the metadata symbol). + const { app } = create({}) + const result = await generateSpecs(app, { + documentation: { + info: { + title: "opencode", + version: "1.0.0", + description: "opencode api", + }, + openapi: "3.1.1", + }, + }) + return result +} + +export let url: URL + +export async function listen(opts: { + port: number + hostname: string + mdns?: boolean + mdnsDomain?: string + cors?: string[] +}): Promise { + const built = create(opts) + const server = await built.runtime.listen(opts) + + const next = new URL("http://localhost") + next.hostname = opts.hostname + next.port = String(server.port) + url = next + + const mdns = + opts.mdns && + server.port && + opts.hostname !== "127.0.0.1" && + opts.hostname !== "localhost" && + opts.hostname !== "::1" + if (mdns) { + MDNS.publish(server.port, opts.mdnsDomain) + } else if (opts.mdns) { + log.warn("mDNS enabled but hostname is loopback; skipping mDNS publish") + } + + let closing: Promise | undefined + return { + hostname: opts.hostname, + port: server.port, + url: next, + stop(close?: boolean) { + closing ??= (async () => { + if (mdns) MDNS.unpublish() + await server.stop(close) + })() + return closing + }, + } +} + +export * as Server from "./server" From 5d47ea091879b026b8efb9d09af06deb0643e46a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:52:04 -0400 Subject: [PATCH 071/120] refactor: unwrap ConfigMCP namespace + self-reexport (#22948) --- .../opencode/src/cli/cmd/tui/context/kv.tsx | 2 +- packages/opencode/src/cli/error.ts | 8 +- packages/opencode/src/config/mcp.ts | 130 +++++++++--------- packages/opencode/src/lsp/lsp.ts | 9 +- packages/opencode/src/npm/index.ts | 13 +- packages/opencode/src/provider/provider.ts | 6 +- packages/opencode/src/session/session.ts | 22 +-- packages/opencode/src/tool/tool.ts | 2 +- packages/opencode/src/util/filesystem.ts | 2 +- packages/opencode/test/config/config.test.ts | 2 +- 10 files changed, 104 insertions(+), 92 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/kv.tsx b/packages/opencode/src/cli/cmd/tui/context/kv.tsx index 39e976b0e5..803752e766 100644 --- a/packages/opencode/src/cli/cmd/tui/context/kv.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/kv.tsx @@ -12,7 +12,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({ const [store, setStore] = createStore>() const filePath = path.join(Global.Path.state, "kv.json") - Filesystem.readJson(filePath) + Filesystem.readJson>(filePath) .then((x) => { setStore(x) }) diff --git a/packages/opencode/src/cli/error.ts b/packages/opencode/src/cli/error.ts index 89b557e2d2..f286b5166f 100644 --- a/packages/opencode/src/cli/error.ts +++ b/packages/opencode/src/cli/error.ts @@ -28,10 +28,10 @@ export function FormatError(input: unknown) { // ProviderModelNotFoundError: { providerID: string, modelID: string, suggestions?: string[] } if (NamedError.hasName(input, "ProviderModelNotFoundError")) { const data = (input as ErrorLike).data - const suggestions = data?.suggestions as string[] | undefined + const suggestions: string[] = Array.isArray(data?.suggestions) ? data.suggestions : [] return [ `Model not found: ${data?.providerID}/${data?.modelID}`, - ...(Array.isArray(suggestions) && suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []), + ...(suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []), `Try: \`opencode models\` to list available models`, `Or check your config (opencode.json) provider/model names`, ].join("\n") @@ -64,10 +64,10 @@ export function FormatError(input: unknown) { const data = (input as ErrorLike).data const path = data?.path const message = data?.message - const issues = data?.issues as Array<{ message: string; path: string[] }> | undefined + const issues: Array<{ message: string; path: string[] }> = Array.isArray(data?.issues) ? data.issues : [] return [ `Configuration is invalid${path && path !== "config" ? ` at ${path}` : ""}` + (message ? `: ${message}` : ""), - ...(issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []), + ...issues.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")), ].join("\n") } diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index fb8f8caa41..fda933b421 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -1,70 +1,70 @@ import z from "zod" -export namespace ConfigMCP { - export const Local = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) +export const Local = z + .object({ + type: z.literal("local").describe("Type of MCP server connection"), + command: z.string().array().describe("Command and arguments to run the MCP server"), + environment: z + .record(z.string(), z.string()) + .optional() + .describe("Environment variables to set when running the MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpLocalConfig", + }) - export const OAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) - export type OAuth = z.infer +export const OAuth = z + .object({ + clientId: z + .string() + .optional() + .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), + clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), + scope: z.string().optional().describe("OAuth scopes to request during authorization"), + redirectUri: z + .string() + .optional() + .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), + }) + .strict() + .meta({ + ref: "McpOAuthConfig", + }) +export type OAuth = z.infer - export const Remote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([OAuth, z.literal(false)]) - .optional() - .describe( - "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", - ), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) +export const Remote = z + .object({ + type: z.literal("remote").describe("Type of MCP server connection"), + url: z.string().describe("URL of the remote MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), + oauth: z + .union([OAuth, z.literal(false)]) + .optional() + .describe( + "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + ), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpRemoteConfig", + }) - export const Info = z.discriminatedUnion("type", [Local, Remote]) - export type Info = z.infer -} +export const Info = z.discriminatedUnion("type", [Local, Remote]) +export type Info = z.infer + +export * as ConfigMCP from "./mcp" diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index d4d1e75634..d895e73256 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -440,12 +440,11 @@ export const layer = Layer.effect( const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) { const results = yield* runAll((client) => client.connection - .sendRequest("workspace/symbol", { query }) - .then((result: any) => result.filter((x: Symbol) => kinds.includes(x.kind))) - .then((result: any) => result.slice(0, 10)) - .catch(() => []), + .sendRequest("workspace/symbol", { query }) + .then((result) => result.filter((x) => kinds.includes(x.kind)).slice(0, 10)) + .catch(() => [] as Symbol[]), ) - return results.flat() as Symbol[] + return results.flat() }) const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) { diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 174df12974..425b27f420 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -124,8 +124,17 @@ export async function install(dir: string) { return } - const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) + type PackageDeps = Record + type PackageJson = { + dependencies?: PackageDeps + devDependencies?: PackageDeps + peerDependencies?: PackageDeps + optionalDependencies?: PackageDeps + } + const pkg: PackageJson = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) + const lock: { packages?: Record } = await Filesystem.readJson<{ + packages?: Record + }>(path.join(dir, "package-lock.json")).catch(() => ({})) const declared = new Set([ ...Object.keys(pkg.dependencies || {}), diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 43ae9a5e9f..a7297634e7 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -547,12 +547,14 @@ function custom(dep: CustomDep): Record { }, async getModel(sdk: any, modelID: string, options?: Record) { if (modelID.startsWith("duo-workflow-")) { - const workflowRef = options?.workflowRef as string | undefined + const workflowRef = typeof options?.workflowRef === "string" ? options.workflowRef : undefined // Use the static mapping if it exists, otherwise use duo-workflow with selectedModelRef const sdkModelID = isWorkflowModel(modelID) ? modelID : "duo-workflow" + const workflowDefinition = + typeof options?.workflowDefinition === "string" ? options.workflowDefinition : undefined const model = sdk.workflowChat(sdkModelID, { featureFlags, - workflowDefinition: options?.workflowDefinition as string | undefined, + workflowDefinition, }) if (workflowRef) { model.selectedModelRef = workflowRef diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index 8c5fc29e4a..a453b19815 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -272,16 +272,18 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa input.usage.inputTokenDetails?.cacheReadTokens ?? input.usage.cachedInputTokens ?? 0, ) const cacheWriteInputTokens = safe( - (input.usage.inputTokenDetails?.cacheWriteTokens ?? - input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? - // google-vertex-anthropic returns metadata under "vertex" key - // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages') - input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ?? - // @ts-expect-error - input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? - // @ts-expect-error - input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ?? - 0) as number, + Number( + input.usage.inputTokenDetails?.cacheWriteTokens ?? + input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? + // google-vertex-anthropic returns metadata under "vertex" key + // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages') + input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ?? + // @ts-expect-error + input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? + // @ts-expect-error + input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ?? + 0, + ), ) // AI SDK v6 normalized inputTokens to include cached tokens across all providers diff --git a/packages/opencode/src/tool/tool.ts b/packages/opencode/src/tool/tool.ts index 0ea0435fb1..179149afd2 100644 --- a/packages/opencode/src/tool/tool.ts +++ b/packages/opencode/src/tool/tool.ts @@ -19,7 +19,7 @@ export type Context = { agent: string abort: AbortSignal callID?: string - extra?: { [key: string]: any } + extra?: { [key: string]: unknown } messages: MessageV2.WithParts[] metadata(input: { title?: string; metadata?: M }): Effect.Effect ask(input: Omit): Effect.Effect diff --git a/packages/opencode/src/util/filesystem.ts b/packages/opencode/src/util/filesystem.ts index 3ff2c6e3f4..6c4d455224 100644 --- a/packages/opencode/src/util/filesystem.ts +++ b/packages/opencode/src/util/filesystem.ts @@ -39,7 +39,7 @@ export async function readText(p: string): Promise { return readFile(p, "utf-8") } -export async function readJson(p: string): Promise { +export async function readJson(p: string): Promise { return JSON.parse(await readFile(p, "utf-8")) } diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index c41f395e51..3e90842e18 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -757,7 +757,7 @@ test("updates config and writes to file", async () => { const newConfig = { model: "updated/model" } await save(newConfig as any) - const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json")) + const writtenConfig = await Filesystem.readJson<{ model: string }>(path.join(tmp.path, "config.json")) expect(writtenConfig.model).toBe("updated/model") }, }) From f9aa3d77cd543ad3a46f86e36a2908f0cc2e652f Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 23:53:10 +0000 Subject: [PATCH 072/120] chore: generate --- packages/opencode/src/config/mcp.ts | 4 +--- packages/opencode/src/server/proxy.ts | 7 +------ 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index fda933b421..5036cd6e4f 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -49,9 +49,7 @@ export const Remote = z oauth: z .union([OAuth, z.literal(false)]) .optional() - .describe( - "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", - ), + .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), timeout: z .number() .int() diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 22bc89baf2..9c1fd1f288 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -103,12 +103,7 @@ const app = (upgrade: UpgradeWebSocket) => const log = Log.Default.clone().tag("service", "server-proxy") -export async function http( - url: string | URL, - extra: HeadersInit | undefined, - req: Request, - workspaceID: WorkspaceID, -) { +export async function http(url: string | URL, extra: HeadersInit | undefined, req: Request, workspaceID: WorkspaceID) { if (!Workspace.isSyncing(workspaceID)) { return new Response(`broken sync connection for workspace: ${workspaceID}`, { status: 503, From bae80af1b4620961664076bd257c22b88b57eeaf Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:15 -0400 Subject: [PATCH 073/120] refactor: unwrap Workspace namespace + self-reexport (#22934) --- .../opencode/src/control-plane/workspace.ts | 886 +++++++++--------- 1 file changed, 443 insertions(+), 443 deletions(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 9c1c4c8960..3af11707e8 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -26,173 +26,239 @@ import { AppRuntime } from "@/effect/app-runtime" import { EventSequenceTable } from "@/sync/event.sql" import { waitEvent } from "./util" -export namespace Workspace { - export const Info = WorkspaceInfo.meta({ - ref: "Workspace", - }) - export type Info = z.infer +export const Info = WorkspaceInfo.meta({ + ref: "Workspace", +}) +export type Info = z.infer - export const ConnectionStatus = z.object({ - workspaceID: WorkspaceID.zod, - status: z.enum(["connected", "connecting", "disconnected", "error"]), - error: z.string().optional(), - }) - export type ConnectionStatus = z.infer +export const ConnectionStatus = z.object({ + workspaceID: WorkspaceID.zod, + status: z.enum(["connected", "connecting", "disconnected", "error"]), + error: z.string().optional(), +}) +export type ConnectionStatus = z.infer - const Restore = z.object({ - workspaceID: WorkspaceID.zod, - sessionID: SessionID.zod, - total: z.number().int().min(0), - step: z.number().int().min(0), - }) +const Restore = z.object({ + workspaceID: WorkspaceID.zod, + sessionID: SessionID.zod, + total: z.number().int().min(0), + step: z.number().int().min(0), +}) - export const Event = { - Ready: BusEvent.define( - "workspace.ready", - z.object({ - name: z.string(), - }), - ), - Failed: BusEvent.define( - "workspace.failed", - z.object({ - message: z.string(), - }), - ), - Restore: BusEvent.define("workspace.restore", Restore), - Status: BusEvent.define("workspace.status", ConnectionStatus), +export const Event = { + Ready: BusEvent.define( + "workspace.ready", + z.object({ + name: z.string(), + }), + ), + Failed: BusEvent.define( + "workspace.failed", + z.object({ + message: z.string(), + }), + ), + Restore: BusEvent.define("workspace.restore", Restore), + Status: BusEvent.define("workspace.status", ConnectionStatus), +} + +function fromRow(row: typeof WorkspaceTable.$inferSelect): Info { + return { + id: row.id, + type: row.type, + branch: row.branch, + name: row.name, + directory: row.directory, + extra: row.extra, + projectID: row.project_id, + } +} + +const CreateInput = z.object({ + id: WorkspaceID.zod.optional(), + type: Info.shape.type, + branch: Info.shape.branch, + projectID: ProjectID.zod, + extra: Info.shape.extra, +}) + +export const create = fn(CreateInput, async (input) => { + const id = WorkspaceID.ascending(input.id) + const adaptor = await getAdaptor(input.projectID, input.type) + + const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null }) + + const info: Info = { + id, + type: config.type, + branch: config.branch ?? null, + name: config.name ?? null, + directory: config.directory ?? null, + extra: config.extra ?? null, + projectID: input.projectID, } - function fromRow(row: typeof WorkspaceTable.$inferSelect): Info { - return { - id: row.id, - type: row.type, - branch: row.branch, - name: row.name, - directory: row.directory, - extra: row.extra, - projectID: row.project_id, - } - } - - const CreateInput = z.object({ - id: WorkspaceID.zod.optional(), - type: Info.shape.type, - branch: Info.shape.branch, - projectID: ProjectID.zod, - extra: Info.shape.extra, + Database.use((db) => { + db.insert(WorkspaceTable) + .values({ + id: info.id, + type: info.type, + branch: info.branch, + name: info.name, + directory: info.directory, + extra: info.extra, + project_id: info.projectID, + }) + .run() }) - export const create = fn(CreateInput, async (input) => { - const id = WorkspaceID.ascending(input.id) - const adaptor = await getAdaptor(input.projectID, input.type) + const env = { + OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), + OPENCODE_WORKSPACE_ID: config.id, + OPENCODE_EXPERIMENTAL_WORKSPACES: "true", + } + await adaptor.create(config, env) - const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null }) + startSync(info) - const info: Info = { - id, - type: config.type, - branch: config.branch ?? null, - name: config.name ?? null, - directory: config.directory ?? null, - extra: config.extra ?? null, - projectID: input.projectID, - } + await waitEvent({ + timeout: TIMEOUT, + fn(event) { + if (event.workspace === info.id && event.payload.type === Event.Status.type) { + const { status } = event.payload.properties + return status === "error" || status === "connected" + } + return false + }, + }) - Database.use((db) => { - db.insert(WorkspaceTable) - .values({ - id: info.id, - type: info.type, - branch: info.branch, - name: info.name, - directory: info.directory, - extra: info.extra, - project_id: info.projectID, - }) - .run() - }) + return info +}) - const env = { - OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), - OPENCODE_WORKSPACE_ID: config.id, - OPENCODE_EXPERIMENTAL_WORKSPACES: "true", - } - await adaptor.create(config, env) +const SessionRestoreInput = z.object({ + workspaceID: WorkspaceID.zod, + sessionID: SessionID.zod, +}) - startSync(info) +export const sessionRestore = fn(SessionRestoreInput, async (input) => { + log.info("session restore requested", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + }) + try { + const space = await get(input.workspaceID) + if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`) - await waitEvent({ - timeout: TIMEOUT, - fn(event) { - if (event.workspace === info.id && event.payload.type === Event.Status.type) { - const { status } = event.payload.properties - return status === "error" || status === "connected" - } - return false + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + // Need to switch the workspace of the session + SyncEvent.run(Session.Event.Updated, { + sessionID: input.sessionID, + info: { + workspaceID: input.workspaceID, }, }) - return info - }) + const rows = Database.use((db) => + db + .select({ + id: EventTable.id, + aggregateID: EventTable.aggregate_id, + seq: EventTable.seq, + type: EventTable.type, + data: EventTable.data, + }) + .from(EventTable) + .where(eq(EventTable.aggregate_id, input.sessionID)) + .orderBy(asc(EventTable.seq)) + .all(), + ) + if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`) - const SessionRestoreInput = z.object({ - workspaceID: WorkspaceID.zod, - sessionID: SessionID.zod, - }) + const all = rows - export const sessionRestore = fn(SessionRestoreInput, async (input) => { - log.info("session restore requested", { + const size = 10 + const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size)) + const total = sets.length + log.info("session restore prepared", { workspaceID: input.workspaceID, sessionID: input.sessionID, + workspaceType: space.type, + directory: space.directory, + target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, + events: all.length, + batches: total, + first: all[0]?.seq, + last: all.at(-1)?.seq, }) - try { - const space = await get(input.workspaceID) - if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`) - - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - // Need to switch the workspace of the session - SyncEvent.run(Session.Event.Updated, { - sessionID: input.sessionID, - info: { + GlobalBus.emit("event", { + directory: "global", + workspace: input.workspaceID, + payload: { + type: Event.Restore.type, + properties: { workspaceID: input.workspaceID, + sessionID: input.sessionID, + total, + step: 0, }, - }) - - const rows = Database.use((db) => - db - .select({ - id: EventTable.id, - aggregateID: EventTable.aggregate_id, - seq: EventTable.seq, - type: EventTable.type, - data: EventTable.data, - }) - .from(EventTable) - .where(eq(EventTable.aggregate_id, input.sessionID)) - .orderBy(asc(EventTable.seq)) - .all(), - ) - if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`) - - const all = rows - - const size = 10 - const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size)) - const total = sets.length - log.info("session restore prepared", { + }, + }) + for (const [i, events] of sets.entries()) { + log.info("session restore batch starting", { workspaceID: input.workspaceID, sessionID: input.sessionID, - workspaceType: space.type, - directory: space.directory, + step: i + 1, + total, + events: events.length, + first: events[0]?.seq, + last: events.at(-1)?.seq, target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, - events: all.length, - batches: total, - first: all[0]?.seq, - last: all.at(-1)?.seq, }) + if (target.type === "local") { + SyncEvent.replayAll(events) + log.info("session restore batch replayed locally", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + events: events.length, + }) + } else { + const url = route(target.url, "/sync/replay") + const headers = new Headers(target.headers) + headers.set("content-type", "application/json") + const res = await fetch(url, { + method: "POST", + headers, + body: JSON.stringify({ + directory: space.directory ?? "", + events, + }), + }) + if (!res.ok) { + const body = await res.text() + log.error("session restore batch failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + status: res.status, + body, + }) + throw new Error( + `Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`, + ) + } + log.info("session restore batch posted", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + status: res.status, + }) + } GlobalBus.emit("event", { directory: "global", workspace: input.workspaceID, @@ -202,329 +268,263 @@ export namespace Workspace { workspaceID: input.workspaceID, sessionID: input.sessionID, total, - step: 0, + step: i + 1, }, }, }) - for (const [i, events] of sets.entries()) { - log.info("session restore batch starting", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - events: events.length, - first: events[0]?.seq, - last: events.at(-1)?.seq, - target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, + } + + log.info("session restore complete", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + batches: total, + }) + + return { + total, + } + } catch (err) { + log.error("session restore failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + throw err + } +}) + +export function list(project: Project.Info) { + const rows = Database.use((db) => + db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), + ) + const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) + + for (const space of spaces) startSync(space) + return spaces +} + +function lookup(id: WorkspaceID) { + const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) + if (!row) return + return fromRow(row) +} + +export const get = fn(WorkspaceID.zod, async (id) => { + const space = lookup(id) + if (!space) return + startSync(space) + return space +}) + +export const remove = fn(WorkspaceID.zod, async (id) => { + const sessions = Database.use((db) => + db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(), + ) + for (const session of sessions) { + await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id))) + } + + const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) + + if (row) { + stopSync(id) + + const info = fromRow(row) + try { + const adaptor = await getAdaptor(info.projectID, row.type) + await adaptor.remove(info) + } catch { + log.error("adaptor not available when removing workspace", { type: row.type }) + } + Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run()) + return info + } +}) + +const connections = new Map() +const aborts = new Map() +const TIMEOUT = 5000 + +function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { + const prev = connections.get(id) + if (prev?.status === status && prev?.error === error) return + const next = { workspaceID: id, status, error } + connections.set(id, next) + + if (status === "error") { + aborts.delete(id) + } + + GlobalBus.emit("event", { + directory: "global", + workspace: id, + payload: { + type: Event.Status.type, + properties: next, + }, + }) +} + +export function status(): ConnectionStatus[] { + return [...connections.values()] +} + +function synced(state: Record) { + const ids = Object.keys(state) + if (ids.length === 0) return true + + const done = Object.fromEntries( + Database.use((db) => + db + .select({ + id: EventSequenceTable.aggregate_id, + seq: EventSequenceTable.seq, }) - if (target.type === "local") { - SyncEvent.replayAll(events) - log.info("session restore batch replayed locally", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - events: events.length, - }) - } else { - const url = route(target.url, "/sync/replay") - const headers = new Headers(target.headers) - headers.set("content-type", "application/json") - const res = await fetch(url, { - method: "POST", - headers, - body: JSON.stringify({ - directory: space.directory ?? "", - events, - }), - }) - if (!res.ok) { - const body = await res.text() - log.error("session restore batch failed", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - status: res.status, - body, - }) - throw new Error( - `Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`, - ) - } - log.info("session restore batch posted", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - status: res.status, - }) + .from(EventSequenceTable) + .where(inArray(EventSequenceTable.aggregate_id, ids)) + .all(), + ).map((row) => [row.id, row.seq]), + ) as Record + + return ids.every((id) => { + return (done[id] ?? -1) >= state[id] + }) +} + +export async function isSyncing(workspaceID: WorkspaceID) { + return aborts.has(workspaceID) +} + +export async function waitForSync(workspaceID: WorkspaceID, state: Record, signal?: AbortSignal) { + if (synced(state)) return + + try { + await waitEvent({ + timeout: TIMEOUT, + signal, + fn(event) { + if (event.workspace !== workspaceID && event.payload.type !== "sync") { + return false } - GlobalBus.emit("event", { - directory: "global", - workspace: input.workspaceID, - payload: { - type: Event.Restore.type, - properties: { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - total, - step: i + 1, - }, - }, - }) - } - - log.info("session restore complete", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - batches: total, - }) - - return { - total, - } - } catch (err) { - log.error("session restore failed", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - error: errorData(err), - }) - throw err - } - }) - - export function list(project: Project.Info) { - const rows = Database.use((db) => - db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), - ) - const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) - - for (const space of spaces) startSync(space) - return spaces - } - - function lookup(id: WorkspaceID) { - const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) - if (!row) return - return fromRow(row) - } - - export const get = fn(WorkspaceID.zod, async (id) => { - const space = lookup(id) - if (!space) return - startSync(space) - return space - }) - - export const remove = fn(WorkspaceID.zod, async (id) => { - const sessions = Database.use((db) => - db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(), - ) - for (const session of sessions) { - await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id))) - } - - const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) - - if (row) { - stopSync(id) - - const info = fromRow(row) - try { - const adaptor = await getAdaptor(info.projectID, row.type) - await adaptor.remove(info) - } catch { - log.error("adaptor not available when removing workspace", { type: row.type }) - } - Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run()) - return info - } - }) - - const connections = new Map() - const aborts = new Map() - const TIMEOUT = 5000 - - function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { - const prev = connections.get(id) - if (prev?.status === status && prev?.error === error) return - const next = { workspaceID: id, status, error } - connections.set(id, next) - - if (status === "error") { - aborts.delete(id) - } - - GlobalBus.emit("event", { - directory: "global", - workspace: id, - payload: { - type: Event.Status.type, - properties: next, + return synced(state) }, }) + } catch { + if (signal?.aborted) throw signal.reason ?? new Error("Request aborted") + throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`) } +} - export function status(): ConnectionStatus[] { - return [...connections.values()] - } +const log = Log.create({ service: "workspace-sync" }) - function synced(state: Record) { - const ids = Object.keys(state) - if (ids.length === 0) return true +function route(url: string | URL, path: string) { + const next = new URL(url) + next.pathname = `${next.pathname.replace(/\/$/, "")}${path}` + next.search = "" + next.hash = "" + return next +} - const done = Object.fromEntries( - Database.use((db) => - db - .select({ - id: EventSequenceTable.aggregate_id, - seq: EventSequenceTable.seq, - }) - .from(EventSequenceTable) - .where(inArray(EventSequenceTable.aggregate_id, ids)) - .all(), - ).map((row) => [row.id, row.seq]), - ) as Record - - return ids.every((id) => { - return (done[id] ?? -1) >= state[id] - }) - } - - export async function isSyncing(workspaceID: WorkspaceID) { - return aborts.has(workspaceID) - } - - export async function waitForSync(workspaceID: WorkspaceID, state: Record, signal?: AbortSignal) { - if (synced(state)) return - - try { - await waitEvent({ - timeout: TIMEOUT, - signal, - fn(event) { - if (event.workspace !== workspaceID && event.payload.type !== "sync") { - return false - } - return synced(state) - }, - }) - } catch { - if (signal?.aborted) throw signal.reason ?? new Error("Request aborted") - throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`) - } - } - - const log = Log.create({ service: "workspace-sync" }) - - function route(url: string | URL, path: string) { - const next = new URL(url) - next.pathname = `${next.pathname.replace(/\/$/, "")}${path}` - next.search = "" - next.hash = "" - return next - } - - async function syncWorkspace(space: Info, signal: AbortSignal) { - while (!signal.aborted) { - log.info("connecting to global sync", { workspace: space.name }) - setStatus(space.id, "connecting") - - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - if (target.type === "local") return - - const res = await fetch(route(target.url, "/global/event"), { - method: "GET", - headers: target.headers, - signal, - }).catch((err: unknown) => { - setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) - - log.info("failed to connect to global sync", { - workspace: space.name, - error: err, - }) - return undefined - }) - - if (!res || !res.ok || !res.body) { - const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` - log.info("failed to connect to global sync", { workspace: space.name, error }) - setStatus(space.id, "error", error) - await sleep(1000) - continue - } - - log.info("global sync connected", { workspace: space.name }) - setStatus(space.id, "connected") - - await parseSSE(res.body, signal, (evt: any) => { - try { - if (!("payload" in evt)) return - - if (evt.payload.type === "sync") { - SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) - } - - GlobalBus.emit("event", { - directory: evt.directory, - project: evt.project, - workspace: space.id, - payload: evt.payload, - }) - } catch (err) { - log.info("failed to replay global event", { - workspaceID: space.id, - error: err, - }) - } - }) - - log.info("disconnected from global sync: " + space.id) - setStatus(space.id, "disconnected") - - // TODO: Implement exponential backoff - await sleep(1000) - } - } - - async function startSync(space: Info) { - if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return +async function syncWorkspace(space: Info, signal: AbortSignal) { + while (!signal.aborted) { + log.info("connecting to global sync", { workspace: space.name }) + setStatus(space.id, "connecting") const adaptor = await getAdaptor(space.projectID, space.type) const target = await adaptor.target(space) - if (target.type === "local") { - void Filesystem.exists(target.directory).then((exists) => { - setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + if (target.type === "local") return + + const res = await fetch(route(target.url, "/global/event"), { + method: "GET", + headers: target.headers, + signal, + }).catch((err: unknown) => { + setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) + + log.info("failed to connect to global sync", { + workspace: space.name, + error: err, }) - return + return undefined + }) + + if (!res || !res.ok || !res.body) { + const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` + log.info("failed to connect to global sync", { workspace: space.name, error }) + setStatus(space.id, "error", error) + await sleep(1000) + continue } - if (aborts.has(space.id)) return true + log.info("global sync connected", { workspace: space.name }) + setStatus(space.id, "connected") + await parseSSE(res.body, signal, (evt: any) => { + try { + if (!("payload" in evt)) return + + if (evt.payload.type === "sync") { + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + } + + GlobalBus.emit("event", { + directory: evt.directory, + project: evt.project, + workspace: space.id, + payload: evt.payload, + }) + } catch (err) { + log.info("failed to replay global event", { + workspaceID: space.id, + error: err, + }) + } + }) + + log.info("disconnected from global sync: " + space.id) setStatus(space.id, "disconnected") - const abort = new AbortController() - aborts.set(space.id, abort) - - void syncWorkspace(space, abort.signal).catch((error) => { - aborts.delete(space.id) - - setStatus(space.id, "error", String(error)) - log.warn("workspace listener failed", { - workspaceID: space.id, - error, - }) - }) - } - - function stopSync(id: WorkspaceID) { - aborts.get(id)?.abort() - aborts.delete(id) - connections.delete(id) + // TODO: Implement exponential backoff + await sleep(1000) } } + +async function startSync(space: Info) { + if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return + + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + if (target.type === "local") { + void Filesystem.exists(target.directory).then((exists) => { + setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + }) + return + } + + if (aborts.has(space.id)) return true + + setStatus(space.id, "disconnected") + + const abort = new AbortController() + aborts.set(space.id, abort) + + void syncWorkspace(space, abort.signal).catch((error) => { + aborts.delete(space.id) + + setStatus(space.id, "error", String(error)) + log.warn("workspace listener failed", { + workspaceID: space.id, + error, + }) + }) +} + +function stopSync(id: WorkspaceID) { + aborts.get(id)?.abort() + aborts.delete(id) + connections.delete(id) +} + +export * as Workspace from "./workspace" From 4e27804160e7df606c27bdc72c1a8acae2304629 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:46 -0400 Subject: [PATCH 074/120] refactor: unwrap McpOAuthCallback namespace + self-reexport (#22943) --- packages/opencode/src/mcp/oauth-callback.ts | 338 ++++++++++---------- 1 file changed, 169 insertions(+), 169 deletions(-) diff --git a/packages/opencode/src/mcp/oauth-callback.ts b/packages/opencode/src/mcp/oauth-callback.ts index 3e6169517f..fbb43d3921 100644 --- a/packages/opencode/src/mcp/oauth-callback.ts +++ b/packages/opencode/src/mcp/oauth-callback.ts @@ -56,177 +56,177 @@ interface PendingAuth { timeout: ReturnType } -export namespace McpOAuthCallback { - let server: ReturnType | undefined - const pendingAuths = new Map() - // Reverse index: mcpName → oauthState, so cancelPending(mcpName) can - // find the right entry in pendingAuths (which is keyed by oauthState). - const mcpNameToState = new Map() +let server: ReturnType | undefined +const pendingAuths = new Map() +// Reverse index: mcpName → oauthState, so cancelPending(mcpName) can +// find the right entry in pendingAuths (which is keyed by oauthState). +const mcpNameToState = new Map() - const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes +const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes - function cleanupStateIndex(oauthState: string) { - for (const [name, state] of mcpNameToState) { - if (state === oauthState) { - mcpNameToState.delete(name) - break - } +function cleanupStateIndex(oauthState: string) { + for (const [name, state] of mcpNameToState) { + if (state === oauthState) { + mcpNameToState.delete(name) + break } } - - function handleRequest(req: import("http").IncomingMessage, res: import("http").ServerResponse) { - const url = new URL(req.url || "/", `http://localhost:${currentPort}`) - - if (url.pathname !== currentPath) { - res.writeHead(404) - res.end("Not found") - return - } - - const code = url.searchParams.get("code") - const state = url.searchParams.get("state") - const error = url.searchParams.get("error") - const errorDescription = url.searchParams.get("error_description") - - log.info("received oauth callback", { hasCode: !!code, state, error }) - - // Enforce state parameter presence - if (!state) { - const errorMsg = "Missing required state parameter - potential CSRF attack" - log.error("oauth callback missing state parameter", { url: url.toString() }) - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - if (error) { - const errorMsg = errorDescription || error - if (pendingAuths.has(state)) { - const pending = pendingAuths.get(state)! - clearTimeout(pending.timeout) - pendingAuths.delete(state) - cleanupStateIndex(state) - pending.reject(new Error(errorMsg)) - } - res.writeHead(200, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - if (!code) { - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR("No authorization code provided")) - return - } - - // Validate state parameter - if (!pendingAuths.has(state)) { - const errorMsg = "Invalid or expired state parameter - potential CSRF attack" - log.error("oauth callback with invalid state", { state, pendingStates: Array.from(pendingAuths.keys()) }) - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - const pending = pendingAuths.get(state)! - - clearTimeout(pending.timeout) - pendingAuths.delete(state) - cleanupStateIndex(state) - pending.resolve(code) - - res.writeHead(200, { "Content-Type": "text/html" }) - res.end(HTML_SUCCESS) - } - - export async function ensureRunning(redirectUri?: string): Promise { - // Parse the redirect URI to get port and path (uses defaults if not provided) - const { port, path } = parseRedirectUri(redirectUri) - - // If server is running on a different port/path, stop it first - if (server && (currentPort !== port || currentPath !== path)) { - log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port }) - await stop() - } - - if (server) return - - const running = await isPortInUse(port) - if (running) { - log.info("oauth callback server already running on another instance", { port }) - return - } - - currentPort = port - currentPath = path - - server = createServer(handleRequest) - await new Promise((resolve, reject) => { - server!.listen(currentPort, () => { - log.info("oauth callback server started", { port: currentPort, path: currentPath }) - resolve() - }) - server!.on("error", reject) - }) - } - - export function waitForCallback(oauthState: string, mcpName?: string): Promise { - if (mcpName) mcpNameToState.set(mcpName, oauthState) - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - if (pendingAuths.has(oauthState)) { - pendingAuths.delete(oauthState) - if (mcpName) mcpNameToState.delete(mcpName) - reject(new Error("OAuth callback timeout - authorization took too long")) - } - }, CALLBACK_TIMEOUT_MS) - - pendingAuths.set(oauthState, { resolve, reject, timeout }) - }) - } - - export function cancelPending(mcpName: string): void { - // Look up the oauthState for this mcpName via the reverse index - const oauthState = mcpNameToState.get(mcpName) - const key = oauthState ?? mcpName - const pending = pendingAuths.get(key) - if (pending) { - clearTimeout(pending.timeout) - pendingAuths.delete(key) - mcpNameToState.delete(mcpName) - pending.reject(new Error("Authorization cancelled")) - } - } - - export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise { - return new Promise((resolve) => { - const socket = createConnection(port, "127.0.0.1") - socket.on("connect", () => { - socket.destroy() - resolve(true) - }) - socket.on("error", () => { - resolve(false) - }) - }) - } - - export async function stop(): Promise { - if (server) { - await new Promise((resolve) => server!.close(() => resolve())) - server = undefined - log.info("oauth callback server stopped") - } - - for (const [_name, pending] of pendingAuths) { - clearTimeout(pending.timeout) - pending.reject(new Error("OAuth callback server stopped")) - } - pendingAuths.clear() - mcpNameToState.clear() - } - - export function isRunning(): boolean { - return server !== undefined - } } + +function handleRequest(req: import("http").IncomingMessage, res: import("http").ServerResponse) { + const url = new URL(req.url || "/", `http://localhost:${currentPort}`) + + if (url.pathname !== currentPath) { + res.writeHead(404) + res.end("Not found") + return + } + + const code = url.searchParams.get("code") + const state = url.searchParams.get("state") + const error = url.searchParams.get("error") + const errorDescription = url.searchParams.get("error_description") + + log.info("received oauth callback", { hasCode: !!code, state, error }) + + // Enforce state parameter presence + if (!state) { + const errorMsg = "Missing required state parameter - potential CSRF attack" + log.error("oauth callback missing state parameter", { url: url.toString() }) + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + if (error) { + const errorMsg = errorDescription || error + if (pendingAuths.has(state)) { + const pending = pendingAuths.get(state)! + clearTimeout(pending.timeout) + pendingAuths.delete(state) + cleanupStateIndex(state) + pending.reject(new Error(errorMsg)) + } + res.writeHead(200, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + if (!code) { + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR("No authorization code provided")) + return + } + + // Validate state parameter + if (!pendingAuths.has(state)) { + const errorMsg = "Invalid or expired state parameter - potential CSRF attack" + log.error("oauth callback with invalid state", { state, pendingStates: Array.from(pendingAuths.keys()) }) + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + const pending = pendingAuths.get(state)! + + clearTimeout(pending.timeout) + pendingAuths.delete(state) + cleanupStateIndex(state) + pending.resolve(code) + + res.writeHead(200, { "Content-Type": "text/html" }) + res.end(HTML_SUCCESS) +} + +export async function ensureRunning(redirectUri?: string): Promise { + // Parse the redirect URI to get port and path (uses defaults if not provided) + const { port, path } = parseRedirectUri(redirectUri) + + // If server is running on a different port/path, stop it first + if (server && (currentPort !== port || currentPath !== path)) { + log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port }) + await stop() + } + + if (server) return + + const running = await isPortInUse(port) + if (running) { + log.info("oauth callback server already running on another instance", { port }) + return + } + + currentPort = port + currentPath = path + + server = createServer(handleRequest) + await new Promise((resolve, reject) => { + server!.listen(currentPort, () => { + log.info("oauth callback server started", { port: currentPort, path: currentPath }) + resolve() + }) + server!.on("error", reject) + }) +} + +export function waitForCallback(oauthState: string, mcpName?: string): Promise { + if (mcpName) mcpNameToState.set(mcpName, oauthState) + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + if (pendingAuths.has(oauthState)) { + pendingAuths.delete(oauthState) + if (mcpName) mcpNameToState.delete(mcpName) + reject(new Error("OAuth callback timeout - authorization took too long")) + } + }, CALLBACK_TIMEOUT_MS) + + pendingAuths.set(oauthState, { resolve, reject, timeout }) + }) +} + +export function cancelPending(mcpName: string): void { + // Look up the oauthState for this mcpName via the reverse index + const oauthState = mcpNameToState.get(mcpName) + const key = oauthState ?? mcpName + const pending = pendingAuths.get(key) + if (pending) { + clearTimeout(pending.timeout) + pendingAuths.delete(key) + mcpNameToState.delete(mcpName) + pending.reject(new Error("Authorization cancelled")) + } +} + +export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise { + return new Promise((resolve) => { + const socket = createConnection(port, "127.0.0.1") + socket.on("connect", () => { + socket.destroy() + resolve(true) + }) + socket.on("error", () => { + resolve(false) + }) + }) +} + +export async function stop(): Promise { + if (server) { + await new Promise((resolve) => server!.close(() => resolve())) + server = undefined + log.info("oauth callback server stopped") + } + + for (const [_name, pending] of pendingAuths) { + clearTimeout(pending.timeout) + pending.reject(new Error("OAuth callback server stopped")) + } + pendingAuths.clear() + mcpNameToState.clear() +} + +export function isRunning(): boolean { + return server !== undefined +} + +export * as McpOAuthCallback from "./oauth-callback" From 19d15d9ff7826db276219bccce278f78b654a431 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:48 -0400 Subject: [PATCH 075/120] refactor: unwrap ConfigProvider namespace + self-reexport (#22949) --- packages/opencode/src/config/provider.ts | 230 +++++++++++------------ 1 file changed, 115 insertions(+), 115 deletions(-) diff --git a/packages/opencode/src/config/provider.ts b/packages/opencode/src/config/provider.ts index 09efedf497..877677519f 100644 --- a/packages/opencode/src/config/provider.ts +++ b/packages/opencode/src/config/provider.ts @@ -1,120 +1,120 @@ import z from "zod" -export namespace ConfigProvider { - export const Model = z - .object({ - id: z.string(), - name: z.string(), - family: z.string().optional(), - release_date: z.string(), - attachment: z.boolean(), - reasoning: z.boolean(), - temperature: z.boolean(), - tool_call: z.boolean(), - interleaved: z - .union([ - z.literal(true), - z - .object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }) - .strict(), - ]) - .optional(), - cost: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - context_over_200k: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - }) - .optional(), - }) - .optional(), - limit: z.object({ - context: z.number(), - input: z.number().optional(), +export const Model = z + .object({ + id: z.string(), + name: z.string(), + family: z.string().optional(), + release_date: z.string(), + attachment: z.boolean(), + reasoning: z.boolean(), + temperature: z.boolean(), + tool_call: z.boolean(), + interleaved: z + .union([ + z.literal(true), + z + .object({ + field: z.enum(["reasoning_content", "reasoning_details"]), + }) + .strict(), + ]) + .optional(), + cost: z + .object({ + input: z.number(), output: z.number(), - }), - modalities: z - .object({ - input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - }) - .optional(), - experimental: z.boolean().optional(), - status: z.enum(["alpha", "beta", "deprecated"]).optional(), - provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()).optional(), - variants: z - .record( - z.string(), - z - .object({ - disabled: z.boolean().optional().describe("Disable this variant for the model"), - }) - .catchall(z.any()), - ) - .optional() - .describe("Variant-specific configuration"), - }) - .partial() + cache_read: z.number().optional(), + cache_write: z.number().optional(), + context_over_200k: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + }) + .optional(), + }) + .optional(), + limit: z.object({ + context: z.number(), + input: z.number().optional(), + output: z.number(), + }), + modalities: z + .object({ + input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + }) + .optional(), + experimental: z.boolean().optional(), + status: z.enum(["alpha", "beta", "deprecated"]).optional(), + provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), + options: z.record(z.string(), z.any()), + headers: z.record(z.string(), z.string()).optional(), + variants: z + .record( + z.string(), + z + .object({ + disabled: z.boolean().optional().describe("Disable this variant for the model"), + }) + .catchall(z.any()), + ) + .optional() + .describe("Variant-specific configuration"), + }) + .partial() - export const Info = z - .object({ - api: z.string().optional(), - name: z.string(), - env: z.array(z.string()), - id: z.string(), - npm: z.string().optional(), - whitelist: z.array(z.string()).optional(), - blacklist: z.array(z.string()).optional(), - options: z - .object({ - apiKey: z.string().optional(), - baseURL: z.string().optional(), - enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), - setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), - timeout: z - .union([ - z - .number() - .int() - .positive() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - z.literal(false).describe("Disable timeout for this provider entirely."), - ]) - .optional() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - chunkTimeout: z - .number() - .int() - .positive() - .optional() - .describe( - "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", - ), - }) - .catchall(z.any()) - .optional(), - models: z.record(z.string(), Model).optional(), - }) - .partial() - .strict() - .meta({ - ref: "ProviderConfig", - }) +export const Info = z + .object({ + api: z.string().optional(), + name: z.string(), + env: z.array(z.string()), + id: z.string(), + npm: z.string().optional(), + whitelist: z.array(z.string()).optional(), + blacklist: z.array(z.string()).optional(), + options: z + .object({ + apiKey: z.string().optional(), + baseURL: z.string().optional(), + enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), + setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), + timeout: z + .union([ + z + .number() + .int() + .positive() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + z.literal(false).describe("Disable timeout for this provider entirely."), + ]) + .optional() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + chunkTimeout: z + .number() + .int() + .positive() + .optional() + .describe( + "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + ), + }) + .catchall(z.any()) + .optional(), + models: z.record(z.string(), Model).optional(), + }) + .partial() + .strict() + .meta({ + ref: "ProviderConfig", + }) - export type Info = z.infer -} +export type Info = z.infer + +export * as ConfigProvider from "./provider" From 1291e82bb4d881a1c0ac5cb882da2b97a169ae9d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:50 -0400 Subject: [PATCH 076/120] refactor: unwrap ACP namespace + self-reexport (#22936) --- packages/opencode/src/acp/agent.ts | 3030 ++++++++++++++-------------- 1 file changed, 1515 insertions(+), 1515 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 5d8c723ea7..7180feabcb 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -57,793 +57,262 @@ type ModelOption = { modelId: string; name: string } const DEFAULT_VARIANT_VALUE = "default" -export namespace ACP { - const log = Log.create({ service: "acp-agent" }) +const log = Log.create({ service: "acp-agent" }) - async function getContextLimit( - sdk: OpencodeClient, - providerID: ProviderID, - modelID: ModelID, - directory: string, - ): Promise { - const providers = await sdk.config - .providers({ directory }) - .then((x) => x.data?.providers ?? []) - .catch((error) => { - log.error("failed to get providers for context limit", { error }) - return [] - }) +async function getContextLimit( + sdk: OpencodeClient, + providerID: ProviderID, + modelID: ModelID, + directory: string, +): Promise { + const providers = await sdk.config + .providers({ directory }) + .then((x) => x.data?.providers ?? []) + .catch((error) => { + log.error("failed to get providers for context limit", { error }) + return [] + }) - const provider = providers.find((p) => p.id === providerID) - const model = provider?.models[modelID] - return model?.limit.context ?? null + const provider = providers.find((p) => p.id === providerID) + const model = provider?.models[modelID] + return model?.limit.context ?? null +} + +async function sendUsageUpdate( + connection: AgentSideConnection, + sdk: OpencodeClient, + sessionID: string, + directory: string, +): Promise { + const messages = await sdk.session + .messages({ sessionID, directory }, { throwOnError: true }) + .then((x) => x.data) + .catch((error) => { + log.error("failed to fetch messages for usage update", { error }) + return undefined + }) + + if (!messages) return + + const assistantMessages = messages.filter( + (m): m is { info: AssistantMessage; parts: SessionMessageResponse["parts"] } => m.info.role === "assistant", + ) + + const lastAssistant = assistantMessages[assistantMessages.length - 1] + if (!lastAssistant) return + + const msg = lastAssistant.info + if (!msg.providerID || !msg.modelID) return + const size = await getContextLimit(sdk, ProviderID.make(msg.providerID), ModelID.make(msg.modelID), directory) + + if (!size) { + // Cannot calculate usage without known context size + return } - async function sendUsageUpdate( - connection: AgentSideConnection, - sdk: OpencodeClient, - sessionID: string, - directory: string, - ): Promise { - const messages = await sdk.session - .messages({ sessionID, directory }, { throwOnError: true }) - .then((x) => x.data) - .catch((error) => { - log.error("failed to fetch messages for usage update", { error }) - return undefined - }) + const used = msg.tokens.input + (msg.tokens.cache?.read ?? 0) + const totalCost = assistantMessages.reduce((sum, m) => sum + m.info.cost, 0) - if (!messages) return - - const assistantMessages = messages.filter( - (m): m is { info: AssistantMessage; parts: SessionMessageResponse["parts"] } => m.info.role === "assistant", - ) - - const lastAssistant = assistantMessages[assistantMessages.length - 1] - if (!lastAssistant) return - - const msg = lastAssistant.info - if (!msg.providerID || !msg.modelID) return - const size = await getContextLimit(sdk, ProviderID.make(msg.providerID), ModelID.make(msg.modelID), directory) - - if (!size) { - // Cannot calculate usage without known context size - return - } - - const used = msg.tokens.input + (msg.tokens.cache?.read ?? 0) - const totalCost = assistantMessages.reduce((sum, m) => sum + m.info.cost, 0) - - await connection - .sessionUpdate({ - sessionId: sessionID, - update: { - sessionUpdate: "usage_update", - used, - size, - cost: { amount: totalCost, currency: "USD" }, - }, - }) - .catch((error) => { - log.error("failed to send usage update", { error }) - }) - } - - export async function init({ sdk: _sdk }: { sdk: OpencodeClient }) { - return { - create: (connection: AgentSideConnection, fullConfig: ACPConfig) => { - return new Agent(connection, fullConfig) + await connection + .sessionUpdate({ + sessionId: sessionID, + update: { + sessionUpdate: "usage_update", + used, + size, + cost: { amount: totalCost, currency: "USD" }, }, - } + }) + .catch((error) => { + log.error("failed to send usage update", { error }) + }) +} + +export async function init({ sdk: _sdk }: { sdk: OpencodeClient }) { + return { + create: (connection: AgentSideConnection, fullConfig: ACPConfig) => { + return new Agent(connection, fullConfig) + }, + } +} + +export class Agent implements ACPAgent { + private connection: AgentSideConnection + private config: ACPConfig + private sdk: OpencodeClient + private sessionManager: ACPSessionManager + private eventAbort = new AbortController() + private eventStarted = false + private bashSnapshots = new Map() + private toolStarts = new Set() + private permissionQueues = new Map>() + private permissionOptions: PermissionOption[] = [ + { optionId: "once", kind: "allow_once", name: "Allow once" }, + { optionId: "always", kind: "allow_always", name: "Always allow" }, + { optionId: "reject", kind: "reject_once", name: "Reject" }, + ] + + constructor(connection: AgentSideConnection, config: ACPConfig) { + this.connection = connection + this.config = config + this.sdk = config.sdk + this.sessionManager = new ACPSessionManager(this.sdk) + this.startEventSubscription() } - export class Agent implements ACPAgent { - private connection: AgentSideConnection - private config: ACPConfig - private sdk: OpencodeClient - private sessionManager: ACPSessionManager - private eventAbort = new AbortController() - private eventStarted = false - private bashSnapshots = new Map() - private toolStarts = new Set() - private permissionQueues = new Map>() - private permissionOptions: PermissionOption[] = [ - { optionId: "once", kind: "allow_once", name: "Allow once" }, - { optionId: "always", kind: "allow_always", name: "Always allow" }, - { optionId: "reject", kind: "reject_once", name: "Reject" }, - ] + private startEventSubscription() { + if (this.eventStarted) return + this.eventStarted = true + this.runEventSubscription().catch((error) => { + if (this.eventAbort.signal.aborted) return + log.error("event subscription failed", { error }) + }) + } - constructor(connection: AgentSideConnection, config: ACPConfig) { - this.connection = connection - this.config = config - this.sdk = config.sdk - this.sessionManager = new ACPSessionManager(this.sdk) - this.startEventSubscription() - } - - private startEventSubscription() { - if (this.eventStarted) return - this.eventStarted = true - this.runEventSubscription().catch((error) => { - if (this.eventAbort.signal.aborted) return - log.error("event subscription failed", { error }) + private async runEventSubscription() { + while (true) { + if (this.eventAbort.signal.aborted) return + const events = await this.sdk.global.event({ + signal: this.eventAbort.signal, }) - } - - private async runEventSubscription() { - while (true) { + for await (const event of events.stream) { if (this.eventAbort.signal.aborted) return - const events = await this.sdk.global.event({ - signal: this.eventAbort.signal, + const payload = event?.payload + if (!payload) continue + await this.handleEvent(payload as Event).catch((error) => { + log.error("failed to handle event", { error, type: payload.type }) }) - for await (const event of events.stream) { - if (this.eventAbort.signal.aborted) return - const payload = event?.payload - if (!payload) continue - await this.handleEvent(payload as Event).catch((error) => { - log.error("failed to handle event", { error, type: payload.type }) - }) - } } } + } - private async handleEvent(event: Event) { - switch (event.type) { - case "permission.asked": { - const permission = event.properties - const session = this.sessionManager.tryGet(permission.sessionID) - if (!session) return + private async handleEvent(event: Event) { + switch (event.type) { + case "permission.asked": { + const permission = event.properties + const session = this.sessionManager.tryGet(permission.sessionID) + if (!session) return - const prev = this.permissionQueues.get(permission.sessionID) ?? Promise.resolve() - const next = prev - .then(async () => { - const directory = session.cwd + const prev = this.permissionQueues.get(permission.sessionID) ?? Promise.resolve() + const next = prev + .then(async () => { + const directory = session.cwd - const res = await this.connection - .requestPermission({ - sessionId: permission.sessionID, - toolCall: { - toolCallId: permission.tool?.callID ?? permission.id, - status: "pending", - title: permission.permission, - rawInput: permission.metadata, - kind: toToolKind(permission.permission), - locations: toLocations(permission.permission, permission.metadata), - }, - options: this.permissionOptions, + const res = await this.connection + .requestPermission({ + sessionId: permission.sessionID, + toolCall: { + toolCallId: permission.tool?.callID ?? permission.id, + status: "pending", + title: permission.permission, + rawInput: permission.metadata, + kind: toToolKind(permission.permission), + locations: toLocations(permission.permission, permission.metadata), + }, + options: this.permissionOptions, + }) + .catch(async (error) => { + log.error("failed to request permission from ACP", { + error, + permissionID: permission.id, + sessionID: permission.sessionID, }) - .catch(async (error) => { - log.error("failed to request permission from ACP", { - error, - permissionID: permission.id, - sessionID: permission.sessionID, - }) - await this.sdk.permission.reply({ - requestID: permission.id, - reply: "reject", - directory, - }) - return undefined - }) - - if (!res) return - if (res.outcome.outcome !== "selected") { await this.sdk.permission.reply({ requestID: permission.id, reply: "reject", directory, }) - return - } - - if (res.outcome.optionId !== "reject" && permission.permission == "edit") { - const metadata = permission.metadata || {} - const filepath = typeof metadata["filepath"] === "string" ? metadata["filepath"] : "" - const diff = typeof metadata["diff"] === "string" ? metadata["diff"] : "" - const content = (await Filesystem.exists(filepath)) ? await Filesystem.readText(filepath) : "" - const newContent = getNewContent(content, diff) - - if (newContent) { - void this.connection.writeTextFile({ - sessionId: session.id, - path: filepath, - content: newContent, - }) - } - } + return undefined + }) + if (!res) return + if (res.outcome.outcome !== "selected") { await this.sdk.permission.reply({ requestID: permission.id, - reply: res.outcome.optionId as "once" | "always" | "reject", + reply: "reject", directory, }) - }) - .catch((error) => { - log.error("failed to handle permission", { error, permissionID: permission.id }) - }) - .finally(() => { - if (this.permissionQueues.get(permission.sessionID) === next) { - this.permissionQueues.delete(permission.sessionID) + return + } + + if (res.outcome.optionId !== "reject" && permission.permission == "edit") { + const metadata = permission.metadata || {} + const filepath = typeof metadata["filepath"] === "string" ? metadata["filepath"] : "" + const diff = typeof metadata["diff"] === "string" ? metadata["diff"] : "" + const content = (await Filesystem.exists(filepath)) ? await Filesystem.readText(filepath) : "" + const newContent = getNewContent(content, diff) + + if (newContent) { + void this.connection.writeTextFile({ + sessionId: session.id, + path: filepath, + content: newContent, + }) } + } + + await this.sdk.permission.reply({ + requestID: permission.id, + reply: res.outcome.optionId as "once" | "always" | "reject", + directory, }) - this.permissionQueues.set(permission.sessionID, next) - return - } + }) + .catch((error) => { + log.error("failed to handle permission", { error, permissionID: permission.id }) + }) + .finally(() => { + if (this.permissionQueues.get(permission.sessionID) === next) { + this.permissionQueues.delete(permission.sessionID) + } + }) + this.permissionQueues.set(permission.sessionID, next) + return + } - case "message.part.updated": { - log.info("message part updated", { event: event.properties }) - const props = event.properties - const part = props.part - const session = this.sessionManager.tryGet(part.sessionID) - if (!session) return - const sessionId = session.id + case "message.part.updated": { + log.info("message part updated", { event: event.properties }) + const props = event.properties + const part = props.part + const session = this.sessionManager.tryGet(part.sessionID) + if (!session) return + const sessionId = session.id - if (part.type === "tool") { - await this.toolStart(sessionId, part) + if (part.type === "tool") { + await this.toolStart(sessionId, part) - switch (part.state.status) { - case "pending": - this.bashSnapshots.delete(part.callID) - return + switch (part.state.status) { + case "pending": + this.bashSnapshots.delete(part.callID) + return - case "running": - const output = this.bashOutput(part) - const content: ToolCallContent[] = [] - if (output) { - const hash = Hash.fast(output) - if (part.tool === "bash") { - if (this.bashSnapshots.get(part.callID) === hash) { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "in_progress", - kind: toToolKind(part.tool), - title: part.tool, - locations: toLocations(part.tool, part.state.input), - rawInput: part.state.input, - }, - }) - .catch((error) => { - log.error("failed to send tool in_progress to ACP", { error }) - }) - return - } - this.bashSnapshots.set(part.callID, hash) - } - content.push({ - type: "content", - content: { - type: "text", - text: output, - }, - }) - } - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "in_progress", - kind: toToolKind(part.tool), - title: part.tool, - locations: toLocations(part.tool, part.state.input), - rawInput: part.state.input, - ...(content.length > 0 && { content }), - }, - }) - .catch((error) => { - log.error("failed to send tool in_progress to ACP", { error }) - }) - return - - case "completed": { - this.toolStarts.delete(part.callID) - this.bashSnapshots.delete(part.callID) - const kind = toToolKind(part.tool) - const content: ToolCallContent[] = [ - { - type: "content", - content: { - type: "text", - text: part.state.output, - }, - }, - ] - - if (kind === "edit") { - const input = part.state.input - const filePath = typeof input["filePath"] === "string" ? input["filePath"] : "" - const oldText = typeof input["oldString"] === "string" ? input["oldString"] : "" - const newText = - typeof input["newString"] === "string" - ? input["newString"] - : typeof input["content"] === "string" - ? input["content"] - : "" - content.push({ - type: "diff", - path: filePath, - oldText, - newText, - }) - } - - if (part.tool === "todowrite") { - const parsedTodos = z.array(Todo.Info).safeParse(JSON.parse(part.state.output)) - if (parsedTodos.success) { + case "running": + const output = this.bashOutput(part) + const content: ToolCallContent[] = [] + if (output) { + const hash = Hash.fast(output) + if (part.tool === "bash") { + if (this.bashSnapshots.get(part.callID) === hash) { await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: "plan", - entries: parsedTodos.data.map((todo) => { - const status: PlanEntry["status"] = - todo.status === "cancelled" ? "completed" : (todo.status as PlanEntry["status"]) - return { - priority: "medium", - status, - content: todo.content, - } - }), + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "in_progress", + kind: toToolKind(part.tool), + title: part.tool, + locations: toLocations(part.tool, part.state.input), + rawInput: part.state.input, }, }) .catch((error) => { - log.error("failed to send session update for todo", { error }) + log.error("failed to send tool in_progress to ACP", { error }) }) - } else { - log.error("failed to parse todo output", { error: parsedTodos.error }) + return } + this.bashSnapshots.set(part.callID, hash) } - - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "completed", - kind, - content, - title: part.state.title, - rawInput: part.state.input, - rawOutput: { - output: part.state.output, - metadata: part.state.metadata, - }, - }, - }) - .catch((error) => { - log.error("failed to send tool completed to ACP", { error }) - }) - return - } - case "error": - this.toolStarts.delete(part.callID) - this.bashSnapshots.delete(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "failed", - kind: toToolKind(part.tool), - title: part.tool, - rawInput: part.state.input, - content: [ - { - type: "content", - content: { - type: "text", - text: part.state.error, - }, - }, - ], - rawOutput: { - error: part.state.error, - metadata: part.state.metadata, - }, - }, - }) - .catch((error) => { - log.error("failed to send tool error to ACP", { error }) - }) - return - } - } - - // ACP clients already know the prompt they just submitted, so replaying - // live user parts duplicates the message. We still replay user history in - // loadSession() and forkSession() via processMessage(). - if (part.type !== "text" && part.type !== "file") return - - return - } - - case "message.part.delta": { - const props = event.properties - const session = this.sessionManager.tryGet(props.sessionID) - if (!session) return - const sessionId = session.id - - const message = await this.sdk.session - .message( - { - sessionID: props.sessionID, - messageID: props.messageID, - directory: session.cwd, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((error) => { - log.error("unexpected error when fetching message", { error }) - return undefined - }) - - if (!message || message.info.role !== "assistant") return - - const part = message.parts.find((p) => p.id === props.partID) - if (!part) return - - if (part.type === "text" && props.field === "text" && part.ignored !== true) { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "agent_message_chunk", - messageId: props.messageID, - content: { - type: "text", - text: props.delta, - }, - }, - }) - .catch((error) => { - log.error("failed to send text delta to ACP", { error }) - }) - return - } - - if (part.type === "reasoning" && props.field === "text") { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "agent_thought_chunk", - messageId: props.messageID, - content: { - type: "text", - text: props.delta, - }, - }, - }) - .catch((error) => { - log.error("failed to send reasoning delta to ACP", { error }) - }) - } - return - } - } - } - - async initialize(params: InitializeRequest): Promise { - log.info("initialize", { protocolVersion: params.protocolVersion }) - - const authMethod: AuthMethod = { - description: "Run `opencode auth login` in the terminal", - name: "Login with opencode", - id: "opencode-login", - } - - // If client supports terminal-auth capability, use that instead. - if (params.clientCapabilities?._meta?.["terminal-auth"] === true) { - authMethod._meta = { - "terminal-auth": { - command: "opencode", - args: ["auth", "login"], - label: "OpenCode Login", - }, - } - } - - return { - protocolVersion: 1, - agentCapabilities: { - loadSession: true, - mcpCapabilities: { - http: true, - sse: true, - }, - promptCapabilities: { - embeddedContext: true, - image: true, - }, - sessionCapabilities: { - fork: {}, - list: {}, - resume: {}, - }, - }, - authMethods: [authMethod], - agentInfo: { - name: "OpenCode", - version: InstallationVersion, - }, - } - } - - async authenticate(_params: AuthenticateRequest) { - throw new Error("Authentication not implemented") - } - - async newSession(params: NewSessionRequest) { - const directory = params.cwd - try { - const model = await defaultModel(this.config, directory) - - // Store ACP session state - const state = await this.sessionManager.create(params.cwd, params.mcpServers, model) - const sessionId = state.id - - log.info("creating_session", { sessionId, mcpServers: params.mcpServers.length }) - - const load = await this.loadSessionMode({ - cwd: directory, - mcpServers: params.mcpServers, - sessionId, - }) - - return { - sessionId, - configOptions: load.configOptions, - models: load.models, - modes: load.modes, - _meta: load._meta, - } - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async loadSession(params: LoadSessionRequest) { - const directory = params.cwd - const sessionId = params.sessionId - - try { - const model = await defaultModel(this.config, directory) - - // Store ACP session state - await this.sessionManager.load(sessionId, params.cwd, params.mcpServers, model) - - log.info("load_session", { sessionId, mcpServers: params.mcpServers.length }) - - const result = await this.loadSessionMode({ - cwd: directory, - mcpServers: params.mcpServers, - sessionId, - }) - - // Replay session history - const messages = await this.sdk.session - .messages( - { - sessionID: sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((err) => { - log.error("unexpected error when fetching message", { error: err }) - return undefined - }) - - const lastUser = messages?.findLast((m) => m.info.role === "user")?.info - if (lastUser?.role === "user") { - result.models.currentModelId = `${lastUser.model.providerID}/${lastUser.model.modelID}` - this.sessionManager.setModel(sessionId, { - providerID: ProviderID.make(lastUser.model.providerID), - modelID: ModelID.make(lastUser.model.modelID), - }) - if (result.modes?.availableModes.some((m) => m.id === lastUser.agent)) { - result.modes.currentModeId = lastUser.agent - this.sessionManager.setMode(sessionId, lastUser.agent) - } - result.configOptions = buildConfigOptions({ - currentModelId: result.models.currentModelId, - availableModels: result.models.availableModels, - modes: result.modes, - }) - } - - for (const msg of messages ?? []) { - log.debug("replay message", msg) - await this.processMessage(msg) - } - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return result - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async listSessions(params: ListSessionsRequest): Promise { - try { - const cursor = params.cursor ? Number(params.cursor) : undefined - const limit = 100 - - const sessions = await this.sdk.session - .list( - { - directory: params.cwd ?? undefined, - roots: true, - }, - { throwOnError: true }, - ) - .then((x) => x.data ?? []) - - const sorted = sessions.toSorted((a, b) => b.time.updated - a.time.updated) - const filtered = cursor ? sorted.filter((s) => s.time.updated < cursor) : sorted - const page = filtered.slice(0, limit) - - const entries: SessionInfo[] = page.map((session) => ({ - sessionId: session.id, - cwd: session.directory, - title: session.title, - updatedAt: new Date(session.time.updated).toISOString(), - })) - - const last = page[page.length - 1] - const next = filtered.length > limit && last ? String(last.time.updated) : undefined - - const response: ListSessionsResponse = { - sessions: entries, - } - if (next) response.nextCursor = next - return response - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async unstable_forkSession(params: ForkSessionRequest): Promise { - const directory = params.cwd - const mcpServers = params.mcpServers ?? [] - - try { - const model = await defaultModel(this.config, directory) - - const forked = await this.sdk.session - .fork( - { - sessionID: params.sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - - if (!forked) { - throw new Error("Fork session returned no data") - } - - const sessionId = forked.id - await this.sessionManager.load(sessionId, directory, mcpServers, model) - - log.info("fork_session", { sessionId, mcpServers: mcpServers.length }) - - const mode = await this.loadSessionMode({ - cwd: directory, - mcpServers, - sessionId, - }) - - const messages = await this.sdk.session - .messages( - { - sessionID: sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((err) => { - log.error("unexpected error when fetching message", { error: err }) - return undefined - }) - - for (const msg of messages ?? []) { - log.debug("replay message", msg) - await this.processMessage(msg) - } - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return mode - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async unstable_resumeSession(params: ResumeSessionRequest): Promise { - const directory = params.cwd - const sessionId = params.sessionId - const mcpServers = params.mcpServers ?? [] - - try { - const model = await defaultModel(this.config, directory) - await this.sessionManager.load(sessionId, directory, mcpServers, model) - - log.info("resume_session", { sessionId, mcpServers: mcpServers.length }) - - const result = await this.loadSessionMode({ - cwd: directory, - mcpServers, - sessionId, - }) - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return result - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - private async processMessage(message: SessionMessageResponse) { - log.debug("process message", message) - if (message.info.role !== "assistant" && message.info.role !== "user") return - const sessionId = message.info.sessionID - - for (const part of message.parts) { - if (part.type === "tool") { - await this.toolStart(sessionId, part) - switch (part.state.status) { - case "pending": - this.bashSnapshots.delete(part.callID) - break - case "running": - const output = this.bashOutput(part) - const runningContent: ToolCallContent[] = [] - if (output) { - runningContent.push({ + content.push({ type: "content", content: { type: "text", @@ -862,14 +331,15 @@ export namespace ACP { title: part.tool, locations: toLocations(part.tool, part.state.input), rawInput: part.state.input, - ...(runningContent.length > 0 && { content: runningContent }), + ...(content.length > 0 && { content }), }, }) - .catch((err) => { - log.error("failed to send tool in_progress to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool in_progress to ACP", { error }) }) - break - case "completed": + return + + case "completed": { this.toolStarts.delete(part.callID) this.bashSnapshots.delete(part.callID) const kind = toToolKind(part.tool) @@ -920,8 +390,8 @@ export namespace ACP { }), }, }) - .catch((err) => { - log.error("failed to send session update for todo", { error: err }) + .catch((error) => { + log.error("failed to send session update for todo", { error }) }) } else { log.error("failed to parse todo output", { error: parsedTodos.error }) @@ -945,10 +415,11 @@ export namespace ACP { }, }, }) - .catch((err) => { - log.error("failed to send tool completed to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool completed to ACP", { error }) }) - break + return + } case "error": this.toolStarts.delete(part.callID) this.bashSnapshots.delete(part.callID) @@ -977,865 +448,1394 @@ export namespace ACP { }, }, }) - .catch((err) => { - log.error("failed to send tool error to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool error to ACP", { error }) }) - break + return } - } else if (part.type === "text") { - if (part.text) { - const audience: Role[] | undefined = part.synthetic ? ["assistant"] : part.ignored ? ["user"] : undefined + } + + // ACP clients already know the prompt they just submitted, so replaying + // live user parts duplicates the message. We still replay user history in + // loadSession() and forkSession() via processMessage(). + if (part.type !== "text" && part.type !== "file") return + + return + } + + case "message.part.delta": { + const props = event.properties + const session = this.sessionManager.tryGet(props.sessionID) + if (!session) return + const sessionId = session.id + + const message = await this.sdk.session + .message( + { + sessionID: props.sessionID, + messageID: props.messageID, + directory: session.cwd, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((error) => { + log.error("unexpected error when fetching message", { error }) + return undefined + }) + + if (!message || message.info.role !== "assistant") return + + const part = message.parts.find((p) => p.id === props.partID) + if (!part) return + + if (part.type === "text" && props.field === "text" && part.ignored !== true) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_message_chunk", + messageId: props.messageID, + content: { + type: "text", + text: props.delta, + }, + }, + }) + .catch((error) => { + log.error("failed to send text delta to ACP", { error }) + }) + return + } + + if (part.type === "reasoning" && props.field === "text") { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_thought_chunk", + messageId: props.messageID, + content: { + type: "text", + text: props.delta, + }, + }, + }) + .catch((error) => { + log.error("failed to send reasoning delta to ACP", { error }) + }) + } + return + } + } + } + + async initialize(params: InitializeRequest): Promise { + log.info("initialize", { protocolVersion: params.protocolVersion }) + + const authMethod: AuthMethod = { + description: "Run `opencode auth login` in the terminal", + name: "Login with opencode", + id: "opencode-login", + } + + // If client supports terminal-auth capability, use that instead. + if (params.clientCapabilities?._meta?.["terminal-auth"] === true) { + authMethod._meta = { + "terminal-auth": { + command: "opencode", + args: ["auth", "login"], + label: "OpenCode Login", + }, + } + } + + return { + protocolVersion: 1, + agentCapabilities: { + loadSession: true, + mcpCapabilities: { + http: true, + sse: true, + }, + promptCapabilities: { + embeddedContext: true, + image: true, + }, + sessionCapabilities: { + fork: {}, + list: {}, + resume: {}, + }, + }, + authMethods: [authMethod], + agentInfo: { + name: "OpenCode", + version: InstallationVersion, + }, + } + } + + async authenticate(_params: AuthenticateRequest) { + throw new Error("Authentication not implemented") + } + + async newSession(params: NewSessionRequest) { + const directory = params.cwd + try { + const model = await defaultModel(this.config, directory) + + // Store ACP session state + const state = await this.sessionManager.create(params.cwd, params.mcpServers, model) + const sessionId = state.id + + log.info("creating_session", { sessionId, mcpServers: params.mcpServers.length }) + + const load = await this.loadSessionMode({ + cwd: directory, + mcpServers: params.mcpServers, + sessionId, + }) + + return { + sessionId, + configOptions: load.configOptions, + models: load.models, + modes: load.modes, + _meta: load._meta, + } + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async loadSession(params: LoadSessionRequest) { + const directory = params.cwd + const sessionId = params.sessionId + + try { + const model = await defaultModel(this.config, directory) + + // Store ACP session state + await this.sessionManager.load(sessionId, params.cwd, params.mcpServers, model) + + log.info("load_session", { sessionId, mcpServers: params.mcpServers.length }) + + const result = await this.loadSessionMode({ + cwd: directory, + mcpServers: params.mcpServers, + sessionId, + }) + + // Replay session history + const messages = await this.sdk.session + .messages( + { + sessionID: sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((err) => { + log.error("unexpected error when fetching message", { error: err }) + return undefined + }) + + const lastUser = messages?.findLast((m) => m.info.role === "user")?.info + if (lastUser?.role === "user") { + result.models.currentModelId = `${lastUser.model.providerID}/${lastUser.model.modelID}` + this.sessionManager.setModel(sessionId, { + providerID: ProviderID.make(lastUser.model.providerID), + modelID: ModelID.make(lastUser.model.modelID), + }) + if (result.modes?.availableModes.some((m) => m.id === lastUser.agent)) { + result.modes.currentModeId = lastUser.agent + this.sessionManager.setMode(sessionId, lastUser.agent) + } + result.configOptions = buildConfigOptions({ + currentModelId: result.models.currentModelId, + availableModels: result.models.availableModels, + modes: result.modes, + }) + } + + for (const msg of messages ?? []) { + log.debug("replay message", msg) + await this.processMessage(msg) + } + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return result + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async listSessions(params: ListSessionsRequest): Promise { + try { + const cursor = params.cursor ? Number(params.cursor) : undefined + const limit = 100 + + const sessions = await this.sdk.session + .list( + { + directory: params.cwd ?? undefined, + roots: true, + }, + { throwOnError: true }, + ) + .then((x) => x.data ?? []) + + const sorted = sessions.toSorted((a, b) => b.time.updated - a.time.updated) + const filtered = cursor ? sorted.filter((s) => s.time.updated < cursor) : sorted + const page = filtered.slice(0, limit) + + const entries: SessionInfo[] = page.map((session) => ({ + sessionId: session.id, + cwd: session.directory, + title: session.title, + updatedAt: new Date(session.time.updated).toISOString(), + })) + + const last = page[page.length - 1] + const next = filtered.length > limit && last ? String(last.time.updated) : undefined + + const response: ListSessionsResponse = { + sessions: entries, + } + if (next) response.nextCursor = next + return response + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async unstable_forkSession(params: ForkSessionRequest): Promise { + const directory = params.cwd + const mcpServers = params.mcpServers ?? [] + + try { + const model = await defaultModel(this.config, directory) + + const forked = await this.sdk.session + .fork( + { + sessionID: params.sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + + if (!forked) { + throw new Error("Fork session returned no data") + } + + const sessionId = forked.id + await this.sessionManager.load(sessionId, directory, mcpServers, model) + + log.info("fork_session", { sessionId, mcpServers: mcpServers.length }) + + const mode = await this.loadSessionMode({ + cwd: directory, + mcpServers, + sessionId, + }) + + const messages = await this.sdk.session + .messages( + { + sessionID: sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((err) => { + log.error("unexpected error when fetching message", { error: err }) + return undefined + }) + + for (const msg of messages ?? []) { + log.debug("replay message", msg) + await this.processMessage(msg) + } + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return mode + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async unstable_resumeSession(params: ResumeSessionRequest): Promise { + const directory = params.cwd + const sessionId = params.sessionId + const mcpServers = params.mcpServers ?? [] + + try { + const model = await defaultModel(this.config, directory) + await this.sessionManager.load(sessionId, directory, mcpServers, model) + + log.info("resume_session", { sessionId, mcpServers: mcpServers.length }) + + const result = await this.loadSessionMode({ + cwd: directory, + mcpServers, + sessionId, + }) + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return result + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + private async processMessage(message: SessionMessageResponse) { + log.debug("process message", message) + if (message.info.role !== "assistant" && message.info.role !== "user") return + const sessionId = message.info.sessionID + + for (const part of message.parts) { + if (part.type === "tool") { + await this.toolStart(sessionId, part) + switch (part.state.status) { + case "pending": + this.bashSnapshots.delete(part.callID) + break + case "running": + const output = this.bashOutput(part) + const runningContent: ToolCallContent[] = [] + if (output) { + runningContent.push({ + type: "content", + content: { + type: "text", + text: output, + }, + }) + } await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk", - messageId: message.info.id, - content: { - type: "text", - text: part.text, - ...(audience && { annotations: { audience } }), + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "in_progress", + kind: toToolKind(part.tool), + title: part.tool, + locations: toLocations(part.tool, part.state.input), + rawInput: part.state.input, + ...(runningContent.length > 0 && { content: runningContent }), + }, + }) + .catch((err) => { + log.error("failed to send tool in_progress to ACP", { error: err }) + }) + break + case "completed": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) + const kind = toToolKind(part.tool) + const content: ToolCallContent[] = [ + { + type: "content", + content: { + type: "text", + text: part.state.output, + }, + }, + ] + + if (kind === "edit") { + const input = part.state.input + const filePath = typeof input["filePath"] === "string" ? input["filePath"] : "" + const oldText = typeof input["oldString"] === "string" ? input["oldString"] : "" + const newText = + typeof input["newString"] === "string" + ? input["newString"] + : typeof input["content"] === "string" + ? input["content"] + : "" + content.push({ + type: "diff", + path: filePath, + oldText, + newText, + }) + } + + if (part.tool === "todowrite") { + const parsedTodos = z.array(Todo.Info).safeParse(JSON.parse(part.state.output)) + if (parsedTodos.success) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "plan", + entries: parsedTodos.data.map((todo) => { + const status: PlanEntry["status"] = + todo.status === "cancelled" ? "completed" : (todo.status as PlanEntry["status"]) + return { + priority: "medium", + status, + content: todo.content, + } + }), + }, + }) + .catch((err) => { + log.error("failed to send session update for todo", { error: err }) + }) + } else { + log.error("failed to parse todo output", { error: parsedTodos.error }) + } + } + + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "completed", + kind, + content, + title: part.state.title, + rawInput: part.state.input, + rawOutput: { + output: part.state.output, + metadata: part.state.metadata, }, }, }) .catch((err) => { - log.error("failed to send text to ACP", { error: err }) + log.error("failed to send tool completed to ACP", { error: err }) }) - } - } else if (part.type === "file") { - // Replay file attachments as appropriate ACP content blocks. - // OpenCode stores files internally as { type: "file", url, filename, mime }. - // We convert these back to ACP blocks based on the URL scheme and MIME type: - // - file:// URLs → resource_link - // - data: URLs with image/* → image block - // - data: URLs with text/* or application/json → resource with text - // - data: URLs with other types → resource with blob - const url = part.url - const filename = part.filename ?? "file" - const mime = part.mime || "application/octet-stream" - const messageChunk = message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk" + break + case "error": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "failed", + kind: toToolKind(part.tool), + title: part.tool, + rawInput: part.state.input, + content: [ + { + type: "content", + content: { + type: "text", + text: part.state.error, + }, + }, + ], + rawOutput: { + error: part.state.error, + metadata: part.state.metadata, + }, + }, + }) + .catch((err) => { + log.error("failed to send tool error to ACP", { error: err }) + }) + break + } + } else if (part.type === "text") { + if (part.text) { + const audience: Role[] | undefined = part.synthetic ? ["assistant"] : part.ignored ? ["user"] : undefined + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk", + messageId: message.info.id, + content: { + type: "text", + text: part.text, + ...(audience && { annotations: { audience } }), + }, + }, + }) + .catch((err) => { + log.error("failed to send text to ACP", { error: err }) + }) + } + } else if (part.type === "file") { + // Replay file attachments as appropriate ACP content blocks. + // OpenCode stores files internally as { type: "file", url, filename, mime }. + // We convert these back to ACP blocks based on the URL scheme and MIME type: + // - file:// URLs → resource_link + // - data: URLs with image/* → image block + // - data: URLs with text/* or application/json → resource with text + // - data: URLs with other types → resource with blob + const url = part.url + const filename = part.filename ?? "file" + const mime = part.mime || "application/octet-stream" + const messageChunk = message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk" - if (url.startsWith("file://")) { - // Local file reference - send as resource_link + if (url.startsWith("file://")) { + // Local file reference - send as resource_link + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: messageChunk, + messageId: message.info.id, + content: { type: "resource_link", uri: url, name: filename, mimeType: mime }, + }, + }) + .catch((err) => { + log.error("failed to send resource_link to ACP", { error: err }) + }) + } else if (url.startsWith("data:")) { + // Embedded content - parse data URL and send as appropriate block type + const base64Match = url.match(/^data:([^;]+);base64,(.*)$/) + const dataMime = base64Match?.[1] + const base64Data = base64Match?.[2] ?? "" + + const effectiveMime = dataMime || mime + + if (effectiveMime.startsWith("image/")) { + // Image - send as image block await this.connection .sessionUpdate({ sessionId, update: { sessionUpdate: messageChunk, messageId: message.info.id, - content: { type: "resource_link", uri: url, name: filename, mimeType: mime }, + content: { + type: "image", + mimeType: effectiveMime, + data: base64Data, + uri: pathToFileURL(filename).href, + }, }, }) .catch((err) => { - log.error("failed to send resource_link to ACP", { error: err }) + log.error("failed to send image to ACP", { error: err }) }) - } else if (url.startsWith("data:")) { - // Embedded content - parse data URL and send as appropriate block type - const base64Match = url.match(/^data:([^;]+);base64,(.*)$/) - const dataMime = base64Match?.[1] - const base64Data = base64Match?.[2] ?? "" + } else { + // Non-image: text types get decoded, binary types stay as blob + const isText = effectiveMime.startsWith("text/") || effectiveMime === "application/json" + const fileUri = pathToFileURL(filename).href + const resource = isText + ? { + uri: fileUri, + mimeType: effectiveMime, + text: Buffer.from(base64Data, "base64").toString("utf-8"), + } + : { uri: fileUri, mimeType: effectiveMime, blob: base64Data } - const effectiveMime = dataMime || mime - - if (effectiveMime.startsWith("image/")) { - // Image - send as image block - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: messageChunk, - messageId: message.info.id, - content: { - type: "image", - mimeType: effectiveMime, - data: base64Data, - uri: pathToFileURL(filename).href, - }, - }, - }) - .catch((err) => { - log.error("failed to send image to ACP", { error: err }) - }) - } else { - // Non-image: text types get decoded, binary types stay as blob - const isText = effectiveMime.startsWith("text/") || effectiveMime === "application/json" - const fileUri = pathToFileURL(filename).href - const resource = isText - ? { - uri: fileUri, - mimeType: effectiveMime, - text: Buffer.from(base64Data, "base64").toString("utf-8"), - } - : { uri: fileUri, mimeType: effectiveMime, blob: base64Data } - - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: messageChunk, - messageId: message.info.id, - content: { type: "resource", resource }, - }, - }) - .catch((err) => { - log.error("failed to send resource to ACP", { error: err }) - }) - } - } - // URLs that don't match file:// or data: are skipped (unsupported) - } else if (part.type === "reasoning") { - if (part.text) { await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: "agent_thought_chunk", + sessionUpdate: messageChunk, messageId: message.info.id, - content: { - type: "text", - text: part.text, - }, + content: { type: "resource", resource }, }, }) .catch((err) => { - log.error("failed to send reasoning to ACP", { error: err }) + log.error("failed to send resource to ACP", { error: err }) }) } } - } - } - - private bashOutput(part: ToolPart) { - if (part.tool !== "bash") return - if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return - const output = part.state.metadata["output"] - if (typeof output !== "string") return - return output - } - - private async toolStart(sessionId: string, part: ToolPart) { - if (this.toolStarts.has(part.callID)) return - this.toolStarts.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((error) => { - log.error("failed to send tool pending to ACP", { error }) - }) - } - - private async loadAvailableModes(directory: string): Promise { - const agents = await this.config.sdk.app - .agents( - { - directory, - }, - { throwOnError: true }, - ) - .then((resp) => resp.data!) - - return agents - .filter((agent) => agent.mode !== "subagent" && !agent.hidden) - .map((agent) => ({ - id: agent.name, - name: agent.name, - description: agent.description, - })) - } - - private async resolveModeState( - directory: string, - sessionId: string, - ): Promise<{ availableModes: ModeOption[]; currentModeId?: string }> { - const availableModes = await this.loadAvailableModes(directory) - const currentModeId = - this.sessionManager.get(sessionId).modeId || - (await (async () => { - if (!availableModes.length) return undefined - const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) - const resolvedModeId = - availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id - this.sessionManager.setMode(sessionId, resolvedModeId) - return resolvedModeId - })()) - - return { availableModes, currentModeId } - } - - private async loadSessionMode(params: LoadSessionRequest) { - const directory = params.cwd - const model = await defaultModel(this.config, directory) - const sessionId = params.sessionId - - const providers = await this.sdk.config.providers({ directory }).then((x) => x.data!.providers) - const entries = sortProvidersByName(providers) - const availableVariants = modelVariantsFromProviders(entries, model) - const currentVariant = this.sessionManager.getVariant(sessionId) - if (currentVariant && !availableVariants.includes(currentVariant)) { - this.sessionManager.setVariant(sessionId, undefined) - } - const availableModels = buildAvailableModels(entries, { includeVariants: true }) - const modeState = await this.resolveModeState(directory, sessionId) - const currentModeId = modeState.currentModeId - const modes = currentModeId - ? { - availableModes: modeState.availableModes, - currentModeId, - } - : undefined - - const commands = await this.config.sdk.command - .list( - { - directory, - }, - { throwOnError: true }, - ) - .then((resp) => resp.data!) - - const availableCommands = commands.map((command) => ({ - name: command.name, - description: command.description ?? "", - })) - const names = new Set(availableCommands.map((c) => c.name)) - if (!names.has("compact")) - availableCommands.push({ - name: "compact", - description: "compact the session", - }) - - const mcpServers: Record = {} - for (const server of params.mcpServers) { - if ("type" in server) { - mcpServers[server.name] = { - url: server.url, - headers: server.headers.reduce>((acc, { name, value }) => { - acc[name] = value - return acc - }, {}), - type: "remote", - } - } else { - mcpServers[server.name] = { - type: "local", - command: [server.command, ...server.args], - environment: server.env.reduce>((acc, { name, value }) => { - acc[name] = value - return acc - }, {}), - } - } - } - - await Promise.all( - Object.entries(mcpServers).map(async ([key, mcp]) => { - await this.sdk.mcp - .add( - { - directory, - name: key, - config: mcp, + // URLs that don't match file:// or data: are skipped (unsupported) + } else if (part.type === "reasoning") { + if (part.text) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_thought_chunk", + messageId: message.info.id, + content: { + type: "text", + text: part.text, + }, }, - { throwOnError: true }, - ) - .catch((error) => { - log.error("failed to add mcp server", { name: key, error }) }) - }), - ) + .catch((err) => { + log.error("failed to send reasoning to ACP", { error: err }) + }) + } + } + } + } - setTimeout(() => { - void this.connection.sessionUpdate({ - sessionId, - update: { - sessionUpdate: "available_commands_update", - availableCommands, - }, - }) - }, 0) + private bashOutput(part: ToolPart) { + if (part.tool !== "bash") return + if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return + const output = part.state.metadata["output"] + if (typeof output !== "string") return + return output + } - return { + private async toolStart(sessionId: string, part: ToolPart) { + if (this.toolStarts.has(part.callID)) return + this.toolStarts.add(part.callID) + await this.connection + .sessionUpdate({ sessionId, - models: { - currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), - availableModels, + update: { + sessionUpdate: "tool_call", + toolCallId: part.callID, + title: part.tool, + kind: toToolKind(part.tool), + status: "pending", + locations: [], + rawInput: {}, }, - modes, - configOptions: buildConfigOptions({ - currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), - availableModels, - modes, - }), - _meta: buildVariantMeta({ - model, - variant: this.sessionManager.getVariant(sessionId), - availableVariants, - }), - } + }) + .catch((error) => { + log.error("failed to send tool pending to ACP", { error }) + }) + } + + private async loadAvailableModes(directory: string): Promise { + const agents = await this.config.sdk.app + .agents( + { + directory, + }, + { throwOnError: true }, + ) + .then((resp) => resp.data!) + + return agents + .filter((agent) => agent.mode !== "subagent" && !agent.hidden) + .map((agent) => ({ + id: agent.name, + name: agent.name, + description: agent.description, + })) + } + + private async resolveModeState( + directory: string, + sessionId: string, + ): Promise<{ availableModes: ModeOption[]; currentModeId?: string }> { + const availableModes = await this.loadAvailableModes(directory) + const currentModeId = + this.sessionManager.get(sessionId).modeId || + (await (async () => { + if (!availableModes.length) return undefined + const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) + const resolvedModeId = + availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id + this.sessionManager.setMode(sessionId, resolvedModeId) + return resolvedModeId + })()) + + return { availableModes, currentModeId } + } + + private async loadSessionMode(params: LoadSessionRequest) { + const directory = params.cwd + const model = await defaultModel(this.config, directory) + const sessionId = params.sessionId + + const providers = await this.sdk.config.providers({ directory }).then((x) => x.data!.providers) + const entries = sortProvidersByName(providers) + const availableVariants = modelVariantsFromProviders(entries, model) + const currentVariant = this.sessionManager.getVariant(sessionId) + if (currentVariant && !availableVariants.includes(currentVariant)) { + this.sessionManager.setVariant(sessionId, undefined) } - - async unstable_setSessionModel(params: SetSessionModelRequest) { - const session = this.sessionManager.get(params.sessionId) - const providers = await this.sdk.config - .providers({ directory: session.cwd }, { throwOnError: true }) - .then((x) => x.data!.providers) - - const selection = parseModelSelection(params.modelId, providers) - this.sessionManager.setModel(session.id, selection.model) - this.sessionManager.setVariant(session.id, selection.variant) - - const entries = sortProvidersByName(providers) - const availableVariants = modelVariantsFromProviders(entries, selection.model) - - return { - _meta: buildVariantMeta({ - model: selection.model, - variant: selection.variant, - availableVariants, - }), - } - } - - async setSessionMode(params: SetSessionModeRequest): Promise { - const session = this.sessionManager.get(params.sessionId) - const availableModes = await this.loadAvailableModes(session.cwd) - if (!availableModes.some((mode) => mode.id === params.modeId)) { - throw new Error(`Agent not found: ${params.modeId}`) - } - this.sessionManager.setMode(params.sessionId, params.modeId) - } - - async setSessionConfigOption(params: SetSessionConfigOptionRequest): Promise { - const session = this.sessionManager.get(params.sessionId) - const providers = await this.sdk.config - .providers({ directory: session.cwd }, { throwOnError: true }) - .then((x) => x.data!.providers) - const entries = sortProvidersByName(providers) - - if (params.configId === "model") { - if (typeof params.value !== "string") throw RequestError.invalidParams("model value must be a string") - const selection = parseModelSelection(params.value, providers) - this.sessionManager.setModel(session.id, selection.model) - this.sessionManager.setVariant(session.id, selection.variant) - } else if (params.configId === "mode") { - if (typeof params.value !== "string") throw RequestError.invalidParams("mode value must be a string") - const availableModes = await this.loadAvailableModes(session.cwd) - if (!availableModes.some((mode) => mode.id === params.value)) { - throw RequestError.invalidParams(JSON.stringify({ error: `Mode not found: ${params.value}` })) + const availableModels = buildAvailableModels(entries, { includeVariants: true }) + const modeState = await this.resolveModeState(directory, sessionId) + const currentModeId = modeState.currentModeId + const modes = currentModeId + ? { + availableModes: modeState.availableModes, + currentModeId, } - this.sessionManager.setMode(session.id, params.value) - } else { - throw RequestError.invalidParams(JSON.stringify({ error: `Unknown config option: ${params.configId}` })) - } + : undefined - const updatedSession = this.sessionManager.get(session.id) - const model = updatedSession.model ?? (await defaultModel(this.config, session.cwd)) - const availableVariants = modelVariantsFromProviders(entries, model) - const currentModelId = formatModelIdWithVariant(model, updatedSession.variant, availableVariants, true) - const availableModels = buildAvailableModels(entries, { includeVariants: true }) - const modeState = await this.resolveModeState(session.cwd, session.id) - const modes = modeState.currentModeId - ? { availableModes: modeState.availableModes, currentModeId: modeState.currentModeId } - : undefined + const commands = await this.config.sdk.command + .list( + { + directory, + }, + { throwOnError: true }, + ) + .then((resp) => resp.data!) - return { - configOptions: buildConfigOptions({ currentModelId, availableModels, modes }), - } - } - - async prompt(params: PromptRequest) { - const sessionID = params.sessionId - const session = this.sessionManager.get(sessionID) - const directory = session.cwd - - const current = session.model - const model = current ?? (await defaultModel(this.config, directory)) - if (!current) { - this.sessionManager.setModel(session.id, model) - } - const agent = - session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) - - const parts: Array< - | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } - | { type: "file"; url: string; filename: string; mime: string } - > = [] - for (const part of params.prompt) { - switch (part.type) { - case "text": - const audience = part.annotations?.audience - const forAssistant = audience?.length === 1 && audience[0] === "assistant" - const forUser = audience?.length === 1 && audience[0] === "user" - parts.push({ - type: "text" as const, - text: part.text, - ...(forAssistant && { synthetic: true }), - ...(forUser && { ignored: true }), - }) - break - case "image": { - const parsed = parseUri(part.uri ?? "") - const filename = parsed.type === "file" ? parsed.filename : "image" - if (part.data) { - parts.push({ - type: "file", - url: `data:${part.mimeType};base64,${part.data}`, - filename, - mime: part.mimeType, - }) - } else if (part.uri && part.uri.startsWith("http:")) { - parts.push({ - type: "file", - url: part.uri, - filename, - mime: part.mimeType, - }) - } - break - } - - case "resource_link": - const parsed = parseUri(part.uri) - // Use the name from resource_link if available - if (part.name && parsed.type === "file") { - parsed.filename = part.name - } - parts.push(parsed) - - break - - case "resource": { - const resource = part.resource - if ("text" in resource && resource.text) { - parts.push({ - type: "text", - text: resource.text, - }) - } else if ("blob" in resource && resource.blob && resource.mimeType) { - // Binary resource (PDFs, etc.): store as file part with data URL - const parsed = parseUri(resource.uri ?? "") - const filename = parsed.type === "file" ? parsed.filename : "file" - parts.push({ - type: "file", - url: `data:${resource.mimeType};base64,${resource.blob}`, - filename, - mime: resource.mimeType, - }) - } - break - } - - default: - break - } - } - - log.info("parts", { parts }) - - const cmd = (() => { - const text = parts - .filter((p): p is { type: "text"; text: string } => p.type === "text") - .map((p) => p.text) - .join("") - .trim() - - if (!text.startsWith("/")) return - - const [name, ...rest] = text.slice(1).split(/\s+/) - return { name, args: rest.join(" ").trim() } - })() - - const buildUsage = (msg: AssistantMessage): Usage => ({ - totalTokens: - msg.tokens.input + - msg.tokens.output + - msg.tokens.reasoning + - (msg.tokens.cache?.read ?? 0) + - (msg.tokens.cache?.write ?? 0), - inputTokens: msg.tokens.input, - outputTokens: msg.tokens.output, - thoughtTokens: msg.tokens.reasoning || undefined, - cachedReadTokens: msg.tokens.cache?.read || undefined, - cachedWriteTokens: msg.tokens.cache?.write || undefined, + const availableCommands = commands.map((command) => ({ + name: command.name, + description: command.description ?? "", + })) + const names = new Set(availableCommands.map((c) => c.name)) + if (!names.has("compact")) + availableCommands.push({ + name: "compact", + description: "compact the session", }) - if (!cmd) { - const response = await this.sdk.session.prompt({ - sessionID, - model: { - providerID: model.providerID, - modelID: model.modelID, - }, - variant: this.sessionManager.getVariant(sessionID), - parts, - agent, - directory, - }) - const msg = response.data?.info - - await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - - return { - stopReason: "end_turn" as const, - usage: msg ? buildUsage(msg) : undefined, - _meta: {}, + const mcpServers: Record = {} + for (const server of params.mcpServers) { + if ("type" in server) { + mcpServers[server.name] = { + url: server.url, + headers: server.headers.reduce>((acc, { name, value }) => { + acc[name] = value + return acc + }, {}), + type: "remote", + } + } else { + mcpServers[server.name] = { + type: "local", + command: [server.command, ...server.args], + environment: server.env.reduce>((acc, { name, value }) => { + acc[name] = value + return acc + }, {}), } } + } - const command = await this.config.sdk.command - .list({ directory }, { throwOnError: true }) - .then((x) => x.data!.find((c) => c.name === cmd.name)) - if (command) { - const response = await this.sdk.session.command({ - sessionID, - command: command.name, - arguments: cmd.args, - model: model.providerID + "/" + model.modelID, - agent, - directory, - }) - const msg = response.data?.info - - await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - - return { - stopReason: "end_turn" as const, - usage: msg ? buildUsage(msg) : undefined, - _meta: {}, - } - } - - switch (cmd.name) { - case "compact": - await this.config.sdk.session.summarize( + await Promise.all( + Object.entries(mcpServers).map(async ([key, mcp]) => { + await this.sdk.mcp + .add( { - sessionID, directory, - providerID: model.providerID, - modelID: model.modelID, + name: key, + config: mcp, }, { throwOnError: true }, ) + .catch((error) => { + log.error("failed to add mcp server", { name: key, error }) + }) + }), + ) + + setTimeout(() => { + void this.connection.sessionUpdate({ + sessionId, + update: { + sessionUpdate: "available_commands_update", + availableCommands, + }, + }) + }, 0) + + return { + sessionId, + models: { + currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), + availableModels, + }, + modes, + configOptions: buildConfigOptions({ + currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), + availableModels, + modes, + }), + _meta: buildVariantMeta({ + model, + variant: this.sessionManager.getVariant(sessionId), + availableVariants, + }), + } + } + + async unstable_setSessionModel(params: SetSessionModelRequest) { + const session = this.sessionManager.get(params.sessionId) + const providers = await this.sdk.config + .providers({ directory: session.cwd }, { throwOnError: true }) + .then((x) => x.data!.providers) + + const selection = parseModelSelection(params.modelId, providers) + this.sessionManager.setModel(session.id, selection.model) + this.sessionManager.setVariant(session.id, selection.variant) + + const entries = sortProvidersByName(providers) + const availableVariants = modelVariantsFromProviders(entries, selection.model) + + return { + _meta: buildVariantMeta({ + model: selection.model, + variant: selection.variant, + availableVariants, + }), + } + } + + async setSessionMode(params: SetSessionModeRequest): Promise { + const session = this.sessionManager.get(params.sessionId) + const availableModes = await this.loadAvailableModes(session.cwd) + if (!availableModes.some((mode) => mode.id === params.modeId)) { + throw new Error(`Agent not found: ${params.modeId}`) + } + this.sessionManager.setMode(params.sessionId, params.modeId) + } + + async setSessionConfigOption(params: SetSessionConfigOptionRequest): Promise { + const session = this.sessionManager.get(params.sessionId) + const providers = await this.sdk.config + .providers({ directory: session.cwd }, { throwOnError: true }) + .then((x) => x.data!.providers) + const entries = sortProvidersByName(providers) + + if (params.configId === "model") { + if (typeof params.value !== "string") throw RequestError.invalidParams("model value must be a string") + const selection = parseModelSelection(params.value, providers) + this.sessionManager.setModel(session.id, selection.model) + this.sessionManager.setVariant(session.id, selection.variant) + } else if (params.configId === "mode") { + if (typeof params.value !== "string") throw RequestError.invalidParams("mode value must be a string") + const availableModes = await this.loadAvailableModes(session.cwd) + if (!availableModes.some((mode) => mode.id === params.value)) { + throw RequestError.invalidParams(JSON.stringify({ error: `Mode not found: ${params.value}` })) + } + this.sessionManager.setMode(session.id, params.value) + } else { + throw RequestError.invalidParams(JSON.stringify({ error: `Unknown config option: ${params.configId}` })) + } + + const updatedSession = this.sessionManager.get(session.id) + const model = updatedSession.model ?? (await defaultModel(this.config, session.cwd)) + const availableVariants = modelVariantsFromProviders(entries, model) + const currentModelId = formatModelIdWithVariant(model, updatedSession.variant, availableVariants, true) + const availableModels = buildAvailableModels(entries, { includeVariants: true }) + const modeState = await this.resolveModeState(session.cwd, session.id) + const modes = modeState.currentModeId + ? { availableModes: modeState.availableModes, currentModeId: modeState.currentModeId } + : undefined + + return { + configOptions: buildConfigOptions({ currentModelId, availableModels, modes }), + } + } + + async prompt(params: PromptRequest) { + const sessionID = params.sessionId + const session = this.sessionManager.get(sessionID) + const directory = session.cwd + + const current = session.model + const model = current ?? (await defaultModel(this.config, directory)) + if (!current) { + this.sessionManager.setModel(session.id, model) + } + const agent = + session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) + + const parts: Array< + | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } + | { type: "file"; url: string; filename: string; mime: string } + > = [] + for (const part of params.prompt) { + switch (part.type) { + case "text": + const audience = part.annotations?.audience + const forAssistant = audience?.length === 1 && audience[0] === "assistant" + const forUser = audience?.length === 1 && audience[0] === "user" + parts.push({ + type: "text" as const, + text: part.text, + ...(forAssistant && { synthetic: true }), + ...(forUser && { ignored: true }), + }) + break + case "image": { + const parsed = parseUri(part.uri ?? "") + const filename = parsed.type === "file" ? parsed.filename : "image" + if (part.data) { + parts.push({ + type: "file", + url: `data:${part.mimeType};base64,${part.data}`, + filename, + mime: part.mimeType, + }) + } else if (part.uri && part.uri.startsWith("http:")) { + parts.push({ + type: "file", + url: part.uri, + filename, + mime: part.mimeType, + }) + } + break + } + + case "resource_link": + const parsed = parseUri(part.uri) + // Use the name from resource_link if available + if (part.name && parsed.type === "file") { + parsed.filename = part.name + } + parts.push(parsed) + + break + + case "resource": { + const resource = part.resource + if ("text" in resource && resource.text) { + parts.push({ + type: "text", + text: resource.text, + }) + } else if ("blob" in resource && resource.blob && resource.mimeType) { + // Binary resource (PDFs, etc.): store as file part with data URL + const parsed = parseUri(resource.uri ?? "") + const filename = parsed.type === "file" ? parsed.filename : "file" + parts.push({ + type: "file", + url: `data:${resource.mimeType};base64,${resource.blob}`, + filename, + mime: resource.mimeType, + }) + } + break + } + + default: break } + } + + log.info("parts", { parts }) + + const cmd = (() => { + const text = parts + .filter((p): p is { type: "text"; text: string } => p.type === "text") + .map((p) => p.text) + .join("") + .trim() + + if (!text.startsWith("/")) return + + const [name, ...rest] = text.slice(1).split(/\s+/) + return { name, args: rest.join(" ").trim() } + })() + + const buildUsage = (msg: AssistantMessage): Usage => ({ + totalTokens: + msg.tokens.input + + msg.tokens.output + + msg.tokens.reasoning + + (msg.tokens.cache?.read ?? 0) + + (msg.tokens.cache?.write ?? 0), + inputTokens: msg.tokens.input, + outputTokens: msg.tokens.output, + thoughtTokens: msg.tokens.reasoning || undefined, + cachedReadTokens: msg.tokens.cache?.read || undefined, + cachedWriteTokens: msg.tokens.cache?.write || undefined, + }) + + if (!cmd) { + const response = await this.sdk.session.prompt({ + sessionID, + model: { + providerID: model.providerID, + modelID: model.modelID, + }, + variant: this.sessionManager.getVariant(sessionID), + parts, + agent, + directory, + }) + const msg = response.data?.info await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) return { stopReason: "end_turn" as const, + usage: msg ? buildUsage(msg) : undefined, _meta: {}, } } - async cancel(params: CancelNotification) { - const session = this.sessionManager.get(params.sessionId) - await this.config.sdk.session.abort( - { - sessionID: params.sessionId, - directory: session.cwd, - }, - { throwOnError: true }, - ) - } - } - - function toToolKind(toolName: string): ToolKind { - const tool = toolName.toLocaleLowerCase() - switch (tool) { - case "bash": - return "execute" - case "webfetch": - return "fetch" - - case "edit": - case "patch": - case "write": - return "edit" - - case "grep": - case "glob": - case "context7_resolve_library_id": - case "context7_get_library_docs": - return "search" - - case "read": - return "read" - - default: - return "other" - } - } - - function toLocations(toolName: string, input: Record): { path: string }[] { - const tool = toolName.toLocaleLowerCase() - switch (tool) { - case "read": - case "edit": - case "write": - return input["filePath"] ? [{ path: input["filePath"] }] : [] - case "glob": - case "grep": - return input["path"] ? [{ path: input["path"] }] : [] - case "bash": - return [] - default: - return [] - } - } - - async function defaultModel(config: ACPConfig, cwd?: string): Promise<{ providerID: ProviderID; modelID: ModelID }> { - const sdk = config.sdk - const configured = config.defaultModel - if (configured) return configured - - const directory = cwd ?? process.cwd() - - const specified = await sdk.config - .get({ directory }, { throwOnError: true }) - .then((resp) => { - const cfg = resp.data - if (!cfg || !cfg.model) return undefined - return Provider.parseModel(cfg.model) - }) - .catch((error) => { - log.error("failed to load user config for default model", { error }) - return undefined + const command = await this.config.sdk.command + .list({ directory }, { throwOnError: true }) + .then((x) => x.data!.find((c) => c.name === cmd.name)) + if (command) { + const response = await this.sdk.session.command({ + sessionID, + command: command.name, + arguments: cmd.args, + model: model.providerID + "/" + model.modelID, + agent, + directory, }) + const msg = response.data?.info - const providers = await sdk.config - .providers({ directory }, { throwOnError: true }) - .then((x) => x.data?.providers ?? []) - .catch((error) => { - log.error("failed to list providers for default model", { error }) - return [] - }) + await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - if (specified && providers.length) { - const provider = providers.find((p) => p.id === specified.providerID) - if (provider && provider.models[specified.modelID]) return specified - } - - if (specified && !providers.length) return specified - - const opencodeProvider = providers.find((p) => p.id === "opencode") - if (opencodeProvider) { - if (opencodeProvider.models["big-pickle"]) { - return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } - } - const [best] = Provider.sort(Object.values(opencodeProvider.models)) - if (best) { - return { - providerID: ProviderID.make(best.providerID), - modelID: ModelID.make(best.id), - } + return { + stopReason: "end_turn" as const, + usage: msg ? buildUsage(msg) : undefined, + _meta: {}, } } - const models = providers.flatMap((p) => Object.values(p.models)) - const [best] = Provider.sort(models) + switch (cmd.name) { + case "compact": + await this.config.sdk.session.summarize( + { + sessionID, + directory, + providerID: model.providerID, + modelID: model.modelID, + }, + { throwOnError: true }, + ) + break + } + + await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) + + return { + stopReason: "end_turn" as const, + _meta: {}, + } + } + + async cancel(params: CancelNotification) { + const session = this.sessionManager.get(params.sessionId) + await this.config.sdk.session.abort( + { + sessionID: params.sessionId, + directory: session.cwd, + }, + { throwOnError: true }, + ) + } +} + +function toToolKind(toolName: string): ToolKind { + const tool = toolName.toLocaleLowerCase() + switch (tool) { + case "bash": + return "execute" + case "webfetch": + return "fetch" + + case "edit": + case "patch": + case "write": + return "edit" + + case "grep": + case "glob": + case "context7_resolve_library_id": + case "context7_get_library_docs": + return "search" + + case "read": + return "read" + + default: + return "other" + } +} + +function toLocations(toolName: string, input: Record): { path: string }[] { + const tool = toolName.toLocaleLowerCase() + switch (tool) { + case "read": + case "edit": + case "write": + return input["filePath"] ? [{ path: input["filePath"] }] : [] + case "glob": + case "grep": + return input["path"] ? [{ path: input["path"] }] : [] + case "bash": + return [] + default: + return [] + } +} + +async function defaultModel(config: ACPConfig, cwd?: string): Promise<{ providerID: ProviderID; modelID: ModelID }> { + const sdk = config.sdk + const configured = config.defaultModel + if (configured) return configured + + const directory = cwd ?? process.cwd() + + const specified = await sdk.config + .get({ directory }, { throwOnError: true }) + .then((resp) => { + const cfg = resp.data + if (!cfg || !cfg.model) return undefined + return Provider.parseModel(cfg.model) + }) + .catch((error) => { + log.error("failed to load user config for default model", { error }) + return undefined + }) + + const providers = await sdk.config + .providers({ directory }, { throwOnError: true }) + .then((x) => x.data?.providers ?? []) + .catch((error) => { + log.error("failed to list providers for default model", { error }) + return [] + }) + + if (specified && providers.length) { + const provider = providers.find((p) => p.id === specified.providerID) + if (provider && provider.models[specified.modelID]) return specified + } + + if (specified && !providers.length) return specified + + const opencodeProvider = providers.find((p) => p.id === "opencode") + if (opencodeProvider) { + if (opencodeProvider.models["big-pickle"]) { + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } + } + const [best] = Provider.sort(Object.values(opencodeProvider.models)) if (best) { return { providerID: ProviderID.make(best.providerID), modelID: ModelID.make(best.id), } } - - if (specified) return specified - - return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } } - function parseUri( - uri: string, - ): { type: "file"; url: string; filename: string; mime: string } | { type: "text"; text: string } { - try { - if (uri.startsWith("file://")) { - const path = uri.slice(7) + const models = providers.flatMap((p) => Object.values(p.models)) + const [best] = Provider.sort(models) + if (best) { + return { + providerID: ProviderID.make(best.providerID), + modelID: ModelID.make(best.id), + } + } + + if (specified) return specified + + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } +} + +function parseUri( + uri: string, +): { type: "file"; url: string; filename: string; mime: string } | { type: "text"; text: string } { + try { + if (uri.startsWith("file://")) { + const path = uri.slice(7) + const name = path.split("/").pop() || path + return { + type: "file", + url: uri, + filename: name, + mime: "text/plain", + } + } + if (uri.startsWith("zed://")) { + const url = new URL(uri) + const path = url.searchParams.get("path") + if (path) { const name = path.split("/").pop() || path return { type: "file", - url: uri, + url: pathToFileURL(path).href, filename: name, mime: "text/plain", } } - if (uri.startsWith("zed://")) { - const url = new URL(uri) - const path = url.searchParams.get("path") - if (path) { - const name = path.split("/").pop() || path - return { - type: "file", - url: pathToFileURL(path).href, - filename: name, - mime: "text/plain", - } - } - } - return { - type: "text", - text: uri, - } - } catch { - return { - type: "text", - text: uri, - } } - } - - function getNewContent(fileOriginal: string, unifiedDiff: string): string | undefined { - const result = applyPatch(fileOriginal, unifiedDiff) - if (result === false) { - log.error("Failed to apply unified diff (context mismatch)") - return undefined - } - return result - } - - function sortProvidersByName(providers: T[]): T[] { - return [...providers].sort((a, b) => { - const nameA = a.name.toLowerCase() - const nameB = b.name.toLowerCase() - if (nameA < nameB) return -1 - if (nameA > nameB) return 1 - return 0 - }) - } - - function modelVariantsFromProviders( - providers: Array<{ id: string; models: Record }> }>, - model: { providerID: ProviderID; modelID: ModelID }, - ): string[] { - const provider = providers.find((entry) => entry.id === model.providerID) - if (!provider) return [] - const modelInfo = provider.models[model.modelID] - if (!modelInfo?.variants) return [] - return Object.keys(modelInfo.variants) - } - - function buildAvailableModels( - providers: Array<{ id: string; name: string; models: Record }>, - options: { includeVariants?: boolean } = {}, - ): ModelOption[] { - const includeVariants = options.includeVariants ?? false - return providers.flatMap((provider) => { - const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( - provider.models, - ) - const models = Provider.sort(unsorted) - return models.flatMap((model) => { - const base: ModelOption = { - modelId: `${provider.id}/${model.id}`, - name: `${provider.name}/${model.name}`, - } - if (!includeVariants || !model.variants) return [base] - const variants = Object.keys(model.variants).filter((variant) => variant !== DEFAULT_VARIANT_VALUE) - const variantOptions = variants.map((variant) => ({ - modelId: `${provider.id}/${model.id}/${variant}`, - name: `${provider.name}/${model.name} (${variant})`, - })) - return [base, ...variantOptions] - }) - }) - } - - function formatModelIdWithVariant( - model: { providerID: ProviderID; modelID: ModelID }, - variant: string | undefined, - availableVariants: string[], - includeVariant: boolean, - ) { - const base = `${model.providerID}/${model.modelID}` - if (!includeVariant || !variant || !availableVariants.includes(variant)) return base - return `${base}/${variant}` - } - - function buildVariantMeta(input: { - model: { providerID: ProviderID; modelID: ModelID } - variant?: string - availableVariants: string[] - }) { return { - opencode: { - modelId: `${input.model.providerID}/${input.model.modelID}`, - variant: input.variant ?? null, - availableVariants: input.availableVariants, - }, + type: "text", + text: uri, + } + } catch { + return { + type: "text", + text: uri, } } +} - function parseModelSelection( - modelId: string, - providers: Array<{ id: string; models: Record }> }>, - ): { model: { providerID: ProviderID; modelID: ModelID }; variant?: string } { - const parsed = Provider.parseModel(modelId) - const provider = providers.find((p) => p.id === parsed.providerID) - if (!provider) { - return { model: parsed, variant: undefined } - } +function getNewContent(fileOriginal: string, unifiedDiff: string): string | undefined { + const result = applyPatch(fileOriginal, unifiedDiff) + if (result === false) { + log.error("Failed to apply unified diff (context mismatch)") + return undefined + } + return result +} - // Check if modelID exists directly - if (provider.models[parsed.modelID]) { - return { model: parsed, variant: undefined } - } +function sortProvidersByName(providers: T[]): T[] { + return [...providers].sort((a, b) => { + const nameA = a.name.toLowerCase() + const nameB = b.name.toLowerCase() + if (nameA < nameB) return -1 + if (nameA > nameB) return 1 + return 0 + }) +} - // Try to extract variant from end of modelID (e.g., "claude-sonnet-4/high" -> model: "claude-sonnet-4", variant: "high") - const segments = parsed.modelID.split("/") - if (segments.length > 1) { - const candidateVariant = segments[segments.length - 1] - const baseModelId = segments.slice(0, -1).join("/") - const baseModelInfo = provider.models[baseModelId] - if (baseModelInfo?.variants && candidateVariant in baseModelInfo.variants) { - return { - model: { providerID: parsed.providerID, modelID: ModelID.make(baseModelId) }, - variant: candidateVariant, - } +function modelVariantsFromProviders( + providers: Array<{ id: string; models: Record }> }>, + model: { providerID: ProviderID; modelID: ModelID }, +): string[] { + const provider = providers.find((entry) => entry.id === model.providerID) + if (!provider) return [] + const modelInfo = provider.models[model.modelID] + if (!modelInfo?.variants) return [] + return Object.keys(modelInfo.variants) +} + +function buildAvailableModels( + providers: Array<{ id: string; name: string; models: Record }>, + options: { includeVariants?: boolean } = {}, +): ModelOption[] { + const includeVariants = options.includeVariants ?? false + return providers.flatMap((provider) => { + const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( + provider.models, + ) + const models = Provider.sort(unsorted) + return models.flatMap((model) => { + const base: ModelOption = { + modelId: `${provider.id}/${model.id}`, + name: `${provider.name}/${model.name}`, } - } + if (!includeVariants || !model.variants) return [base] + const variants = Object.keys(model.variants).filter((variant) => variant !== DEFAULT_VARIANT_VALUE) + const variantOptions = variants.map((variant) => ({ + modelId: `${provider.id}/${model.id}/${variant}`, + name: `${provider.name}/${model.name} (${variant})`, + })) + return [base, ...variantOptions] + }) + }) +} +function formatModelIdWithVariant( + model: { providerID: ProviderID; modelID: ModelID }, + variant: string | undefined, + availableVariants: string[], + includeVariant: boolean, +) { + const base = `${model.providerID}/${model.modelID}` + if (!includeVariant || !variant || !availableVariants.includes(variant)) return base + return `${base}/${variant}` +} + +function buildVariantMeta(input: { + model: { providerID: ProviderID; modelID: ModelID } + variant?: string + availableVariants: string[] +}) { + return { + opencode: { + modelId: `${input.model.providerID}/${input.model.modelID}`, + variant: input.variant ?? null, + availableVariants: input.availableVariants, + }, + } +} + +function parseModelSelection( + modelId: string, + providers: Array<{ id: string; models: Record }> }>, +): { model: { providerID: ProviderID; modelID: ModelID }; variant?: string } { + const parsed = Provider.parseModel(modelId) + const provider = providers.find((p) => p.id === parsed.providerID) + if (!provider) { return { model: parsed, variant: undefined } } - function buildConfigOptions(input: { - currentModelId: string - availableModels: ModelOption[] - modes?: { availableModes: ModeOption[]; currentModeId: string } | undefined - }): SessionConfigOption[] { - const options: SessionConfigOption[] = [ - { - id: "model", - name: "Model", - category: "model", - type: "select", - currentValue: input.currentModelId, - options: input.availableModels.map((m) => ({ value: m.modelId, name: m.name })), - }, - ] - if (input.modes) { - options.push({ - id: "mode", - name: "Session Mode", - category: "mode", - type: "select", - currentValue: input.modes.currentModeId, - options: input.modes.availableModes.map((m) => ({ - value: m.id, - name: m.name, - ...(m.description ? { description: m.description } : {}), - })), - }) - } - return options + // Check if modelID exists directly + if (provider.models[parsed.modelID]) { + return { model: parsed, variant: undefined } } + + // Try to extract variant from end of modelID (e.g., "claude-sonnet-4/high" -> model: "claude-sonnet-4", variant: "high") + const segments = parsed.modelID.split("/") + if (segments.length > 1) { + const candidateVariant = segments[segments.length - 1] + const baseModelId = segments.slice(0, -1).join("/") + const baseModelInfo = provider.models[baseModelId] + if (baseModelInfo?.variants && candidateVariant in baseModelInfo.variants) { + return { + model: { providerID: parsed.providerID, modelID: ModelID.make(baseModelId) }, + variant: candidateVariant, + } + } + } + + return { model: parsed, variant: undefined } } + +function buildConfigOptions(input: { + currentModelId: string + availableModels: ModelOption[] + modes?: { availableModes: ModeOption[]; currentModeId: string } | undefined +}): SessionConfigOption[] { + const options: SessionConfigOption[] = [ + { + id: "model", + name: "Model", + category: "model", + type: "select", + currentValue: input.currentModelId, + options: input.availableModels.map((m) => ({ value: m.modelId, name: m.name })), + }, + ] + if (input.modes) { + options.push({ + id: "mode", + name: "Session Mode", + category: "mode", + type: "select", + currentValue: input.modes.currentModeId, + options: input.modes.availableModes.map((m) => ({ + value: m.id, + name: m.name, + ...(m.description ? { description: m.description } : {}), + })), + }) + } + return options +} + +export * as ACP from "./agent" From cde105e7a8832b9c6d9d0a43d5699b4768156533 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:09 -0400 Subject: [PATCH 077/120] refactor: unwrap CopilotModels namespace + self-reexport (#22947) --- .../src/plugin/github-copilot/models.ts | 266 +++++++++--------- 1 file changed, 133 insertions(+), 133 deletions(-) diff --git a/packages/opencode/src/plugin/github-copilot/models.ts b/packages/opencode/src/plugin/github-copilot/models.ts index dfd6ceceaa..71d21afbe4 100644 --- a/packages/opencode/src/plugin/github-copilot/models.ts +++ b/packages/opencode/src/plugin/github-copilot/models.ts @@ -1,146 +1,146 @@ import { z } from "zod" import type { Model } from "@opencode-ai/sdk/v2" -export namespace CopilotModels { - export const schema = z.object({ - data: z.array( - z.object({ - model_picker_enabled: z.boolean(), - id: z.string(), - name: z.string(), - // every version looks like: `{model.id}-YYYY-MM-DD` - version: z.string(), - supported_endpoints: z.array(z.string()).optional(), - capabilities: z.object({ - family: z.string(), - limits: z.object({ - max_context_window_tokens: z.number(), - max_output_tokens: z.number(), - max_prompt_tokens: z.number(), - vision: z - .object({ - max_prompt_image_size: z.number(), - max_prompt_images: z.number(), - supported_media_types: z.array(z.string()), - }) - .optional(), - }), - supports: z.object({ - adaptive_thinking: z.boolean().optional(), - max_thinking_budget: z.number().optional(), - min_thinking_budget: z.number().optional(), - reasoning_effort: z.array(z.string()).optional(), - streaming: z.boolean(), - structured_outputs: z.boolean().optional(), - tool_calls: z.boolean(), - vision: z.boolean().optional(), - }), +export const schema = z.object({ + data: z.array( + z.object({ + model_picker_enabled: z.boolean(), + id: z.string(), + name: z.string(), + // every version looks like: `{model.id}-YYYY-MM-DD` + version: z.string(), + supported_endpoints: z.array(z.string()).optional(), + capabilities: z.object({ + family: z.string(), + limits: z.object({ + max_context_window_tokens: z.number(), + max_output_tokens: z.number(), + max_prompt_tokens: z.number(), + vision: z + .object({ + max_prompt_image_size: z.number(), + max_prompt_images: z.number(), + supported_media_types: z.array(z.string()), + }) + .optional(), + }), + supports: z.object({ + adaptive_thinking: z.boolean().optional(), + max_thinking_budget: z.number().optional(), + min_thinking_budget: z.number().optional(), + reasoning_effort: z.array(z.string()).optional(), + streaming: z.boolean(), + structured_outputs: z.boolean().optional(), + tool_calls: z.boolean(), + vision: z.boolean().optional(), }), }), - ), - }) + }), + ), +}) - type Item = z.infer["data"][number] +type Item = z.infer["data"][number] - function build(key: string, remote: Item, url: string, prev?: Model): Model { - const reasoning = - !!remote.capabilities.supports.adaptive_thinking || - !!remote.capabilities.supports.reasoning_effort?.length || - remote.capabilities.supports.max_thinking_budget !== undefined || - remote.capabilities.supports.min_thinking_budget !== undefined - const image = - (remote.capabilities.supports.vision ?? false) || - (remote.capabilities.limits.vision?.supported_media_types ?? []).some((item) => item.startsWith("image/")) +function build(key: string, remote: Item, url: string, prev?: Model): Model { + const reasoning = + !!remote.capabilities.supports.adaptive_thinking || + !!remote.capabilities.supports.reasoning_effort?.length || + remote.capabilities.supports.max_thinking_budget !== undefined || + remote.capabilities.supports.min_thinking_budget !== undefined + const image = + (remote.capabilities.supports.vision ?? false) || + (remote.capabilities.limits.vision?.supported_media_types ?? []).some((item) => item.startsWith("image/")) - const isMsgApi = remote.supported_endpoints?.includes("/v1/messages") + const isMsgApi = remote.supported_endpoints?.includes("/v1/messages") - return { - id: key, - providerID: "github-copilot", - api: { - id: remote.id, - url: isMsgApi ? `${url}/v1` : url, - npm: isMsgApi ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot", + return { + id: key, + providerID: "github-copilot", + api: { + id: remote.id, + url: isMsgApi ? `${url}/v1` : url, + npm: isMsgApi ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot", + }, + // API response wins + status: "active", + limit: { + context: remote.capabilities.limits.max_context_window_tokens, + input: remote.capabilities.limits.max_prompt_tokens, + output: remote.capabilities.limits.max_output_tokens, + }, + capabilities: { + temperature: prev?.capabilities.temperature ?? true, + reasoning: prev?.capabilities.reasoning ?? reasoning, + attachment: prev?.capabilities.attachment ?? true, + toolcall: remote.capabilities.supports.tool_calls, + input: { + text: true, + audio: false, + image, + video: false, + pdf: false, }, - // API response wins - status: "active", - limit: { - context: remote.capabilities.limits.max_context_window_tokens, - input: remote.capabilities.limits.max_prompt_tokens, - output: remote.capabilities.limits.max_output_tokens, + output: { + text: true, + audio: false, + image: false, + video: false, + pdf: false, }, - capabilities: { - temperature: prev?.capabilities.temperature ?? true, - reasoning: prev?.capabilities.reasoning ?? reasoning, - attachment: prev?.capabilities.attachment ?? true, - toolcall: remote.capabilities.supports.tool_calls, - input: { - text: true, - audio: false, - image, - video: false, - pdf: false, - }, - output: { - text: true, - audio: false, - image: false, - video: false, - pdf: false, - }, - interleaved: false, - }, - // existing wins - family: prev?.family ?? remote.capabilities.family, - name: prev?.name ?? remote.name, - cost: { - input: 0, - output: 0, - cache: { read: 0, write: 0 }, - }, - options: prev?.options ?? {}, - headers: prev?.headers ?? {}, - release_date: - prev?.release_date ?? - (remote.version.startsWith(`${remote.id}-`) ? remote.version.slice(remote.id.length + 1) : remote.version), - variants: prev?.variants ?? {}, - } - } - - export async function get( - baseURL: string, - headers: HeadersInit = {}, - existing: Record = {}, - ): Promise> { - const data = await fetch(`${baseURL}/models`, { - headers, - signal: AbortSignal.timeout(5_000), - }).then(async (res) => { - if (!res.ok) { - throw new Error(`Failed to fetch models: ${res.status}`) - } - return schema.parse(await res.json()) - }) - - const result = { ...existing } - const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) - - // prune existing models whose api.id isn't in the endpoint response - for (const [key, model] of Object.entries(result)) { - const m = remote.get(model.api.id) - if (!m) { - delete result[key] - continue - } - result[key] = build(key, m, baseURL, model) - } - - // add new endpoint models not already keyed in result - for (const [id, m] of remote) { - if (id in result) continue - result[id] = build(id, m, baseURL) - } - - return result + interleaved: false, + }, + // existing wins + family: prev?.family ?? remote.capabilities.family, + name: prev?.name ?? remote.name, + cost: { + input: 0, + output: 0, + cache: { read: 0, write: 0 }, + }, + options: prev?.options ?? {}, + headers: prev?.headers ?? {}, + release_date: + prev?.release_date ?? + (remote.version.startsWith(`${remote.id}-`) ? remote.version.slice(remote.id.length + 1) : remote.version), + variants: prev?.variants ?? {}, } } + +export async function get( + baseURL: string, + headers: HeadersInit = {}, + existing: Record = {}, +): Promise> { + const data = await fetch(`${baseURL}/models`, { + headers, + signal: AbortSignal.timeout(5_000), + }).then(async (res) => { + if (!res.ok) { + throw new Error(`Failed to fetch models: ${res.status}`) + } + return schema.parse(await res.json()) + }) + + const result = { ...existing } + const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) + + // prune existing models whose api.id isn't in the endpoint response + for (const [key, model] of Object.entries(result)) { + const m = remote.get(model.api.id) + if (!m) { + delete result[key] + continue + } + result[key] = build(key, m, baseURL, model) + } + + // add new endpoint models not already keyed in result + for (const [id, m] of remote) { + if (id in result) continue + result[id] = build(id, m, baseURL) + } + + return result +} + +export * as CopilotModels from "./models" From fdd5b77bfd9c525a2ae6656a011c1419748761e3 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:12 -0400 Subject: [PATCH 078/120] refactor: unwrap McpAuth namespace + self-reexport (#22942) --- packages/opencode/src/mcp/auth.ts | 266 +++++++++++++++--------------- 1 file changed, 133 insertions(+), 133 deletions(-) diff --git a/packages/opencode/src/mcp/auth.ts b/packages/opencode/src/mcp/auth.ts index 85f9e1d8c9..efb046d7a7 100644 --- a/packages/opencode/src/mcp/auth.ts +++ b/packages/opencode/src/mcp/auth.ts @@ -4,141 +4,141 @@ import { Global } from "../global" import { Effect, Layer, Context } from "effect" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -export namespace McpAuth { - export const Tokens = z.object({ - accessToken: z.string(), - refreshToken: z.string().optional(), - expiresAt: z.number().optional(), - scope: z.string().optional(), - }) - export type Tokens = z.infer +export const Tokens = z.object({ + accessToken: z.string(), + refreshToken: z.string().optional(), + expiresAt: z.number().optional(), + scope: z.string().optional(), +}) +export type Tokens = z.infer - export const ClientInfo = z.object({ - clientId: z.string(), - clientSecret: z.string().optional(), - clientIdIssuedAt: z.number().optional(), - clientSecretExpiresAt: z.number().optional(), - }) - export type ClientInfo = z.infer +export const ClientInfo = z.object({ + clientId: z.string(), + clientSecret: z.string().optional(), + clientIdIssuedAt: z.number().optional(), + clientSecretExpiresAt: z.number().optional(), +}) +export type ClientInfo = z.infer - export const Entry = z.object({ - tokens: Tokens.optional(), - clientInfo: ClientInfo.optional(), - codeVerifier: z.string().optional(), - oauthState: z.string().optional(), - serverUrl: z.string().optional(), - }) - export type Entry = z.infer +export const Entry = z.object({ + tokens: Tokens.optional(), + clientInfo: ClientInfo.optional(), + codeVerifier: z.string().optional(), + oauthState: z.string().optional(), + serverUrl: z.string().optional(), +}) +export type Entry = z.infer - const filepath = path.join(Global.Path.data, "mcp-auth.json") +const filepath = path.join(Global.Path.data, "mcp-auth.json") - export interface Interface { - readonly all: () => Effect.Effect> - readonly get: (mcpName: string) => Effect.Effect - readonly getForUrl: (mcpName: string, serverUrl: string) => Effect.Effect - readonly set: (mcpName: string, entry: Entry, serverUrl?: string) => Effect.Effect - readonly remove: (mcpName: string) => Effect.Effect - readonly updateTokens: (mcpName: string, tokens: Tokens, serverUrl?: string) => Effect.Effect - readonly updateClientInfo: (mcpName: string, clientInfo: ClientInfo, serverUrl?: string) => Effect.Effect - readonly updateCodeVerifier: (mcpName: string, codeVerifier: string) => Effect.Effect - readonly clearCodeVerifier: (mcpName: string) => Effect.Effect - readonly updateOAuthState: (mcpName: string, oauthState: string) => Effect.Effect - readonly getOAuthState: (mcpName: string) => Effect.Effect - readonly clearOAuthState: (mcpName: string) => Effect.Effect - readonly isTokenExpired: (mcpName: string) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/McpAuth") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fs = yield* AppFileSystem.Service - - const all = Effect.fn("McpAuth.all")(function* () { - return yield* fs.readJson(filepath).pipe( - Effect.map((data) => data as Record), - Effect.catch(() => Effect.succeed({} as Record)), - ) - }) - - const get = Effect.fn("McpAuth.get")(function* (mcpName: string) { - const data = yield* all() - return data[mcpName] - }) - - const getForUrl = Effect.fn("McpAuth.getForUrl")(function* (mcpName: string, serverUrl: string) { - const entry = yield* get(mcpName) - if (!entry) return undefined - if (!entry.serverUrl) return undefined - if (entry.serverUrl !== serverUrl) return undefined - return entry - }) - - const set = Effect.fn("McpAuth.set")(function* (mcpName: string, entry: Entry, serverUrl?: string) { - const data = yield* all() - if (serverUrl) entry.serverUrl = serverUrl - yield* fs.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600).pipe(Effect.orDie) - }) - - const remove = Effect.fn("McpAuth.remove")(function* (mcpName: string) { - const data = yield* all() - delete data[mcpName] - yield* fs.writeJson(filepath, data, 0o600).pipe(Effect.orDie) - }) - - const updateField = (field: K, spanName: string) => - Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string, value: NonNullable, serverUrl?: string) { - const entry = (yield* get(mcpName)) ?? {} - entry[field] = value - yield* set(mcpName, entry, serverUrl) - }) - - const clearField = (field: K, spanName: string) => - Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string) { - const entry = yield* get(mcpName) - if (entry) { - delete entry[field] - yield* set(mcpName, entry) - } - }) - - const updateTokens = updateField("tokens", "updateTokens") - const updateClientInfo = updateField("clientInfo", "updateClientInfo") - const updateCodeVerifier = updateField("codeVerifier", "updateCodeVerifier") - const updateOAuthState = updateField("oauthState", "updateOAuthState") - const clearCodeVerifier = clearField("codeVerifier", "clearCodeVerifier") - const clearOAuthState = clearField("oauthState", "clearOAuthState") - - const getOAuthState = Effect.fn("McpAuth.getOAuthState")(function* (mcpName: string) { - const entry = yield* get(mcpName) - return entry?.oauthState - }) - - const isTokenExpired = Effect.fn("McpAuth.isTokenExpired")(function* (mcpName: string) { - const entry = yield* get(mcpName) - if (!entry?.tokens) return null - if (!entry.tokens.expiresAt) return false - return entry.tokens.expiresAt < Date.now() / 1000 - }) - - return Service.of({ - all, - get, - getForUrl, - set, - remove, - updateTokens, - updateClientInfo, - updateCodeVerifier, - clearCodeVerifier, - updateOAuthState, - getOAuthState, - clearOAuthState, - isTokenExpired, - }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) +export interface Interface { + readonly all: () => Effect.Effect> + readonly get: (mcpName: string) => Effect.Effect + readonly getForUrl: (mcpName: string, serverUrl: string) => Effect.Effect + readonly set: (mcpName: string, entry: Entry, serverUrl?: string) => Effect.Effect + readonly remove: (mcpName: string) => Effect.Effect + readonly updateTokens: (mcpName: string, tokens: Tokens, serverUrl?: string) => Effect.Effect + readonly updateClientInfo: (mcpName: string, clientInfo: ClientInfo, serverUrl?: string) => Effect.Effect + readonly updateCodeVerifier: (mcpName: string, codeVerifier: string) => Effect.Effect + readonly clearCodeVerifier: (mcpName: string) => Effect.Effect + readonly updateOAuthState: (mcpName: string, oauthState: string) => Effect.Effect + readonly getOAuthState: (mcpName: string) => Effect.Effect + readonly clearOAuthState: (mcpName: string) => Effect.Effect + readonly isTokenExpired: (mcpName: string) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/McpAuth") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const fs = yield* AppFileSystem.Service + + const all = Effect.fn("McpAuth.all")(function* () { + return yield* fs.readJson(filepath).pipe( + Effect.map((data) => data as Record), + Effect.catch(() => Effect.succeed({} as Record)), + ) + }) + + const get = Effect.fn("McpAuth.get")(function* (mcpName: string) { + const data = yield* all() + return data[mcpName] + }) + + const getForUrl = Effect.fn("McpAuth.getForUrl")(function* (mcpName: string, serverUrl: string) { + const entry = yield* get(mcpName) + if (!entry) return undefined + if (!entry.serverUrl) return undefined + if (entry.serverUrl !== serverUrl) return undefined + return entry + }) + + const set = Effect.fn("McpAuth.set")(function* (mcpName: string, entry: Entry, serverUrl?: string) { + const data = yield* all() + if (serverUrl) entry.serverUrl = serverUrl + yield* fs.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600).pipe(Effect.orDie) + }) + + const remove = Effect.fn("McpAuth.remove")(function* (mcpName: string) { + const data = yield* all() + delete data[mcpName] + yield* fs.writeJson(filepath, data, 0o600).pipe(Effect.orDie) + }) + + const updateField = (field: K, spanName: string) => + Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string, value: NonNullable, serverUrl?: string) { + const entry = (yield* get(mcpName)) ?? {} + entry[field] = value + yield* set(mcpName, entry, serverUrl) + }) + + const clearField = (field: K, spanName: string) => + Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string) { + const entry = yield* get(mcpName) + if (entry) { + delete entry[field] + yield* set(mcpName, entry) + } + }) + + const updateTokens = updateField("tokens", "updateTokens") + const updateClientInfo = updateField("clientInfo", "updateClientInfo") + const updateCodeVerifier = updateField("codeVerifier", "updateCodeVerifier") + const updateOAuthState = updateField("oauthState", "updateOAuthState") + const clearCodeVerifier = clearField("codeVerifier", "clearCodeVerifier") + const clearOAuthState = clearField("oauthState", "clearOAuthState") + + const getOAuthState = Effect.fn("McpAuth.getOAuthState")(function* (mcpName: string) { + const entry = yield* get(mcpName) + return entry?.oauthState + }) + + const isTokenExpired = Effect.fn("McpAuth.isTokenExpired")(function* (mcpName: string) { + const entry = yield* get(mcpName) + if (!entry?.tokens) return null + if (!entry.tokens.expiresAt) return false + return entry.tokens.expiresAt < Date.now() / 1000 + }) + + return Service.of({ + all, + get, + getForUrl, + set, + remove, + updateTokens, + updateClientInfo, + updateCodeVerifier, + clearCodeVerifier, + updateOAuthState, + getOAuthState, + clearOAuthState, + isTokenExpired, + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) + +export * as McpAuth from "./auth" From f6dbb2f3e0de46d2a5e07618548c94259889a22a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:37 -0400 Subject: [PATCH 079/120] refactor: unwrap Heap namespace + self-reexport (#22931) --- packages/opencode/src/cli/heap.ts | 82 +++++++++++++++---------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/packages/opencode/src/cli/heap.ts b/packages/opencode/src/cli/heap.ts index cf1cffa800..87b7b2ebf9 100644 --- a/packages/opencode/src/cli/heap.ts +++ b/packages/opencode/src/cli/heap.ts @@ -8,52 +8,52 @@ const log = Log.create({ service: "heap" }) const MINUTE = 60_000 const LIMIT = 2 * 1024 * 1024 * 1024 -export namespace Heap { - let timer: Timer | undefined - let lock = false - let armed = true +let timer: Timer | undefined +let lock = false +let armed = true - export function start() { - if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return - if (timer) return +export function start() { + if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return + if (timer) return - const run = async () => { - if (lock) return + const run = async () => { + if (lock) return - const stat = process.memoryUsage() - if (stat.rss <= LIMIT) { - armed = true - return - } - if (!armed) return + const stat = process.memoryUsage() + if (stat.rss <= LIMIT) { + armed = true + return + } + if (!armed) return - lock = true - armed = false - const file = path.join( - Global.Path.log, - `heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`, - ) - log.warn("heap usage exceeded limit", { - rss: stat.rss, - heap: stat.heapUsed, - file, + lock = true + armed = false + const file = path.join( + Global.Path.log, + `heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`, + ) + log.warn("heap usage exceeded limit", { + rss: stat.rss, + heap: stat.heapUsed, + file, + }) + + await Promise.resolve() + .then(() => writeHeapSnapshot(file)) + .catch((err) => { + log.error("failed to write heap snapshot", { + error: err instanceof Error ? err.message : String(err), + file, + }) }) - await Promise.resolve() - .then(() => writeHeapSnapshot(file)) - .catch((err) => { - log.error("failed to write heap snapshot", { - error: err instanceof Error ? err.message : String(err), - file, - }) - }) - - lock = false - } - - timer = setInterval(() => { - void run() - }, MINUTE) - timer.unref?.() + lock = false } + + timer = setInterval(() => { + void run() + }, MINUTE) + timer.unref?.() } + +export * as Heap from "./heap" From 79732ab17560c59745eef6d151b4b6cc69e23163 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:41 -0400 Subject: [PATCH 080/120] refactor: unwrap UI namespace + self-reexport (#22951) --- packages/opencode/src/cli/ui.ts | 226 ++++++++++++++++---------------- 1 file changed, 113 insertions(+), 113 deletions(-) diff --git a/packages/opencode/src/cli/ui.ts b/packages/opencode/src/cli/ui.ts index d735a55417..46335d24a8 100644 --- a/packages/opencode/src/cli/ui.ts +++ b/packages/opencode/src/cli/ui.ts @@ -3,131 +3,131 @@ import { EOL } from "os" import { NamedError } from "@opencode-ai/shared/util/error" import { logo as glyphs } from "./logo" -export namespace UI { - const wordmark = [ - `⠀ ▄ `, - `█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`, - `█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`, - `▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`, - ] +const wordmark = [ + `⠀ ▄ `, + `█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`, + `█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`, + `▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`, +] - export const CancelledError = NamedError.create("UICancelledError", z.void()) +export const CancelledError = NamedError.create("UICancelledError", z.void()) - export const Style = { - TEXT_HIGHLIGHT: "\x1b[96m", - TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m", - TEXT_DIM: "\x1b[90m", - TEXT_DIM_BOLD: "\x1b[90m\x1b[1m", - TEXT_NORMAL: "\x1b[0m", - TEXT_NORMAL_BOLD: "\x1b[1m", - TEXT_WARNING: "\x1b[93m", - TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m", - TEXT_DANGER: "\x1b[91m", - TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m", - TEXT_SUCCESS: "\x1b[92m", - TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m", - TEXT_INFO: "\x1b[94m", - TEXT_INFO_BOLD: "\x1b[94m\x1b[1m", - } +export const Style = { + TEXT_HIGHLIGHT: "\x1b[96m", + TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m", + TEXT_DIM: "\x1b[90m", + TEXT_DIM_BOLD: "\x1b[90m\x1b[1m", + TEXT_NORMAL: "\x1b[0m", + TEXT_NORMAL_BOLD: "\x1b[1m", + TEXT_WARNING: "\x1b[93m", + TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m", + TEXT_DANGER: "\x1b[91m", + TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m", + TEXT_SUCCESS: "\x1b[92m", + TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m", + TEXT_INFO: "\x1b[94m", + TEXT_INFO_BOLD: "\x1b[94m\x1b[1m", +} - export function println(...message: string[]) { - print(...message) - process.stderr.write(EOL) - } +export function println(...message: string[]) { + print(...message) + process.stderr.write(EOL) +} - export function print(...message: string[]) { - blank = false - process.stderr.write(message.join(" ")) - } +export function print(...message: string[]) { + blank = false + process.stderr.write(message.join(" ")) +} - let blank = false - export function empty() { - if (blank) return - println("" + Style.TEXT_NORMAL) - blank = true - } +let blank = false +export function empty() { + if (blank) return + println("" + Style.TEXT_NORMAL) + blank = true +} - export function logo(pad?: string) { - if (!process.stdout.isTTY && !process.stderr.isTTY) { - const result = [] - for (const row of wordmark) { - if (pad) result.push(pad) - result.push(row) - result.push(EOL) - } - return result.join("").trimEnd() - } - - const result: string[] = [] - const reset = "\x1b[0m" - const left = { - fg: "\x1b[90m", - shadow: "\x1b[38;5;235m", - bg: "\x1b[48;5;235m", - } - const right = { - fg: reset, - shadow: "\x1b[38;5;238m", - bg: "\x1b[48;5;238m", - } - const gap = " " - const draw = (line: string, fg: string, shadow: string, bg: string) => { - const parts: string[] = [] - for (const char of line) { - if (char === "_") { - parts.push(bg, " ", reset) - continue - } - if (char === "^") { - parts.push(fg, bg, "▀", reset) - continue - } - if (char === "~") { - parts.push(shadow, "▀", reset) - continue - } - if (char === " ") { - parts.push(" ") - continue - } - parts.push(fg, char, reset) - } - return parts.join("") - } - glyphs.left.forEach((row, index) => { +export function logo(pad?: string) { + if (!process.stdout.isTTY && !process.stderr.isTTY) { + const result = [] + for (const row of wordmark) { if (pad) result.push(pad) - result.push(draw(row, left.fg, left.shadow, left.bg)) - result.push(gap) - const other = glyphs.right[index] ?? "" - result.push(draw(other, right.fg, right.shadow, right.bg)) + result.push(row) result.push(EOL) - }) + } return result.join("").trimEnd() } - export async function input(prompt: string): Promise { - const readline = require("readline") - const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - }) - - return new Promise((resolve) => { - rl.question(prompt, (answer: string) => { - rl.close() - resolve(answer.trim()) - }) - }) + const result: string[] = [] + const reset = "\x1b[0m" + const left = { + fg: "\x1b[90m", + shadow: "\x1b[38;5;235m", + bg: "\x1b[48;5;235m", } - - export function error(message: string) { - if (message.startsWith("Error: ")) { - message = message.slice("Error: ".length) + const right = { + fg: reset, + shadow: "\x1b[38;5;238m", + bg: "\x1b[48;5;238m", + } + const gap = " " + const draw = (line: string, fg: string, shadow: string, bg: string) => { + const parts: string[] = [] + for (const char of line) { + if (char === "_") { + parts.push(bg, " ", reset) + continue + } + if (char === "^") { + parts.push(fg, bg, "▀", reset) + continue + } + if (char === "~") { + parts.push(shadow, "▀", reset) + continue + } + if (char === " ") { + parts.push(" ") + continue + } + parts.push(fg, char, reset) } - println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) - } - - export function markdown(text: string): string { - return text + return parts.join("") } + glyphs.left.forEach((row, index) => { + if (pad) result.push(pad) + result.push(draw(row, left.fg, left.shadow, left.bg)) + result.push(gap) + const other = glyphs.right[index] ?? "" + result.push(draw(other, right.fg, right.shadow, right.bg)) + result.push(EOL) + }) + return result.join("").trimEnd() } + +export async function input(prompt: string): Promise { + const readline = require("readline") + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }) + + return new Promise((resolve) => { + rl.question(prompt, (answer: string) => { + rl.close() + resolve(answer.trim()) + }) + }) +} + +export function error(message: string) { + if (message.startsWith("Error: ")) { + message = message.slice("Error: ".length) + } + println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) +} + +export function markdown(text: string): string { + return text +} + +export * as UI from "./ui" From fb0274446043401d48fae0aefb8e21f75a080ee8 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:44 -0400 Subject: [PATCH 081/120] refactor: unwrap Agent namespace + self-reexport (#22935) --- packages/opencode/src/agent/agent.ts | 766 +++++++++++++-------------- 1 file changed, 383 insertions(+), 383 deletions(-) diff --git a/packages/opencode/src/agent/agent.ts b/packages/opencode/src/agent/agent.ts index 54ca484555..07f742fe12 100644 --- a/packages/opencode/src/agent/agent.ts +++ b/packages/opencode/src/agent/agent.ts @@ -24,389 +24,389 @@ import { InstanceState } from "@/effect" import * as Option from "effect/Option" import * as OtelTracer from "@effect/opentelemetry/Tracer" -export namespace Agent { - export const Info = z - .object({ - name: z.string(), - description: z.string().optional(), - mode: z.enum(["subagent", "primary", "all"]), - native: z.boolean().optional(), - hidden: z.boolean().optional(), - topP: z.number().optional(), - temperature: z.number().optional(), - color: z.string().optional(), - permission: Permission.Ruleset.zod, - model: z - .object({ - modelID: ModelID.zod, - providerID: ProviderID.zod, - }) - .optional(), - variant: z.string().optional(), - prompt: z.string().optional(), - options: z.record(z.string(), z.any()), - steps: z.number().int().positive().optional(), - }) - .meta({ - ref: "Agent", - }) - export type Info = z.infer - - export interface Interface { - readonly get: (agent: string) => Effect.Effect - readonly list: () => Effect.Effect - readonly defaultAgent: () => Effect.Effect - readonly generate: (input: { - description: string - model?: { providerID: ProviderID; modelID: ModelID } - }) => Effect.Effect<{ - identifier: string - whenToUse: string - systemPrompt: string - }> - } - - type State = Omit - - export class Service extends Context.Service()("@opencode/Agent") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const auth = yield* Auth.Service - const plugin = yield* Plugin.Service - const skill = yield* Skill.Service - const provider = yield* Provider.Service - - const state = yield* InstanceState.make( - Effect.fn("Agent.state")(function* (_ctx) { - const cfg = yield* config.get() - const skillDirs = yield* skill.dirs() - const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))] - - const defaults = Permission.fromConfig({ - "*": "allow", - doom_loop: "ask", - external_directory: { - "*": "ask", - ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), - }, - question: "deny", - plan_enter: "deny", - plan_exit: "deny", - // mirrors github.com/github/gitignore Node.gitignore pattern for .env files - read: { - "*": "allow", - "*.env": "ask", - "*.env.*": "ask", - "*.env.example": "allow", - }, - }) - - const user = Permission.fromConfig(cfg.permission ?? {}) - - const agents: Record = { - build: { - name: "build", - description: "The default agent. Executes tools based on configured permissions.", - options: {}, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - question: "allow", - plan_enter: "allow", - }), - user, - ), - mode: "primary", - native: true, - }, - plan: { - name: "plan", - description: "Plan mode. Disallows all edit tools.", - options: {}, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - question: "allow", - plan_exit: "allow", - external_directory: { - [path.join(Global.Path.data, "plans", "*")]: "allow", - }, - edit: { - "*": "deny", - [path.join(".opencode", "plans", "*.md")]: "allow", - [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: - "allow", - }, - }), - user, - ), - mode: "primary", - native: true, - }, - general: { - name: "general", - description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - todowrite: "deny", - }), - user, - ), - options: {}, - mode: "subagent", - native: true, - }, - explore: { - name: "explore", - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - grep: "allow", - glob: "allow", - list: "allow", - bash: "allow", - webfetch: "allow", - websearch: "allow", - codesearch: "allow", - read: "allow", - external_directory: { - "*": "ask", - ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), - }, - }), - user, - ), - description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`, - prompt: PROMPT_EXPLORE, - options: {}, - mode: "subagent", - native: true, - }, - compaction: { - name: "compaction", - mode: "primary", - native: true, - hidden: true, - prompt: PROMPT_COMPACTION, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - options: {}, - }, - title: { - name: "title", - mode: "primary", - options: {}, - native: true, - hidden: true, - temperature: 0.5, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - prompt: PROMPT_TITLE, - }, - summary: { - name: "summary", - mode: "primary", - options: {}, - native: true, - hidden: true, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - prompt: PROMPT_SUMMARY, - }, - } - - for (const [key, value] of Object.entries(cfg.agent ?? {})) { - if (value.disable) { - delete agents[key] - continue - } - let item = agents[key] - if (!item) - item = agents[key] = { - name: key, - mode: "all", - permission: Permission.merge(defaults, user), - options: {}, - native: false, - } - if (value.model) item.model = Provider.parseModel(value.model) - item.variant = value.variant ?? item.variant - item.prompt = value.prompt ?? item.prompt - item.description = value.description ?? item.description - item.temperature = value.temperature ?? item.temperature - item.topP = value.top_p ?? item.topP - item.mode = value.mode ?? item.mode - item.color = value.color ?? item.color - item.hidden = value.hidden ?? item.hidden - item.name = value.name ?? item.name - item.steps = value.steps ?? item.steps - item.options = mergeDeep(item.options, value.options ?? {}) - item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {})) - } - - // Ensure Truncate.GLOB is allowed unless explicitly configured - for (const name in agents) { - const agent = agents[name] - const explicit = agent.permission.some((r) => { - if (r.permission !== "external_directory") return false - if (r.action !== "deny") return false - return r.pattern === Truncate.GLOB - }) - if (explicit) continue - - agents[name].permission = Permission.merge( - agents[name].permission, - Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }), - ) - } - - const get = Effect.fnUntraced(function* (agent: string) { - return agents[agent] - }) - - const list = Effect.fnUntraced(function* () { - const cfg = yield* config.get() - return pipe( - agents, - values(), - sortBy( - [(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"], - [(x) => x.name, "asc"], - ), - ) - }) - - const defaultAgent = Effect.fnUntraced(function* () { - const c = yield* config.get() - if (c.default_agent) { - const agent = agents[c.default_agent] - if (!agent) throw new Error(`default agent "${c.default_agent}" not found`) - if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`) - if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`) - return agent.name - } - const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true) - if (!visible) throw new Error("no primary visible agent found") - return visible.name - }) - - return { - get, - list, - defaultAgent, - } satisfies State - }), - ) - - return Service.of({ - get: Effect.fn("Agent.get")(function* (agent: string) { - return yield* InstanceState.useEffect(state, (s) => s.get(agent)) - }), - list: Effect.fn("Agent.list")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.list()) - }), - defaultAgent: Effect.fn("Agent.defaultAgent")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.defaultAgent()) - }), - generate: Effect.fn("Agent.generate")(function* (input: { - description: string - model?: { providerID: ProviderID; modelID: ModelID } - }) { - const cfg = yield* config.get() - const model = input.model ?? (yield* provider.defaultModel()) - const resolved = yield* provider.getModel(model.providerID, model.modelID) - const language = yield* provider.getLanguage(resolved) - const tracer = cfg.experimental?.openTelemetry - ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) - : undefined - - const system = [PROMPT_GENERATE] - yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system }) - const existing = yield* InstanceState.useEffect(state, (s) => s.list()) - - // TODO: clean this up so provider specific logic doesnt bleed over - const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie) - const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth" - - const params = { - experimental_telemetry: { - isEnabled: cfg.experimental?.openTelemetry, - tracer, - metadata: { - userId: cfg.username ?? "unknown", - }, - }, - temperature: 0.3, - messages: [ - ...(isOpenaiOauth - ? [] - : system.map( - (item): ModelMessage => ({ - role: "system", - content: item, - }), - )), - { - role: "user", - content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`, - }, - ], - model: language, - schema: z.object({ - identifier: z.string(), - whenToUse: z.string(), - systemPrompt: z.string(), - }), - } satisfies Parameters[0] - - if (isOpenaiOauth) { - return yield* Effect.promise(async () => { - const result = streamObject({ - ...params, - providerOptions: ProviderTransform.providerOptions(resolved, { - instructions: system.join("\n"), - store: false, - }), - onError: () => {}, - }) - for await (const part of result.fullStream) { - if (part.type === "error") throw part.error - } - return result.object - }) - } - - return yield* Effect.promise(() => generateObject(params).then((r) => r.object)) - }), +export const Info = z + .object({ + name: z.string(), + description: z.string().optional(), + mode: z.enum(["subagent", "primary", "all"]), + native: z.boolean().optional(), + hidden: z.boolean().optional(), + topP: z.number().optional(), + temperature: z.number().optional(), + color: z.string().optional(), + permission: Permission.Ruleset.zod, + model: z + .object({ + modelID: ModelID.zod, + providerID: ProviderID.zod, }) - }), - ) + .optional(), + variant: z.string().optional(), + prompt: z.string().optional(), + options: z.record(z.string(), z.any()), + steps: z.number().int().positive().optional(), + }) + .meta({ + ref: "Agent", + }) +export type Info = z.infer - export const defaultLayer = layer.pipe( - Layer.provide(Plugin.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Auth.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Skill.defaultLayer), - ) +export interface Interface { + readonly get: (agent: string) => Effect.Effect + readonly list: () => Effect.Effect + readonly defaultAgent: () => Effect.Effect + readonly generate: (input: { + description: string + model?: { providerID: ProviderID; modelID: ModelID } + }) => Effect.Effect<{ + identifier: string + whenToUse: string + systemPrompt: string + }> } + +type State = Omit + +export class Service extends Context.Service()("@opencode/Agent") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const auth = yield* Auth.Service + const plugin = yield* Plugin.Service + const skill = yield* Skill.Service + const provider = yield* Provider.Service + + const state = yield* InstanceState.make( + Effect.fn("Agent.state")(function* (_ctx) { + const cfg = yield* config.get() + const skillDirs = yield* skill.dirs() + const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))] + + const defaults = Permission.fromConfig({ + "*": "allow", + doom_loop: "ask", + external_directory: { + "*": "ask", + ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), + }, + question: "deny", + plan_enter: "deny", + plan_exit: "deny", + // mirrors github.com/github/gitignore Node.gitignore pattern for .env files + read: { + "*": "allow", + "*.env": "ask", + "*.env.*": "ask", + "*.env.example": "allow", + }, + }) + + const user = Permission.fromConfig(cfg.permission ?? {}) + + const agents: Record = { + build: { + name: "build", + description: "The default agent. Executes tools based on configured permissions.", + options: {}, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + question: "allow", + plan_enter: "allow", + }), + user, + ), + mode: "primary", + native: true, + }, + plan: { + name: "plan", + description: "Plan mode. Disallows all edit tools.", + options: {}, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + question: "allow", + plan_exit: "allow", + external_directory: { + [path.join(Global.Path.data, "plans", "*")]: "allow", + }, + edit: { + "*": "deny", + [path.join(".opencode", "plans", "*.md")]: "allow", + [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: + "allow", + }, + }), + user, + ), + mode: "primary", + native: true, + }, + general: { + name: "general", + description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + todowrite: "deny", + }), + user, + ), + options: {}, + mode: "subagent", + native: true, + }, + explore: { + name: "explore", + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + grep: "allow", + glob: "allow", + list: "allow", + bash: "allow", + webfetch: "allow", + websearch: "allow", + codesearch: "allow", + read: "allow", + external_directory: { + "*": "ask", + ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), + }, + }), + user, + ), + description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`, + prompt: PROMPT_EXPLORE, + options: {}, + mode: "subagent", + native: true, + }, + compaction: { + name: "compaction", + mode: "primary", + native: true, + hidden: true, + prompt: PROMPT_COMPACTION, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + options: {}, + }, + title: { + name: "title", + mode: "primary", + options: {}, + native: true, + hidden: true, + temperature: 0.5, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + prompt: PROMPT_TITLE, + }, + summary: { + name: "summary", + mode: "primary", + options: {}, + native: true, + hidden: true, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + prompt: PROMPT_SUMMARY, + }, + } + + for (const [key, value] of Object.entries(cfg.agent ?? {})) { + if (value.disable) { + delete agents[key] + continue + } + let item = agents[key] + if (!item) + item = agents[key] = { + name: key, + mode: "all", + permission: Permission.merge(defaults, user), + options: {}, + native: false, + } + if (value.model) item.model = Provider.parseModel(value.model) + item.variant = value.variant ?? item.variant + item.prompt = value.prompt ?? item.prompt + item.description = value.description ?? item.description + item.temperature = value.temperature ?? item.temperature + item.topP = value.top_p ?? item.topP + item.mode = value.mode ?? item.mode + item.color = value.color ?? item.color + item.hidden = value.hidden ?? item.hidden + item.name = value.name ?? item.name + item.steps = value.steps ?? item.steps + item.options = mergeDeep(item.options, value.options ?? {}) + item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {})) + } + + // Ensure Truncate.GLOB is allowed unless explicitly configured + for (const name in agents) { + const agent = agents[name] + const explicit = agent.permission.some((r) => { + if (r.permission !== "external_directory") return false + if (r.action !== "deny") return false + return r.pattern === Truncate.GLOB + }) + if (explicit) continue + + agents[name].permission = Permission.merge( + agents[name].permission, + Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }), + ) + } + + const get = Effect.fnUntraced(function* (agent: string) { + return agents[agent] + }) + + const list = Effect.fnUntraced(function* () { + const cfg = yield* config.get() + return pipe( + agents, + values(), + sortBy( + [(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"], + [(x) => x.name, "asc"], + ), + ) + }) + + const defaultAgent = Effect.fnUntraced(function* () { + const c = yield* config.get() + if (c.default_agent) { + const agent = agents[c.default_agent] + if (!agent) throw new Error(`default agent "${c.default_agent}" not found`) + if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`) + if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`) + return agent.name + } + const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true) + if (!visible) throw new Error("no primary visible agent found") + return visible.name + }) + + return { + get, + list, + defaultAgent, + } satisfies State + }), + ) + + return Service.of({ + get: Effect.fn("Agent.get")(function* (agent: string) { + return yield* InstanceState.useEffect(state, (s) => s.get(agent)) + }), + list: Effect.fn("Agent.list")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.list()) + }), + defaultAgent: Effect.fn("Agent.defaultAgent")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.defaultAgent()) + }), + generate: Effect.fn("Agent.generate")(function* (input: { + description: string + model?: { providerID: ProviderID; modelID: ModelID } + }) { + const cfg = yield* config.get() + const model = input.model ?? (yield* provider.defaultModel()) + const resolved = yield* provider.getModel(model.providerID, model.modelID) + const language = yield* provider.getLanguage(resolved) + const tracer = cfg.experimental?.openTelemetry + ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) + : undefined + + const system = [PROMPT_GENERATE] + yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system }) + const existing = yield* InstanceState.useEffect(state, (s) => s.list()) + + // TODO: clean this up so provider specific logic doesnt bleed over + const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie) + const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth" + + const params = { + experimental_telemetry: { + isEnabled: cfg.experimental?.openTelemetry, + tracer, + metadata: { + userId: cfg.username ?? "unknown", + }, + }, + temperature: 0.3, + messages: [ + ...(isOpenaiOauth + ? [] + : system.map( + (item): ModelMessage => ({ + role: "system", + content: item, + }), + )), + { + role: "user", + content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`, + }, + ], + model: language, + schema: z.object({ + identifier: z.string(), + whenToUse: z.string(), + systemPrompt: z.string(), + }), + } satisfies Parameters[0] + + if (isOpenaiOauth) { + return yield* Effect.promise(async () => { + const result = streamObject({ + ...params, + providerOptions: ProviderTransform.providerOptions(resolved, { + instructions: system.join("\n"), + store: false, + }), + onError: () => {}, + }) + for await (const part of result.fullStream) { + if (part.type === "error") throw part.error + } + return result.object + }) + } + + return yield* Effect.promise(() => generateObject(params).then((r) => r.object)) + }), + }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Plugin.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Auth.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Skill.defaultLayer), +) + +export * as Agent from "./agent" From 974fa1b8b1990c2e51172e32ef321e5fdce0843d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:05 -0400 Subject: [PATCH 082/120] refactor: unwrap PluginMeta namespace + self-reexport (#22945) --- packages/opencode/src/plugin/meta.ts | 312 +++++++++++++-------------- 1 file changed, 156 insertions(+), 156 deletions(-) diff --git a/packages/opencode/src/plugin/meta.ts b/packages/opencode/src/plugin/meta.ts index 89955d1dfb..86ad8fbab1 100644 --- a/packages/opencode/src/plugin/meta.ts +++ b/packages/opencode/src/plugin/meta.ts @@ -8,181 +8,181 @@ import { Flock } from "@opencode-ai/shared/util/flock" import { parsePluginSpecifier, pluginSource } from "./shared" -export namespace PluginMeta { - type Source = "file" | "npm" +type Source = "file" | "npm" - export type Theme = { - src: string - dest: string - mtime?: number - size?: number - } +export type Theme = { + src: string + dest: string + mtime?: number + size?: number +} - export type Entry = { - id: string - source: Source - spec: string - target: string - requested?: string - version?: string - modified?: number - first_time: number - last_time: number - time_changed: number - load_count: number - fingerprint: string - themes?: Record - } +export type Entry = { + id: string + source: Source + spec: string + target: string + requested?: string + version?: string + modified?: number + first_time: number + last_time: number + time_changed: number + load_count: number + fingerprint: string + themes?: Record +} - export type State = "first" | "updated" | "same" +export type State = "first" | "updated" | "same" - export type Touch = { - spec: string - target: string - id: string - } +export type Touch = { + spec: string + target: string + id: string +} - type Store = Record - type Core = Omit - type Row = Touch & { core: Core } +type Store = Record +type Core = Omit +type Row = Touch & { core: Core } - function storePath() { - return Flag.OPENCODE_PLUGIN_META_FILE ?? path.join(Global.Path.state, "plugin-meta.json") - } +function storePath() { + return Flag.OPENCODE_PLUGIN_META_FILE ?? path.join(Global.Path.state, "plugin-meta.json") +} - function lock(file: string) { - return `plugin-meta:${file}` - } +function lock(file: string) { + return `plugin-meta:${file}` +} - function fileTarget(spec: string, target: string) { - if (spec.startsWith("file://")) return fileURLToPath(spec) - if (target.startsWith("file://")) return fileURLToPath(target) - return - } +function fileTarget(spec: string, target: string) { + if (spec.startsWith("file://")) return fileURLToPath(spec) + if (target.startsWith("file://")) return fileURLToPath(target) + return +} - async function modifiedAt(file: string) { - const stat = await Filesystem.statAsync(file) - if (!stat) return - const mtime = stat.mtimeMs - return Math.floor(typeof mtime === "bigint" ? Number(mtime) : mtime) - } +async function modifiedAt(file: string) { + const stat = await Filesystem.statAsync(file) + if (!stat) return + const mtime = stat.mtimeMs + return Math.floor(typeof mtime === "bigint" ? Number(mtime) : mtime) +} - function resolvedTarget(target: string) { - if (target.startsWith("file://")) return fileURLToPath(target) - return target - } +function resolvedTarget(target: string) { + if (target.startsWith("file://")) return fileURLToPath(target) + return target +} - async function npmVersion(target: string) { - const resolved = resolvedTarget(target) - const stat = await Filesystem.statAsync(resolved) - const dir = stat?.isDirectory() ? resolved : path.dirname(resolved) - return Filesystem.readJson<{ version?: string }>(path.join(dir, "package.json")) - .then((item) => item.version) - .catch(() => undefined) - } - - async function entryCore(item: Touch): Promise { - const spec = item.spec - const target = item.target - const source = pluginSource(spec) - if (source === "file") { - const file = fileTarget(spec, target) - return { - id: item.id, - source, - spec, - target, - modified: file ? await modifiedAt(file) : undefined, - } - } +async function npmVersion(target: string) { + const resolved = resolvedTarget(target) + const stat = await Filesystem.statAsync(resolved) + const dir = stat?.isDirectory() ? resolved : path.dirname(resolved) + return Filesystem.readJson<{ version?: string }>(path.join(dir, "package.json")) + .then((item) => item.version) + .catch(() => undefined) +} +async function entryCore(item: Touch): Promise { + const spec = item.spec + const target = item.target + const source = pluginSource(spec) + if (source === "file") { + const file = fileTarget(spec, target) return { id: item.id, source, spec, target, - requested: parsePluginSpecifier(spec).version, - version: await npmVersion(target), + modified: file ? await modifiedAt(file) : undefined, } } - function fingerprint(value: Core) { - if (value.source === "file") return [value.target, value.modified ?? ""].join("|") - return [value.target, value.requested ?? "", value.version ?? ""].join("|") - } - - async function read(file: string): Promise { - return Filesystem.readJson(file).catch(() => ({}) as Store) - } - - async function row(item: Touch): Promise { - return { - ...item, - core: await entryCore(item), - } - } - - function next(prev: Entry | undefined, core: Core, now: number): { state: State; entry: Entry } { - const entry: Entry = { - ...core, - first_time: prev?.first_time ?? now, - last_time: now, - time_changed: prev?.time_changed ?? now, - load_count: (prev?.load_count ?? 0) + 1, - fingerprint: fingerprint(core), - themes: prev?.themes, - } - const state: State = !prev ? "first" : prev.fingerprint === entry.fingerprint ? "same" : "updated" - if (state === "updated") entry.time_changed = now - return { - state, - entry, - } - } - - export async function touchMany(items: Touch[]): Promise> { - if (!items.length) return [] - const file = storePath() - const rows = await Promise.all(items.map((item) => row(item))) - - return Flock.withLock(lock(file), async () => { - const store = await read(file) - const now = Date.now() - const out: Array<{ state: State; entry: Entry }> = [] - for (const item of rows) { - const hit = next(store[item.id], item.core, now) - store[item.id] = hit.entry - out.push(hit) - } - await Filesystem.writeJson(file, store) - return out - }) - } - - export async function touch(spec: string, target: string, id: string): Promise<{ state: State; entry: Entry }> { - return touchMany([{ spec, target, id }]).then((item) => { - const hit = item[0] - if (hit) return hit - throw new Error("Failed to touch plugin metadata.") - }) - } - - export async function setTheme(id: string, name: string, theme: Theme): Promise { - const file = storePath() - await Flock.withLock(lock(file), async () => { - const store = await read(file) - const entry = store[id] - if (!entry) return - entry.themes = { - ...entry.themes, - [name]: theme, - } - await Filesystem.writeJson(file, store) - }) - } - - export async function list(): Promise { - const file = storePath() - return Flock.withLock(lock(file), async () => read(file)) + return { + id: item.id, + source, + spec, + target, + requested: parsePluginSpecifier(spec).version, + version: await npmVersion(target), } } + +function fingerprint(value: Core) { + if (value.source === "file") return [value.target, value.modified ?? ""].join("|") + return [value.target, value.requested ?? "", value.version ?? ""].join("|") +} + +async function read(file: string): Promise { + return Filesystem.readJson(file).catch(() => ({}) as Store) +} + +async function row(item: Touch): Promise { + return { + ...item, + core: await entryCore(item), + } +} + +function next(prev: Entry | undefined, core: Core, now: number): { state: State; entry: Entry } { + const entry: Entry = { + ...core, + first_time: prev?.first_time ?? now, + last_time: now, + time_changed: prev?.time_changed ?? now, + load_count: (prev?.load_count ?? 0) + 1, + fingerprint: fingerprint(core), + themes: prev?.themes, + } + const state: State = !prev ? "first" : prev.fingerprint === entry.fingerprint ? "same" : "updated" + if (state === "updated") entry.time_changed = now + return { + state, + entry, + } +} + +export async function touchMany(items: Touch[]): Promise> { + if (!items.length) return [] + const file = storePath() + const rows = await Promise.all(items.map((item) => row(item))) + + return Flock.withLock(lock(file), async () => { + const store = await read(file) + const now = Date.now() + const out: Array<{ state: State; entry: Entry }> = [] + for (const item of rows) { + const hit = next(store[item.id], item.core, now) + store[item.id] = hit.entry + out.push(hit) + } + await Filesystem.writeJson(file, store) + return out + }) +} + +export async function touch(spec: string, target: string, id: string): Promise<{ state: State; entry: Entry }> { + return touchMany([{ spec, target, id }]).then((item) => { + const hit = item[0] + if (hit) return hit + throw new Error("Failed to touch plugin metadata.") + }) +} + +export async function setTheme(id: string, name: string, theme: Theme): Promise { + const file = storePath() + await Flock.withLock(lock(file), async () => { + const store = await read(file) + const entry = store[id] + if (!entry) return + entry.themes = { + ...entry.themes, + [name]: theme, + } + await Filesystem.writeJson(file, store) + }) +} + +export async function list(): Promise { + const file = storePath() + return Flock.withLock(lock(file), async () => read(file)) +} + +export * as PluginMeta from "./meta" From 06d247c70982b9bba0bb25f4b990fb59e3374650 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:08 -0400 Subject: [PATCH 083/120] refactor: unwrap FileIgnore namespace + self-reexport (#22937) --- packages/opencode/src/file/ignore.ts | 140 +++++++++++++-------------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/packages/opencode/src/file/ignore.ts b/packages/opencode/src/file/ignore.ts index 63f2f594eb..efce872808 100644 --- a/packages/opencode/src/file/ignore.ts +++ b/packages/opencode/src/file/ignore.ts @@ -1,81 +1,81 @@ import { Glob } from "@opencode-ai/shared/util/glob" -export namespace FileIgnore { - const FOLDERS = new Set([ - "node_modules", - "bower_components", - ".pnpm-store", - "vendor", - ".npm", - "dist", - "build", - "out", - ".next", - "target", - "bin", - "obj", - ".git", - ".svn", - ".hg", - ".vscode", - ".idea", - ".turbo", - ".output", - "desktop", - ".sst", - ".cache", - ".webkit-cache", - "__pycache__", - ".pytest_cache", - "mypy_cache", - ".history", - ".gradle", - ]) +const FOLDERS = new Set([ + "node_modules", + "bower_components", + ".pnpm-store", + "vendor", + ".npm", + "dist", + "build", + "out", + ".next", + "target", + "bin", + "obj", + ".git", + ".svn", + ".hg", + ".vscode", + ".idea", + ".turbo", + ".output", + "desktop", + ".sst", + ".cache", + ".webkit-cache", + "__pycache__", + ".pytest_cache", + "mypy_cache", + ".history", + ".gradle", +]) - const FILES = [ - "**/*.swp", - "**/*.swo", +const FILES = [ + "**/*.swp", + "**/*.swo", - "**/*.pyc", + "**/*.pyc", - // OS - "**/.DS_Store", - "**/Thumbs.db", + // OS + "**/.DS_Store", + "**/Thumbs.db", - // Logs & temp - "**/logs/**", - "**/tmp/**", - "**/temp/**", - "**/*.log", + // Logs & temp + "**/logs/**", + "**/tmp/**", + "**/temp/**", + "**/*.log", - // Coverage/test outputs - "**/coverage/**", - "**/.nyc_output/**", - ] + // Coverage/test outputs + "**/coverage/**", + "**/.nyc_output/**", +] - export const PATTERNS = [...FILES, ...FOLDERS] +export const PATTERNS = [...FILES, ...FOLDERS] - export function match( - filepath: string, - opts?: { - extra?: string[] - whitelist?: string[] - }, - ) { - for (const pattern of opts?.whitelist || []) { - if (Glob.match(pattern, filepath)) return false - } - - const parts = filepath.split(/[/\\]/) - for (let i = 0; i < parts.length; i++) { - if (FOLDERS.has(parts[i])) return true - } - - const extra = opts?.extra || [] - for (const pattern of [...FILES, ...extra]) { - if (Glob.match(pattern, filepath)) return true - } - - return false +export function match( + filepath: string, + opts?: { + extra?: string[] + whitelist?: string[] + }, +) { + for (const pattern of opts?.whitelist || []) { + if (Glob.match(pattern, filepath)) return false } + + const parts = filepath.split(/[/\\]/) + for (let i = 0; i < parts.length; i++) { + if (FOLDERS.has(parts[i])) return true + } + + const extra = opts?.extra || [] + for (const pattern of [...FILES, ...extra]) { + if (Glob.match(pattern, filepath)) return true + } + + return false } + +export * as FileIgnore from "./ignore" From 2704ad9110e6705dacd1b018a7245c1950a3ae80 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:24 -0400 Subject: [PATCH 084/120] refactor: unwrap TuiConfig namespace + self-reexport (#22952) --- .../opencode/src/cli/cmd/tui/config/tui.ts | 360 +++++++++--------- 1 file changed, 180 insertions(+), 180 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index e8eb9ff5d3..d264273bca 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -17,197 +17,197 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -export namespace TuiConfig { - const log = Log.create({ service: "tui.config" }) +const log = Log.create({ service: "tui.config" }) - export const Info = TuiInfo +export const Info = TuiInfo - type Acc = { - result: Info - } +type Acc = { + result: Info +} - type State = { - config: Info - deps: Array> - } +type State = { + config: Info + deps: Array> +} - export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] - } +export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] +} - export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect - } +export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect +} - export class Service extends Context.Service()("@opencode/TuiConfig") {} +export class Service extends Context.Service()("@opencode/TuiConfig") {} - function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" - } +function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" +} - function customPath() { - return Flag.OPENCODE_TUI_CONFIG - } +function customPath() { + return Flag.OPENCODE_TUI_CONFIG +} - function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { - delete data.tui - return data - } - - const tui = data.tui +function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { delete data.tui - return { - ...tui, - ...data, - } + return data } - async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config - } - - async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins - } - - async function loadState(ctx: { directory: string }) { - let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - const directories = await ConfigPaths.directories(ctx.directory) - const custom = customPath() - await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) - // Re-compute after migration since migrateTuiConfig may have created new tui.json files - projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - if (custom) { - await mergeFile(acc, custom, ctx) - log.debug("loaded custom tui config", { path: custom }) - } - - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) - } - } - - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") - } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], - } - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) - - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) - - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) - - const { runPromise } = makeRuntime(Service, defaultLayer) - - export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) - } - - export async function get() { - return runPromise((svc) => svc.get()) - } - - async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) - } - - async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} - - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) - return {} - }) + const tui = data.tui + delete data.tui + return { + ...tui, + ...data, } } + +async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config +} + +async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins +} + +async function loadState(ctx: { directory: string }) { + let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + const directories = await ConfigPaths.directories(ctx.directory) + const custom = customPath() + await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) + // Re-compute after migration since migrateTuiConfig may have created new tui.json files + projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + + const acc: Acc = { + result: {}, + } + + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } + + if (custom) { + await mergeFile(acc, custom, ctx) + log.debug("loaded custom tui config", { path: custom }) + } + + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } + + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) + + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } + + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) + + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), +) + +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + +const { runPromise } = makeRuntime(Service, defaultLayer) + +export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) +} + +export async function get() { + return runPromise((svc) => svc.get()) +} + +async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) + return {} + }) +} + +async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) +} + +export * as TuiConfig from "./tui" From 059b32c2124da9bb7a9e3cc1e9a49e3a72f29740 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:51 -0400 Subject: [PATCH 085/120] refactor: unwrap Protected namespace + self-reexport (#22938) --- packages/opencode/src/file/protected.ts | 38 ++++++++++++------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/packages/opencode/src/file/protected.ts b/packages/opencode/src/file/protected.ts index d519746193..a316e790b8 100644 --- a/packages/opencode/src/file/protected.ts +++ b/packages/opencode/src/file/protected.ts @@ -37,23 +37,23 @@ const DARWIN_ROOT = ["/.DocumentRevisions-V100", "/.Spotlight-V100", "/.Trashes" const WIN32_HOME = ["AppData", "Downloads", "Desktop", "Documents", "Pictures", "Music", "Videos", "OneDrive"] -export namespace Protected { - /** Directory basenames to skip when scanning the home directory. */ - export function names(): ReadonlySet { - if (process.platform === "darwin") return new Set(DARWIN_HOME) - if (process.platform === "win32") return new Set(WIN32_HOME) - return new Set() - } - - /** Absolute paths that should never be watched, stated, or scanned. */ - export function paths(): string[] { - if (process.platform === "darwin") - return [ - ...DARWIN_HOME.map((n) => path.join(home, n)), - ...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)), - ...DARWIN_ROOT, - ] - if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n)) - return [] - } +/** Directory basenames to skip when scanning the home directory. */ +export function names(): ReadonlySet { + if (process.platform === "darwin") return new Set(DARWIN_HOME) + if (process.platform === "win32") return new Set(WIN32_HOME) + return new Set() } + +/** Absolute paths that should never be watched, stated, or scanned. */ +export function paths(): string[] { + if (process.platform === "darwin") + return [ + ...DARWIN_HOME.map((n) => path.join(home, n)), + ...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)), + ...DARWIN_ROOT, + ] + if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n)) + return [] +} + +export * as Protected from "./protected" From 635970b0a117481603d9894975ca502bc3887224 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:53 -0400 Subject: [PATCH 086/120] refactor: unwrap ConfigSkills namespace + self-reexport (#22950) --- packages/opencode/src/config/skills.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts index bdc63f5d6a..38cbf99e7d 100644 --- a/packages/opencode/src/config/skills.ts +++ b/packages/opencode/src/config/skills.ts @@ -1,13 +1,13 @@ import z from "zod" -export namespace ConfigSkills { - export const Info = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), - }) +export const Info = z.object({ + paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), + urls: z + .array(z.string()) + .optional() + .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), +}) - export type Info = z.infer -} +export type Info = z.infer + +export * as ConfigSkills from "./skills" From 53dc7b164940edbc5793bac83f91d7fca7b78fe5 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 00:04:01 +0000 Subject: [PATCH 087/120] chore: generate --- packages/opencode/src/acp/agent.ts | 10 +++------- packages/opencode/src/agent/agent.ts | 3 +-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 7180feabcb..f12328153b 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -1162,8 +1162,7 @@ export class Agent implements ACPAgent { (await (async () => { if (!availableModes.length) return undefined const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) - const resolvedModeId = - availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id + const resolvedModeId = availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id this.sessionManager.setMode(sessionId, resolvedModeId) return resolvedModeId })()) @@ -1362,8 +1361,7 @@ export class Agent implements ACPAgent { if (!current) { this.sessionManager.setModel(session.id, model) } - const agent = - session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) + const agent = session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) const parts: Array< | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } @@ -1729,9 +1727,7 @@ function buildAvailableModels( ): ModelOption[] { const includeVariants = options.includeVariants ?? false return providers.flatMap((provider) => { - const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( - provider.models, - ) + const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values(provider.models) const models = Provider.sort(unsorted) return models.flatMap((model) => { const base: ModelOption = { diff --git a/packages/opencode/src/agent/agent.ts b/packages/opencode/src/agent/agent.ts index 07f742fe12..355718b6bf 100644 --- a/packages/opencode/src/agent/agent.ts +++ b/packages/opencode/src/agent/agent.ts @@ -136,8 +136,7 @@ export const layer = Layer.effect( edit: { "*": "deny", [path.join(".opencode", "plans", "*.md")]: "allow", - [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: - "allow", + [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: "allow", }, }), user, From c0bfccc15ea6e2baea1d5b67f73d689317caa2af Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:11:17 -0400 Subject: [PATCH 088/120] tooling: add unwrap-and-self-reexport + batch-unwrap-pr scripts (#22929) --- packages/opencode/script/batch-unwrap-pr.ts | 230 +++++++++++++++++ .../script/unwrap-and-self-reexport.ts | 241 ++++++++++++++++++ 2 files changed, 471 insertions(+) create mode 100644 packages/opencode/script/batch-unwrap-pr.ts create mode 100644 packages/opencode/script/unwrap-and-self-reexport.ts diff --git a/packages/opencode/script/batch-unwrap-pr.ts b/packages/opencode/script/batch-unwrap-pr.ts new file mode 100644 index 0000000000..5730501412 --- /dev/null +++ b/packages/opencode/script/batch-unwrap-pr.ts @@ -0,0 +1,230 @@ +#!/usr/bin/env bun +/** + * Automate the full per-file namespace→self-reexport migration: + * + * 1. Create a worktree at ../opencode-worktrees/ns- on a new branch + * `kit/ns-` off `origin/dev`. + * 2. Symlink `node_modules` from the main repo into the worktree root so + * builds work without a fresh `bun install`. + * 3. Run `script/unwrap-and-self-reexport.ts` on the target file inside the worktree. + * 4. Verify: + * - `bunx --bun tsgo --noEmit` (pre-existing plugin.ts cross-worktree + * noise ignored — we compare against a pre-change baseline captured + * via `git stash`, so only NEW errors fail). + * - `bun run --conditions=browser ./src/index.ts generate`. + * - Relevant tests under `test/` if that directory exists. + * 5. Commit, push with `--no-verify`, and open a PR titled after the + * namespace. + * + * Usage: + * + * bun script/batch-unwrap-pr.ts src/file/ignore.ts + * bun script/batch-unwrap-pr.ts src/file/ignore.ts src/file/watcher.ts # multiple + * bun script/batch-unwrap-pr.ts --dry-run src/file/ignore.ts # plan only + * + * Repo assumptions: + * + * - Main checkout at /Users/kit/code/open-source/opencode (configurable via + * --repo-root=...). + * - Worktree root at /Users/kit/code/open-source/opencode-worktrees + * (configurable via --worktree-root=...). + * + * The script does NOT enable auto-merge; that's a separate manual step if we + * want it. + */ + +import fs from "node:fs" +import path from "node:path" +import { spawnSync, type SpawnSyncReturns } from "node:child_process" + +type Cmd = string[] + +function run( + cwd: string, + cmd: Cmd, + opts: { capture?: boolean; allowFail?: boolean; stdin?: string } = {}, +): SpawnSyncReturns { + const result = spawnSync(cmd[0], cmd.slice(1), { + cwd, + stdio: opts.capture ? ["pipe", "pipe", "pipe"] : ["inherit", "inherit", "inherit"], + encoding: "utf-8", + input: opts.stdin, + }) + if (!opts.allowFail && result.status !== 0) { + const label = `${path.basename(cmd[0])} ${cmd.slice(1).join(" ")}` + console.error(`[fail] ${label} (cwd=${cwd})`) + if (opts.capture) { + if (result.stdout) console.error(result.stdout) + if (result.stderr) console.error(result.stderr) + } + process.exit(result.status ?? 1) + } + return result +} + +function fileSlug(fileArg: string): string { + // src/file/ignore.ts → file-ignore + return fileArg + .replace(/^src\//, "") + .replace(/\.tsx?$/, "") + .replace(/[\/_]/g, "-") +} + +function readNamespace(absFile: string): string { + const content = fs.readFileSync(absFile, "utf-8") + const match = content.match(/^export\s+namespace\s+(\w+)\s*\{/m) + if (!match) { + console.error(`no \`export namespace\` found in ${absFile}`) + process.exit(1) + } + return match[1] +} + +// --------------------------------------------------------------------------- + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const repoRoot = ( + args.find((a) => a.startsWith("--repo-root=")) ?? "--repo-root=/Users/kit/code/open-source/opencode" +).split("=")[1] +const worktreeRoot = ( + args.find((a) => a.startsWith("--worktree-root=")) ?? "--worktree-root=/Users/kit/code/open-source/opencode-worktrees" +).split("=")[1] +const targets = args.filter((a) => !a.startsWith("--")) + +if (targets.length === 0) { + console.error("Usage: bun script/batch-unwrap-pr.ts [more files...] [--dry-run]") + process.exit(1) +} + +if (!fs.existsSync(worktreeRoot)) fs.mkdirSync(worktreeRoot, { recursive: true }) + +for (const rel of targets) { + const absSrc = path.join(repoRoot, "packages", "opencode", rel) + if (!fs.existsSync(absSrc)) { + console.error(`skip ${rel}: file does not exist under ${repoRoot}/packages/opencode`) + continue + } + const slug = fileSlug(rel) + const branch = `kit/ns-${slug}` + const wt = path.join(worktreeRoot, `ns-${slug}`) + const ns = readNamespace(absSrc) + + console.log(`\n=== ${rel} → ${ns} (branch=${branch} wt=${path.basename(wt)}) ===`) + + if (dryRun) { + console.log(` would create worktree ${wt}`) + console.log(` would run unwrap on packages/opencode/${rel}`) + console.log(` would commit, push, and open PR`) + continue + } + + // Sync dev (fetch only; we branch off origin/dev directly). + run(repoRoot, ["git", "fetch", "origin", "dev", "--quiet"]) + + // Create worktree + branch. + if (fs.existsSync(wt)) { + console.log(` worktree already exists at ${wt}; skipping`) + continue + } + run(repoRoot, ["git", "worktree", "add", "-b", branch, wt, "origin/dev"]) + + // Symlink node_modules so bun/tsgo work without a full install. + // We link both the repo root and packages/opencode, since the opencode + // package has its own local node_modules (including bunfig.toml preload deps + // like @opentui/solid) that aren't hoisted to the root. + const wtRootNodeModules = path.join(wt, "node_modules") + if (!fs.existsSync(wtRootNodeModules)) { + fs.symlinkSync(path.join(repoRoot, "node_modules"), wtRootNodeModules) + } + const wtOpencode = path.join(wt, "packages", "opencode") + const wtOpencodeNodeModules = path.join(wtOpencode, "node_modules") + if (!fs.existsSync(wtOpencodeNodeModules)) { + fs.symlinkSync(path.join(repoRoot, "packages", "opencode", "node_modules"), wtOpencodeNodeModules) + } + const wtTarget = path.join(wt, "packages", "opencode", rel) + + // Baseline tsgo output (pre-change). + const baselinePath = path.join(wt, ".ns-baseline.txt") + const baseline = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) + fs.writeFileSync(baselinePath, (baseline.stdout ?? "") + (baseline.stderr ?? "")) + + // Run the unwrap script from the MAIN repo checkout (where the tooling + // lives) targeting the worktree's file by absolute path. We run from the + // worktree root (not `packages/opencode`) to avoid triggering the + // bunfig.toml preload, which needs `@opentui/solid` that only the TUI + // workspace has installed. + const unwrapScript = path.join(repoRoot, "packages", "opencode", "script", "unwrap-and-self-reexport.ts") + run(wt, ["bun", unwrapScript, wtTarget]) + + // Post-change tsgo. + const after = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) + const afterText = (after.stdout ?? "") + (after.stderr ?? "") + + // Compare line-sets to detect NEW tsgo errors. + const sanitize = (s: string) => + s + .split("\n") + .map((l) => l.replace(/\s+$/, "")) + .filter(Boolean) + .sort() + .join("\n") + const baselineSorted = sanitize(fs.readFileSync(baselinePath, "utf-8")) + const afterSorted = sanitize(afterText) + if (baselineSorted !== afterSorted) { + console.log(` tsgo output differs from baseline. Showing diff:`) + const diffResult = spawnSync("diff", ["-u", baselinePath, "-"], { input: afterText, encoding: "utf-8" }) + if (diffResult.stdout) console.log(diffResult.stdout) + if (diffResult.stderr) console.log(diffResult.stderr) + console.error(` aborting ${rel}; investigate manually in ${wt}`) + process.exit(1) + } + + // SDK build. + run(wtOpencode, ["bun", "run", "--conditions=browser", "./src/index.ts", "generate"], { capture: true }) + + // Run tests for the directory, if a matching test dir exists. + const dirName = path.basename(path.dirname(rel)) + const testDir = path.join(wt, "packages", "opencode", "test", dirName) + if (fs.existsSync(testDir)) { + const testResult = run(wtOpencode, ["bun", "run", "test", `test/${dirName}`], { capture: true, allowFail: true }) + const combined = (testResult.stdout ?? "") + (testResult.stderr ?? "") + if (testResult.status !== 0) { + console.error(combined) + console.error(` tests failed for ${rel}; aborting`) + process.exit(1) + } + // Surface the summary line if present. + const summary = combined + .split("\n") + .filter((l) => /\bpass\b|\bfail\b/.test(l)) + .slice(-3) + .join("\n") + if (summary) console.log(` tests: ${summary.replace(/\n/g, " | ")}`) + } else { + console.log(` tests: no test/${dirName} directory, skipping`) + } + + // Clean up baseline file before committing. + fs.unlinkSync(baselinePath) + + // Commit, push, open PR. + const commitMsg = `refactor: unwrap ${ns} namespace + self-reexport` + run(wt, ["git", "add", "-A"]) + run(wt, ["git", "commit", "-m", commitMsg]) + run(wt, ["git", "push", "-u", "origin", branch, "--no-verify"]) + + const prBody = [ + "## Summary", + `- Unwrap the \`${ns}\` namespace in \`packages/opencode/${rel}\` to flat top-level exports.`, + `- Append \`export * as ${ns} from "./${path.basename(rel, ".ts")}"\` so consumers keep the same \`${ns}.x\` import ergonomics.`, + "", + "## Verification (local)", + "- `bunx --bun tsgo --noEmit` — no new errors vs baseline.", + "- `bun run --conditions=browser ./src/index.ts generate` — clean.", + `- \`bun run test test/${dirName}\` — all pass (if applicable).`, + ].join("\n") + run(wt, ["gh", "pr", "create", "--title", commitMsg, "--base", "dev", "--body", prBody]) + + console.log(` PR opened for ${rel}`) +} diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts new file mode 100644 index 0000000000..5ae703182e --- /dev/null +++ b/packages/opencode/script/unwrap-and-self-reexport.ts @@ -0,0 +1,241 @@ +#!/usr/bin/env bun +/** + * Unwrap a single `export namespace` in a file into flat top-level exports + * plus a self-reexport at the bottom of the same file. + * + * Usage: + * + * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts + * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts --dry-run + * + * Input file shape: + * + * // imports ... + * + * export namespace FileIgnore { + * export function ...(...) { ... } + * const helper = ... + * } + * + * Output shape: + * + * // imports ... + * + * export function ...(...) { ... } + * const helper = ... + * + * export * as FileIgnore from "./ignore" + * + * What the script does: + * + * 1. Uses ast-grep to locate the single `export namespace Foo { ... }` block. + * 2. Removes the `export namespace Foo {` line and the matching closing `}`. + * 3. Dedents the body by one indent level (2 spaces). + * 4. Rewrites `Foo.Bar` self-references inside the file to just `Bar` + * (but only for names that are actually exported from the namespace — + * non-exported members get the same treatment so references remain valid). + * 5. Appends `export * as Foo from "./"` at the end of the file. + * + * What it does NOT do: + * + * - Does not create or modify barrel `index.ts` files. + * - Does not rewrite any consumer imports. Consumers already import from + * the file path itself (e.g. `import { FileIgnore } from "../file/ignore"`); + * the self-reexport keeps that import working unchanged. + * - Does not handle files with more than one `export namespace` declaration. + * The script refuses that case. + * + * Requires: ast-grep (`brew install ast-grep`). + */ + +import fs from "node:fs" +import path from "node:path" + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const targetArg = args.find((a) => !a.startsWith("--")) + +if (!targetArg) { + console.error("Usage: bun script/unwrap-and-self-reexport.ts [--dry-run]") + process.exit(1) +} + +const absPath = path.resolve(targetArg) +if (!fs.existsSync(absPath) || !fs.statSync(absPath).isFile()) { + console.error(`Not a file: ${absPath}`) + process.exit(1) +} + +// Locate the namespace block with ast-grep (accurate AST boundaries). +const ast = Bun.spawnSync( + ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], + { stdout: "pipe", stderr: "pipe" }, +) +if (ast.exitCode !== 0) { + console.error("ast-grep failed:", ast.stderr.toString()) + process.exit(1) +} + +type AstMatch = { + range: { start: { line: number; column: number }; end: { line: number; column: number } } + metaVariables: { single: Record } +} +const matches = JSON.parse(ast.stdout.toString()) as AstMatch[] +if (matches.length === 0) { + console.error(`No \`export namespace\` found in ${path.relative(process.cwd(), absPath)}`) + process.exit(1) +} +if (matches.length > 1) { + console.error(`File has ${matches.length} \`export namespace\` declarations — this script handles one per file.`) + for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) + process.exit(1) +} + +const match = matches[0] +const nsName = match.metaVariables.single.NAME.text +const startLine = match.range.start.line +const endLine = match.range.end.line + +const original = fs.readFileSync(absPath, "utf-8") +const lines = original.split("\n") + +// Split the file into before/body/after. +const before = lines.slice(0, startLine) +const body = lines.slice(startLine + 1, endLine) +const after = lines.slice(endLine + 1) + +// Dedent body by one indent level (2 spaces). +const dedented = body.map((line) => { + if (line === "") return "" + if (line.startsWith(" ")) return line.slice(2) + return line +}) + +// Collect all top-level declared identifiers inside the namespace body so we can +// rewrite `Foo.X` → `X` when X is one of them. We gather BOTH exported and +// non-exported names because the namespace body might reference its own +// non-exported helpers via `Foo.helper` too. +const declaredNames = new Set() +const declRe = + /^\s*(?:export\s+)?(?:abstract\s+)?(?:async\s+)?(?:const|let|var|function|class|interface|type|enum)\s+(\w+)/ +for (const line of dedented) { + const m = line.match(declRe) + if (m) declaredNames.add(m[1]) +} +// Also capture `export { X, Y }` re-exports inside the namespace. +const reExportRe = /export\s*\{\s*([^}]+)\}/g +for (const line of dedented) { + for (const reExport of line.matchAll(reExportRe)) { + for (const part of reExport[1].split(",")) { + const name = part + .trim() + .split(/\s+as\s+/) + .pop()! + .trim() + if (name) declaredNames.add(name) + } + } +} + +// Rewrite `Foo.X` → `X` inside the body, avoiding matches in strings, comments, +// templates. We walk the line char-by-char rather than using a regex so we can +// skip over those segments cleanly. +let rewriteCount = 0 +function rewriteLine(line: string): string { + const out: string[] = [] + let i = 0 + let stringQuote: string | null = null + while (i < line.length) { + const ch = line[i] + // String / template literal pass-through. + if (stringQuote) { + out.push(ch) + if (ch === "\\" && i + 1 < line.length) { + out.push(line[i + 1]) + i += 2 + continue + } + if (ch === stringQuote) stringQuote = null + i++ + continue + } + if (ch === '"' || ch === "'" || ch === "`") { + stringQuote = ch + out.push(ch) + i++ + continue + } + // Line comment: emit the rest of the line untouched. + if (ch === "/" && line[i + 1] === "/") { + out.push(line.slice(i)) + i = line.length + continue + } + // Block comment: emit until "*/" if present on same line; else rest of line. + if (ch === "/" && line[i + 1] === "*") { + const end = line.indexOf("*/", i + 2) + if (end === -1) { + out.push(line.slice(i)) + i = line.length + } else { + out.push(line.slice(i, end + 2)) + i = end + 2 + } + continue + } + // Try to match `Foo.` at this position. + if (line.startsWith(nsName + ".", i)) { + // Make sure the char before is NOT a word character (otherwise we'd be in the middle of another identifier). + const prev = i === 0 ? "" : line[i - 1] + if (!/\w/.test(prev)) { + const after = line.slice(i + nsName.length + 1) + const nameMatch = after.match(/^([A-Za-z_$][\w$]*)/) + if (nameMatch && declaredNames.has(nameMatch[1])) { + out.push(nameMatch[1]) + i += nsName.length + 1 + nameMatch[1].length + rewriteCount++ + continue + } + } + } + out.push(ch) + i++ + } + return out.join("") +} +const rewrittenBody = dedented.map(rewriteLine) + +// Assemble the new file. Collapse multiple trailing blank lines so the +// self-reexport sits cleanly at the end. +const basename = path.basename(absPath, ".ts") +const assembled = [...before, ...rewrittenBody, ...after].join("\n") +const trimmed = assembled.replace(/\s+$/g, "") +const output = `${trimmed}\n\nexport * as ${nsName} from "./${basename}"\n` + +if (dryRun) { + console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) + console.log(`namespace: ${nsName}`) + console.log(`body lines: ${body.length}`) + console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) + console.log(`self-refs rewr: ${rewriteCount}`) + console.log(`self-reexport: export * as ${nsName} from "./${basename}"`) + console.log(`output preview (last 10 lines):`) + const outputLines = output.split("\n") + for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { + console.log(` ${l}`) + } + process.exit(0) +} + +fs.writeFileSync(absPath, output) +console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) +console.log(` body lines: ${body.length}`) +console.log(` self-refs rewr: ${rewriteCount}`) +console.log(` self-reexport: export * as ${nsName} from "./${basename}"`) +console.log("") +console.log("Next: verify with") +console.log(" bunx --bun tsgo --noEmit") +console.log(" bun run --conditions=browser ./src/index.ts generate") +console.log( + ` bun run test test/${path.relative(path.join(path.dirname(absPath), "..", ".."), absPath).replace(/\.ts$/, "")}*`, +) From 54078c4caea1adadea25ca1c4ec1479f3ab4e423 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:11:19 -0400 Subject: [PATCH 089/120] refactor: unwrap Shell namespace + self-reexport (#22964) --- packages/opencode/src/shell/shell.ts | 186 +++++++++++++-------------- 1 file changed, 93 insertions(+), 93 deletions(-) diff --git a/packages/opencode/src/shell/shell.ts b/packages/opencode/src/shell/shell.ts index 056a794dc8..60643c10b0 100644 --- a/packages/opencode/src/shell/shell.ts +++ b/packages/opencode/src/shell/shell.ts @@ -8,103 +8,103 @@ import { setTimeout as sleep } from "node:timers/promises" const SIGKILL_TIMEOUT_MS = 200 -export namespace Shell { - const BLACKLIST = new Set(["fish", "nu"]) - const LOGIN = new Set(["bash", "dash", "fish", "ksh", "sh", "zsh"]) - const POSIX = new Set(["bash", "dash", "ksh", "sh", "zsh"]) +const BLACKLIST = new Set(["fish", "nu"]) +const LOGIN = new Set(["bash", "dash", "fish", "ksh", "sh", "zsh"]) +const POSIX = new Set(["bash", "dash", "ksh", "sh", "zsh"]) - export async function killTree(proc: ChildProcess, opts?: { exited?: () => boolean }): Promise { - const pid = proc.pid - if (!pid || opts?.exited?.()) return +export async function killTree(proc: ChildProcess, opts?: { exited?: () => boolean }): Promise { + const pid = proc.pid + if (!pid || opts?.exited?.()) return - if (process.platform === "win32") { - await new Promise((resolve) => { - const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { - stdio: "ignore", - windowsHide: true, - }) - killer.once("exit", () => resolve()) - killer.once("error", () => resolve()) + if (process.platform === "win32") { + await new Promise((resolve) => { + const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { + stdio: "ignore", + windowsHide: true, }) - return - } + killer.once("exit", () => resolve()) + killer.once("error", () => resolve()) + }) + return + } - try { - process.kill(-pid, "SIGTERM") - await sleep(SIGKILL_TIMEOUT_MS) - if (!opts?.exited?.()) { - process.kill(-pid, "SIGKILL") - } - } catch (_e) { - proc.kill("SIGTERM") - await sleep(SIGKILL_TIMEOUT_MS) - if (!opts?.exited?.()) { - proc.kill("SIGKILL") - } + try { + process.kill(-pid, "SIGTERM") + await sleep(SIGKILL_TIMEOUT_MS) + if (!opts?.exited?.()) { + process.kill(-pid, "SIGKILL") + } + } catch (_e) { + proc.kill("SIGTERM") + await sleep(SIGKILL_TIMEOUT_MS) + if (!opts?.exited?.()) { + proc.kill("SIGKILL") } } - - function full(file: string) { - if (process.platform !== "win32") return file - const shell = Filesystem.windowsPath(file) - if (path.win32.dirname(shell) !== ".") { - if (shell.startsWith("/") && name(shell) === "bash") return gitbash() || shell - return shell - } - return which(shell) || shell - } - - function pick() { - const pwsh = which("pwsh.exe") - if (pwsh) return pwsh - const powershell = which("powershell.exe") - if (powershell) return powershell - } - - function select(file: string | undefined, opts?: { acceptable?: boolean }) { - if (file && (!opts?.acceptable || !BLACKLIST.has(name(file)))) return full(file) - if (process.platform === "win32") { - const shell = pick() - if (shell) return shell - } - return fallback() - } - - export function gitbash() { - if (process.platform !== "win32") return - if (Flag.OPENCODE_GIT_BASH_PATH) return Flag.OPENCODE_GIT_BASH_PATH - const git = which("git") - if (!git) return - const file = path.join(git, "..", "..", "bin", "bash.exe") - if (Filesystem.stat(file)?.size) return file - } - - function fallback() { - if (process.platform === "win32") { - const file = gitbash() - if (file) return file - return process.env.COMSPEC || "cmd.exe" - } - if (process.platform === "darwin") return "/bin/zsh" - const bash = which("bash") - if (bash) return bash - return "/bin/sh" - } - - export function name(file: string) { - if (process.platform === "win32") return path.win32.parse(Filesystem.windowsPath(file)).name.toLowerCase() - return path.basename(file).toLowerCase() - } - - export function login(file: string) { - return LOGIN.has(name(file)) - } - - export function posix(file: string) { - return POSIX.has(name(file)) - } - - export const preferred = lazy(() => select(process.env.SHELL)) - - export const acceptable = lazy(() => select(process.env.SHELL, { acceptable: true })) } + +function full(file: string) { + if (process.platform !== "win32") return file + const shell = Filesystem.windowsPath(file) + if (path.win32.dirname(shell) !== ".") { + if (shell.startsWith("/") && name(shell) === "bash") return gitbash() || shell + return shell + } + return which(shell) || shell +} + +function pick() { + const pwsh = which("pwsh.exe") + if (pwsh) return pwsh + const powershell = which("powershell.exe") + if (powershell) return powershell +} + +function select(file: string | undefined, opts?: { acceptable?: boolean }) { + if (file && (!opts?.acceptable || !BLACKLIST.has(name(file)))) return full(file) + if (process.platform === "win32") { + const shell = pick() + if (shell) return shell + } + return fallback() +} + +export function gitbash() { + if (process.platform !== "win32") return + if (Flag.OPENCODE_GIT_BASH_PATH) return Flag.OPENCODE_GIT_BASH_PATH + const git = which("git") + if (!git) return + const file = path.join(git, "..", "..", "bin", "bash.exe") + if (Filesystem.stat(file)?.size) return file +} + +function fallback() { + if (process.platform === "win32") { + const file = gitbash() + if (file) return file + return process.env.COMSPEC || "cmd.exe" + } + if (process.platform === "darwin") return "/bin/zsh" + const bash = which("bash") + if (bash) return bash + return "/bin/sh" +} + +export function name(file: string) { + if (process.platform === "win32") return path.win32.parse(Filesystem.windowsPath(file)).name.toLowerCase() + return path.basename(file).toLowerCase() +} + +export function login(file: string) { + return LOGIN.has(name(file)) +} + +export function posix(file: string) { + return POSIX.has(name(file)) +} + +export const preferred = lazy(() => select(process.env.SHELL)) + +export const acceptable = lazy(() => select(process.env.SHELL, { acceptable: true })) + +export * as Shell from "./shell" From 39342b0e759a265045a2b49f34af8ae5da773a8c Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 20:28:08 -0400 Subject: [PATCH 090/120] tui: fix Windows terminal suspend and input undo keybindings On Windows, native terminals don't support POSIX suspend (ctrl+z), so we now assign ctrl+z to input undo instead of terminal suspend. Terminal suspend is disabled on Windows to avoid conflicts with the undo functionality. --- .../src/cli/cmd/tui/config/tui-migrate.ts | 1 - .../opencode/src/cli/cmd/tui/config/tui.ts | 336 +++++++++--------- packages/opencode/src/config/config.ts | 1 - packages/opencode/src/config/keybinds.ts | 14 +- 4 files changed, 183 insertions(+), 169 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts index 3ce5c4b739..9323dd979a 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts @@ -26,7 +26,6 @@ const TuiLegacy = z interface MigrateInput { cwd: string directories: string[] - custom?: string } /** diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index d264273bca..6e5296db87 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -17,197 +17,203 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -const log = Log.create({ service: "tui.config" }) +export namespace TuiConfig { + const log = Log.create({ service: "tui.config" }) -export const Info = TuiInfo + export const Info = TuiInfo -type Acc = { - result: Info -} + type Acc = { + result: Info + } -type State = { - config: Info - deps: Array> -} + type State = { + config: Info + deps: Array> + } -export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] -} + export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] + } -export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect -} + export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect + } -export class Service extends Context.Service()("@opencode/TuiConfig") {} + export class Service extends Context.Service()("@opencode/TuiConfig") {} -function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" -} + function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" + } -function customPath() { - return Flag.OPENCODE_TUI_CONFIG -} + function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { + delete data.tui + return data + } -function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { + const tui = data.tui delete data.tui - return data - } - - const tui = data.tui - delete data.tui - return { - ...tui, - ...data, - } -} - -async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config -} - -async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins -} - -async function loadState(ctx: { directory: string }) { - let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - const directories = await ConfigPaths.directories(ctx.directory) - const custom = customPath() - await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) - // Re-compute after migration since migrateTuiConfig may have created new tui.json files - projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - if (custom) { - await mergeFile(acc, custom, ctx) - log.debug("loaded custom tui config", { path: custom }) - } - - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) + return { + ...tui, + ...data, } } - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") + async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], + async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins } -} -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) + async function loadState(ctx: { directory: string }) { + // Every config dir we may read from: global config dir, any `.opencode` + // folders between cwd and home, and OPENCODE_CONFIG_DIR. + const directories = await ConfigPaths.directories(ctx.directory) + // One-time migration: extract tui keys (theme/keybinds/tui) from existing + // opencode.json files into sibling tui.json files. + await migrateTuiConfig({ directories, cwd: ctx.directory }) - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG + ? [] + : await ConfigPaths.projectFiles("tui", ctx.directory) - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), -) + const acc: Acc = { + result: {}, + } -export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + // 1. Global tui config (lowest precedence). + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } -const { runPromise } = makeRuntime(Service, defaultLayer) + // 2. Explicit OPENCODE_TUI_CONFIG override, if set. + if (Flag.OPENCODE_TUI_CONFIG) { + await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) + log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) + } -export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) -} + // 3. Project tui files, applied root-first so the closest file wins. + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } -export async function get() { - return runPromise((svc) => svc.get()) -} + // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while + // walking up the tree. Also returned below so callers can install plugin + // dependencies from each location. + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) -async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) -} + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } -async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } + } + + export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), + ) + + export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + + const { runPromise } = makeRuntime(Service, defaultLayer) + + export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) + } + + export async function get() { + return runPromise((svc) => svc.get()) + } + + async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) return {} }) -} + } -export * as TuiConfig from "./tui" + async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) + } +} diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 3cbc539600..adccb6353b 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -19,7 +19,6 @@ import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" -import { InvalidError, JsonError } from "./error" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" diff --git a/packages/opencode/src/config/keybinds.ts b/packages/opencode/src/config/keybinds.ts index cb146b7cae..8a22289d2a 100644 --- a/packages/opencode/src/config/keybinds.ts +++ b/packages/opencode/src/config/keybinds.ts @@ -106,7 +106,12 @@ export const Keybinds = z input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), + input_undo: z + .string() + .optional() + // On Windows prepend ctrl+z since terminal_suspend releases the binding. + .default(process.platform === "win32" ? "ctrl+z,ctrl+-,super+z" : "ctrl+-,super+z") + .describe("Undo in input"), input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), input_word_forward: z .string() @@ -144,7 +149,12 @@ export const Keybinds = z session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), + terminal_suspend: z + .string() + .optional() + .default("ctrl+z") + .transform((v) => (process.platform === "win32" ? "none" : v)) + .describe("Suspend terminal"), terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), From d6af5a686cd45dc68504283645de990bbeaf2005 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 20:46:40 -0400 Subject: [PATCH 091/120] tui: convert TuiConfig namespace to ES module exports --- .../opencode/src/cli/cmd/tui/config/tui.ts | 372 +++++++++--------- 1 file changed, 185 insertions(+), 187 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 6e5296db87..b55cf3b83f 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -1,3 +1,5 @@ +export * as TuiConfig from "./tui" + import z from "zod" import { mergeDeep, unique } from "remeda" import { Context, Effect, Fiber, Layer } from "effect" @@ -17,203 +19,199 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -export namespace TuiConfig { - const log = Log.create({ service: "tui.config" }) +const log = Log.create({ service: "tui.config" }) - export const Info = TuiInfo +export const Info = TuiInfo - type Acc = { - result: Info - } +type Acc = { + result: Info +} - type State = { - config: Info - deps: Array> - } +type State = { + config: Info + deps: Array> +} - export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] - } +export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] +} - export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect - } +export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect +} - export class Service extends Context.Service()("@opencode/TuiConfig") {} +export class Service extends Context.Service()("@opencode/TuiConfig") {} - function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" - } +function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" +} - function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { - delete data.tui - return data - } - - const tui = data.tui +function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { delete data.tui - return { - ...tui, - ...data, - } + return data } - async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config - } - - async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins - } - - async function loadState(ctx: { directory: string }) { - // Every config dir we may read from: global config dir, any `.opencode` - // folders between cwd and home, and OPENCODE_CONFIG_DIR. - const directories = await ConfigPaths.directories(ctx.directory) - // One-time migration: extract tui keys (theme/keybinds/tui) from existing - // opencode.json files into sibling tui.json files. - await migrateTuiConfig({ directories, cwd: ctx.directory }) - - const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG - ? [] - : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - // 1. Global tui config (lowest precedence). - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - // 2. Explicit OPENCODE_TUI_CONFIG override, if set. - if (Flag.OPENCODE_TUI_CONFIG) { - await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) - log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) - } - - // 3. Project tui files, applied root-first so the closest file wins. - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while - // walking up the tree. Also returned below so callers can install plugin - // dependencies from each location. - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) - } - } - - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") - } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], - } - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) - - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) - - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) - - const { runPromise } = makeRuntime(Service, defaultLayer) - - export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) - } - - export async function get() { - return runPromise((svc) => svc.get()) - } - - async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) - } - - async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} - - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) - return {} - }) + const tui = data.tui + delete data.tui + return { + ...tui, + ...data, } } + +async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config +} + +async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins +} + +async function loadState(ctx: { directory: string }) { + // Every config dir we may read from: global config dir, any `.opencode` + // folders between cwd and home, and OPENCODE_CONFIG_DIR. + const directories = await ConfigPaths.directories(ctx.directory) + // One-time migration: extract tui keys (theme/keybinds/tui) from existing + // opencode.json files into sibling tui.json files. + await migrateTuiConfig({ directories, cwd: ctx.directory }) + + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + + const acc: Acc = { + result: {}, + } + + // 1. Global tui config (lowest precedence). + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } + + // 2. Explicit OPENCODE_TUI_CONFIG override, if set. + if (Flag.OPENCODE_TUI_CONFIG) { + await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) + log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) + } + + // 3. Project tui files, applied root-first so the closest file wins. + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } + + // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while + // walking up the tree. Also returned below so callers can install plugin + // dependencies from each location. + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) + + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } + + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) + + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), +) + +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + +const { runPromise } = makeRuntime(Service, defaultLayer) + +export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) +} + +export async function get() { + return runPromise((svc) => svc.get()) +} + +async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) + return {} + }) +} + +async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) +} From 51d8219c46f902b90cee716f3b8475e163e21e7c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:49:39 -0400 Subject: [PATCH 092/120] refactor: unwrap session/ tier-2 namespaces + self-reexport (#22973) --- packages/opencode/src/session/compaction.ts | 666 ++-- packages/opencode/src/session/instruction.ts | 318 +- packages/opencode/src/session/llm.ts | 824 ++--- packages/opencode/src/session/message-v2.ts | 1908 +++++------ packages/opencode/src/session/message.ts | 346 +- packages/opencode/src/session/processor.ts | 1158 +++---- packages/opencode/src/session/prompt.ts | 3174 +++++++++--------- packages/opencode/src/session/retry.ts | 232 +- packages/opencode/src/session/revert.ts | 290 +- packages/opencode/src/session/run-state.ts | 198 +- packages/opencode/src/session/status.ts | 156 +- packages/opencode/src/session/summary.ts | 274 +- packages/opencode/src/session/system.ts | 126 +- packages/opencode/src/session/todo.ts | 148 +- 14 files changed, 4909 insertions(+), 4909 deletions(-) diff --git a/packages/opencode/src/session/compaction.ts b/packages/opencode/src/session/compaction.ts index 3ef6977547..212f5fdbab 100644 --- a/packages/opencode/src/session/compaction.ts +++ b/packages/opencode/src/session/compaction.ts @@ -17,173 +17,172 @@ import { Effect, Layer, Context } from "effect" import { InstanceState } from "@/effect" import { isOverflow as overflow } from "./overflow" -export namespace SessionCompaction { - const log = Log.create({ service: "session.compaction" }) +const log = Log.create({ service: "session.compaction" }) - export const Event = { - Compacted: BusEvent.define( - "session.compacted", - z.object({ - sessionID: SessionID.zod, - }), - ), - } +export const Event = { + Compacted: BusEvent.define( + "session.compacted", + z.object({ + sessionID: SessionID.zod, + }), + ), +} - export const PRUNE_MINIMUM = 20_000 - export const PRUNE_PROTECT = 40_000 - const PRUNE_PROTECTED_TOOLS = ["skill"] +export const PRUNE_MINIMUM = 20_000 +export const PRUNE_PROTECT = 40_000 +const PRUNE_PROTECTED_TOOLS = ["skill"] - export interface Interface { - readonly isOverflow: (input: { +export interface Interface { + readonly isOverflow: (input: { + tokens: MessageV2.Assistant["tokens"] + model: Provider.Model + }) => Effect.Effect + readonly prune: (input: { sessionID: SessionID }) => Effect.Effect + readonly process: (input: { + parentID: MessageID + messages: MessageV2.WithParts[] + sessionID: SessionID + auto: boolean + overflow?: boolean + }) => Effect.Effect<"continue" | "stop"> + readonly create: (input: { + sessionID: SessionID + agent: string + model: { providerID: ProviderID; modelID: ModelID } + auto: boolean + overflow?: boolean + }) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionCompaction") {} + +export const layer: Layer.Layer< + Service, + never, + | Bus.Service + | Config.Service + | Session.Service + | Agent.Service + | Plugin.Service + | SessionProcessor.Service + | Provider.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const config = yield* Config.Service + const session = yield* Session.Service + const agents = yield* Agent.Service + const plugin = yield* Plugin.Service + const processors = yield* SessionProcessor.Service + const provider = yield* Provider.Service + + const isOverflow = Effect.fn("SessionCompaction.isOverflow")(function* (input: { tokens: MessageV2.Assistant["tokens"] model: Provider.Model - }) => Effect.Effect - readonly prune: (input: { sessionID: SessionID }) => Effect.Effect - readonly process: (input: { + }) { + return overflow({ cfg: yield* config.get(), tokens: input.tokens, model: input.model }) + }) + + // goes backwards through parts until there are PRUNE_PROTECT tokens worth of tool + // calls, then erases output of older tool calls to free context space + const prune = Effect.fn("SessionCompaction.prune")(function* (input: { sessionID: SessionID }) { + const cfg = yield* config.get() + if (cfg.compaction?.prune === false) return + log.info("pruning") + + const msgs = yield* session + .messages({ sessionID: input.sessionID }) + .pipe(Effect.catchIf(NotFoundError.isInstance, () => Effect.succeed(undefined))) + if (!msgs) return + + let total = 0 + let pruned = 0 + const toPrune: MessageV2.ToolPart[] = [] + let turns = 0 + + loop: for (let msgIndex = msgs.length - 1; msgIndex >= 0; msgIndex--) { + const msg = msgs[msgIndex] + if (msg.info.role === "user") turns++ + if (turns < 2) continue + if (msg.info.role === "assistant" && msg.info.summary) break loop + for (let partIndex = msg.parts.length - 1; partIndex >= 0; partIndex--) { + const part = msg.parts[partIndex] + if (part.type === "tool") + if (part.state.status === "completed") { + if (PRUNE_PROTECTED_TOOLS.includes(part.tool)) continue + if (part.state.time.compacted) break loop + const estimate = Token.estimate(part.state.output) + total += estimate + if (total > PRUNE_PROTECT) { + pruned += estimate + toPrune.push(part) + } + } + } + } + + log.info("found", { pruned, total }) + if (pruned > PRUNE_MINIMUM) { + for (const part of toPrune) { + if (part.state.status === "completed") { + part.state.time.compacted = Date.now() + yield* session.updatePart(part) + } + } + log.info("pruned", { count: toPrune.length }) + } + }) + + const processCompaction = Effect.fn("SessionCompaction.process")(function* (input: { parentID: MessageID messages: MessageV2.WithParts[] sessionID: SessionID auto: boolean overflow?: boolean - }) => Effect.Effect<"continue" | "stop"> - readonly create: (input: { - sessionID: SessionID - agent: string - model: { providerID: ProviderID; modelID: ModelID } - auto: boolean - overflow?: boolean - }) => Effect.Effect - } + }) { + const parent = input.messages.findLast((m) => m.info.id === input.parentID) + if (!parent || parent.info.role !== "user") { + throw new Error(`Compaction parent must be a user message: ${input.parentID}`) + } + const userMessage = parent.info - export class Service extends Context.Service()("@opencode/SessionCompaction") {} - - export const layer: Layer.Layer< - Service, - never, - | Bus.Service - | Config.Service - | Session.Service - | Agent.Service - | Plugin.Service - | SessionProcessor.Service - | Provider.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const config = yield* Config.Service - const session = yield* Session.Service - const agents = yield* Agent.Service - const plugin = yield* Plugin.Service - const processors = yield* SessionProcessor.Service - const provider = yield* Provider.Service - - const isOverflow = Effect.fn("SessionCompaction.isOverflow")(function* (input: { - tokens: MessageV2.Assistant["tokens"] - model: Provider.Model - }) { - return overflow({ cfg: yield* config.get(), tokens: input.tokens, model: input.model }) - }) - - // goes backwards through parts until there are PRUNE_PROTECT tokens worth of tool - // calls, then erases output of older tool calls to free context space - const prune = Effect.fn("SessionCompaction.prune")(function* (input: { sessionID: SessionID }) { - const cfg = yield* config.get() - if (cfg.compaction?.prune === false) return - log.info("pruning") - - const msgs = yield* session - .messages({ sessionID: input.sessionID }) - .pipe(Effect.catchIf(NotFoundError.isInstance, () => Effect.succeed(undefined))) - if (!msgs) return - - let total = 0 - let pruned = 0 - const toPrune: MessageV2.ToolPart[] = [] - let turns = 0 - - loop: for (let msgIndex = msgs.length - 1; msgIndex >= 0; msgIndex--) { - const msg = msgs[msgIndex] - if (msg.info.role === "user") turns++ - if (turns < 2) continue - if (msg.info.role === "assistant" && msg.info.summary) break loop - for (let partIndex = msg.parts.length - 1; partIndex >= 0; partIndex--) { - const part = msg.parts[partIndex] - if (part.type === "tool") - if (part.state.status === "completed") { - if (PRUNE_PROTECTED_TOOLS.includes(part.tool)) continue - if (part.state.time.compacted) break loop - const estimate = Token.estimate(part.state.output) - total += estimate - if (total > PRUNE_PROTECT) { - pruned += estimate - toPrune.push(part) - } - } + let messages = input.messages + let replay: + | { + info: MessageV2.User + parts: MessageV2.Part[] + } + | undefined + if (input.overflow) { + const idx = input.messages.findIndex((m) => m.info.id === input.parentID) + for (let i = idx - 1; i >= 0; i--) { + const msg = input.messages[i] + if (msg.info.role === "user" && !msg.parts.some((p) => p.type === "compaction")) { + replay = { info: msg.info, parts: msg.parts } + messages = input.messages.slice(0, i) + break } } - - log.info("found", { pruned, total }) - if (pruned > PRUNE_MINIMUM) { - for (const part of toPrune) { - if (part.state.status === "completed") { - part.state.time.compacted = Date.now() - yield* session.updatePart(part) - } - } - log.info("pruned", { count: toPrune.length }) + const hasContent = + replay && messages.some((m) => m.info.role === "user" && !m.parts.some((p) => p.type === "compaction")) + if (!hasContent) { + replay = undefined + messages = input.messages } - }) + } - const processCompaction = Effect.fn("SessionCompaction.process")(function* (input: { - parentID: MessageID - messages: MessageV2.WithParts[] - sessionID: SessionID - auto: boolean - overflow?: boolean - }) { - const parent = input.messages.findLast((m) => m.info.id === input.parentID) - if (!parent || parent.info.role !== "user") { - throw new Error(`Compaction parent must be a user message: ${input.parentID}`) - } - const userMessage = parent.info - - let messages = input.messages - let replay: - | { - info: MessageV2.User - parts: MessageV2.Part[] - } - | undefined - if (input.overflow) { - const idx = input.messages.findIndex((m) => m.info.id === input.parentID) - for (let i = idx - 1; i >= 0; i--) { - const msg = input.messages[i] - if (msg.info.role === "user" && !msg.parts.some((p) => p.type === "compaction")) { - replay = { info: msg.info, parts: msg.parts } - messages = input.messages.slice(0, i) - break - } - } - const hasContent = - replay && messages.some((m) => m.info.role === "user" && !m.parts.some((p) => p.type === "compaction")) - if (!hasContent) { - replay = undefined - messages = input.messages - } - } - - const agent = yield* agents.get("compaction") - const model = agent.model - ? yield* provider.getModel(agent.model.providerID, agent.model.modelID) - : yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID) - // Allow plugins to inject context or replace compaction prompt. - const compacting = yield* plugin.trigger( - "experimental.session.compacting", - { sessionID: input.sessionID }, - { context: [], prompt: undefined }, - ) - const defaultPrompt = `Provide a detailed prompt for continuing our conversation above. + const agent = yield* agents.get("compaction") + const model = agent.model + ? yield* provider.getModel(agent.model.providerID, agent.model.modelID) + : yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID) + // Allow plugins to inject context or replace compaction prompt. + const compacting = yield* plugin.trigger( + "experimental.session.compacting", + { sessionID: input.sessionID }, + { context: [], prompt: undefined }, + ) + const defaultPrompt = `Provide a detailed prompt for continuing our conversation above. Focus on information that would be helpful for continuing the conversation, including what we did, what we're doing, which files we're working on, and what we're going to do next. The summary that you construct will be used so that another agent can read it and continue the work. Do not call any tools. Respond only with the summary text. @@ -213,200 +212,201 @@ When constructing the summary, try to stick to this template: [Construct a structured list of relevant files that have been read, edited, or created that pertain to the task at hand. If all the files in a directory are relevant, include the path to the directory.] ---` - const prompt = compacting.prompt ?? [defaultPrompt, ...compacting.context].join("\n\n") - const msgs = structuredClone(messages) - yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) - const modelMessages = yield* MessageV2.toModelMessagesEffect(msgs, model, { stripMedia: true }) - const ctx = yield* InstanceState.context - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - role: "assistant", - parentID: input.parentID, - sessionID: input.sessionID, - mode: "compaction", - agent: "compaction", - variant: userMessage.model.variant, - summary: true, - path: { - cwd: ctx.directory, - root: ctx.worktree, + const prompt = compacting.prompt ?? [defaultPrompt, ...compacting.context].join("\n\n") + const msgs = structuredClone(messages) + yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) + const modelMessages = yield* MessageV2.toModelMessagesEffect(msgs, model, { stripMedia: true }) + const ctx = yield* InstanceState.context + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + role: "assistant", + parentID: input.parentID, + sessionID: input.sessionID, + mode: "compaction", + agent: "compaction", + variant: userMessage.model.variant, + summary: true, + path: { + cwd: ctx.directory, + root: ctx.worktree, + }, + cost: 0, + tokens: { + output: 0, + input: 0, + reasoning: 0, + cache: { read: 0, write: 0 }, + }, + modelID: model.id, + providerID: model.providerID, + time: { + created: Date.now(), + }, + } + yield* session.updateMessage(msg) + const processor = yield* processors.create({ + assistantMessage: msg, + sessionID: input.sessionID, + model, + }) + const result = yield* processor.process({ + user: userMessage, + agent, + sessionID: input.sessionID, + tools: {}, + system: [], + messages: [ + ...modelMessages, + { + role: "user", + content: [{ type: "text", text: prompt }], }, - cost: 0, - tokens: { - output: 0, - input: 0, - reasoning: 0, - cache: { read: 0, write: 0 }, - }, - modelID: model.id, - providerID: model.providerID, - time: { - created: Date.now(), - }, - } - yield* session.updateMessage(msg) - const processor = yield* processors.create({ - assistantMessage: msg, - sessionID: input.sessionID, - model, - }) - const result = yield* processor.process({ - user: userMessage, - agent, - sessionID: input.sessionID, - tools: {}, - system: [], - messages: [ - ...modelMessages, - { - role: "user", - content: [{ type: "text", text: prompt }], - }, - ], - model, - }) + ], + model, + }) - if (result === "compact") { - processor.message.error = new MessageV2.ContextOverflowError({ - message: replay - ? "Conversation history too large to compact - exceeds model context limit" - : "Session too large to compact - context exceeds model limit even after stripping media", - }).toObject() - processor.message.finish = "error" - yield* session.updateMessage(processor.message) - return "stop" + if (result === "compact") { + processor.message.error = new MessageV2.ContextOverflowError({ + message: replay + ? "Conversation history too large to compact - exceeds model context limit" + : "Session too large to compact - context exceeds model limit even after stripping media", + }).toObject() + processor.message.finish = "error" + yield* session.updateMessage(processor.message) + return "stop" + } + + if (result === "continue" && input.auto) { + if (replay) { + const original = replay.info + const replayMsg = yield* session.updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + agent: original.agent, + model: original.model, + format: original.format, + tools: original.tools, + system: original.system, + }) + for (const part of replay.parts) { + if (part.type === "compaction") continue + const replayPart = + part.type === "file" && MessageV2.isMedia(part.mime) + ? { type: "text" as const, text: `[Attached ${part.mime}: ${part.filename ?? "file"}]` } + : part + yield* session.updatePart({ + ...replayPart, + id: PartID.ascending(), + messageID: replayMsg.id, + sessionID: input.sessionID, + }) + } } - if (result === "continue" && input.auto) { - if (replay) { - const original = replay.info - const replayMsg = yield* session.updateMessage({ + if (!replay) { + const info = yield* provider.getProvider(userMessage.model.providerID) + if ( + (yield* plugin.trigger( + "experimental.compaction.autocontinue", + { + sessionID: input.sessionID, + agent: userMessage.agent, + model: yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID), + provider: { + source: info.source, + info, + options: info.options, + }, + message: userMessage, + overflow: input.overflow === true, + }, + { enabled: true }, + )).enabled + ) { + const continueMsg = yield* session.updateMessage({ id: MessageID.ascending(), role: "user", sessionID: input.sessionID, time: { created: Date.now() }, - agent: original.agent, - model: original.model, - format: original.format, - tools: original.tools, - system: original.system, + agent: userMessage.agent, + model: userMessage.model, + }) + const text = + (input.overflow + ? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n" + : "") + + "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed." + yield* session.updatePart({ + id: PartID.ascending(), + messageID: continueMsg.id, + sessionID: input.sessionID, + type: "text", + // Internal marker for auto-compaction followups so provider plugins + // can distinguish them from manual post-compaction user prompts. + // This is not a stable plugin contract and may change or disappear. + metadata: { compaction_continue: true }, + synthetic: true, + text, + time: { + start: Date.now(), + end: Date.now(), + }, }) - for (const part of replay.parts) { - if (part.type === "compaction") continue - const replayPart = - part.type === "file" && MessageV2.isMedia(part.mime) - ? { type: "text" as const, text: `[Attached ${part.mime}: ${part.filename ?? "file"}]` } - : part - yield* session.updatePart({ - ...replayPart, - id: PartID.ascending(), - messageID: replayMsg.id, - sessionID: input.sessionID, - }) - } - } - - if (!replay) { - const info = yield* provider.getProvider(userMessage.model.providerID) - if ( - (yield* plugin.trigger( - "experimental.compaction.autocontinue", - { - sessionID: input.sessionID, - agent: userMessage.agent, - model: yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID), - provider: { - source: info.source, - info, - options: info.options, - }, - message: userMessage, - overflow: input.overflow === true, - }, - { enabled: true }, - )).enabled - ) { - const continueMsg = yield* session.updateMessage({ - id: MessageID.ascending(), - role: "user", - sessionID: input.sessionID, - time: { created: Date.now() }, - agent: userMessage.agent, - model: userMessage.model, - }) - const text = - (input.overflow - ? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n" - : "") + - "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed." - yield* session.updatePart({ - id: PartID.ascending(), - messageID: continueMsg.id, - sessionID: input.sessionID, - type: "text", - // Internal marker for auto-compaction followups so provider plugins - // can distinguish them from manual post-compaction user prompts. - // This is not a stable plugin contract and may change or disappear. - metadata: { compaction_continue: true }, - synthetic: true, - text, - time: { - start: Date.now(), - end: Date.now(), - }, - }) - } } } + } - if (processor.message.error) return "stop" - if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID }) - return result + if (processor.message.error) return "stop" + if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID }) + return result + }) + + const create = Effect.fn("SessionCompaction.create")(function* (input: { + sessionID: SessionID + agent: string + model: { providerID: ProviderID; modelID: ModelID } + auto: boolean + overflow?: boolean + }) { + const msg = yield* session.updateMessage({ + id: MessageID.ascending(), + role: "user", + model: input.model, + sessionID: input.sessionID, + agent: input.agent, + time: { created: Date.now() }, }) - - const create = Effect.fn("SessionCompaction.create")(function* (input: { - sessionID: SessionID - agent: string - model: { providerID: ProviderID; modelID: ModelID } - auto: boolean - overflow?: boolean - }) { - const msg = yield* session.updateMessage({ - id: MessageID.ascending(), - role: "user", - model: input.model, - sessionID: input.sessionID, - agent: input.agent, - time: { created: Date.now() }, - }) - yield* session.updatePart({ - id: PartID.ascending(), - messageID: msg.id, - sessionID: msg.sessionID, - type: "compaction", - auto: input.auto, - overflow: input.overflow, - }) + yield* session.updatePart({ + id: PartID.ascending(), + messageID: msg.id, + sessionID: msg.sessionID, + type: "compaction", + auto: input.auto, + overflow: input.overflow, }) + }) - return Service.of({ - isOverflow, - prune, - process: processCompaction, - create, - }) - }), - ) + return Service.of({ + isOverflow, + prune, + process: processCompaction, + create, + }) + }), +) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Provider.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(SessionProcessor.defaultLayer), - Layer.provide(Agent.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - ), - ) -} +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Provider.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(SessionProcessor.defaultLayer), + Layer.provide(Agent.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + ), +) + +export * as SessionCompaction from "./compaction" diff --git a/packages/opencode/src/session/instruction.ts b/packages/opencode/src/session/instruction.ts index cd2050adf5..768f352d93 100644 --- a/packages/opencode/src/session/instruction.ts +++ b/packages/opencode/src/session/instruction.ts @@ -50,194 +50,194 @@ function extract(messages: MessageV2.WithParts[]) { return paths } -export namespace Instruction { - export interface Interface { - readonly clear: (messageID: MessageID) => Effect.Effect - readonly systemPaths: () => Effect.Effect, AppFileSystem.Error> - readonly system: () => Effect.Effect - readonly find: (dir: string) => Effect.Effect - readonly resolve: ( - messages: MessageV2.WithParts[], - filepath: string, - messageID: MessageID, - ) => Effect.Effect<{ filepath: string; content: string }[], AppFileSystem.Error> - } +export interface Interface { + readonly clear: (messageID: MessageID) => Effect.Effect + readonly systemPaths: () => Effect.Effect, AppFileSystem.Error> + readonly system: () => Effect.Effect + readonly find: (dir: string) => Effect.Effect + readonly resolve: ( + messages: MessageV2.WithParts[], + filepath: string, + messageID: MessageID, + ) => Effect.Effect<{ filepath: string; content: string }[], AppFileSystem.Error> +} - export class Service extends Context.Service()("@opencode/Instruction") {} +export class Service extends Context.Service()("@opencode/Instruction") {} - export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const cfg = yield* Config.Service - const fs = yield* AppFileSystem.Service - const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const cfg = yield* Config.Service + const fs = yield* AppFileSystem.Service + const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) - const state = yield* InstanceState.make( - Effect.fn("Instruction.state")(() => - Effect.succeed({ - // Track which instruction files have already been attached for a given assistant message. - claims: new Map>(), - }), - ), - ) + const state = yield* InstanceState.make( + Effect.fn("Instruction.state")(() => + Effect.succeed({ + // Track which instruction files have already been attached for a given assistant message. + claims: new Map>(), + }), + ), + ) - const relative = Effect.fnUntraced(function* (instruction: string) { - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - return yield* fs - .globUp(instruction, Instance.directory, Instance.worktree) - .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - } - if (!Flag.OPENCODE_CONFIG_DIR) { - log.warn( - `Skipping relative instruction "${instruction}" - no OPENCODE_CONFIG_DIR set while project config is disabled`, - ) - return [] - } + const relative = Effect.fnUntraced(function* (instruction: string) { + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { return yield* fs - .globUp(instruction, Flag.OPENCODE_CONFIG_DIR, Flag.OPENCODE_CONFIG_DIR) + .globUp(instruction, Instance.directory, Instance.worktree) .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - }) - - const read = Effect.fnUntraced(function* (filepath: string) { - return yield* fs.readFileString(filepath).pipe(Effect.catch(() => Effect.succeed(""))) - }) - - const fetch = Effect.fnUntraced(function* (url: string) { - const res = yield* http.execute(HttpClientRequest.get(url)).pipe( - Effect.timeout(5000), - Effect.catch(() => Effect.succeed(null)), + } + if (!Flag.OPENCODE_CONFIG_DIR) { + log.warn( + `Skipping relative instruction "${instruction}" - no OPENCODE_CONFIG_DIR set while project config is disabled`, ) - if (!res) return "" - const body = yield* res.arrayBuffer.pipe(Effect.catch(() => Effect.succeed(new ArrayBuffer(0)))) - return new TextDecoder().decode(body) - }) + return [] + } + return yield* fs + .globUp(instruction, Flag.OPENCODE_CONFIG_DIR, Flag.OPENCODE_CONFIG_DIR) + .pipe(Effect.catch(() => Effect.succeed([] as string[]))) + }) - const clear = Effect.fn("Instruction.clear")(function* (messageID: MessageID) { - const s = yield* InstanceState.get(state) - s.claims.delete(messageID) - }) + const read = Effect.fnUntraced(function* (filepath: string) { + return yield* fs.readFileString(filepath).pipe(Effect.catch(() => Effect.succeed(""))) + }) - const systemPaths = Effect.fn("Instruction.systemPaths")(function* () { - const config = yield* cfg.get() - const paths = new Set() + const fetch = Effect.fnUntraced(function* (url: string) { + const res = yield* http.execute(HttpClientRequest.get(url)).pipe( + Effect.timeout(5000), + Effect.catch(() => Effect.succeed(null)), + ) + if (!res) return "" + const body = yield* res.arrayBuffer.pipe(Effect.catch(() => Effect.succeed(new ArrayBuffer(0)))) + return new TextDecoder().decode(body) + }) - // The first project-level match wins so we don't stack AGENTS.md/CLAUDE.md from every ancestor. - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - for (const file of FILES) { - const matches = yield* fs.findUp(file, Instance.directory, Instance.worktree) - if (matches.length > 0) { - matches.forEach((item) => paths.add(path.resolve(item))) - break - } - } - } + const clear = Effect.fn("Instruction.clear")(function* (messageID: MessageID) { + const s = yield* InstanceState.get(state) + s.claims.delete(messageID) + }) - for (const file of globalFiles()) { - if (yield* fs.existsSafe(file)) { - paths.add(path.resolve(file)) + const systemPaths = Effect.fn("Instruction.systemPaths")(function* () { + const config = yield* cfg.get() + const paths = new Set() + + // The first project-level match wins so we don't stack AGENTS.md/CLAUDE.md from every ancestor. + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { + for (const file of FILES) { + const matches = yield* fs.findUp(file, Instance.directory, Instance.worktree) + if (matches.length > 0) { + matches.forEach((item) => paths.add(path.resolve(item))) break } } + } - if (config.instructions) { - for (const raw of config.instructions) { - if (raw.startsWith("https://") || raw.startsWith("http://")) continue - const instruction = raw.startsWith("~/") ? path.join(os.homedir(), raw.slice(2)) : raw - const matches = yield* ( - path.isAbsolute(instruction) - ? fs.glob(path.basename(instruction), { - cwd: path.dirname(instruction), - absolute: true, - include: "file", - }) - : relative(instruction) - ).pipe(Effect.catch(() => Effect.succeed([] as string[]))) - matches.forEach((item) => paths.add(path.resolve(item))) - } + for (const file of globalFiles()) { + if (yield* fs.existsSafe(file)) { + paths.add(path.resolve(file)) + break } + } - return paths - }) - - const system = Effect.fn("Instruction.system")(function* () { - const config = yield* cfg.get() - const paths = yield* systemPaths() - const urls = (config.instructions ?? []).filter( - (item) => item.startsWith("https://") || item.startsWith("http://"), - ) - - const files = yield* Effect.forEach(Array.from(paths), read, { concurrency: 8 }) - const remote = yield* Effect.forEach(urls, fetch, { concurrency: 4 }) - - return [ - ...Array.from(paths).flatMap((item, i) => (files[i] ? [`Instructions from: ${item}\n${files[i]}`] : [])), - ...urls.flatMap((item, i) => (remote[i] ? [`Instructions from: ${item}\n${remote[i]}`] : [])), - ] - }) - - const find = Effect.fn("Instruction.find")(function* (dir: string) { - for (const file of FILES) { - const filepath = path.resolve(path.join(dir, file)) - if (yield* fs.existsSafe(filepath)) return filepath + if (config.instructions) { + for (const raw of config.instructions) { + if (raw.startsWith("https://") || raw.startsWith("http://")) continue + const instruction = raw.startsWith("~/") ? path.join(os.homedir(), raw.slice(2)) : raw + const matches = yield* ( + path.isAbsolute(instruction) + ? fs.glob(path.basename(instruction), { + cwd: path.dirname(instruction), + absolute: true, + include: "file", + }) + : relative(instruction) + ).pipe(Effect.catch(() => Effect.succeed([] as string[]))) + matches.forEach((item) => paths.add(path.resolve(item))) } - }) + } - const resolve = Effect.fn("Instruction.resolve")(function* ( - messages: MessageV2.WithParts[], - filepath: string, - messageID: MessageID, - ) { - const sys = yield* systemPaths() - const already = extract(messages) - const results: { filepath: string; content: string }[] = [] - const s = yield* InstanceState.get(state) + return paths + }) - const target = path.resolve(filepath) - const root = path.resolve(Instance.directory) - let current = path.dirname(target) + const system = Effect.fn("Instruction.system")(function* () { + const config = yield* cfg.get() + const paths = yield* systemPaths() + const urls = (config.instructions ?? []).filter( + (item) => item.startsWith("https://") || item.startsWith("http://"), + ) - // Walk upward from the file being read and attach nearby instruction files once per message. - while (current.startsWith(root) && current !== root) { - const found = yield* find(current) - if (!found || found === target || sys.has(found) || already.has(found)) { - current = path.dirname(current) - continue - } + const files = yield* Effect.forEach(Array.from(paths), read, { concurrency: 8 }) + const remote = yield* Effect.forEach(urls, fetch, { concurrency: 4 }) - let set = s.claims.get(messageID) - if (!set) { - set = new Set() - s.claims.set(messageID, set) - } - if (set.has(found)) { - current = path.dirname(current) - continue - } + return [ + ...Array.from(paths).flatMap((item, i) => (files[i] ? [`Instructions from: ${item}\n${files[i]}`] : [])), + ...urls.flatMap((item, i) => (remote[i] ? [`Instructions from: ${item}\n${remote[i]}`] : [])), + ] + }) - set.add(found) - const content = yield* read(found) - if (content) { - results.push({ filepath: found, content: `Instructions from: ${found}\n${content}` }) - } + const find = Effect.fn("Instruction.find")(function* (dir: string) { + for (const file of FILES) { + const filepath = path.resolve(path.join(dir, file)) + if (yield* fs.existsSafe(filepath)) return filepath + } + }) + const resolve = Effect.fn("Instruction.resolve")(function* ( + messages: MessageV2.WithParts[], + filepath: string, + messageID: MessageID, + ) { + const sys = yield* systemPaths() + const already = extract(messages) + const results: { filepath: string; content: string }[] = [] + const s = yield* InstanceState.get(state) + + const target = path.resolve(filepath) + const root = path.resolve(Instance.directory) + let current = path.dirname(target) + + // Walk upward from the file being read and attach nearby instruction files once per message. + while (current.startsWith(root) && current !== root) { + const found = yield* find(current) + if (!found || found === target || sys.has(found) || already.has(found)) { current = path.dirname(current) + continue } - return results - }) + let set = s.claims.get(messageID) + if (!set) { + set = new Set() + s.claims.set(messageID, set) + } + if (set.has(found)) { + current = path.dirname(current) + continue + } - return Service.of({ clear, systemPaths, system, find, resolve }) - }), - ) + set.add(found) + const content = yield* read(found) + if (content) { + results.push({ filepath: found, content: `Instructions from: ${found}\n${content}` }) + } - export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(FetchHttpClient.layer), + current = path.dirname(current) + } + + return results + }) + + return Service.of({ clear, systemPaths, system, find, resolve }) + }), ) - export function loaded(messages: MessageV2.WithParts[]) { - return extract(messages) - } +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(FetchHttpClient.layer), +) + +export function loaded(messages: MessageV2.WithParts[]) { + return extract(messages) } + +export * as Instruction from "./instruction" diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index d38c29765a..b66e99fc82 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -25,429 +25,429 @@ import { EffectBridge } from "@/effect" import * as Option from "effect/Option" import * as OtelTracer from "@effect/opentelemetry/Tracer" -export namespace LLM { - const log = Log.create({ service: "llm" }) - export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX - type Result = Awaited> +const log = Log.create({ service: "llm" }) +export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX +type Result = Awaited> - export type StreamInput = { - user: MessageV2.User - sessionID: string - parentSessionID?: string - model: Provider.Model - agent: Agent.Info - permission?: Permission.Ruleset - system: string[] - messages: ModelMessage[] - small?: boolean - tools: Record - retries?: number - toolChoice?: "auto" | "required" | "none" - } +export type StreamInput = { + user: MessageV2.User + sessionID: string + parentSessionID?: string + model: Provider.Model + agent: Agent.Info + permission?: Permission.Ruleset + system: string[] + messages: ModelMessage[] + small?: boolean + tools: Record + retries?: number + toolChoice?: "auto" | "required" | "none" +} - export type StreamRequest = StreamInput & { - abort: AbortSignal - } +export type StreamRequest = StreamInput & { + abort: AbortSignal +} - export type Event = Result["fullStream"] extends AsyncIterable ? T : never +export type Event = Result["fullStream"] extends AsyncIterable ? T : never - export interface Interface { - readonly stream: (input: StreamInput) => Stream.Stream - } +export interface Interface { + readonly stream: (input: StreamInput) => Stream.Stream +} - export class Service extends Context.Service()("@opencode/LLM") {} +export class Service extends Context.Service()("@opencode/LLM") {} - const live: Layer.Layer< - Service, - never, - Auth.Service | Config.Service | Provider.Service | Plugin.Service | Permission.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const auth = yield* Auth.Service - const config = yield* Config.Service - const provider = yield* Provider.Service - const plugin = yield* Plugin.Service - const perm = yield* Permission.Service +const live: Layer.Layer< + Service, + never, + Auth.Service | Config.Service | Provider.Service | Plugin.Service | Permission.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const auth = yield* Auth.Service + const config = yield* Config.Service + const provider = yield* Provider.Service + const plugin = yield* Plugin.Service + const perm = yield* Permission.Service - const run = Effect.fn("LLM.run")(function* (input: StreamRequest) { - const l = log - .clone() - .tag("providerID", input.model.providerID) - .tag("modelID", input.model.id) - .tag("sessionID", input.sessionID) - .tag("small", (input.small ?? false).toString()) - .tag("agent", input.agent.name) - .tag("mode", input.agent.mode) - l.info("stream", { - modelID: input.model.id, - providerID: input.model.providerID, - }) - - const [language, cfg, item, info] = yield* Effect.all( - [ - provider.getLanguage(input.model), - config.get(), - provider.getProvider(input.model.providerID), - auth.get(input.model.providerID), - ], - { concurrency: "unbounded" }, - ) - - // TODO: move this to a proper hook - const isOpenaiOauth = item.id === "openai" && info?.type === "oauth" - - const system: string[] = [] - system.push( - [ - // use agent prompt otherwise provider prompt - ...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)), - // any custom prompt passed into this call - ...input.system, - // any custom prompt from last user message - ...(input.user.system ? [input.user.system] : []), - ] - .filter((x) => x) - .join("\n"), - ) - - const header = system[0] - yield* plugin.trigger( - "experimental.chat.system.transform", - { sessionID: input.sessionID, model: input.model }, - { system }, - ) - // rejoin to maintain 2-part structure for caching if header unchanged - if (system.length > 2 && system[0] === header) { - const rest = system.slice(1) - system.length = 0 - system.push(header, rest.join("\n")) - } - - const variant = - !input.small && input.model.variants && input.user.model.variant - ? input.model.variants[input.user.model.variant] - : {} - const base = input.small - ? ProviderTransform.smallOptions(input.model) - : ProviderTransform.options({ - model: input.model, - sessionID: input.sessionID, - providerOptions: item.options, - }) - const options: Record = pipe( - base, - mergeDeep(input.model.options), - mergeDeep(input.agent.options), - mergeDeep(variant), - ) - if (isOpenaiOauth) { - options.instructions = system.join("\n") - } - - const isWorkflow = language instanceof GitLabWorkflowLanguageModel - const messages = isOpenaiOauth - ? input.messages - : isWorkflow - ? input.messages - : [ - ...system.map( - (x): ModelMessage => ({ - role: "system", - content: x, - }), - ), - ...input.messages, - ] - - const params = yield* plugin.trigger( - "chat.params", - { - sessionID: input.sessionID, - agent: input.agent.name, - model: input.model, - provider: item, - message: input.user, - }, - { - temperature: input.model.capabilities.temperature - ? (input.agent.temperature ?? ProviderTransform.temperature(input.model)) - : undefined, - topP: input.agent.topP ?? ProviderTransform.topP(input.model), - topK: ProviderTransform.topK(input.model), - maxOutputTokens: ProviderTransform.maxOutputTokens(input.model), - options, - }, - ) - - const { headers } = yield* plugin.trigger( - "chat.headers", - { - sessionID: input.sessionID, - agent: input.agent.name, - model: input.model, - provider: item, - message: input.user, - }, - { - headers: {}, - }, - ) - - const tools = resolveTools(input) - - // LiteLLM and some Anthropic proxies require the tools parameter to be present - // when message history contains tool calls, even if no tools are being used. - // Add a dummy tool that is never called to satisfy this validation. - // This is enabled for: - // 1. Providers with "litellm" in their ID or API ID (auto-detected) - // 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways) - const isLiteLLMProxy = - item.options?.["litellmProxy"] === true || - input.model.providerID.toLowerCase().includes("litellm") || - input.model.api.id.toLowerCase().includes("litellm") - - // LiteLLM/Bedrock rejects requests where the message history contains tool - // calls but no tools param is present. When there are no active tools (e.g. - // during compaction), inject a stub tool to satisfy the validation requirement. - // The stub description explicitly tells the model not to call it. - if ( - (isLiteLLMProxy || input.model.providerID.includes("github-copilot")) && - Object.keys(tools).length === 0 && - hasToolCalls(input.messages) - ) { - tools["_noop"] = tool({ - description: "Do not call this tool. It exists only for API compatibility and must never be invoked.", - inputSchema: jsonSchema({ - type: "object", - properties: { - reason: { type: "string", description: "Unused" }, - }, - }), - execute: async () => ({ output: "", title: "", metadata: {} }), - }) - } - - // Wire up toolExecutor for DWS workflow models so that tool calls - // from the workflow service are executed via opencode's tool system - // and results sent back over the WebSocket. - if (language instanceof GitLabWorkflowLanguageModel) { - const workflowModel = language as GitLabWorkflowLanguageModel & { - sessionID?: string - sessionPreapprovedTools?: string[] - approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }> - } - workflowModel.sessionID = input.sessionID - workflowModel.systemPrompt = system.join("\n") - workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => { - const t = tools[toolName] - if (!t || !t.execute) { - return { result: "", error: `Unknown tool: ${toolName}` } - } - try { - const result = await t.execute!(JSON.parse(argsJson), { - toolCallId: _requestID, - messages: input.messages, - abortSignal: input.abort, - }) - const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result)) - return { - result: output, - metadata: typeof result === "object" ? result?.metadata : undefined, - title: typeof result === "object" ? result?.title : undefined, - } - } catch (e: any) { - return { result: "", error: e.message ?? String(e) } - } - } - - const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? []) - workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => { - const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission)) - return !match || match.action !== "ask" - }) - - const bridge = yield* EffectBridge.make() - const approvedToolsForSession = new Set() - workflowModel.approvalHandler = Instance.bind(async (approvalTools) => { - const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[] - // Auto-approve tools that were already approved in this session - // (prevents infinite approval loops for server-side MCP tools) - if (uniqueNames.every((name) => approvedToolsForSession.has(name))) { - return { approved: true } - } - - const id = PermissionID.ascending() - let unsub: (() => void) | undefined - try { - unsub = Bus.subscribe(Permission.Event.Replied, (evt) => { - if (evt.properties.requestID === id) void evt.properties.reply - }) - const toolPatterns = approvalTools.map((t: { name: string; args: string }) => { - try { - const parsed = JSON.parse(t.args) as Record - const title = (parsed?.title ?? parsed?.name ?? "") as string - return title ? `${t.name}: ${title}` : t.name - } catch { - return t.name - } - }) - const uniquePatterns = [...new Set(toolPatterns)] as string[] - await bridge.promise( - perm.ask({ - id, - sessionID: SessionID.make(input.sessionID), - permission: "workflow_tool_approval", - patterns: uniquePatterns, - metadata: { tools: approvalTools }, - always: uniquePatterns, - ruleset: [], - }), - ) - for (const name of uniqueNames) approvedToolsForSession.add(name) - workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames] - return { approved: true } - } catch { - return { approved: false } - } finally { - unsub?.() - } - }) - } - - const tracer = cfg.experimental?.openTelemetry - ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) - : undefined - - return streamText({ - onError(error) { - l.error("stream error", { - error, - }) - }, - async experimental_repairToolCall(failed) { - const lower = failed.toolCall.toolName.toLowerCase() - if (lower !== failed.toolCall.toolName && tools[lower]) { - l.info("repairing tool call", { - tool: failed.toolCall.toolName, - repaired: lower, - }) - return { - ...failed.toolCall, - toolName: lower, - } - } - return { - ...failed.toolCall, - input: JSON.stringify({ - tool: failed.toolCall.toolName, - error: failed.error.message, - }), - toolName: "invalid", - } - }, - temperature: params.temperature, - topP: params.topP, - topK: params.topK, - providerOptions: ProviderTransform.providerOptions(input.model, params.options), - activeTools: Object.keys(tools).filter((x) => x !== "invalid"), - tools, - toolChoice: input.toolChoice, - maxOutputTokens: params.maxOutputTokens, - abortSignal: input.abort, - headers: { - ...(input.model.providerID.startsWith("opencode") - ? { - "x-opencode-project": Instance.project.id, - "x-opencode-session": input.sessionID, - "x-opencode-request": input.user.id, - "x-opencode-client": Flag.OPENCODE_CLIENT, - } - : { - "x-session-affinity": input.sessionID, - ...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}), - "User-Agent": `opencode/${InstallationVersion}`, - }), - ...input.model.headers, - ...headers, - }, - maxRetries: input.retries ?? 0, - messages, - model: wrapLanguageModel({ - model: language, - middleware: [ - { - specificationVersion: "v3" as const, - async transformParams(args) { - if (args.type === "stream") { - // @ts-expect-error - args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options) - } - return args.params - }, - }, - ], - }), - experimental_telemetry: { - isEnabled: cfg.experimental?.openTelemetry, - functionId: "session.llm", - tracer, - metadata: { - userId: cfg.username ?? "unknown", - sessionId: input.sessionID, - }, - }, - }) + const run = Effect.fn("LLM.run")(function* (input: StreamRequest) { + const l = log + .clone() + .tag("providerID", input.model.providerID) + .tag("modelID", input.model.id) + .tag("sessionID", input.sessionID) + .tag("small", (input.small ?? false).toString()) + .tag("agent", input.agent.name) + .tag("mode", input.agent.mode) + l.info("stream", { + modelID: input.model.id, + providerID: input.model.providerID, }) - const stream: Interface["stream"] = (input) => - Stream.scoped( - Stream.unwrap( - Effect.gen(function* () { - const ctrl = yield* Effect.acquireRelease( - Effect.sync(() => new AbortController()), - (ctrl) => Effect.sync(() => ctrl.abort()), - ) + const [language, cfg, item, info] = yield* Effect.all( + [ + provider.getLanguage(input.model), + config.get(), + provider.getProvider(input.model.providerID), + auth.get(input.model.providerID), + ], + { concurrency: "unbounded" }, + ) - const result = yield* run({ ...input, abort: ctrl.signal }) + // TODO: move this to a proper hook + const isOpenaiOauth = item.id === "openai" && info?.type === "oauth" - return Stream.fromAsyncIterable(result.fullStream, (e) => (e instanceof Error ? e : new Error(String(e)))) - }), - ), - ) + const system: string[] = [] + system.push( + [ + // use agent prompt otherwise provider prompt + ...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)), + // any custom prompt passed into this call + ...input.system, + // any custom prompt from last user message + ...(input.user.system ? [input.user.system] : []), + ] + .filter((x) => x) + .join("\n"), + ) - return Service.of({ stream }) - }), - ) - - export const layer = live.pipe(Layer.provide(Permission.defaultLayer)) - - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Auth.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Plugin.defaultLayer), - ), - ) - - function resolveTools(input: Pick) { - const disabled = Permission.disabled( - Object.keys(input.tools), - Permission.merge(input.agent.permission, input.permission ?? []), - ) - return Record.filter(input.tools, (_, k) => input.user.tools?.[k] !== false && !disabled.has(k)) - } - - // Check if messages contain any tool-call content - // Used to determine if a dummy tool should be added for LiteLLM proxy compatibility - export function hasToolCalls(messages: ModelMessage[]): boolean { - for (const msg of messages) { - if (!Array.isArray(msg.content)) continue - for (const part of msg.content) { - if (part.type === "tool-call" || part.type === "tool-result") return true + const header = system[0] + yield* plugin.trigger( + "experimental.chat.system.transform", + { sessionID: input.sessionID, model: input.model }, + { system }, + ) + // rejoin to maintain 2-part structure for caching if header unchanged + if (system.length > 2 && system[0] === header) { + const rest = system.slice(1) + system.length = 0 + system.push(header, rest.join("\n")) } - } - return false - } + + const variant = + !input.small && input.model.variants && input.user.model.variant + ? input.model.variants[input.user.model.variant] + : {} + const base = input.small + ? ProviderTransform.smallOptions(input.model) + : ProviderTransform.options({ + model: input.model, + sessionID: input.sessionID, + providerOptions: item.options, + }) + const options: Record = pipe( + base, + mergeDeep(input.model.options), + mergeDeep(input.agent.options), + mergeDeep(variant), + ) + if (isOpenaiOauth) { + options.instructions = system.join("\n") + } + + const isWorkflow = language instanceof GitLabWorkflowLanguageModel + const messages = isOpenaiOauth + ? input.messages + : isWorkflow + ? input.messages + : [ + ...system.map( + (x): ModelMessage => ({ + role: "system", + content: x, + }), + ), + ...input.messages, + ] + + const params = yield* plugin.trigger( + "chat.params", + { + sessionID: input.sessionID, + agent: input.agent.name, + model: input.model, + provider: item, + message: input.user, + }, + { + temperature: input.model.capabilities.temperature + ? (input.agent.temperature ?? ProviderTransform.temperature(input.model)) + : undefined, + topP: input.agent.topP ?? ProviderTransform.topP(input.model), + topK: ProviderTransform.topK(input.model), + maxOutputTokens: ProviderTransform.maxOutputTokens(input.model), + options, + }, + ) + + const { headers } = yield* plugin.trigger( + "chat.headers", + { + sessionID: input.sessionID, + agent: input.agent.name, + model: input.model, + provider: item, + message: input.user, + }, + { + headers: {}, + }, + ) + + const tools = resolveTools(input) + + // LiteLLM and some Anthropic proxies require the tools parameter to be present + // when message history contains tool calls, even if no tools are being used. + // Add a dummy tool that is never called to satisfy this validation. + // This is enabled for: + // 1. Providers with "litellm" in their ID or API ID (auto-detected) + // 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways) + const isLiteLLMProxy = + item.options?.["litellmProxy"] === true || + input.model.providerID.toLowerCase().includes("litellm") || + input.model.api.id.toLowerCase().includes("litellm") + + // LiteLLM/Bedrock rejects requests where the message history contains tool + // calls but no tools param is present. When there are no active tools (e.g. + // during compaction), inject a stub tool to satisfy the validation requirement. + // The stub description explicitly tells the model not to call it. + if ( + (isLiteLLMProxy || input.model.providerID.includes("github-copilot")) && + Object.keys(tools).length === 0 && + hasToolCalls(input.messages) + ) { + tools["_noop"] = tool({ + description: "Do not call this tool. It exists only for API compatibility and must never be invoked.", + inputSchema: jsonSchema({ + type: "object", + properties: { + reason: { type: "string", description: "Unused" }, + }, + }), + execute: async () => ({ output: "", title: "", metadata: {} }), + }) + } + + // Wire up toolExecutor for DWS workflow models so that tool calls + // from the workflow service are executed via opencode's tool system + // and results sent back over the WebSocket. + if (language instanceof GitLabWorkflowLanguageModel) { + const workflowModel = language as GitLabWorkflowLanguageModel & { + sessionID?: string + sessionPreapprovedTools?: string[] + approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }> + } + workflowModel.sessionID = input.sessionID + workflowModel.systemPrompt = system.join("\n") + workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => { + const t = tools[toolName] + if (!t || !t.execute) { + return { result: "", error: `Unknown tool: ${toolName}` } + } + try { + const result = await t.execute!(JSON.parse(argsJson), { + toolCallId: _requestID, + messages: input.messages, + abortSignal: input.abort, + }) + const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result)) + return { + result: output, + metadata: typeof result === "object" ? result?.metadata : undefined, + title: typeof result === "object" ? result?.title : undefined, + } + } catch (e: any) { + return { result: "", error: e.message ?? String(e) } + } + } + + const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? []) + workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => { + const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission)) + return !match || match.action !== "ask" + }) + + const bridge = yield* EffectBridge.make() + const approvedToolsForSession = new Set() + workflowModel.approvalHandler = Instance.bind(async (approvalTools) => { + const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[] + // Auto-approve tools that were already approved in this session + // (prevents infinite approval loops for server-side MCP tools) + if (uniqueNames.every((name) => approvedToolsForSession.has(name))) { + return { approved: true } + } + + const id = PermissionID.ascending() + let unsub: (() => void) | undefined + try { + unsub = Bus.subscribe(Permission.Event.Replied, (evt) => { + if (evt.properties.requestID === id) void evt.properties.reply + }) + const toolPatterns = approvalTools.map((t: { name: string; args: string }) => { + try { + const parsed = JSON.parse(t.args) as Record + const title = (parsed?.title ?? parsed?.name ?? "") as string + return title ? `${t.name}: ${title}` : t.name + } catch { + return t.name + } + }) + const uniquePatterns = [...new Set(toolPatterns)] as string[] + await bridge.promise( + perm.ask({ + id, + sessionID: SessionID.make(input.sessionID), + permission: "workflow_tool_approval", + patterns: uniquePatterns, + metadata: { tools: approvalTools }, + always: uniquePatterns, + ruleset: [], + }), + ) + for (const name of uniqueNames) approvedToolsForSession.add(name) + workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames] + return { approved: true } + } catch { + return { approved: false } + } finally { + unsub?.() + } + }) + } + + const tracer = cfg.experimental?.openTelemetry + ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) + : undefined + + return streamText({ + onError(error) { + l.error("stream error", { + error, + }) + }, + async experimental_repairToolCall(failed) { + const lower = failed.toolCall.toolName.toLowerCase() + if (lower !== failed.toolCall.toolName && tools[lower]) { + l.info("repairing tool call", { + tool: failed.toolCall.toolName, + repaired: lower, + }) + return { + ...failed.toolCall, + toolName: lower, + } + } + return { + ...failed.toolCall, + input: JSON.stringify({ + tool: failed.toolCall.toolName, + error: failed.error.message, + }), + toolName: "invalid", + } + }, + temperature: params.temperature, + topP: params.topP, + topK: params.topK, + providerOptions: ProviderTransform.providerOptions(input.model, params.options), + activeTools: Object.keys(tools).filter((x) => x !== "invalid"), + tools, + toolChoice: input.toolChoice, + maxOutputTokens: params.maxOutputTokens, + abortSignal: input.abort, + headers: { + ...(input.model.providerID.startsWith("opencode") + ? { + "x-opencode-project": Instance.project.id, + "x-opencode-session": input.sessionID, + "x-opencode-request": input.user.id, + "x-opencode-client": Flag.OPENCODE_CLIENT, + } + : { + "x-session-affinity": input.sessionID, + ...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}), + "User-Agent": `opencode/${InstallationVersion}`, + }), + ...input.model.headers, + ...headers, + }, + maxRetries: input.retries ?? 0, + messages, + model: wrapLanguageModel({ + model: language, + middleware: [ + { + specificationVersion: "v3" as const, + async transformParams(args) { + if (args.type === "stream") { + // @ts-expect-error + args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options) + } + return args.params + }, + }, + ], + }), + experimental_telemetry: { + isEnabled: cfg.experimental?.openTelemetry, + functionId: "session.llm", + tracer, + metadata: { + userId: cfg.username ?? "unknown", + sessionId: input.sessionID, + }, + }, + }) + }) + + const stream: Interface["stream"] = (input) => + Stream.scoped( + Stream.unwrap( + Effect.gen(function* () { + const ctrl = yield* Effect.acquireRelease( + Effect.sync(() => new AbortController()), + (ctrl) => Effect.sync(() => ctrl.abort()), + ) + + const result = yield* run({ ...input, abort: ctrl.signal }) + + return Stream.fromAsyncIterable(result.fullStream, (e) => (e instanceof Error ? e : new Error(String(e)))) + }), + ), + ) + + return Service.of({ stream }) + }), +) + +export const layer = live.pipe(Layer.provide(Permission.defaultLayer)) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Auth.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Plugin.defaultLayer), + ), +) + +function resolveTools(input: Pick) { + const disabled = Permission.disabled( + Object.keys(input.tools), + Permission.merge(input.agent.permission, input.permission ?? []), + ) + return Record.filter(input.tools, (_, k) => input.user.tools?.[k] !== false && !disabled.has(k)) } + +// Check if messages contain any tool-call content +// Used to determine if a dummy tool should be added for LiteLLM proxy compatibility +export function hasToolCalls(messages: ModelMessage[]): boolean { + for (const msg of messages) { + if (!Array.isArray(msg.content)) continue + for (const part of msg.content) { + if (part.type === "tool-call" || part.type === "tool-result") return true + } + } + return false +} + +export * as LLM from "./llm" diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index f5ba74826d..5e7e008401 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -24,726 +24,738 @@ interface FetchDecompressionError extends Error { path: string } -export namespace MessageV2 { - export const SYNTHETIC_ATTACHMENT_PROMPT = "Attached image(s) from tool result:" +export const SYNTHETIC_ATTACHMENT_PROMPT = "Attached image(s) from tool result:" - export function isMedia(mime: string) { - return mime.startsWith("image/") || mime === "application/pdf" - } +export function isMedia(mime: string) { + return mime.startsWith("image/") || mime === "application/pdf" +} - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AbortedError = NamedError.create("MessageAbortedError", z.object({ message: z.string() })) - export const StructuredOutputError = NamedError.create( - "StructuredOutputError", - z.object({ - message: z.string(), - retries: z.number(), - }), - ) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), - ) - export const APIError = NamedError.create( - "APIError", - z.object({ - message: z.string(), - statusCode: z.number().optional(), - isRetryable: z.boolean(), - responseHeaders: z.record(z.string(), z.string()).optional(), - responseBody: z.string().optional(), - metadata: z.record(z.string(), z.string()).optional(), - }), - ) - export type APIError = z.infer - export const ContextOverflowError = NamedError.create( - "ContextOverflowError", - z.object({ message: z.string(), responseBody: z.string().optional() }), - ) +export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) +export const AbortedError = NamedError.create("MessageAbortedError", z.object({ message: z.string() })) +export const StructuredOutputError = NamedError.create( + "StructuredOutputError", + z.object({ + message: z.string(), + retries: z.number(), + }), +) +export const AuthError = NamedError.create( + "ProviderAuthError", + z.object({ + providerID: z.string(), + message: z.string(), + }), +) +export const APIError = NamedError.create( + "APIError", + z.object({ + message: z.string(), + statusCode: z.number().optional(), + isRetryable: z.boolean(), + responseHeaders: z.record(z.string(), z.string()).optional(), + responseBody: z.string().optional(), + metadata: z.record(z.string(), z.string()).optional(), + }), +) +export type APIError = z.infer +export const ContextOverflowError = NamedError.create( + "ContextOverflowError", + z.object({ message: z.string(), responseBody: z.string().optional() }), +) - export const OutputFormatText = z - .object({ - type: z.literal("text"), - }) - .meta({ - ref: "OutputFormatText", - }) - - export const OutputFormatJsonSchema = z - .object({ - type: z.literal("json_schema"), - schema: z.record(z.string(), z.any()).meta({ ref: "JSONSchema" }), - retryCount: z.number().int().min(0).default(2), - }) - .meta({ - ref: "OutputFormatJsonSchema", - }) - - export const Format = z.discriminatedUnion("type", [OutputFormatText, OutputFormatJsonSchema]).meta({ - ref: "OutputFormat", - }) - export type OutputFormat = z.infer - - const PartBase = z.object({ - id: PartID.zod, - sessionID: SessionID.zod, - messageID: MessageID.zod, - }) - - export const SnapshotPart = PartBase.extend({ - type: z.literal("snapshot"), - snapshot: z.string(), - }).meta({ - ref: "SnapshotPart", - }) - export type SnapshotPart = z.infer - - export const PatchPart = PartBase.extend({ - type: z.literal("patch"), - hash: z.string(), - files: z.string().array(), - }).meta({ - ref: "PatchPart", - }) - export type PatchPart = z.infer - - export const TextPart = PartBase.extend({ +export const OutputFormatText = z + .object({ type: z.literal("text"), - text: z.string(), - synthetic: z.boolean().optional(), - ignored: z.boolean().optional(), - time: z - .object({ - start: z.number(), - end: z.number().optional(), - }) - .optional(), - metadata: z.record(z.string(), z.any()).optional(), - }).meta({ - ref: "TextPart", }) - export type TextPart = z.infer + .meta({ + ref: "OutputFormatText", + }) - export const ReasoningPart = PartBase.extend({ - type: z.literal("reasoning"), - text: z.string(), +export const OutputFormatJsonSchema = z + .object({ + type: z.literal("json_schema"), + schema: z.record(z.string(), z.any()).meta({ ref: "JSONSchema" }), + retryCount: z.number().int().min(0).default(2), + }) + .meta({ + ref: "OutputFormatJsonSchema", + }) + +export const Format = z.discriminatedUnion("type", [OutputFormatText, OutputFormatJsonSchema]).meta({ + ref: "OutputFormat", +}) +export type OutputFormat = z.infer + +const PartBase = z.object({ + id: PartID.zod, + sessionID: SessionID.zod, + messageID: MessageID.zod, +}) + +export const SnapshotPart = PartBase.extend({ + type: z.literal("snapshot"), + snapshot: z.string(), +}).meta({ + ref: "SnapshotPart", +}) +export type SnapshotPart = z.infer + +export const PatchPart = PartBase.extend({ + type: z.literal("patch"), + hash: z.string(), + files: z.string().array(), +}).meta({ + ref: "PatchPart", +}) +export type PatchPart = z.infer + +export const TextPart = PartBase.extend({ + type: z.literal("text"), + text: z.string(), + synthetic: z.boolean().optional(), + ignored: z.boolean().optional(), + time: z + .object({ + start: z.number(), + end: z.number().optional(), + }) + .optional(), + metadata: z.record(z.string(), z.any()).optional(), +}).meta({ + ref: "TextPart", +}) +export type TextPart = z.infer + +export const ReasoningPart = PartBase.extend({ + type: z.literal("reasoning"), + text: z.string(), + metadata: z.record(z.string(), z.any()).optional(), + time: z.object({ + start: z.number(), + end: z.number().optional(), + }), +}).meta({ + ref: "ReasoningPart", +}) +export type ReasoningPart = z.infer + +const FilePartSourceBase = z.object({ + text: z + .object({ + value: z.string(), + start: z.number().int(), + end: z.number().int(), + }) + .meta({ + ref: "FilePartSourceText", + }), +}) + +export const FileSource = FilePartSourceBase.extend({ + type: z.literal("file"), + path: z.string(), +}).meta({ + ref: "FileSource", +}) + +export const SymbolSource = FilePartSourceBase.extend({ + type: z.literal("symbol"), + path: z.string(), + range: LSP.Range, + name: z.string(), + kind: z.number().int(), +}).meta({ + ref: "SymbolSource", +}) + +export const ResourceSource = FilePartSourceBase.extend({ + type: z.literal("resource"), + clientName: z.string(), + uri: z.string(), +}).meta({ + ref: "ResourceSource", +}) + +export const FilePartSource = z.discriminatedUnion("type", [FileSource, SymbolSource, ResourceSource]).meta({ + ref: "FilePartSource", +}) + +export const FilePart = PartBase.extend({ + type: z.literal("file"), + mime: z.string(), + filename: z.string().optional(), + url: z.string(), + source: FilePartSource.optional(), +}).meta({ + ref: "FilePart", +}) +export type FilePart = z.infer + +export const AgentPart = PartBase.extend({ + type: z.literal("agent"), + name: z.string(), + source: z + .object({ + value: z.string(), + start: z.number().int(), + end: z.number().int(), + }) + .optional(), +}).meta({ + ref: "AgentPart", +}) +export type AgentPart = z.infer + +export const CompactionPart = PartBase.extend({ + type: z.literal("compaction"), + auto: z.boolean(), + overflow: z.boolean().optional(), +}).meta({ + ref: "CompactionPart", +}) +export type CompactionPart = z.infer + +export const SubtaskPart = PartBase.extend({ + type: z.literal("subtask"), + prompt: z.string(), + description: z.string(), + agent: z.string(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + command: z.string().optional(), +}).meta({ + ref: "SubtaskPart", +}) +export type SubtaskPart = z.infer + +export const RetryPart = PartBase.extend({ + type: z.literal("retry"), + attempt: z.number(), + error: APIError.Schema, + time: z.object({ + created: z.number(), + }), +}).meta({ + ref: "RetryPart", +}) +export type RetryPart = z.infer + +export const StepStartPart = PartBase.extend({ + type: z.literal("step-start"), + snapshot: z.string().optional(), +}).meta({ + ref: "StepStartPart", +}) +export type StepStartPart = z.infer + +export const StepFinishPart = PartBase.extend({ + type: z.literal("step-finish"), + reason: z.string(), + snapshot: z.string().optional(), + cost: z.number(), + tokens: z.object({ + total: z.number().optional(), + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), + }), + }), +}).meta({ + ref: "StepFinishPart", +}) +export type StepFinishPart = z.infer + +export const ToolStatePending = z + .object({ + status: z.literal("pending"), + input: z.record(z.string(), z.any()), + raw: z.string(), + }) + .meta({ + ref: "ToolStatePending", + }) + +export type ToolStatePending = z.infer + +export const ToolStateRunning = z + .object({ + status: z.literal("running"), + input: z.record(z.string(), z.any()), + title: z.string().optional(), metadata: z.record(z.string(), z.any()).optional(), time: z.object({ start: z.number(), - end: z.number().optional(), }), - }).meta({ - ref: "ReasoningPart", }) - export type ReasoningPart = z.infer - - const FilePartSourceBase = z.object({ - text: z - .object({ - value: z.string(), - start: z.number().int(), - end: z.number().int(), - }) - .meta({ - ref: "FilePartSourceText", - }), + .meta({ + ref: "ToolStateRunning", }) +export type ToolStateRunning = z.infer - export const FileSource = FilePartSourceBase.extend({ - type: z.literal("file"), - path: z.string(), - }).meta({ - ref: "FileSource", - }) - - export const SymbolSource = FilePartSourceBase.extend({ - type: z.literal("symbol"), - path: z.string(), - range: LSP.Range, - name: z.string(), - kind: z.number().int(), - }).meta({ - ref: "SymbolSource", - }) - - export const ResourceSource = FilePartSourceBase.extend({ - type: z.literal("resource"), - clientName: z.string(), - uri: z.string(), - }).meta({ - ref: "ResourceSource", - }) - - export const FilePartSource = z.discriminatedUnion("type", [FileSource, SymbolSource, ResourceSource]).meta({ - ref: "FilePartSource", - }) - - export const FilePart = PartBase.extend({ - type: z.literal("file"), - mime: z.string(), - filename: z.string().optional(), - url: z.string(), - source: FilePartSource.optional(), - }).meta({ - ref: "FilePart", - }) - export type FilePart = z.infer - - export const AgentPart = PartBase.extend({ - type: z.literal("agent"), - name: z.string(), - source: z - .object({ - value: z.string(), - start: z.number().int(), - end: z.number().int(), - }) - .optional(), - }).meta({ - ref: "AgentPart", - }) - export type AgentPart = z.infer - - export const CompactionPart = PartBase.extend({ - type: z.literal("compaction"), - auto: z.boolean(), - overflow: z.boolean().optional(), - }).meta({ - ref: "CompactionPart", - }) - export type CompactionPart = z.infer - - export const SubtaskPart = PartBase.extend({ - type: z.literal("subtask"), - prompt: z.string(), - description: z.string(), - agent: z.string(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - }) - .optional(), - command: z.string().optional(), - }).meta({ - ref: "SubtaskPart", - }) - export type SubtaskPart = z.infer - - export const RetryPart = PartBase.extend({ - type: z.literal("retry"), - attempt: z.number(), - error: APIError.Schema, +export const ToolStateCompleted = z + .object({ + status: z.literal("completed"), + input: z.record(z.string(), z.any()), + output: z.string(), + title: z.string(), + metadata: z.record(z.string(), z.any()), time: z.object({ - created: z.number(), + start: z.number(), + end: z.number(), + compacted: z.number().optional(), }), - }).meta({ - ref: "RetryPart", + attachments: FilePart.array().optional(), }) - export type RetryPart = z.infer - - export const StepStartPart = PartBase.extend({ - type: z.literal("step-start"), - snapshot: z.string().optional(), - }).meta({ - ref: "StepStartPart", + .meta({ + ref: "ToolStateCompleted", }) - export type StepStartPart = z.infer +export type ToolStateCompleted = z.infer - export const StepFinishPart = PartBase.extend({ - type: z.literal("step-finish"), - reason: z.string(), - snapshot: z.string().optional(), - cost: z.number(), - tokens: z.object({ - total: z.number().optional(), - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - }).meta({ - ref: "StepFinishPart", - }) - export type StepFinishPart = z.infer - - export const ToolStatePending = z - .object({ - status: z.literal("pending"), - input: z.record(z.string(), z.any()), - raw: z.string(), - }) - .meta({ - ref: "ToolStatePending", - }) - - export type ToolStatePending = z.infer - - export const ToolStateRunning = z - .object({ - status: z.literal("running"), - input: z.record(z.string(), z.any()), - title: z.string().optional(), - metadata: z.record(z.string(), z.any()).optional(), - time: z.object({ - start: z.number(), - }), - }) - .meta({ - ref: "ToolStateRunning", - }) - export type ToolStateRunning = z.infer - - export const ToolStateCompleted = z - .object({ - status: z.literal("completed"), - input: z.record(z.string(), z.any()), - output: z.string(), - title: z.string(), - metadata: z.record(z.string(), z.any()), - time: z.object({ - start: z.number(), - end: z.number(), - compacted: z.number().optional(), - }), - attachments: FilePart.array().optional(), - }) - .meta({ - ref: "ToolStateCompleted", - }) - export type ToolStateCompleted = z.infer - - export const ToolStateError = z - .object({ - status: z.literal("error"), - input: z.record(z.string(), z.any()), - error: z.string(), - metadata: z.record(z.string(), z.any()).optional(), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .meta({ - ref: "ToolStateError", - }) - export type ToolStateError = z.infer - - export const ToolState = z - .discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) - .meta({ - ref: "ToolState", - }) - - export const ToolPart = PartBase.extend({ - type: z.literal("tool"), - callID: z.string(), - tool: z.string(), - state: ToolState, +export const ToolStateError = z + .object({ + status: z.literal("error"), + input: z.record(z.string(), z.any()), + error: z.string(), metadata: z.record(z.string(), z.any()).optional(), - }).meta({ - ref: "ToolPart", - }) - export type ToolPart = z.infer - - const Base = z.object({ - id: MessageID.zod, - sessionID: SessionID.zod, - }) - - export const User = Base.extend({ - role: z.literal("user"), time: z.object({ - created: z.number(), + start: z.number(), + end: z.number(), }), - format: Format.optional(), - summary: z - .object({ - title: z.string().optional(), - body: z.string().optional(), - diffs: Snapshot.FileDiff.array(), - }) - .optional(), - agent: z.string(), - model: z.object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - variant: z.string().optional(), - }), - system: z.string().optional(), - tools: z.record(z.string(), z.boolean()).optional(), - }).meta({ - ref: "UserMessage", }) - export type User = z.infer + .meta({ + ref: "ToolStateError", + }) +export type ToolStateError = z.infer - export const Part = z - .discriminatedUnion("type", [ - TextPart, - SubtaskPart, - ReasoningPart, - FilePart, - ToolPart, - StepStartPart, - StepFinishPart, - SnapshotPart, - PatchPart, - AgentPart, - RetryPart, - CompactionPart, - ]) - .meta({ - ref: "Part", +export const ToolState = z + .discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) + .meta({ + ref: "ToolState", + }) + +export const ToolPart = PartBase.extend({ + type: z.literal("tool"), + callID: z.string(), + tool: z.string(), + state: ToolState, + metadata: z.record(z.string(), z.any()).optional(), +}).meta({ + ref: "ToolPart", +}) +export type ToolPart = z.infer + +const Base = z.object({ + id: MessageID.zod, + sessionID: SessionID.zod, +}) + +export const User = Base.extend({ + role: z.literal("user"), + time: z.object({ + created: z.number(), + }), + format: Format.optional(), + summary: z + .object({ + title: z.string().optional(), + body: z.string().optional(), + diffs: Snapshot.FileDiff.array(), }) - export type Part = z.infer - - export const Assistant = Base.extend({ - role: z.literal("assistant"), - time: z.object({ - created: z.number(), - completed: z.number().optional(), - }), - error: z - .discriminatedUnion("name", [ - AuthError.Schema, - NamedError.Unknown.Schema, - OutputLengthError.Schema, - AbortedError.Schema, - StructuredOutputError.Schema, - ContextOverflowError.Schema, - APIError.Schema, - ]) - .optional(), - parentID: MessageID.zod, - modelID: ModelID.zod, + .optional(), + agent: z.string(), + model: z.object({ providerID: ProviderID.zod, - /** - * @deprecated - */ - mode: z.string(), - agent: z.string(), - path: z.object({ - cwd: z.string(), - root: z.string(), - }), - summary: z.boolean().optional(), - cost: z.number(), - tokens: z.object({ - total: z.number().optional(), - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - structured: z.any().optional(), + modelID: ModelID.zod, variant: z.string().optional(), - finish: z.string().optional(), - }).meta({ - ref: "AssistantMessage", + }), + system: z.string().optional(), + tools: z.record(z.string(), z.boolean()).optional(), +}).meta({ + ref: "UserMessage", +}) +export type User = z.infer + +export const Part = z + .discriminatedUnion("type", [ + TextPart, + SubtaskPart, + ReasoningPart, + FilePart, + ToolPart, + StepStartPart, + StepFinishPart, + SnapshotPart, + PatchPart, + AgentPart, + RetryPart, + CompactionPart, + ]) + .meta({ + ref: "Part", }) - export type Assistant = z.infer +export type Part = z.infer - export const Info = z.discriminatedUnion("role", [User, Assistant]).meta({ - ref: "Message", - }) - export type Info = z.infer - - export const Event = { - Updated: SyncEvent.define({ - type: "message.updated", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - info: Info, - }), +export const Assistant = Base.extend({ + role: z.literal("assistant"), + time: z.object({ + created: z.number(), + completed: z.number().optional(), + }), + error: z + .discriminatedUnion("name", [ + AuthError.Schema, + NamedError.Unknown.Schema, + OutputLengthError.Schema, + AbortedError.Schema, + StructuredOutputError.Schema, + ContextOverflowError.Schema, + APIError.Schema, + ]) + .optional(), + parentID: MessageID.zod, + modelID: ModelID.zod, + providerID: ProviderID.zod, + /** + * @deprecated + */ + mode: z.string(), + agent: z.string(), + path: z.object({ + cwd: z.string(), + root: z.string(), + }), + summary: z.boolean().optional(), + cost: z.number(), + tokens: z.object({ + total: z.number().optional(), + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), }), - Removed: SyncEvent.define({ - type: "message.removed", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - }), + }), + structured: z.any().optional(), + variant: z.string().optional(), + finish: z.string().optional(), +}).meta({ + ref: "AssistantMessage", +}) +export type Assistant = z.infer + +export const Info = z.discriminatedUnion("role", [User, Assistant]).meta({ + ref: "Message", +}) +export type Info = z.infer + +export const Event = { + Updated: SyncEvent.define({ + type: "message.updated", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + info: Info, }), - PartUpdated: SyncEvent.define({ - type: "message.part.updated", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - part: Part, - time: z.number(), - }), + }), + Removed: SyncEvent.define({ + type: "message.removed", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, }), - PartDelta: BusEvent.define( - "message.part.delta", - z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod, - field: z.string(), - delta: z.string(), - }), - ), - PartRemoved: SyncEvent.define({ - type: "message.part.removed", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod, - }), + }), + PartUpdated: SyncEvent.define({ + type: "message.part.updated", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + part: Part, + time: z.number(), }), - } + }), + PartDelta: BusEvent.define( + "message.part.delta", + z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, + field: z.string(), + delta: z.string(), + }), + ), + PartRemoved: SyncEvent.define({ + type: "message.part.removed", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, + }), + }), +} - export const WithParts = z.object({ - info: Info, - parts: z.array(Part), - }) - export type WithParts = z.infer +export const WithParts = z.object({ + info: Info, + parts: z.array(Part), +}) +export type WithParts = z.infer - const Cursor = z.object({ - id: MessageID.zod, - time: z.number(), - }) - type Cursor = z.infer +const Cursor = z.object({ + id: MessageID.zod, + time: z.number(), +}) +type Cursor = z.infer - export const cursor = { - encode(input: Cursor) { - return Buffer.from(JSON.stringify(input)).toString("base64url") - }, - decode(input: string) { - return Cursor.parse(JSON.parse(Buffer.from(input, "base64url").toString("utf8"))) - }, - } +export const cursor = { + encode(input: Cursor) { + return Buffer.from(JSON.stringify(input)).toString("base64url") + }, + decode(input: string) { + return Cursor.parse(JSON.parse(Buffer.from(input, "base64url").toString("utf8"))) + }, +} - const info = (row: typeof MessageTable.$inferSelect) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - }) as MessageV2.Info +const info = (row: typeof MessageTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + }) as Info - const part = (row: typeof PartTable.$inferSelect) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - messageID: row.message_id, - }) as MessageV2.Part +const part = (row: typeof PartTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + messageID: row.message_id, + }) as Part - const older = (row: Cursor) => - or( - lt(MessageTable.time_created, row.time), - and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), +const older = (row: Cursor) => + or( + lt(MessageTable.time_created, row.time), + and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), + ) + +function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { + const ids = rows.map((row) => row.id) + const partByMessage = new Map() + if (ids.length > 0) { + const partRows = Database.use((db) => + db + .select() + .from(PartTable) + .where(inArray(PartTable.message_id, ids)) + .orderBy(PartTable.message_id, PartTable.id) + .all(), ) + for (const row of partRows) { + const next = part(row) + const list = partByMessage.get(row.message_id) + if (list) list.push(next) + else partByMessage.set(row.message_id, [next]) + } + } - function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { - const ids = rows.map((row) => row.id) - const partByMessage = new Map() - if (ids.length > 0) { - const partRows = Database.use((db) => - db - .select() - .from(PartTable) - .where(inArray(PartTable.message_id, ids)) - .orderBy(PartTable.message_id, PartTable.id) - .all(), - ) - for (const row of partRows) { - const next = part(row) - const list = partByMessage.get(row.message_id) - if (list) list.push(next) - else partByMessage.set(row.message_id, [next]) + return rows.map((row) => ({ + info: info(row), + parts: partByMessage.get(row.id) ?? [], + })) +} + +function providerMeta(metadata: Record | undefined) { + if (!metadata) return undefined + const { providerExecuted: _, ...rest } = metadata + return Object.keys(rest).length > 0 ? rest : undefined +} + +export const toModelMessagesEffect = Effect.fnUntraced(function* ( + input: WithParts[], + model: Provider.Model, + options?: { stripMedia?: boolean }, +) { + const result: UIMessage[] = [] + const toolNames = new Set() + // Track media from tool results that need to be injected as user messages + // for providers that don't support media in tool results. + // + // OpenAI-compatible APIs only support string content in tool results, so we need + // to extract media and inject as user messages. Other SDKs (anthropic, google, + // bedrock) handle type: "content" with media parts natively. + // + // Only apply this workaround if the model actually supports image input - + // otherwise there's no point extracting images. + const supportsMediaInToolResults = (() => { + if (model.api.npm === "@ai-sdk/anthropic") return true + if (model.api.npm === "@ai-sdk/openai") return true + if (model.api.npm === "@ai-sdk/amazon-bedrock") return true + if (model.api.npm === "@ai-sdk/google-vertex/anthropic") return true + if (model.api.npm === "@ai-sdk/google") { + const id = model.api.id.toLowerCase() + return id.includes("gemini-3") && !id.includes("gemini-2") + } + return false + })() + + const toModelOutput = (options: { toolCallId: string; input: unknown; output: unknown }) => { + const output = options.output + if (typeof output === "string") { + return { type: "text", value: output } + } + + if (typeof output === "object") { + const outputObject = output as { + text: string + attachments?: Array<{ mime: string; url: string }> + } + const attachments = (outputObject.attachments ?? []).filter((attachment) => { + return attachment.url.startsWith("data:") && attachment.url.includes(",") + }) + + return { + type: "content", + value: [ + { type: "text", text: outputObject.text }, + ...attachments.map((attachment) => ({ + type: "media", + mediaType: attachment.mime, + data: iife(() => { + const commaIndex = attachment.url.indexOf(",") + return commaIndex === -1 ? attachment.url : attachment.url.slice(commaIndex + 1) + }), + })), + ], } } - return rows.map((row) => ({ - info: info(row), - parts: partByMessage.get(row.id) ?? [], - })) + return { type: "json", value: output as never } } - function providerMeta(metadata: Record | undefined) { - if (!metadata) return undefined - const { providerExecuted: _, ...rest } = metadata - return Object.keys(rest).length > 0 ? rest : undefined - } + for (const msg of input) { + if (msg.parts.length === 0) continue - export const toModelMessagesEffect = Effect.fnUntraced(function* ( - input: WithParts[], - model: Provider.Model, - options?: { stripMedia?: boolean }, - ) { - const result: UIMessage[] = [] - const toolNames = new Set() - // Track media from tool results that need to be injected as user messages - // for providers that don't support media in tool results. - // - // OpenAI-compatible APIs only support string content in tool results, so we need - // to extract media and inject as user messages. Other SDKs (anthropic, google, - // bedrock) handle type: "content" with media parts natively. - // - // Only apply this workaround if the model actually supports image input - - // otherwise there's no point extracting images. - const supportsMediaInToolResults = (() => { - if (model.api.npm === "@ai-sdk/anthropic") return true - if (model.api.npm === "@ai-sdk/openai") return true - if (model.api.npm === "@ai-sdk/amazon-bedrock") return true - if (model.api.npm === "@ai-sdk/google-vertex/anthropic") return true - if (model.api.npm === "@ai-sdk/google") { - const id = model.api.id.toLowerCase() - return id.includes("gemini-3") && !id.includes("gemini-2") + if (msg.info.role === "user") { + const userMessage: UIMessage = { + id: msg.info.id, + role: "user", + parts: [], } - return false - })() - - const toModelOutput = (options: { toolCallId: string; input: unknown; output: unknown }) => { - const output = options.output - if (typeof output === "string") { - return { type: "text", value: output } - } - - if (typeof output === "object") { - const outputObject = output as { - text: string - attachments?: Array<{ mime: string; url: string }> - } - const attachments = (outputObject.attachments ?? []).filter((attachment) => { - return attachment.url.startsWith("data:") && attachment.url.includes(",") - }) - - return { - type: "content", - value: [ - { type: "text", text: outputObject.text }, - ...attachments.map((attachment) => ({ - type: "media", - mediaType: attachment.mime, - data: iife(() => { - const commaIndex = attachment.url.indexOf(",") - return commaIndex === -1 ? attachment.url : attachment.url.slice(commaIndex + 1) - }), - })), - ], - } - } - - return { type: "json", value: output as never } - } - - for (const msg of input) { - if (msg.parts.length === 0) continue - - if (msg.info.role === "user") { - const userMessage: UIMessage = { - id: msg.info.id, - role: "user", - parts: [], - } - result.push(userMessage) - for (const part of msg.parts) { - if (part.type === "text" && !part.ignored) + result.push(userMessage) + for (const part of msg.parts) { + if (part.type === "text" && !part.ignored) + userMessage.parts.push({ + type: "text", + text: part.text, + }) + // text/plain and directory files are converted into text parts, ignore them + if (part.type === "file" && part.mime !== "text/plain" && part.mime !== "application/x-directory") { + if (options?.stripMedia && isMedia(part.mime)) { userMessage.parts.push({ type: "text", - text: part.text, + text: `[Attached ${part.mime}: ${part.filename ?? "file"}]`, }) - // text/plain and directory files are converted into text parts, ignore them - if (part.type === "file" && part.mime !== "text/plain" && part.mime !== "application/x-directory") { - if (options?.stripMedia && isMedia(part.mime)) { - userMessage.parts.push({ - type: "text", - text: `[Attached ${part.mime}: ${part.filename ?? "file"}]`, - }) - } else { - userMessage.parts.push({ - type: "file", - url: part.url, - mediaType: part.mime, - filename: part.filename, - }) + } else { + userMessage.parts.push({ + type: "file", + url: part.url, + mediaType: part.mime, + filename: part.filename, + }) + } + } + + if (part.type === "compaction") { + userMessage.parts.push({ + type: "text", + text: "What did we do so far?", + }) + } + if (part.type === "subtask") { + userMessage.parts.push({ + type: "text", + text: "The following tool was executed by the user", + }) + } + } + } + + if (msg.info.role === "assistant") { + const differentModel = `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}` + const media: Array<{ mime: string; url: string }> = [] + + if ( + msg.info.error && + !( + AbortedError.isInstance(msg.info.error) && + msg.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning") + ) + ) { + continue + } + const assistantMessage: UIMessage = { + id: msg.info.id, + role: "assistant", + parts: [], + } + for (const part of msg.parts) { + if (part.type === "text") + assistantMessage.parts.push({ + type: "text", + text: part.text, + ...(differentModel ? {} : { providerMetadata: part.metadata }), + }) + if (part.type === "step-start") + assistantMessage.parts.push({ + type: "step-start", + }) + if (part.type === "tool") { + toolNames.add(part.tool) + if (part.state.status === "completed") { + const outputText = part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output + const attachments = part.state.time.compacted || options?.stripMedia ? [] : (part.state.attachments ?? []) + + // For providers that don't support media in tool results, extract media files + // (images, PDFs) to be sent as a separate user message + const mediaAttachments = attachments.filter((a) => isMedia(a.mime)) + const nonMediaAttachments = attachments.filter((a) => !isMedia(a.mime)) + if (!supportsMediaInToolResults && mediaAttachments.length > 0) { + media.push(...mediaAttachments) } - } + const finalAttachments = supportsMediaInToolResults ? attachments : nonMediaAttachments - if (part.type === "compaction") { - userMessage.parts.push({ - type: "text", - text: "What did we do so far?", - }) - } - if (part.type === "subtask") { - userMessage.parts.push({ - type: "text", - text: "The following tool was executed by the user", - }) - } - } - } + const output = + finalAttachments.length > 0 + ? { + text: outputText, + attachments: finalAttachments, + } + : outputText - if (msg.info.role === "assistant") { - const differentModel = `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}` - const media: Array<{ mime: string; url: string }> = [] - - if ( - msg.info.error && - !( - MessageV2.AbortedError.isInstance(msg.info.error) && - msg.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning") - ) - ) { - continue - } - const assistantMessage: UIMessage = { - id: msg.info.id, - role: "assistant", - parts: [], - } - for (const part of msg.parts) { - if (part.type === "text") assistantMessage.parts.push({ - type: "text", - text: part.text, - ...(differentModel ? {} : { providerMetadata: part.metadata }), + type: ("tool-" + part.tool) as `tool-${string}`, + state: "output-available", + toolCallId: part.callID, + input: part.state.input, + output, + ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), + ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - if (part.type === "step-start") - assistantMessage.parts.push({ - type: "step-start", - }) - if (part.type === "tool") { - toolNames.add(part.tool) - if (part.state.status === "completed") { - const outputText = part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output - const attachments = part.state.time.compacted || options?.stripMedia ? [] : (part.state.attachments ?? []) - - // For providers that don't support media in tool results, extract media files - // (images, PDFs) to be sent as a separate user message - const mediaAttachments = attachments.filter((a) => isMedia(a.mime)) - const nonMediaAttachments = attachments.filter((a) => !isMedia(a.mime)) - if (!supportsMediaInToolResults && mediaAttachments.length > 0) { - media.push(...mediaAttachments) - } - const finalAttachments = supportsMediaInToolResults ? attachments : nonMediaAttachments - - const output = - finalAttachments.length > 0 - ? { - text: outputText, - attachments: finalAttachments, - } - : outputText - + } + if (part.state.status === "error") { + const output = part.state.metadata?.interrupted === true ? part.state.metadata.output : undefined + if (typeof output === "string") { assistantMessage.parts.push({ type: ("tool-" + part.tool) as `tool-${string}`, state: "output-available", @@ -753,305 +765,293 @@ export namespace MessageV2 { ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - } - if (part.state.status === "error") { - const output = part.state.metadata?.interrupted === true ? part.state.metadata.output : undefined - if (typeof output === "string") { - assistantMessage.parts.push({ - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-available", - toolCallId: part.callID, - input: part.state.input, - output, - ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), - ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), - }) - } else { - assistantMessage.parts.push({ - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-error", - toolCallId: part.callID, - input: part.state.input, - errorText: part.state.error, - ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), - ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), - }) - } - } - // Handle pending/running tool calls to prevent dangling tool_use blocks - // Anthropic/Claude APIs require every tool_use to have a corresponding tool_result - if (part.state.status === "pending" || part.state.status === "running") + } else { assistantMessage.parts.push({ type: ("tool-" + part.tool) as `tool-${string}`, state: "output-error", toolCallId: part.callID, input: part.state.input, - errorText: "[Tool execution was interrupted]", + errorText: part.state.error, ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - } - if (part.type === "reasoning") { - assistantMessage.parts.push({ - type: "reasoning", - text: part.text, - ...(differentModel ? {} : { providerMetadata: part.metadata }), - }) - } - } - if (assistantMessage.parts.length > 0) { - result.push(assistantMessage) - // Inject pending media as a user message for providers that don't support - // media (images, PDFs) in tool results - if (media.length > 0) { - result.push({ - id: MessageID.ascending(), - role: "user", - parts: [ - { - type: "text" as const, - text: SYNTHETIC_ATTACHMENT_PROMPT, - }, - ...media.map((attachment) => ({ - type: "file" as const, - url: attachment.url, - mediaType: attachment.mime, - })), - ], - }) - } - } - } - } - - const tools = Object.fromEntries(Array.from(toolNames).map((toolName) => [toolName, { toModelOutput }])) - - return yield* Effect.promise(() => - convertToModelMessages( - result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")), - { - //@ts-expect-error (convertToModelMessages expects a ToolSet but only actually needs tools[name]?.toModelOutput) - tools, - }, - ), - ) - }) - - export function toModelMessages( - input: WithParts[], - model: Provider.Model, - options?: { stripMedia?: boolean }, - ): Promise { - return Effect.runPromise(toModelMessagesEffect(input, model, options).pipe(Effect.provide(EffectLogger.layer))) - } - - export function page(input: { sessionID: SessionID; limit: number; before?: string }) { - const before = input.before ? cursor.decode(input.before) : undefined - const where = before - ? and(eq(MessageTable.session_id, input.sessionID), older(before)) - : eq(MessageTable.session_id, input.sessionID) - const rows = Database.use((db) => - db - .select() - .from(MessageTable) - .where(where) - .orderBy(desc(MessageTable.time_created), desc(MessageTable.id)) - .limit(input.limit + 1) - .all(), - ) - if (rows.length === 0) { - const row = Database.use((db) => - db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.id, input.sessionID)).get(), - ) - if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` }) - return { - items: [] as MessageV2.WithParts[], - more: false, - } - } - - const more = rows.length > input.limit - const slice = more ? rows.slice(0, input.limit) : rows - const items = hydrate(slice) - items.reverse() - const tail = slice.at(-1) - return { - items, - more, - cursor: more && tail ? cursor.encode({ id: tail.id, time: tail.time_created }) : undefined, - } - } - - export function* stream(sessionID: SessionID) { - const size = 50 - let before: string | undefined - while (true) { - const next = page({ sessionID, limit: size, before }) - if (next.items.length === 0) break - for (let i = next.items.length - 1; i >= 0; i--) { - yield next.items[i] - } - if (!next.more || !next.cursor) break - before = next.cursor - } - } - - export function parts(message_id: MessageID) { - const rows = Database.use((db) => - db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(), - ) - return rows.map( - (row) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - messageID: row.message_id, - }) as MessageV2.Part, - ) - } - - export function get(input: { sessionID: SessionID; messageID: MessageID }): WithParts { - const row = Database.use((db) => - db - .select() - .from(MessageTable) - .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) - .get(), - ) - if (!row) throw new NotFoundError({ message: `Message not found: ${input.messageID}` }) - return { - info: info(row), - parts: parts(input.messageID), - } - } - - export function filterCompacted(msgs: Iterable) { - const result = [] as MessageV2.WithParts[] - const completed = new Set() - for (const msg of msgs) { - result.push(msg) - if ( - msg.info.role === "user" && - completed.has(msg.info.id) && - msg.parts.some((part) => part.type === "compaction") - ) - break - if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) - completed.add(msg.info.parentID) - } - result.reverse() - return result - } - - export const filterCompactedEffect = Effect.fnUntraced(function* (sessionID: SessionID) { - return filterCompacted(stream(sessionID)) - }) - - export function fromError( - e: unknown, - ctx: { providerID: ProviderID; aborted?: boolean }, - ): NonNullable { - switch (true) { - case e instanceof DOMException && e.name === "AbortError": - return new MessageV2.AbortedError( - { message: e.message }, - { - cause: e, - }, - ).toObject() - case MessageV2.OutputLengthError.isInstance(e): - return e - case LoadAPIKeyError.isInstance(e): - return new MessageV2.AuthError( - { - providerID: ctx.providerID, - message: e.message, - }, - { cause: e }, - ).toObject() - case (e as SystemError)?.code === "ECONNRESET": - return new MessageV2.APIError( - { - message: "Connection reset by server", - isRetryable: true, - metadata: { - code: (e as SystemError).code ?? "", - syscall: (e as SystemError).syscall ?? "", - message: (e as SystemError).message ?? "", - }, - }, - { cause: e }, - ).toObject() - case e instanceof Error && (e as FetchDecompressionError).code === "ZlibError": - if (ctx.aborted) { - return new MessageV2.AbortedError({ message: e.message }, { cause: e }).toObject() - } - return new MessageV2.APIError( - { - message: "Response decompression failed", - isRetryable: true, - metadata: { - code: (e as FetchDecompressionError).code, - message: e.message, - }, - }, - { cause: e }, - ).toObject() - case APICallError.isInstance(e): - const parsed = ProviderError.parseAPICallError({ - providerID: ctx.providerID, - error: e, - }) - if (parsed.type === "context_overflow") { - return new MessageV2.ContextOverflowError( - { - message: parsed.message, - responseBody: parsed.responseBody, - }, - { cause: e }, - ).toObject() - } - - return new MessageV2.APIError( - { - message: parsed.message, - statusCode: parsed.statusCode, - isRetryable: parsed.isRetryable, - responseHeaders: parsed.responseHeaders, - responseBody: parsed.responseBody, - metadata: parsed.metadata, - }, - { cause: e }, - ).toObject() - case e instanceof Error: - return new NamedError.Unknown({ message: errorMessage(e) }, { cause: e }).toObject() - default: - try { - const parsed = ProviderError.parseStreamError(e) - if (parsed) { - if (parsed.type === "context_overflow") { - return new MessageV2.ContextOverflowError( - { - message: parsed.message, - responseBody: parsed.responseBody, - }, - { cause: e }, - ).toObject() } - return new MessageV2.APIError( - { - message: parsed.message, - isRetryable: parsed.isRetryable, - responseBody: parsed.responseBody, - }, - { - cause: e, - }, - ).toObject() } - } catch {} - return new NamedError.Unknown({ message: JSON.stringify(e) }, { cause: e }).toObject() + // Handle pending/running tool calls to prevent dangling tool_use blocks + // Anthropic/Claude APIs require every tool_use to have a corresponding tool_result + if (part.state.status === "pending" || part.state.status === "running") + assistantMessage.parts.push({ + type: ("tool-" + part.tool) as `tool-${string}`, + state: "output-error", + toolCallId: part.callID, + input: part.state.input, + errorText: "[Tool execution was interrupted]", + ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), + ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), + }) + } + if (part.type === "reasoning") { + assistantMessage.parts.push({ + type: "reasoning", + text: part.text, + ...(differentModel ? {} : { providerMetadata: part.metadata }), + }) + } + } + if (assistantMessage.parts.length > 0) { + result.push(assistantMessage) + // Inject pending media as a user message for providers that don't support + // media (images, PDFs) in tool results + if (media.length > 0) { + result.push({ + id: MessageID.ascending(), + role: "user", + parts: [ + { + type: "text" as const, + text: SYNTHETIC_ATTACHMENT_PROMPT, + }, + ...media.map((attachment) => ({ + type: "file" as const, + url: attachment.url, + mediaType: attachment.mime, + })), + ], + }) + } + } } } + + const tools = Object.fromEntries(Array.from(toolNames).map((toolName) => [toolName, { toModelOutput }])) + + return yield* Effect.promise(() => + convertToModelMessages( + result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")), + { + //@ts-expect-error (convertToModelMessages expects a ToolSet but only actually needs tools[name]?.toModelOutput) + tools, + }, + ), + ) +}) + +export function toModelMessages( + input: WithParts[], + model: Provider.Model, + options?: { stripMedia?: boolean }, +): Promise { + return Effect.runPromise(toModelMessagesEffect(input, model, options).pipe(Effect.provide(EffectLogger.layer))) +} + +export function page(input: { sessionID: SessionID; limit: number; before?: string }) { + const before = input.before ? cursor.decode(input.before) : undefined + const where = before + ? and(eq(MessageTable.session_id, input.sessionID), older(before)) + : eq(MessageTable.session_id, input.sessionID) + const rows = Database.use((db) => + db + .select() + .from(MessageTable) + .where(where) + .orderBy(desc(MessageTable.time_created), desc(MessageTable.id)) + .limit(input.limit + 1) + .all(), + ) + if (rows.length === 0) { + const row = Database.use((db) => + db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.id, input.sessionID)).get(), + ) + if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` }) + return { + items: [] as WithParts[], + more: false, + } + } + + const more = rows.length > input.limit + const slice = more ? rows.slice(0, input.limit) : rows + const items = hydrate(slice) + items.reverse() + const tail = slice.at(-1) + return { + items, + more, + cursor: more && tail ? cursor.encode({ id: tail.id, time: tail.time_created }) : undefined, + } } + +export function* stream(sessionID: SessionID) { + const size = 50 + let before: string | undefined + while (true) { + const next = page({ sessionID, limit: size, before }) + if (next.items.length === 0) break + for (let i = next.items.length - 1; i >= 0; i--) { + yield next.items[i] + } + if (!next.more || !next.cursor) break + before = next.cursor + } +} + +export function parts(message_id: MessageID) { + const rows = Database.use((db) => + db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(), + ) + return rows.map( + (row) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + messageID: row.message_id, + }) as Part, + ) +} + +export function get(input: { sessionID: SessionID; messageID: MessageID }): WithParts { + const row = Database.use((db) => + db + .select() + .from(MessageTable) + .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) + .get(), + ) + if (!row) throw new NotFoundError({ message: `Message not found: ${input.messageID}` }) + return { + info: info(row), + parts: parts(input.messageID), + } +} + +export function filterCompacted(msgs: Iterable) { + const result = [] as WithParts[] + const completed = new Set() + for (const msg of msgs) { + result.push(msg) + if ( + msg.info.role === "user" && + completed.has(msg.info.id) && + msg.parts.some((part) => part.type === "compaction") + ) + break + if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) + completed.add(msg.info.parentID) + } + result.reverse() + return result +} + +export const filterCompactedEffect = Effect.fnUntraced(function* (sessionID: SessionID) { + return filterCompacted(stream(sessionID)) +}) + +export function fromError( + e: unknown, + ctx: { providerID: ProviderID; aborted?: boolean }, +): NonNullable { + switch (true) { + case e instanceof DOMException && e.name === "AbortError": + return new AbortedError( + { message: e.message }, + { + cause: e, + }, + ).toObject() + case OutputLengthError.isInstance(e): + return e + case LoadAPIKeyError.isInstance(e): + return new AuthError( + { + providerID: ctx.providerID, + message: e.message, + }, + { cause: e }, + ).toObject() + case (e as SystemError)?.code === "ECONNRESET": + return new APIError( + { + message: "Connection reset by server", + isRetryable: true, + metadata: { + code: (e as SystemError).code ?? "", + syscall: (e as SystemError).syscall ?? "", + message: (e as SystemError).message ?? "", + }, + }, + { cause: e }, + ).toObject() + case e instanceof Error && (e as FetchDecompressionError).code === "ZlibError": + if (ctx.aborted) { + return new AbortedError({ message: e.message }, { cause: e }).toObject() + } + return new APIError( + { + message: "Response decompression failed", + isRetryable: true, + metadata: { + code: (e as FetchDecompressionError).code, + message: e.message, + }, + }, + { cause: e }, + ).toObject() + case APICallError.isInstance(e): + const parsed = ProviderError.parseAPICallError({ + providerID: ctx.providerID, + error: e, + }) + if (parsed.type === "context_overflow") { + return new ContextOverflowError( + { + message: parsed.message, + responseBody: parsed.responseBody, + }, + { cause: e }, + ).toObject() + } + + return new APIError( + { + message: parsed.message, + statusCode: parsed.statusCode, + isRetryable: parsed.isRetryable, + responseHeaders: parsed.responseHeaders, + responseBody: parsed.responseBody, + metadata: parsed.metadata, + }, + { cause: e }, + ).toObject() + case e instanceof Error: + return new NamedError.Unknown({ message: errorMessage(e) }, { cause: e }).toObject() + default: + try { + const parsed = ProviderError.parseStreamError(e) + if (parsed) { + if (parsed.type === "context_overflow") { + return new ContextOverflowError( + { + message: parsed.message, + responseBody: parsed.responseBody, + }, + { cause: e }, + ).toObject() + } + return new APIError( + { + message: parsed.message, + isRetryable: parsed.isRetryable, + responseBody: parsed.responseBody, + }, + { + cause: e, + }, + ).toObject() + } + } catch {} + return new NamedError.Unknown({ message: JSON.stringify(e) }, { cause: e }).toObject() + } +} + +export * as MessageV2 from "./message-v2" diff --git a/packages/opencode/src/session/message.ts b/packages/opencode/src/session/message.ts index 396034825a..ced04b8e9d 100644 --- a/packages/opencode/src/session/message.ts +++ b/packages/opencode/src/session/message.ts @@ -3,189 +3,189 @@ import { SessionID } from "./schema" import { ModelID, ProviderID } from "../provider/schema" import { NamedError } from "@opencode-ai/shared/util/error" -export namespace Message { - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), - ) +export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) +export const AuthError = NamedError.create( + "ProviderAuthError", + z.object({ + providerID: z.string(), + message: z.string(), + }), +) - export const ToolCall = z - .object({ - state: z.literal("call"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - }) - .meta({ - ref: "ToolCall", - }) - export type ToolCall = z.infer - - export const ToolPartialCall = z - .object({ - state: z.literal("partial-call"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - }) - .meta({ - ref: "ToolPartialCall", - }) - export type ToolPartialCall = z.infer - - export const ToolResult = z - .object({ - state: z.literal("result"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - result: z.string(), - }) - .meta({ - ref: "ToolResult", - }) - export type ToolResult = z.infer - - export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).meta({ - ref: "ToolInvocation", +export const ToolCall = z + .object({ + state: z.literal("call"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), }) - export type ToolInvocation = z.infer + .meta({ + ref: "ToolCall", + }) +export type ToolCall = z.infer - export const TextPart = z - .object({ - type: z.literal("text"), - text: z.string(), - }) - .meta({ - ref: "TextPart", - }) - export type TextPart = z.infer +export const ToolPartialCall = z + .object({ + state: z.literal("partial-call"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), + }) + .meta({ + ref: "ToolPartialCall", + }) +export type ToolPartialCall = z.infer - export const ReasoningPart = z - .object({ - type: z.literal("reasoning"), - text: z.string(), - providerMetadata: z.record(z.string(), z.any()).optional(), - }) - .meta({ - ref: "ReasoningPart", - }) - export type ReasoningPart = z.infer +export const ToolResult = z + .object({ + state: z.literal("result"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), + result: z.string(), + }) + .meta({ + ref: "ToolResult", + }) +export type ToolResult = z.infer - export const ToolInvocationPart = z - .object({ - type: z.literal("tool-invocation"), - toolInvocation: ToolInvocation, - }) - .meta({ - ref: "ToolInvocationPart", - }) - export type ToolInvocationPart = z.infer +export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).meta({ + ref: "ToolInvocation", +}) +export type ToolInvocation = z.infer - export const SourceUrlPart = z - .object({ - type: z.literal("source-url"), - sourceId: z.string(), - url: z.string(), - title: z.string().optional(), - providerMetadata: z.record(z.string(), z.any()).optional(), - }) - .meta({ - ref: "SourceUrlPart", - }) - export type SourceUrlPart = z.infer +export const TextPart = z + .object({ + type: z.literal("text"), + text: z.string(), + }) + .meta({ + ref: "TextPart", + }) +export type TextPart = z.infer - export const FilePart = z - .object({ - type: z.literal("file"), - mediaType: z.string(), - filename: z.string().optional(), - url: z.string(), - }) - .meta({ - ref: "FilePart", - }) - export type FilePart = z.infer +export const ReasoningPart = z + .object({ + type: z.literal("reasoning"), + text: z.string(), + providerMetadata: z.record(z.string(), z.any()).optional(), + }) + .meta({ + ref: "ReasoningPart", + }) +export type ReasoningPart = z.infer - export const StepStartPart = z - .object({ - type: z.literal("step-start"), - }) - .meta({ - ref: "StepStartPart", - }) - export type StepStartPart = z.infer +export const ToolInvocationPart = z + .object({ + type: z.literal("tool-invocation"), + toolInvocation: ToolInvocation, + }) + .meta({ + ref: "ToolInvocationPart", + }) +export type ToolInvocationPart = z.infer - export const MessagePart = z - .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart]) - .meta({ - ref: "MessagePart", - }) - export type MessagePart = z.infer +export const SourceUrlPart = z + .object({ + type: z.literal("source-url"), + sourceId: z.string(), + url: z.string(), + title: z.string().optional(), + providerMetadata: z.record(z.string(), z.any()).optional(), + }) + .meta({ + ref: "SourceUrlPart", + }) +export type SourceUrlPart = z.infer - export const Info = z - .object({ - id: z.string(), - role: z.enum(["user", "assistant"]), - parts: z.array(MessagePart), - metadata: z - .object({ - time: z.object({ - created: z.number(), - completed: z.number().optional(), - }), - error: z - .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) - .optional(), - sessionID: SessionID.zod, - tool: z.record( - z.string(), - z - .object({ - title: z.string(), - snapshot: z.string().optional(), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .catchall(z.any()), - ), - assistant: z +export const FilePart = z + .object({ + type: z.literal("file"), + mediaType: z.string(), + filename: z.string().optional(), + url: z.string(), + }) + .meta({ + ref: "FilePart", + }) +export type FilePart = z.infer + +export const StepStartPart = z + .object({ + type: z.literal("step-start"), + }) + .meta({ + ref: "StepStartPart", + }) +export type StepStartPart = z.infer + +export const MessagePart = z + .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart]) + .meta({ + ref: "MessagePart", + }) +export type MessagePart = z.infer + +export const Info = z + .object({ + id: z.string(), + role: z.enum(["user", "assistant"]), + parts: z.array(MessagePart), + metadata: z + .object({ + time: z.object({ + created: z.number(), + completed: z.number().optional(), + }), + error: z + .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) + .optional(), + sessionID: SessionID.zod, + tool: z.record( + z.string(), + z .object({ - system: z.string().array(), - modelID: ModelID.zod, - providerID: ProviderID.zod, - path: z.object({ - cwd: z.string(), - root: z.string(), - }), - cost: z.number(), - summary: z.boolean().optional(), - tokens: z.object({ - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), + title: z.string(), + snapshot: z.string().optional(), + time: z.object({ + start: z.number(), + end: z.number(), }), }) - .optional(), - snapshot: z.string().optional(), - }) - .meta({ ref: "MessageMetadata" }), - }) - .meta({ - ref: "Message", - }) - export type Info = z.infer -} + .catchall(z.any()), + ), + assistant: z + .object({ + system: z.string().array(), + modelID: ModelID.zod, + providerID: ProviderID.zod, + path: z.object({ + cwd: z.string(), + root: z.string(), + }), + cost: z.number(), + summary: z.boolean().optional(), + tokens: z.object({ + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), + }), + }), + }) + .optional(), + snapshot: z.string().optional(), + }) + .meta({ ref: "MessageMetadata" }), + }) + .meta({ + ref: "Message", + }) +export type Info = z.infer + +export * as Message from "./message" diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 415639fbe5..820c61aa91 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -21,599 +21,599 @@ import { errorMessage } from "@/util/error" import { Log } from "@/util" import { isRecord } from "@/util/record" -export namespace SessionProcessor { - const DOOM_LOOP_THRESHOLD = 3 - const log = Log.create({ service: "session.processor" }) +const DOOM_LOOP_THRESHOLD = 3 +const log = Log.create({ service: "session.processor" }) - export type Result = "compact" | "stop" | "continue" +export type Result = "compact" | "stop" | "continue" - export type Event = LLM.Event +export type Event = LLM.Event - export interface Handle { - readonly message: MessageV2.Assistant - readonly updateToolCall: ( - toolCallID: string, - update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, - ) => Effect.Effect - readonly completeToolCall: ( - toolCallID: string, - output: { - title: string - metadata: Record - output: string - attachments?: MessageV2.FilePart[] - }, - ) => Effect.Effect - readonly process: (streamInput: LLM.StreamInput) => Effect.Effect - } +export interface Handle { + readonly message: MessageV2.Assistant + readonly updateToolCall: ( + toolCallID: string, + update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, + ) => Effect.Effect + readonly completeToolCall: ( + toolCallID: string, + output: { + title: string + metadata: Record + output: string + attachments?: MessageV2.FilePart[] + }, + ) => Effect.Effect + readonly process: (streamInput: LLM.StreamInput) => Effect.Effect +} - type Input = { - assistantMessage: MessageV2.Assistant - sessionID: SessionID - model: Provider.Model - } +type Input = { + assistantMessage: MessageV2.Assistant + sessionID: SessionID + model: Provider.Model +} - export interface Interface { - readonly create: (input: Input) => Effect.Effect - } +export interface Interface { + readonly create: (input: Input) => Effect.Effect +} - type ToolCall = { - partID: MessageV2.ToolPart["id"] - messageID: MessageV2.ToolPart["messageID"] - sessionID: MessageV2.ToolPart["sessionID"] - done: Deferred.Deferred - } +type ToolCall = { + partID: MessageV2.ToolPart["id"] + messageID: MessageV2.ToolPart["messageID"] + sessionID: MessageV2.ToolPart["sessionID"] + done: Deferred.Deferred +} - interface ProcessorContext extends Input { - toolcalls: Record - shouldBreak: boolean - snapshot: string | undefined - blocked: boolean - needsCompaction: boolean - currentText: MessageV2.TextPart | undefined - reasoningMap: Record - } +interface ProcessorContext extends Input { + toolcalls: Record + shouldBreak: boolean + snapshot: string | undefined + blocked: boolean + needsCompaction: boolean + currentText: MessageV2.TextPart | undefined + reasoningMap: Record +} - type StreamEvent = Event +type StreamEvent = Event - export class Service extends Context.Service()("@opencode/SessionProcessor") {} +export class Service extends Context.Service()("@opencode/SessionProcessor") {} - export const layer: Layer.Layer< - Service, - never, - | Session.Service - | Config.Service - | Bus.Service - | Snapshot.Service - | Agent.Service - | LLM.Service - | Permission.Service - | Plugin.Service - | SessionSummary.Service - | SessionStatus.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const session = yield* Session.Service - const config = yield* Config.Service - const bus = yield* Bus.Service - const snapshot = yield* Snapshot.Service - const agents = yield* Agent.Service - const llm = yield* LLM.Service - const permission = yield* Permission.Service - const plugin = yield* Plugin.Service - const summary = yield* SessionSummary.Service - const scope = yield* Scope.Scope - const status = yield* SessionStatus.Service +export const layer: Layer.Layer< + Service, + never, + | Session.Service + | Config.Service + | Bus.Service + | Snapshot.Service + | Agent.Service + | LLM.Service + | Permission.Service + | Plugin.Service + | SessionSummary.Service + | SessionStatus.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const session = yield* Session.Service + const config = yield* Config.Service + const bus = yield* Bus.Service + const snapshot = yield* Snapshot.Service + const agents = yield* Agent.Service + const llm = yield* LLM.Service + const permission = yield* Permission.Service + const plugin = yield* Plugin.Service + const summary = yield* SessionSummary.Service + const scope = yield* Scope.Scope + const status = yield* SessionStatus.Service - const create = Effect.fn("SessionProcessor.create")(function* (input: Input) { - // Pre-capture snapshot before the LLM stream starts. The AI SDK - // may execute tools internally before emitting start-step events, - // so capturing inside the event handler can be too late. - const initialSnapshot = yield* snapshot.track() - const ctx: ProcessorContext = { - assistantMessage: input.assistantMessage, - sessionID: input.sessionID, - model: input.model, - toolcalls: {}, - shouldBreak: false, - snapshot: initialSnapshot, - blocked: false, - needsCompaction: false, - currentText: undefined, - reasoningMap: {}, - } - let aborted = false - const slog = log.clone().tag("sessionID", input.sessionID).tag("messageID", input.assistantMessage.id) + const create = Effect.fn("SessionProcessor.create")(function* (input: Input) { + // Pre-capture snapshot before the LLM stream starts. The AI SDK + // may execute tools internally before emitting start-step events, + // so capturing inside the event handler can be too late. + const initialSnapshot = yield* snapshot.track() + const ctx: ProcessorContext = { + assistantMessage: input.assistantMessage, + sessionID: input.sessionID, + model: input.model, + toolcalls: {}, + shouldBreak: false, + snapshot: initialSnapshot, + blocked: false, + needsCompaction: false, + currentText: undefined, + reasoningMap: {}, + } + let aborted = false + const slog = log.clone().tag("sessionID", input.sessionID).tag("messageID", input.assistantMessage.id) - const parse = (e: unknown) => - MessageV2.fromError(e, { - providerID: input.model.providerID, - aborted, - }) - - const settleToolCall = Effect.fn("SessionProcessor.settleToolCall")(function* (toolCallID: string) { - const done = ctx.toolcalls[toolCallID]?.done - delete ctx.toolcalls[toolCallID] - if (done) yield* Deferred.succeed(done, undefined).pipe(Effect.ignore) + const parse = (e: unknown) => + MessageV2.fromError(e, { + providerID: input.model.providerID, + aborted, }) - const readToolCall = Effect.fn("SessionProcessor.readToolCall")(function* (toolCallID: string) { - const call = ctx.toolcalls[toolCallID] - if (!call) return - const part = yield* session.getPart({ - partID: call.partID, - messageID: call.messageID, - sessionID: call.sessionID, - }) - if (!part || part.type !== "tool") { - delete ctx.toolcalls[toolCallID] - return - } - return { call, part } - }) - - const updateToolCall = Effect.fn("SessionProcessor.updateToolCall")(function* ( - toolCallID: string, - update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, - ) { - const match = yield* readToolCall(toolCallID) - if (!match) return - const part = yield* session.updatePart(update(match.part)) - ctx.toolcalls[toolCallID] = { - ...match.call, - partID: part.id, - messageID: part.messageID, - sessionID: part.sessionID, - } - return part - }) - - const completeToolCall = Effect.fn("SessionProcessor.completeToolCall")(function* ( - toolCallID: string, - output: { - title: string - metadata: Record - output: string - attachments?: MessageV2.FilePart[] - }, - ) { - const match = yield* readToolCall(toolCallID) - if (!match || match.part.state.status !== "running") return - yield* session.updatePart({ - ...match.part, - state: { - status: "completed", - input: match.part.state.input, - output: output.output, - metadata: output.metadata, - title: output.title, - time: { start: match.part.state.time.start, end: Date.now() }, - attachments: output.attachments, - }, - }) - yield* settleToolCall(toolCallID) - }) - - const failToolCall = Effect.fn("SessionProcessor.failToolCall")(function* (toolCallID: string, error: unknown) { - const match = yield* readToolCall(toolCallID) - if (!match || match.part.state.status !== "running") return false - yield* session.updatePart({ - ...match.part, - state: { - status: "error", - input: match.part.state.input, - error: errorMessage(error), - time: { start: match.part.state.time.start, end: Date.now() }, - }, - }) - if (error instanceof Permission.RejectedError || error instanceof Question.RejectedError) { - ctx.blocked = ctx.shouldBreak - } - yield* settleToolCall(toolCallID) - return true - }) - - const handleEvent = Effect.fn("SessionProcessor.handleEvent")(function* (value: StreamEvent) { - switch (value.type) { - case "start": - yield* status.set(ctx.sessionID, { type: "busy" }) - return - - case "reasoning-start": - if (value.id in ctx.reasoningMap) return - ctx.reasoningMap[value.id] = { - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "reasoning", - text: "", - time: { start: Date.now() }, - metadata: value.providerMetadata, - } - yield* session.updatePart(ctx.reasoningMap[value.id]) - return - - case "reasoning-delta": - if (!(value.id in ctx.reasoningMap)) return - ctx.reasoningMap[value.id].text += value.text - if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata - yield* session.updatePartDelta({ - sessionID: ctx.reasoningMap[value.id].sessionID, - messageID: ctx.reasoningMap[value.id].messageID, - partID: ctx.reasoningMap[value.id].id, - field: "text", - delta: value.text, - }) - return - - case "reasoning-end": - if (!(value.id in ctx.reasoningMap)) return - // oxlint-disable-next-line no-self-assign -- reactivity trigger - ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text - ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() } - if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata - yield* session.updatePart(ctx.reasoningMap[value.id]) - delete ctx.reasoningMap[value.id] - return - - case "tool-input-start": - if (ctx.assistantMessage.summary) { - throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) - } - const part = yield* session.updatePart({ - id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "tool", - tool: value.toolName, - callID: value.id, - state: { status: "pending", input: {}, raw: "" }, - metadata: value.providerExecuted ? { providerExecuted: true } : undefined, - } satisfies MessageV2.ToolPart) - ctx.toolcalls[value.id] = { - done: yield* Deferred.make(), - partID: part.id, - messageID: part.messageID, - sessionID: part.sessionID, - } - return - - case "tool-input-delta": - return - - case "tool-input-end": - return - - case "tool-call": { - if (ctx.assistantMessage.summary) { - throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) - } - yield* updateToolCall(value.toolCallId, (match) => ({ - ...match, - tool: value.toolName, - state: { - ...match.state, - status: "running", - input: value.input, - time: { start: Date.now() }, - }, - metadata: match.metadata?.providerExecuted - ? { ...value.providerMetadata, providerExecuted: true } - : value.providerMetadata, - })) - - const parts = MessageV2.parts(ctx.assistantMessage.id) - const recentParts = parts.slice(-DOOM_LOOP_THRESHOLD) - - if ( - recentParts.length !== DOOM_LOOP_THRESHOLD || - !recentParts.every( - (part) => - part.type === "tool" && - part.tool === value.toolName && - part.state.status !== "pending" && - JSON.stringify(part.state.input) === JSON.stringify(value.input), - ) - ) { - return - } - - const agent = yield* agents.get(ctx.assistantMessage.agent) - yield* permission.ask({ - permission: "doom_loop", - patterns: [value.toolName], - sessionID: ctx.assistantMessage.sessionID, - metadata: { tool: value.toolName, input: value.input }, - always: [value.toolName], - ruleset: agent.permission, - }) - return - } - - case "tool-result": { - yield* completeToolCall(value.toolCallId, value.output) - return - } - - case "tool-error": { - yield* failToolCall(value.toolCallId, value.error) - return - } - - case "error": - throw value.error - - case "start-step": - if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track() - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - snapshot: ctx.snapshot, - type: "step-start", - }) - return - - case "finish-step": { - const usage = Session.getUsage({ - model: ctx.model, - usage: value.usage, - metadata: value.providerMetadata, - }) - ctx.assistantMessage.finish = value.finishReason - ctx.assistantMessage.cost += usage.cost - ctx.assistantMessage.tokens = usage.tokens - yield* session.updatePart({ - id: PartID.ascending(), - reason: value.finishReason, - snapshot: yield* snapshot.track(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "step-finish", - tokens: usage.tokens, - cost: usage.cost, - }) - yield* session.updateMessage(ctx.assistantMessage) - if (ctx.snapshot) { - const patch = yield* snapshot.patch(ctx.snapshot) - if (patch.files.length) { - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - type: "patch", - hash: patch.hash, - files: patch.files, - }) - } - ctx.snapshot = undefined - } - yield* summary - .summarize({ - sessionID: ctx.sessionID, - messageID: ctx.assistantMessage.parentID, - }) - .pipe(Effect.ignore, Effect.forkIn(scope)) - if ( - !ctx.assistantMessage.summary && - isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model }) - ) { - ctx.needsCompaction = true - } - return - } - - case "text-start": - ctx.currentText = { - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "text", - text: "", - time: { start: Date.now() }, - metadata: value.providerMetadata, - } - yield* session.updatePart(ctx.currentText) - return - - case "text-delta": - if (!ctx.currentText) return - ctx.currentText.text += value.text - if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata - yield* session.updatePartDelta({ - sessionID: ctx.currentText.sessionID, - messageID: ctx.currentText.messageID, - partID: ctx.currentText.id, - field: "text", - delta: value.text, - }) - return - - case "text-end": - if (!ctx.currentText) return - // oxlint-disable-next-line no-self-assign -- reactivity trigger - ctx.currentText.text = ctx.currentText.text - ctx.currentText.text = (yield* plugin.trigger( - "experimental.text.complete", - { - sessionID: ctx.sessionID, - messageID: ctx.assistantMessage.id, - partID: ctx.currentText.id, - }, - { text: ctx.currentText.text }, - )).text - { - const end = Date.now() - ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } - } - if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata - yield* session.updatePart(ctx.currentText) - ctx.currentText = undefined - return - - case "finish": - return - - default: - slog.info("unhandled", { event: value.type, value }) - return - } - }) - - const cleanup = Effect.fn("SessionProcessor.cleanup")(function* () { - if (ctx.snapshot) { - const patch = yield* snapshot.patch(ctx.snapshot) - if (patch.files.length) { - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - type: "patch", - hash: patch.hash, - files: patch.files, - }) - } - ctx.snapshot = undefined - } - - if (ctx.currentText) { - const end = Date.now() - ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } - yield* session.updatePart(ctx.currentText) - ctx.currentText = undefined - } - - for (const part of Object.values(ctx.reasoningMap)) { - const end = Date.now() - yield* session.updatePart({ - ...part, - time: { start: part.time.start ?? end, end }, - }) - } - ctx.reasoningMap = {} - - yield* Effect.forEach( - Object.values(ctx.toolcalls), - (call) => Deferred.await(call.done).pipe(Effect.timeout("250 millis"), Effect.ignore), - { concurrency: "unbounded" }, - ) - - for (const toolCallID of Object.keys(ctx.toolcalls)) { - const match = yield* readToolCall(toolCallID) - if (!match) continue - const part = match.part - const end = Date.now() - const metadata = "metadata" in part.state && isRecord(part.state.metadata) ? part.state.metadata : {} - yield* session.updatePart({ - ...part, - state: { - ...part.state, - status: "error", - error: "Tool execution aborted", - metadata: { ...metadata, interrupted: true }, - time: { start: "time" in part.state ? part.state.time.start : end, end }, - }, - }) - } - ctx.toolcalls = {} - ctx.assistantMessage.time.completed = Date.now() - yield* session.updateMessage(ctx.assistantMessage) - }) - - const halt = Effect.fn("SessionProcessor.halt")(function* (e: unknown) { - slog.error("process", { error: errorMessage(e), stack: e instanceof Error ? e.stack : undefined }) - const error = parse(e) - if (MessageV2.ContextOverflowError.isInstance(error)) { - ctx.needsCompaction = true - yield* bus.publish(Session.Event.Error, { sessionID: ctx.sessionID, error }) - return - } - ctx.assistantMessage.error = error - yield* bus.publish(Session.Event.Error, { - sessionID: ctx.assistantMessage.sessionID, - error: ctx.assistantMessage.error, - }) - yield* status.set(ctx.sessionID, { type: "idle" }) - }) - - const process = Effect.fn("SessionProcessor.process")(function* (streamInput: LLM.StreamInput) { - slog.info("process") - ctx.needsCompaction = false - ctx.shouldBreak = (yield* config.get()).experimental?.continue_loop_on_deny !== true - - return yield* Effect.gen(function* () { - yield* Effect.gen(function* () { - ctx.currentText = undefined - ctx.reasoningMap = {} - const stream = llm.stream(streamInput) - - yield* stream.pipe( - Stream.tap((event) => handleEvent(event)), - Stream.takeUntil(() => ctx.needsCompaction), - Stream.runDrain, - ) - }).pipe( - Effect.onInterrupt(() => - Effect.gen(function* () { - aborted = true - if (!ctx.assistantMessage.error) { - yield* halt(new DOMException("Aborted", "AbortError")) - } - }), - ), - Effect.catchCauseIf( - (cause) => !Cause.hasInterruptsOnly(cause), - (cause) => Effect.fail(Cause.squash(cause)), - ), - Effect.retry( - SessionRetry.policy({ - parse, - set: (info) => - status.set(ctx.sessionID, { - type: "retry", - attempt: info.attempt, - message: info.message, - next: info.next, - }), - }), - ), - Effect.catch(halt), - Effect.ensuring(cleanup()), - ) - - if (ctx.needsCompaction) return "compact" - if (ctx.blocked || ctx.assistantMessage.error) return "stop" - return "continue" - }) - }) - - return { - get message() { - return ctx.assistantMessage - }, - updateToolCall, - completeToolCall, - process, - } satisfies Handle + const settleToolCall = Effect.fn("SessionProcessor.settleToolCall")(function* (toolCallID: string) { + const done = ctx.toolcalls[toolCallID]?.done + delete ctx.toolcalls[toolCallID] + if (done) yield* Deferred.succeed(done, undefined).pipe(Effect.ignore) }) - return Service.of({ create }) - }), - ) + const readToolCall = Effect.fn("SessionProcessor.readToolCall")(function* (toolCallID: string) { + const call = ctx.toolcalls[toolCallID] + if (!call) return + const part = yield* session.getPart({ + partID: call.partID, + messageID: call.messageID, + sessionID: call.sessionID, + }) + if (!part || part.type !== "tool") { + delete ctx.toolcalls[toolCallID] + return + } + return { call, part } + }) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Agent.defaultLayer), - Layer.provide(LLM.defaultLayer), - Layer.provide(Permission.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(SessionSummary.defaultLayer), - Layer.provide(SessionStatus.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - ), - ) -} + const updateToolCall = Effect.fn("SessionProcessor.updateToolCall")(function* ( + toolCallID: string, + update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, + ) { + const match = yield* readToolCall(toolCallID) + if (!match) return + const part = yield* session.updatePart(update(match.part)) + ctx.toolcalls[toolCallID] = { + ...match.call, + partID: part.id, + messageID: part.messageID, + sessionID: part.sessionID, + } + return part + }) + + const completeToolCall = Effect.fn("SessionProcessor.completeToolCall")(function* ( + toolCallID: string, + output: { + title: string + metadata: Record + output: string + attachments?: MessageV2.FilePart[] + }, + ) { + const match = yield* readToolCall(toolCallID) + if (!match || match.part.state.status !== "running") return + yield* session.updatePart({ + ...match.part, + state: { + status: "completed", + input: match.part.state.input, + output: output.output, + metadata: output.metadata, + title: output.title, + time: { start: match.part.state.time.start, end: Date.now() }, + attachments: output.attachments, + }, + }) + yield* settleToolCall(toolCallID) + }) + + const failToolCall = Effect.fn("SessionProcessor.failToolCall")(function* (toolCallID: string, error: unknown) { + const match = yield* readToolCall(toolCallID) + if (!match || match.part.state.status !== "running") return false + yield* session.updatePart({ + ...match.part, + state: { + status: "error", + input: match.part.state.input, + error: errorMessage(error), + time: { start: match.part.state.time.start, end: Date.now() }, + }, + }) + if (error instanceof Permission.RejectedError || error instanceof Question.RejectedError) { + ctx.blocked = ctx.shouldBreak + } + yield* settleToolCall(toolCallID) + return true + }) + + const handleEvent = Effect.fn("SessionProcessor.handleEvent")(function* (value: StreamEvent) { + switch (value.type) { + case "start": + yield* status.set(ctx.sessionID, { type: "busy" }) + return + + case "reasoning-start": + if (value.id in ctx.reasoningMap) return + ctx.reasoningMap[value.id] = { + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "reasoning", + text: "", + time: { start: Date.now() }, + metadata: value.providerMetadata, + } + yield* session.updatePart(ctx.reasoningMap[value.id]) + return + + case "reasoning-delta": + if (!(value.id in ctx.reasoningMap)) return + ctx.reasoningMap[value.id].text += value.text + if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata + yield* session.updatePartDelta({ + sessionID: ctx.reasoningMap[value.id].sessionID, + messageID: ctx.reasoningMap[value.id].messageID, + partID: ctx.reasoningMap[value.id].id, + field: "text", + delta: value.text, + }) + return + + case "reasoning-end": + if (!(value.id in ctx.reasoningMap)) return + // oxlint-disable-next-line no-self-assign -- reactivity trigger + ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text + ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() } + if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata + yield* session.updatePart(ctx.reasoningMap[value.id]) + delete ctx.reasoningMap[value.id] + return + + case "tool-input-start": + if (ctx.assistantMessage.summary) { + throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) + } + const part = yield* session.updatePart({ + id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "tool", + tool: value.toolName, + callID: value.id, + state: { status: "pending", input: {}, raw: "" }, + metadata: value.providerExecuted ? { providerExecuted: true } : undefined, + } satisfies MessageV2.ToolPart) + ctx.toolcalls[value.id] = { + done: yield* Deferred.make(), + partID: part.id, + messageID: part.messageID, + sessionID: part.sessionID, + } + return + + case "tool-input-delta": + return + + case "tool-input-end": + return + + case "tool-call": { + if (ctx.assistantMessage.summary) { + throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) + } + yield* updateToolCall(value.toolCallId, (match) => ({ + ...match, + tool: value.toolName, + state: { + ...match.state, + status: "running", + input: value.input, + time: { start: Date.now() }, + }, + metadata: match.metadata?.providerExecuted + ? { ...value.providerMetadata, providerExecuted: true } + : value.providerMetadata, + })) + + const parts = MessageV2.parts(ctx.assistantMessage.id) + const recentParts = parts.slice(-DOOM_LOOP_THRESHOLD) + + if ( + recentParts.length !== DOOM_LOOP_THRESHOLD || + !recentParts.every( + (part) => + part.type === "tool" && + part.tool === value.toolName && + part.state.status !== "pending" && + JSON.stringify(part.state.input) === JSON.stringify(value.input), + ) + ) { + return + } + + const agent = yield* agents.get(ctx.assistantMessage.agent) + yield* permission.ask({ + permission: "doom_loop", + patterns: [value.toolName], + sessionID: ctx.assistantMessage.sessionID, + metadata: { tool: value.toolName, input: value.input }, + always: [value.toolName], + ruleset: agent.permission, + }) + return + } + + case "tool-result": { + yield* completeToolCall(value.toolCallId, value.output) + return + } + + case "tool-error": { + yield* failToolCall(value.toolCallId, value.error) + return + } + + case "error": + throw value.error + + case "start-step": + if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track() + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + snapshot: ctx.snapshot, + type: "step-start", + }) + return + + case "finish-step": { + const usage = Session.getUsage({ + model: ctx.model, + usage: value.usage, + metadata: value.providerMetadata, + }) + ctx.assistantMessage.finish = value.finishReason + ctx.assistantMessage.cost += usage.cost + ctx.assistantMessage.tokens = usage.tokens + yield* session.updatePart({ + id: PartID.ascending(), + reason: value.finishReason, + snapshot: yield* snapshot.track(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "step-finish", + tokens: usage.tokens, + cost: usage.cost, + }) + yield* session.updateMessage(ctx.assistantMessage) + if (ctx.snapshot) { + const patch = yield* snapshot.patch(ctx.snapshot) + if (patch.files.length) { + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + type: "patch", + hash: patch.hash, + files: patch.files, + }) + } + ctx.snapshot = undefined + } + yield* summary + .summarize({ + sessionID: ctx.sessionID, + messageID: ctx.assistantMessage.parentID, + }) + .pipe(Effect.ignore, Effect.forkIn(scope)) + if ( + !ctx.assistantMessage.summary && + isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model }) + ) { + ctx.needsCompaction = true + } + return + } + + case "text-start": + ctx.currentText = { + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "text", + text: "", + time: { start: Date.now() }, + metadata: value.providerMetadata, + } + yield* session.updatePart(ctx.currentText) + return + + case "text-delta": + if (!ctx.currentText) return + ctx.currentText.text += value.text + if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata + yield* session.updatePartDelta({ + sessionID: ctx.currentText.sessionID, + messageID: ctx.currentText.messageID, + partID: ctx.currentText.id, + field: "text", + delta: value.text, + }) + return + + case "text-end": + if (!ctx.currentText) return + // oxlint-disable-next-line no-self-assign -- reactivity trigger + ctx.currentText.text = ctx.currentText.text + ctx.currentText.text = (yield* plugin.trigger( + "experimental.text.complete", + { + sessionID: ctx.sessionID, + messageID: ctx.assistantMessage.id, + partID: ctx.currentText.id, + }, + { text: ctx.currentText.text }, + )).text + { + const end = Date.now() + ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } + } + if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata + yield* session.updatePart(ctx.currentText) + ctx.currentText = undefined + return + + case "finish": + return + + default: + slog.info("unhandled", { event: value.type, value }) + return + } + }) + + const cleanup = Effect.fn("SessionProcessor.cleanup")(function* () { + if (ctx.snapshot) { + const patch = yield* snapshot.patch(ctx.snapshot) + if (patch.files.length) { + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + type: "patch", + hash: patch.hash, + files: patch.files, + }) + } + ctx.snapshot = undefined + } + + if (ctx.currentText) { + const end = Date.now() + ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } + yield* session.updatePart(ctx.currentText) + ctx.currentText = undefined + } + + for (const part of Object.values(ctx.reasoningMap)) { + const end = Date.now() + yield* session.updatePart({ + ...part, + time: { start: part.time.start ?? end, end }, + }) + } + ctx.reasoningMap = {} + + yield* Effect.forEach( + Object.values(ctx.toolcalls), + (call) => Deferred.await(call.done).pipe(Effect.timeout("250 millis"), Effect.ignore), + { concurrency: "unbounded" }, + ) + + for (const toolCallID of Object.keys(ctx.toolcalls)) { + const match = yield* readToolCall(toolCallID) + if (!match) continue + const part = match.part + const end = Date.now() + const metadata = "metadata" in part.state && isRecord(part.state.metadata) ? part.state.metadata : {} + yield* session.updatePart({ + ...part, + state: { + ...part.state, + status: "error", + error: "Tool execution aborted", + metadata: { ...metadata, interrupted: true }, + time: { start: "time" in part.state ? part.state.time.start : end, end }, + }, + }) + } + ctx.toolcalls = {} + ctx.assistantMessage.time.completed = Date.now() + yield* session.updateMessage(ctx.assistantMessage) + }) + + const halt = Effect.fn("SessionProcessor.halt")(function* (e: unknown) { + slog.error("process", { error: errorMessage(e), stack: e instanceof Error ? e.stack : undefined }) + const error = parse(e) + if (MessageV2.ContextOverflowError.isInstance(error)) { + ctx.needsCompaction = true + yield* bus.publish(Session.Event.Error, { sessionID: ctx.sessionID, error }) + return + } + ctx.assistantMessage.error = error + yield* bus.publish(Session.Event.Error, { + sessionID: ctx.assistantMessage.sessionID, + error: ctx.assistantMessage.error, + }) + yield* status.set(ctx.sessionID, { type: "idle" }) + }) + + const process = Effect.fn("SessionProcessor.process")(function* (streamInput: LLM.StreamInput) { + slog.info("process") + ctx.needsCompaction = false + ctx.shouldBreak = (yield* config.get()).experimental?.continue_loop_on_deny !== true + + return yield* Effect.gen(function* () { + yield* Effect.gen(function* () { + ctx.currentText = undefined + ctx.reasoningMap = {} + const stream = llm.stream(streamInput) + + yield* stream.pipe( + Stream.tap((event) => handleEvent(event)), + Stream.takeUntil(() => ctx.needsCompaction), + Stream.runDrain, + ) + }).pipe( + Effect.onInterrupt(() => + Effect.gen(function* () { + aborted = true + if (!ctx.assistantMessage.error) { + yield* halt(new DOMException("Aborted", "AbortError")) + } + }), + ), + Effect.catchCauseIf( + (cause) => !Cause.hasInterruptsOnly(cause), + (cause) => Effect.fail(Cause.squash(cause)), + ), + Effect.retry( + SessionRetry.policy({ + parse, + set: (info) => + status.set(ctx.sessionID, { + type: "retry", + attempt: info.attempt, + message: info.message, + next: info.next, + }), + }), + ), + Effect.catch(halt), + Effect.ensuring(cleanup()), + ) + + if (ctx.needsCompaction) return "compact" + if (ctx.blocked || ctx.assistantMessage.error) return "stop" + return "continue" + }) + }) + + return { + get message() { + return ctx.assistantMessage + }, + updateToolCall, + completeToolCall, + process, + } satisfies Handle + }) + + return Service.of({ create }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Agent.defaultLayer), + Layer.provide(LLM.defaultLayer), + Layer.provide(Permission.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(SessionSummary.defaultLayer), + Layer.provide(SessionStatus.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + ), +) + +export * as SessionProcessor from "./processor" diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 004ee19abe..14fdf30780 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -64,221 +64,220 @@ IMPORTANT: const STRUCTURED_OUTPUT_SYSTEM_PROMPT = `IMPORTANT: The user has requested structured output. You MUST use the StructuredOutput tool to provide your final response. Do NOT respond with plain text - you MUST call the StructuredOutput tool with your answer formatted according to the schema.` -export namespace SessionPrompt { - const log = Log.create({ service: "session.prompt" }) - const elog = EffectLogger.create({ service: "session.prompt" }) +const log = Log.create({ service: "session.prompt" }) +const elog = EffectLogger.create({ service: "session.prompt" }) - export interface Interface { - readonly cancel: (sessionID: SessionID) => Effect.Effect - readonly prompt: (input: PromptInput) => Effect.Effect - readonly loop: (input: z.infer) => Effect.Effect - readonly shell: (input: ShellInput) => Effect.Effect - readonly command: (input: CommandInput) => Effect.Effect - readonly resolvePromptParts: (template: string) => Effect.Effect - } +export interface Interface { + readonly cancel: (sessionID: SessionID) => Effect.Effect + readonly prompt: (input: PromptInput) => Effect.Effect + readonly loop: (input: z.infer) => Effect.Effect + readonly shell: (input: ShellInput) => Effect.Effect + readonly command: (input: CommandInput) => Effect.Effect + readonly resolvePromptParts: (template: string) => Effect.Effect +} - export class Service extends Context.Service()("@opencode/SessionPrompt") {} +export class Service extends Context.Service()("@opencode/SessionPrompt") {} - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const status = yield* SessionStatus.Service - const sessions = yield* Session.Service - const agents = yield* Agent.Service - const provider = yield* Provider.Service - const processor = yield* SessionProcessor.Service - const compaction = yield* SessionCompaction.Service - const plugin = yield* Plugin.Service - const commands = yield* Command.Service - const permission = yield* Permission.Service - const fsys = yield* AppFileSystem.Service - const mcp = yield* MCP.Service - const lsp = yield* LSP.Service - const filetime = yield* FileTime.Service - const registry = yield* ToolRegistry.Service - const truncate = yield* Truncate.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const scope = yield* Scope.Scope - const instruction = yield* Instruction.Service - const state = yield* SessionRunState.Service - const revert = yield* SessionRevert.Service - const summary = yield* SessionSummary.Service - const sys = yield* SystemPrompt.Service - const llm = yield* LLM.Service - const runner = Effect.fn("SessionPrompt.runner")(function* () { - return yield* EffectBridge.make() - }) - const ops = Effect.fn("SessionPrompt.ops")(function* () { - const run = yield* runner() - return { - cancel: (sessionID: SessionID) => run.fork(cancel(sessionID)), - resolvePromptParts: (template: string) => resolvePromptParts(template), - prompt: (input: PromptInput) => prompt(input), - } satisfies TaskPromptOps - }) +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const status = yield* SessionStatus.Service + const sessions = yield* Session.Service + const agents = yield* Agent.Service + const provider = yield* Provider.Service + const processor = yield* SessionProcessor.Service + const compaction = yield* SessionCompaction.Service + const plugin = yield* Plugin.Service + const commands = yield* Command.Service + const permission = yield* Permission.Service + const fsys = yield* AppFileSystem.Service + const mcp = yield* MCP.Service + const lsp = yield* LSP.Service + const filetime = yield* FileTime.Service + const registry = yield* ToolRegistry.Service + const truncate = yield* Truncate.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const scope = yield* Scope.Scope + const instruction = yield* Instruction.Service + const state = yield* SessionRunState.Service + const revert = yield* SessionRevert.Service + const summary = yield* SessionSummary.Service + const sys = yield* SystemPrompt.Service + const llm = yield* LLM.Service + const runner = Effect.fn("SessionPrompt.runner")(function* () { + return yield* EffectBridge.make() + }) + const ops = Effect.fn("SessionPrompt.ops")(function* () { + const run = yield* runner() + return { + cancel: (sessionID: SessionID) => run.fork(cancel(sessionID)), + resolvePromptParts: (template: string) => resolvePromptParts(template), + prompt: (input: PromptInput) => prompt(input), + } satisfies TaskPromptOps + }) - const cancel = Effect.fn("SessionPrompt.cancel")(function* (sessionID: SessionID) { - yield* elog.info("cancel", { sessionID }) - yield* state.cancel(sessionID) - }) + const cancel = Effect.fn("SessionPrompt.cancel")(function* (sessionID: SessionID) { + yield* elog.info("cancel", { sessionID }) + yield* state.cancel(sessionID) + }) - const resolvePromptParts = Effect.fn("SessionPrompt.resolvePromptParts")(function* (template: string) { - const ctx = yield* InstanceState.context - const parts: PromptInput["parts"] = [{ type: "text", text: template }] - const files = ConfigMarkdown.files(template) - const seen = new Set() - yield* Effect.forEach( - files, - Effect.fnUntraced(function* (match) { - const name = match[1] - if (seen.has(name)) return - seen.add(name) - const filepath = name.startsWith("~/") - ? path.join(os.homedir(), name.slice(2)) - : path.resolve(ctx.worktree, name) + const resolvePromptParts = Effect.fn("SessionPrompt.resolvePromptParts")(function* (template: string) { + const ctx = yield* InstanceState.context + const parts: PromptInput["parts"] = [{ type: "text", text: template }] + const files = ConfigMarkdown.files(template) + const seen = new Set() + yield* Effect.forEach( + files, + Effect.fnUntraced(function* (match) { + const name = match[1] + if (seen.has(name)) return + seen.add(name) + const filepath = name.startsWith("~/") + ? path.join(os.homedir(), name.slice(2)) + : path.resolve(ctx.worktree, name) - const info = yield* fsys.stat(filepath).pipe(Effect.option) - if (Option.isNone(info)) { - const found = yield* agents.get(name) - if (found) parts.push({ type: "agent", name: found.name }) - return - } - const stat = info.value - parts.push({ - type: "file", - url: pathToFileURL(filepath).href, - filename: name, - mime: stat.type === "Directory" ? "application/x-directory" : "text/plain", - }) - }), - { concurrency: "unbounded", discard: true }, - ) - return parts - }) - - const title = Effect.fn("SessionPrompt.ensureTitle")(function* (input: { - session: Session.Info - history: MessageV2.WithParts[] - providerID: ProviderID - modelID: ModelID - }) { - if (input.session.parentID) return - if (!Session.isDefaultTitle(input.session.title)) return - - const real = (m: MessageV2.WithParts) => - m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic) - const idx = input.history.findIndex(real) - if (idx === -1) return - if (input.history.filter(real).length !== 1) return - - const context = input.history.slice(0, idx + 1) - const firstUser = context[idx] - if (!firstUser || firstUser.info.role !== "user") return - const firstInfo = firstUser.info - - const subtasks = firstUser.parts.filter((p): p is MessageV2.SubtaskPart => p.type === "subtask") - const onlySubtasks = subtasks.length > 0 && firstUser.parts.every((p) => p.type === "subtask") - - const ag = yield* agents.get("title") - if (!ag) return - const mdl = ag.model - ? yield* provider.getModel(ag.model.providerID, ag.model.modelID) - : ((yield* provider.getSmallModel(input.providerID)) ?? - (yield* provider.getModel(input.providerID, input.modelID))) - const msgs = onlySubtasks - ? [{ role: "user" as const, content: subtasks.map((p) => p.prompt).join("\n") }] - : yield* MessageV2.toModelMessagesEffect(context, mdl) - const text = yield* llm - .stream({ - agent: ag, - user: firstInfo, - system: [], - small: true, - tools: {}, - model: mdl, - sessionID: input.session.id, - retries: 2, - messages: [{ role: "user", content: "Generate a title for this conversation:\n" }, ...msgs], + const info = yield* fsys.stat(filepath).pipe(Effect.option) + if (Option.isNone(info)) { + const found = yield* agents.get(name) + if (found) parts.push({ type: "agent", name: found.name }) + return + } + const stat = info.value + parts.push({ + type: "file", + url: pathToFileURL(filepath).href, + filename: name, + mime: stat.type === "Directory" ? "application/x-directory" : "text/plain", }) - .pipe( - Stream.filter((e): e is Extract => e.type === "text-delta"), - Stream.map((e) => e.text), - Stream.mkString, - Effect.orDie, - ) - const cleaned = text - .replace(/[\s\S]*?<\/think>\s*/g, "") - .split("\n") - .map((line) => line.trim()) - .find((line) => line.length > 0) - if (!cleaned) return - const t = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned - yield* sessions - .setTitle({ sessionID: input.session.id, title: t }) - .pipe(Effect.catchCause((cause) => elog.error("failed to generate title", { error: Cause.squash(cause) }))) - }) + }), + { concurrency: "unbounded", discard: true }, + ) + return parts + }) - const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: { - messages: MessageV2.WithParts[] - agent: Agent.Info - session: Session.Info - }) { - const userMessage = input.messages.findLast((msg) => msg.info.role === "user") - if (!userMessage) return input.messages + const title = Effect.fn("SessionPrompt.ensureTitle")(function* (input: { + session: Session.Info + history: MessageV2.WithParts[] + providerID: ProviderID + modelID: ModelID + }) { + if (input.session.parentID) return + if (!Session.isDefaultTitle(input.session.title)) return - if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) { - if (input.agent.name === "plan") { - userMessage.parts.push({ - id: PartID.ascending(), - messageID: userMessage.info.id, - sessionID: userMessage.info.sessionID, - type: "text", - text: PROMPT_PLAN, - synthetic: true, - }) - } - const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan") - if (wasPlan && input.agent.name === "build") { - userMessage.parts.push({ - id: PartID.ascending(), - messageID: userMessage.info.id, - sessionID: userMessage.info.sessionID, - type: "text", - text: BUILD_SWITCH, - synthetic: true, - }) - } - return input.messages - } + const real = (m: MessageV2.WithParts) => + m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic) + const idx = input.history.findIndex(real) + if (idx === -1) return + if (input.history.filter(real).length !== 1) return - const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant") - if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") { - const plan = Session.plan(input.session) - if (!(yield* fsys.existsSafe(plan))) return input.messages - const part = yield* sessions.updatePart({ + const context = input.history.slice(0, idx + 1) + const firstUser = context[idx] + if (!firstUser || firstUser.info.role !== "user") return + const firstInfo = firstUser.info + + const subtasks = firstUser.parts.filter((p): p is MessageV2.SubtaskPart => p.type === "subtask") + const onlySubtasks = subtasks.length > 0 && firstUser.parts.every((p) => p.type === "subtask") + + const ag = yield* agents.get("title") + if (!ag) return + const mdl = ag.model + ? yield* provider.getModel(ag.model.providerID, ag.model.modelID) + : ((yield* provider.getSmallModel(input.providerID)) ?? + (yield* provider.getModel(input.providerID, input.modelID))) + const msgs = onlySubtasks + ? [{ role: "user" as const, content: subtasks.map((p) => p.prompt).join("\n") }] + : yield* MessageV2.toModelMessagesEffect(context, mdl) + const text = yield* llm + .stream({ + agent: ag, + user: firstInfo, + system: [], + small: true, + tools: {}, + model: mdl, + sessionID: input.session.id, + retries: 2, + messages: [{ role: "user", content: "Generate a title for this conversation:\n" }, ...msgs], + }) + .pipe( + Stream.filter((e): e is Extract => e.type === "text-delta"), + Stream.map((e) => e.text), + Stream.mkString, + Effect.orDie, + ) + const cleaned = text + .replace(/[\s\S]*?<\/think>\s*/g, "") + .split("\n") + .map((line) => line.trim()) + .find((line) => line.length > 0) + if (!cleaned) return + const t = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned + yield* sessions + .setTitle({ sessionID: input.session.id, title: t }) + .pipe(Effect.catchCause((cause) => elog.error("failed to generate title", { error: Cause.squash(cause) }))) + }) + + const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: { + messages: MessageV2.WithParts[] + agent: Agent.Info + session: Session.Info + }) { + const userMessage = input.messages.findLast((msg) => msg.info.role === "user") + if (!userMessage) return input.messages + + if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) { + if (input.agent.name === "plan") { + userMessage.parts.push({ id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", - text: `${BUILD_SWITCH}\n\nA plan file exists at ${plan}. You should execute on the plan defined within it`, + text: PROMPT_PLAN, synthetic: true, }) - userMessage.parts.push(part) - return input.messages } + const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan") + if (wasPlan && input.agent.name === "build") { + userMessage.parts.push({ + id: PartID.ascending(), + messageID: userMessage.info.id, + sessionID: userMessage.info.sessionID, + type: "text", + text: BUILD_SWITCH, + synthetic: true, + }) + } + return input.messages + } - if (input.agent.name !== "plan" || assistantMessage?.info.agent === "plan") return input.messages - + const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant") + if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") { const plan = Session.plan(input.session) - const exists = yield* fsys.existsSafe(plan) - if (!exists) yield* fsys.ensureDir(path.dirname(plan)).pipe(Effect.catch(Effect.die)) + if (!(yield* fsys.existsSafe(plan))) return input.messages const part = yield* sessions.updatePart({ id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", - text: ` + text: `${BUILD_SWITCH}\n\nA plan file exists at ${plan}. You should execute on the plan defined within it`, + synthetic: true, + }) + userMessage.parts.push(part) + return input.messages + } + + if (input.agent.name !== "plan" || assistantMessage?.info.agent === "plan") return input.messages + + const plan = Session.plan(input.session) + const exists = yield* fsys.existsSafe(plan) + if (!exists) yield* fsys.ensureDir(path.dirname(plan)).pipe(Effect.catch(Effect.die)) + const part = yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: userMessage.info.id, + sessionID: userMessage.info.sessionID, + type: "text", + text: ` Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits (with the exception of the plan file mentioned below), run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supersedes any other instructions you have received. ## Plan File Info: @@ -293,10 +292,10 @@ Goal: Gain a comprehensive understanding of the user's request by reading throug 1. Focus on understanding the user's request and the code associated with their request 2. **Launch up to 3 explore agents IN PARALLEL** (single message, multiple tool calls) to efficiently explore the codebase. - - Use 1 agent when the task is isolated to known files, the user provided specific file paths, or you're making a small targeted change. - - Use multiple agents when: the scope is uncertain, multiple areas of the codebase are involved, or you need to understand existing patterns before planning. - - Quality over quantity - 3 agents maximum, but you should try to use the minimum number of agents necessary (usually just 1) - - If using multiple agents: Provide each agent with a specific search focus or area to explore. Example: One agent searches for existing implementations, another explores related components, a third investigates testing patterns + - Use 1 agent when the task is isolated to known files, the user provided specific file paths, or you're making a small targeted change. + - Use multiple agents when: the scope is uncertain, multiple areas of the codebase are involved, or you need to understand existing patterns before planning. + - Quality over quantity - 3 agents maximum, but you should try to use the minimum number of agents necessary (usually just 1) + - If using multiple agents: Provide each agent with a specific search focus or area to explore. Example: One agent searches for existing implementations, another explores related components, a third investigates testing patterns 3. After exploring the code, use the question tool to clarify ambiguities in the user request up front. @@ -348,1507 +347,1508 @@ This is critical - your turn should only end with either asking the user a quest NOTE: At any point in time through this workflow you should feel free to ask the user questions or clarifications. Don't make large assumptions about user intent. The goal is to present a well researched plan to the user, and tie any loose ends before implementation begins. `, - synthetic: true, - }) - userMessage.parts.push(part) - return input.messages + synthetic: true, + }) + userMessage.parts.push(part) + return input.messages + }) + + const resolveTools = Effect.fn("SessionPrompt.resolveTools")(function* (input: { + agent: Agent.Info + model: Provider.Model + session: Session.Info + tools?: Record + processor: Pick + bypassAgentCheck: boolean + messages: MessageV2.WithParts[] + }) { + using _ = log.time("resolveTools") + const tools: Record = {} + const run = yield* runner() + const promptOps = yield* ops() + + const context = (args: any, options: ToolExecutionOptions): Tool.Context => ({ + sessionID: input.session.id, + abort: options.abortSignal!, + messageID: input.processor.message.id, + callID: options.toolCallId, + extra: { model: input.model, bypassAgentCheck: input.bypassAgentCheck, promptOps }, + agent: input.agent.name, + messages: input.messages, + metadata: (val) => + input.processor.updateToolCall(options.toolCallId, (match) => { + if (!["running", "pending"].includes(match.state.status)) return match + return { + ...match, + state: { + title: val.title, + metadata: val.metadata, + status: "running", + input: args, + time: { start: Date.now() }, + }, + } + }), + ask: (req) => + permission + .ask({ + ...req, + sessionID: input.session.id, + tool: { messageID: input.processor.message.id, callID: options.toolCallId }, + ruleset: Permission.merge(input.agent.permission, input.session.permission ?? []), + }) + .pipe(Effect.orDie), }) - const resolveTools = Effect.fn("SessionPrompt.resolveTools")(function* (input: { - agent: Agent.Info - model: Provider.Model - session: Session.Info - tools?: Record - processor: Pick - bypassAgentCheck: boolean - messages: MessageV2.WithParts[] - }) { - using _ = log.time("resolveTools") - const tools: Record = {} - const run = yield* runner() - const promptOps = yield* ops() - - const context = (args: any, options: ToolExecutionOptions): Tool.Context => ({ - sessionID: input.session.id, - abort: options.abortSignal!, - messageID: input.processor.message.id, - callID: options.toolCallId, - extra: { model: input.model, bypassAgentCheck: input.bypassAgentCheck, promptOps }, - agent: input.agent.name, - messages: input.messages, - metadata: (val) => - input.processor.updateToolCall(options.toolCallId, (match) => { - if (!["running", "pending"].includes(match.state.status)) return match - return { - ...match, - state: { - title: val.title, - metadata: val.metadata, - status: "running", - input: args, - time: { start: Date.now() }, - }, - } - }), - ask: (req) => - permission - .ask({ - ...req, - sessionID: input.session.id, - tool: { messageID: input.processor.message.id, callID: options.toolCallId }, - ruleset: Permission.merge(input.agent.permission, input.session.permission ?? []), - }) - .pipe(Effect.orDie), - }) - - for (const item of yield* registry.tools({ - modelID: ModelID.make(input.model.api.id), - providerID: input.model.providerID, - agent: input.agent, - })) { - const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) - tools[item.id] = tool({ - description: item.description, - inputSchema: jsonSchema(schema), - execute(args, options) { - return run.promise( - Effect.gen(function* () { - const ctx = context(args, options) - yield* plugin.trigger( - "tool.execute.before", - { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID }, - { args }, - ) - const result = yield* item.execute(args, ctx) - const output = { - ...result, - attachments: result.attachments?.map((attachment) => ({ - ...attachment, - id: PartID.ascending(), - sessionID: ctx.sessionID, - messageID: input.processor.message.id, - })), - } - yield* plugin.trigger( - "tool.execute.after", - { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID, args }, - output, - ) - if (options.abortSignal?.aborted) { - yield* input.processor.completeToolCall(options.toolCallId, output) - } - return output - }), - ) - }, - }) - } - - for (const [key, item] of Object.entries(yield* mcp.tools())) { - const execute = item.execute - if (!execute) continue - - const schema = yield* Effect.promise(() => Promise.resolve(asSchema(item.inputSchema).jsonSchema)) - const transformed = ProviderTransform.schema(input.model, schema) - item.inputSchema = jsonSchema(transformed) - item.execute = (args, opts) => - run.promise( + for (const item of yield* registry.tools({ + modelID: ModelID.make(input.model.api.id), + providerID: input.model.providerID, + agent: input.agent, + })) { + const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) + tools[item.id] = tool({ + description: item.description, + inputSchema: jsonSchema(schema), + execute(args, options) { + return run.promise( Effect.gen(function* () { - const ctx = context(args, opts) + const ctx = context(args, options) yield* plugin.trigger( "tool.execute.before", - { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId }, + { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID }, { args }, ) - yield* ctx.ask({ permission: key, metadata: {}, patterns: ["*"], always: ["*"] }) - const result: Awaited>> = yield* Effect.promise(() => - execute(args, opts), - ) - yield* plugin.trigger( - "tool.execute.after", - { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId, args }, - result, - ) - - const textParts: string[] = [] - const attachments: Omit[] = [] - for (const contentItem of result.content) { - if (contentItem.type === "text") textParts.push(contentItem.text) - else if (contentItem.type === "image") { - attachments.push({ - type: "file", - mime: contentItem.mimeType, - url: `data:${contentItem.mimeType};base64,${contentItem.data}`, - }) - } else if (contentItem.type === "resource") { - const { resource } = contentItem - if (resource.text) textParts.push(resource.text) - if (resource.blob) { - attachments.push({ - type: "file", - mime: resource.mimeType ?? "application/octet-stream", - url: `data:${resource.mimeType ?? "application/octet-stream"};base64,${resource.blob}`, - filename: resource.uri, - }) - } - } - } - - const truncated = yield* truncate.output(textParts.join("\n\n"), {}, input.agent) - const metadata = { - ...result.metadata, - truncated: truncated.truncated, - ...(truncated.truncated && { outputPath: truncated.outputPath }), - } - + const result = yield* item.execute(args, ctx) const output = { - title: "", - metadata, - output: truncated.content, - attachments: attachments.map((attachment) => ({ + ...result, + attachments: result.attachments?.map((attachment) => ({ ...attachment, id: PartID.ascending(), sessionID: ctx.sessionID, messageID: input.processor.message.id, })), - content: result.content, } - if (opts.abortSignal?.aborted) { - yield* input.processor.completeToolCall(opts.toolCallId, output) + yield* plugin.trigger( + "tool.execute.after", + { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID, args }, + output, + ) + if (options.abortSignal?.aborted) { + yield* input.processor.completeToolCall(options.toolCallId, output) } return output }), ) - tools[key] = item - } - - return tools - }) - - const handleSubtask = Effect.fn("SessionPrompt.handleSubtask")(function* (input: { - task: MessageV2.SubtaskPart - model: Provider.Model - lastUser: MessageV2.User - sessionID: SessionID - session: Session.Info - msgs: MessageV2.WithParts[] - }) { - const { task, model, lastUser, sessionID, session, msgs } = input - const ctx = yield* InstanceState.context - const promptOps = yield* ops() - const { task: taskTool } = yield* registry.named() - const taskModel = task.model ? yield* getModel(task.model.providerID, task.model.modelID, sessionID) : model - const assistantMessage: MessageV2.Assistant = yield* sessions.updateMessage({ - id: MessageID.ascending(), - role: "assistant", - parentID: lastUser.id, - sessionID, - mode: task.agent, - agent: task.agent, - variant: lastUser.model.variant, - path: { cwd: ctx.directory, root: ctx.worktree }, - cost: 0, - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: taskModel.id, - providerID: taskModel.providerID, - time: { created: Date.now() }, - }) - let part: MessageV2.ToolPart = yield* sessions.updatePart({ - id: PartID.ascending(), - messageID: assistantMessage.id, - sessionID: assistantMessage.sessionID, - type: "tool", - callID: ulid(), - tool: TaskTool.id, - state: { - status: "running", - input: { - prompt: task.prompt, - description: task.description, - subagent_type: task.agent, - command: task.command, - }, - time: { start: Date.now() }, }, }) - const taskArgs = { - prompt: task.prompt, - description: task.description, - subagent_type: task.agent, - command: task.command, - } - yield* plugin.trigger( - "tool.execute.before", - { tool: TaskTool.id, sessionID, callID: part.id }, - { args: taskArgs }, - ) + } - const taskAgent = yield* agents.get(task.agent) - if (!taskAgent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${task.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) - throw error - } + for (const [key, item] of Object.entries(yield* mcp.tools())) { + const execute = item.execute + if (!execute) continue - let error: Error | undefined - const taskAbort = new AbortController() - const result = yield* taskTool - .execute(taskArgs, { - agent: task.agent, - messageID: assistantMessage.id, - sessionID, - abort: taskAbort.signal, - callID: part.callID, - extra: { bypassAgentCheck: true, promptOps }, - messages: msgs, - metadata: (val: { title?: string; metadata?: Record }) => - Effect.gen(function* () { - part = yield* sessions.updatePart({ - ...part, - type: "tool", - state: { ...part.state, ...val }, - } satisfies MessageV2.ToolPart) - }), - ask: (req: any) => - permission - .ask({ - ...req, - sessionID, - ruleset: Permission.merge(taskAgent.permission, session.permission ?? []), - }) - .pipe(Effect.orDie), - }) - .pipe( - Effect.catchCause((cause) => { - const defect = Cause.squash(cause) - error = defect instanceof Error ? defect : new Error(String(defect)) - log.error("subtask execution failed", { error, agent: task.agent, description: task.description }) - return Effect.void - }), - Effect.onInterrupt(() => - Effect.gen(function* () { - taskAbort.abort() - assistantMessage.finish = "tool-calls" - assistantMessage.time.completed = Date.now() - yield* sessions.updateMessage(assistantMessage) - if (part.state.status === "running") { - yield* sessions.updatePart({ - ...part, - state: { - status: "error", - error: "Cancelled", - time: { start: part.state.time.start, end: Date.now() }, - metadata: part.state.metadata, - input: part.state.input, - }, - } satisfies MessageV2.ToolPart) + const schema = yield* Effect.promise(() => Promise.resolve(asSchema(item.inputSchema).jsonSchema)) + const transformed = ProviderTransform.schema(input.model, schema) + item.inputSchema = jsonSchema(transformed) + item.execute = (args, opts) => + run.promise( + Effect.gen(function* () { + const ctx = context(args, opts) + yield* plugin.trigger( + "tool.execute.before", + { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId }, + { args }, + ) + yield* ctx.ask({ permission: key, metadata: {}, patterns: ["*"], always: ["*"] }) + const result: Awaited>> = yield* Effect.promise(() => + execute(args, opts), + ) + yield* plugin.trigger( + "tool.execute.after", + { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId, args }, + result, + ) + + const textParts: string[] = [] + const attachments: Omit[] = [] + for (const contentItem of result.content) { + if (contentItem.type === "text") textParts.push(contentItem.text) + else if (contentItem.type === "image") { + attachments.push({ + type: "file", + mime: contentItem.mimeType, + url: `data:${contentItem.mimeType};base64,${contentItem.data}`, + }) + } else if (contentItem.type === "resource") { + const { resource } = contentItem + if (resource.text) textParts.push(resource.text) + if (resource.blob) { + attachments.push({ + type: "file", + mime: resource.mimeType ?? "application/octet-stream", + url: `data:${resource.mimeType ?? "application/octet-stream"};base64,${resource.blob}`, + filename: resource.uri, + }) + } } - }), - ), + } + + const truncated = yield* truncate.output(textParts.join("\n\n"), {}, input.agent) + const metadata = { + ...result.metadata, + truncated: truncated.truncated, + ...(truncated.truncated && { outputPath: truncated.outputPath }), + } + + const output = { + title: "", + metadata, + output: truncated.content, + attachments: attachments.map((attachment) => ({ + ...attachment, + id: PartID.ascending(), + sessionID: ctx.sessionID, + messageID: input.processor.message.id, + })), + content: result.content, + } + if (opts.abortSignal?.aborted) { + yield* input.processor.completeToolCall(opts.toolCallId, output) + } + return output + }), ) + tools[key] = item + } - const attachments = result?.attachments?.map((attachment) => ({ - ...attachment, - id: PartID.ascending(), - sessionID, + return tools + }) + + const handleSubtask = Effect.fn("SessionPrompt.handleSubtask")(function* (input: { + task: MessageV2.SubtaskPart + model: Provider.Model + lastUser: MessageV2.User + sessionID: SessionID + session: Session.Info + msgs: MessageV2.WithParts[] + }) { + const { task, model, lastUser, sessionID, session, msgs } = input + const ctx = yield* InstanceState.context + const promptOps = yield* ops() + const { task: taskTool } = yield* registry.named() + const taskModel = task.model ? yield* getModel(task.model.providerID, task.model.modelID, sessionID) : model + const assistantMessage: MessageV2.Assistant = yield* sessions.updateMessage({ + id: MessageID.ascending(), + role: "assistant", + parentID: lastUser.id, + sessionID, + mode: task.agent, + agent: task.agent, + variant: lastUser.model.variant, + path: { cwd: ctx.directory, root: ctx.worktree }, + cost: 0, + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: taskModel.id, + providerID: taskModel.providerID, + time: { created: Date.now() }, + }) + let part: MessageV2.ToolPart = yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: assistantMessage.id, + sessionID: assistantMessage.sessionID, + type: "tool", + callID: ulid(), + tool: TaskTool.id, + state: { + status: "running", + input: { + prompt: task.prompt, + description: task.description, + subagent_type: task.agent, + command: task.command, + }, + time: { start: Date.now() }, + }, + }) + const taskArgs = { + prompt: task.prompt, + description: task.description, + subagent_type: task.agent, + command: task.command, + } + yield* plugin.trigger( + "tool.execute.before", + { tool: TaskTool.id, sessionID, callID: part.id }, + { args: taskArgs }, + ) + + const taskAgent = yield* agents.get(task.agent) + if (!taskAgent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${task.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) + throw error + } + + let error: Error | undefined + const taskAbort = new AbortController() + const result = yield* taskTool + .execute(taskArgs, { + agent: task.agent, messageID: assistantMessage.id, - })) - - yield* plugin.trigger( - "tool.execute.after", - { tool: TaskTool.id, sessionID, callID: part.id, args: taskArgs }, - result, + sessionID, + abort: taskAbort.signal, + callID: part.callID, + extra: { bypassAgentCheck: true, promptOps }, + messages: msgs, + metadata: (val: { title?: string; metadata?: Record }) => + Effect.gen(function* () { + part = yield* sessions.updatePart({ + ...part, + type: "tool", + state: { ...part.state, ...val }, + } satisfies MessageV2.ToolPart) + }), + ask: (req: any) => + permission + .ask({ + ...req, + sessionID, + ruleset: Permission.merge(taskAgent.permission, session.permission ?? []), + }) + .pipe(Effect.orDie), + }) + .pipe( + Effect.catchCause((cause) => { + const defect = Cause.squash(cause) + error = defect instanceof Error ? defect : new Error(String(defect)) + log.error("subtask execution failed", { error, agent: task.agent, description: task.description }) + return Effect.void + }), + Effect.onInterrupt(() => + Effect.gen(function* () { + taskAbort.abort() + assistantMessage.finish = "tool-calls" + assistantMessage.time.completed = Date.now() + yield* sessions.updateMessage(assistantMessage) + if (part.state.status === "running") { + yield* sessions.updatePart({ + ...part, + state: { + status: "error", + error: "Cancelled", + time: { start: part.state.time.start, end: Date.now() }, + metadata: part.state.metadata, + input: part.state.input, + }, + } satisfies MessageV2.ToolPart) + } + }), + ), ) - assistantMessage.finish = "tool-calls" - assistantMessage.time.completed = Date.now() - yield* sessions.updateMessage(assistantMessage) + const attachments = result?.attachments?.map((attachment) => ({ + ...attachment, + id: PartID.ascending(), + sessionID, + messageID: assistantMessage.id, + })) - if (result && part.state.status === "running") { - yield* sessions.updatePart({ - ...part, - state: { - status: "completed", - input: part.state.input, - title: result.title, - metadata: result.metadata, - output: result.output, - attachments, - time: { ...part.state.time, end: Date.now() }, - }, - } satisfies MessageV2.ToolPart) - } + yield* plugin.trigger( + "tool.execute.after", + { tool: TaskTool.id, sessionID, callID: part.id, args: taskArgs }, + result, + ) - if (!result) { - yield* sessions.updatePart({ - ...part, - state: { - status: "error", - error: error ? `Tool execution failed: ${error.message}` : "Tool execution failed", - time: { - start: part.state.status === "running" ? part.state.time.start : Date.now(), - end: Date.now(), - }, - metadata: part.state.status === "pending" ? undefined : part.state.metadata, - input: part.state.input, - }, - } satisfies MessageV2.ToolPart) - } + assistantMessage.finish = "tool-calls" + assistantMessage.time.completed = Date.now() + yield* sessions.updateMessage(assistantMessage) - if (!task.command) return - - const summaryUserMsg: MessageV2.User = { - id: MessageID.ascending(), - sessionID, - role: "user", - time: { created: Date.now() }, - agent: lastUser.agent, - model: lastUser.model, - } - yield* sessions.updateMessage(summaryUserMsg) + if (result && part.state.status === "running") { yield* sessions.updatePart({ - id: PartID.ascending(), - messageID: summaryUserMsg.id, - sessionID, - type: "text", - text: "Summarize the task tool output above and continue with your task.", - synthetic: true, - } satisfies MessageV2.TextPart) + ...part, + state: { + status: "completed", + input: part.state.input, + title: result.title, + metadata: result.metadata, + output: result.output, + attachments, + time: { ...part.state.time, end: Date.now() }, + }, + } satisfies MessageV2.ToolPart) + } + + if (!result) { + yield* sessions.updatePart({ + ...part, + state: { + status: "error", + error: error ? `Tool execution failed: ${error.message}` : "Tool execution failed", + time: { + start: part.state.status === "running" ? part.state.time.start : Date.now(), + end: Date.now(), + }, + metadata: part.state.status === "pending" ? undefined : part.state.metadata, + input: part.state.input, + }, + } satisfies MessageV2.ToolPart) + } + + if (!task.command) return + + const summaryUserMsg: MessageV2.User = { + id: MessageID.ascending(), + sessionID, + role: "user", + time: { created: Date.now() }, + agent: lastUser.agent, + model: lastUser.model, + } + yield* sessions.updateMessage(summaryUserMsg) + yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: summaryUserMsg.id, + sessionID, + type: "text", + text: "Summarize the task tool output above and continue with your task.", + synthetic: true, + } satisfies MessageV2.TextPart) + }) + + const shellImpl = Effect.fn("SessionPrompt.shellImpl")(function* (input: ShellInput) { + const ctx = yield* InstanceState.context + const run = yield* runner() + const session = yield* sessions.get(input.sessionID) + if (session.revert) { + yield* revert.cleanup(session) + } + const agent = yield* agents.get(input.agent) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${input.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + const model = input.model ?? agent.model ?? (yield* lastModel(input.sessionID)) + const userMsg: MessageV2.User = { + id: input.messageID ?? MessageID.ascending(), + sessionID: input.sessionID, + time: { created: Date.now() }, + role: "user", + agent: input.agent, + model: { providerID: model.providerID, modelID: model.modelID }, + } + yield* sessions.updateMessage(userMsg) + const userPart: MessageV2.Part = { + type: "text", + id: PartID.ascending(), + messageID: userMsg.id, + sessionID: input.sessionID, + text: "The following tool was executed by the user", + synthetic: true, + } + yield* sessions.updatePart(userPart) + + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + sessionID: input.sessionID, + parentID: userMsg.id, + mode: input.agent, + agent: input.agent, + cost: 0, + path: { cwd: ctx.directory, root: ctx.worktree }, + time: { created: Date.now() }, + role: "assistant", + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: model.modelID, + providerID: model.providerID, + } + yield* sessions.updateMessage(msg) + const part: MessageV2.ToolPart = { + type: "tool", + id: PartID.ascending(), + messageID: msg.id, + sessionID: input.sessionID, + tool: "bash", + callID: ulid(), + state: { + status: "running", + time: { start: Date.now() }, + input: { command: input.command }, + }, + } + yield* sessions.updatePart(part) + + const sh = Shell.preferred() + const shellName = ( + process.platform === "win32" ? path.win32.basename(sh, ".exe") : path.basename(sh) + ).toLowerCase() + const invocations: Record = { + nu: { args: ["-c", input.command] }, + fish: { args: ["-c", input.command] }, + zsh: { + args: [ + "-l", + "-c", + ` + __oc_cwd=$PWD + [[ -f ~/.zshenv ]] && source ~/.zshenv >/dev/null 2>&1 || true + [[ -f "\${ZDOTDIR:-$HOME}/.zshrc" ]] && source "\${ZDOTDIR:-$HOME}/.zshrc" >/dev/null 2>&1 || true + cd "$__oc_cwd" + eval ${JSON.stringify(input.command)} + `, + ], + }, + bash: { + args: [ + "-l", + "-c", + ` + __oc_cwd=$PWD + shopt -s expand_aliases + [[ -f ~/.bashrc ]] && source ~/.bashrc >/dev/null 2>&1 || true + cd "$__oc_cwd" + eval ${JSON.stringify(input.command)} + `, + ], + }, + cmd: { args: ["/c", input.command] }, + powershell: { args: ["-NoProfile", "-Command", input.command] }, + pwsh: { args: ["-NoProfile", "-Command", input.command] }, + "": { args: ["-c", input.command] }, + } + + const args = (invocations[shellName] ?? invocations[""]).args + const cwd = ctx.directory + const shellEnv = yield* plugin.trigger( + "shell.env", + { cwd, sessionID: input.sessionID, callID: part.callID }, + { env: {} }, + ) + + const cmd = ChildProcess.make(sh, args, { + cwd, + extendEnv: true, + env: { ...shellEnv.env, TERM: "dumb" }, + stdin: "ignore", + forceKillAfter: "3 seconds", }) - const shellImpl = Effect.fn("SessionPrompt.shellImpl")(function* (input: ShellInput) { - const ctx = yield* InstanceState.context - const run = yield* runner() - const session = yield* sessions.get(input.sessionID) - if (session.revert) { - yield* revert.cleanup(session) - } - const agent = yield* agents.get(input.agent) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${input.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - const model = input.model ?? agent.model ?? (yield* lastModel(input.sessionID)) - const userMsg: MessageV2.User = { - id: input.messageID ?? MessageID.ascending(), - sessionID: input.sessionID, - time: { created: Date.now() }, - role: "user", - agent: input.agent, - model: { providerID: model.providerID, modelID: model.modelID }, - } - yield* sessions.updateMessage(userMsg) - const userPart: MessageV2.Part = { - type: "text", - id: PartID.ascending(), - messageID: userMsg.id, - sessionID: input.sessionID, - text: "The following tool was executed by the user", - synthetic: true, - } - yield* sessions.updatePart(userPart) + let output = "" + let aborted = false - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - sessionID: input.sessionID, - parentID: userMsg.id, - mode: input.agent, - agent: input.agent, - cost: 0, - path: { cwd: ctx.directory, root: ctx.worktree }, - time: { created: Date.now() }, - role: "assistant", - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: model.modelID, - providerID: model.providerID, - } - yield* sessions.updateMessage(msg) - const part: MessageV2.ToolPart = { - type: "tool", - id: PartID.ascending(), - messageID: msg.id, - sessionID: input.sessionID, - tool: "bash", - callID: ulid(), - state: { - status: "running", - time: { start: Date.now() }, - input: { command: input.command }, - }, - } - yield* sessions.updatePart(part) - - const sh = Shell.preferred() - const shellName = ( - process.platform === "win32" ? path.win32.basename(sh, ".exe") : path.basename(sh) - ).toLowerCase() - const invocations: Record = { - nu: { args: ["-c", input.command] }, - fish: { args: ["-c", input.command] }, - zsh: { - args: [ - "-l", - "-c", - ` - __oc_cwd=$PWD - [[ -f ~/.zshenv ]] && source ~/.zshenv >/dev/null 2>&1 || true - [[ -f "\${ZDOTDIR:-$HOME}/.zshrc" ]] && source "\${ZDOTDIR:-$HOME}/.zshrc" >/dev/null 2>&1 || true - cd "$__oc_cwd" - eval ${JSON.stringify(input.command)} - `, - ], - }, - bash: { - args: [ - "-l", - "-c", - ` - __oc_cwd=$PWD - shopt -s expand_aliases - [[ -f ~/.bashrc ]] && source ~/.bashrc >/dev/null 2>&1 || true - cd "$__oc_cwd" - eval ${JSON.stringify(input.command)} - `, - ], - }, - cmd: { args: ["/c", input.command] }, - powershell: { args: ["-NoProfile", "-Command", input.command] }, - pwsh: { args: ["-NoProfile", "-Command", input.command] }, - "": { args: ["-c", input.command] }, - } - - const args = (invocations[shellName] ?? invocations[""]).args - const cwd = ctx.directory - const shellEnv = yield* plugin.trigger( - "shell.env", - { cwd, sessionID: input.sessionID, callID: part.callID }, - { env: {} }, - ) - - const cmd = ChildProcess.make(sh, args, { - cwd, - extendEnv: true, - env: { ...shellEnv.env, TERM: "dumb" }, - stdin: "ignore", - forceKillAfter: "3 seconds", - }) - - let output = "" - let aborted = false - - const finish = Effect.uninterruptible( - Effect.gen(function* () { - if (aborted) { - output += "\n\n" + ["", "User aborted the command", ""].join("\n") - } - if (!msg.time.completed) { - msg.time.completed = Date.now() - yield* sessions.updateMessage(msg) + const finish = Effect.uninterruptible( + Effect.gen(function* () { + if (aborted) { + output += "\n\n" + ["", "User aborted the command", ""].join("\n") + } + if (!msg.time.completed) { + msg.time.completed = Date.now() + yield* sessions.updateMessage(msg) + } + if (part.state.status === "running") { + part.state = { + status: "completed", + time: { ...part.state.time, end: Date.now() }, + input: part.state.input, + title: "", + metadata: { output, description: "" }, + output, } + yield* sessions.updatePart(part) + } + }), + ) + + const exit = yield* Effect.gen(function* () { + const handle = yield* spawner.spawn(cmd) + yield* Stream.runForEach(Stream.decodeText(handle.all), (chunk) => + Effect.sync(() => { + output += chunk if (part.state.status === "running") { - part.state = { - status: "completed", - time: { ...part.state.time, end: Date.now() }, - input: part.state.input, - title: "", - metadata: { output, description: "" }, - output, - } - yield* sessions.updatePart(part) + part.state.metadata = { output, description: "" } + void run.fork(sessions.updatePart(part)) } }), ) + yield* handle.exitCode + }).pipe( + Effect.scoped, + Effect.onInterrupt(() => + Effect.sync(() => { + aborted = true + }), + ), + Effect.orDie, + Effect.ensuring(finish), + Effect.exit, + ) - const exit = yield* Effect.gen(function* () { - const handle = yield* spawner.spawn(cmd) - yield* Stream.runForEach(Stream.decodeText(handle.all), (chunk) => - Effect.sync(() => { - output += chunk - if (part.state.status === "running") { - part.state.metadata = { output, description: "" } - void run.fork(sessions.updatePart(part)) - } - }), - ) - yield* handle.exitCode - }).pipe( - Effect.scoped, - Effect.onInterrupt(() => - Effect.sync(() => { - aborted = true - }), - ), - Effect.orDie, - Effect.ensuring(finish), - Effect.exit, - ) - - if (Exit.isFailure(exit) && !Cause.hasInterruptsOnly(exit.cause)) { - return yield* Effect.failCause(exit.cause) - } - - return { info: msg, parts: [part] } - }) - - const getModel = Effect.fn("SessionPrompt.getModel")(function* ( - providerID: ProviderID, - modelID: ModelID, - sessionID: SessionID, - ) { - const exit = yield* provider.getModel(providerID, modelID).pipe(Effect.exit) - if (Exit.isSuccess(exit)) return exit.value - const err = Cause.squash(exit.cause) - if (Provider.ModelNotFoundError.isInstance(err)) { - const hint = err.data.suggestions?.length ? ` Did you mean: ${err.data.suggestions.join(", ")}?` : "" - yield* bus.publish(Session.Event.Error, { - sessionID, - error: new NamedError.Unknown({ - message: `Model not found: ${err.data.providerID}/${err.data.modelID}.${hint}`, - }).toObject(), - }) - } + if (Exit.isFailure(exit) && !Cause.hasInterruptsOnly(exit.cause)) { return yield* Effect.failCause(exit.cause) - }) + } - const lastModel = Effect.fnUntraced(function* (sessionID: SessionID) { - const match = yield* sessions.findMessage(sessionID, (m) => m.info.role === "user" && !!m.info.model) - if (Option.isSome(match) && match.value.info.role === "user") return match.value.info.model - return yield* provider.defaultModel() - }) + return { info: msg, parts: [part] } + }) - const createUserMessage = Effect.fn("SessionPrompt.createUserMessage")(function* (input: PromptInput) { - const agentName = input.agent || (yield* agents.defaultAgent()) - const ag = yield* agents.get(agentName) - if (!ag) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - - const model = input.model ?? ag.model ?? (yield* lastModel(input.sessionID)) - const same = ag.model && model.providerID === ag.model.providerID && model.modelID === ag.model.modelID - const full = - !input.variant && ag.variant && same - ? yield* provider.getModel(model.providerID, model.modelID).pipe(Effect.catchDefect(() => Effect.void)) - : undefined - const variant = input.variant ?? (ag.variant && full?.variants?.[ag.variant] ? ag.variant : undefined) - - const info: MessageV2.User = { - id: input.messageID ?? MessageID.ascending(), - role: "user", - sessionID: input.sessionID, - time: { created: Date.now() }, - tools: input.tools, - agent: ag.name, - model: { - providerID: model.providerID, - modelID: model.modelID, - variant, - }, - system: input.system, - format: input.format, - } - - yield* Effect.addFinalizer(() => instruction.clear(info.id)) - - type Draft = T extends MessageV2.Part ? Omit & { id?: string } : never - const assign = (part: Draft): MessageV2.Part => ({ - ...part, - id: part.id ? PartID.make(part.id) : PartID.ascending(), + const getModel = Effect.fn("SessionPrompt.getModel")(function* ( + providerID: ProviderID, + modelID: ModelID, + sessionID: SessionID, + ) { + const exit = yield* provider.getModel(providerID, modelID).pipe(Effect.exit) + if (Exit.isSuccess(exit)) return exit.value + const err = Cause.squash(exit.cause) + if (Provider.ModelNotFoundError.isInstance(err)) { + const hint = err.data.suggestions?.length ? ` Did you mean: ${err.data.suggestions.join(", ")}?` : "" + yield* bus.publish(Session.Event.Error, { + sessionID, + error: new NamedError.Unknown({ + message: `Model not found: ${err.data.providerID}/${err.data.modelID}.${hint}`, + }).toObject(), }) + } + return yield* Effect.failCause(exit.cause) + }) - const resolvePart: (part: PromptInput["parts"][number]) => Effect.Effect[]> = Effect.fn( - "SessionPrompt.resolveUserPart", - )(function* (part) { - if (part.type === "file") { - if (part.source?.type === "resource") { - const { clientName, uri } = part.source - log.info("mcp resource", { clientName, uri, mime: part.mime }) - const pieces: Draft[] = [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Reading MCP resource: ${part.filename} (${uri})`, - }, - ] - const exit = yield* mcp.readResource(clientName, uri).pipe(Effect.exit) - if (Exit.isSuccess(exit)) { - const content = exit.value - if (!content) throw new Error(`Resource not found: ${clientName}/${uri}`) - const items = Array.isArray(content.contents) ? content.contents : [content.contents] - for (const c of items) { - if ("text" in c && c.text) { - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: c.text, - }) - } else if ("blob" in c && c.blob) { - const mime = "mimeType" in c ? c.mimeType : part.mime - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `[Binary content: ${mime}]`, - }) - } + const lastModel = Effect.fnUntraced(function* (sessionID: SessionID) { + const match = yield* sessions.findMessage(sessionID, (m) => m.info.role === "user" && !!m.info.model) + if (Option.isSome(match) && match.value.info.role === "user") return match.value.info.model + return yield* provider.defaultModel() + }) + + const createUserMessage = Effect.fn("SessionPrompt.createUserMessage")(function* (input: PromptInput) { + const agentName = input.agent || (yield* agents.defaultAgent()) + const ag = yield* agents.get(agentName) + if (!ag) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + + const model = input.model ?? ag.model ?? (yield* lastModel(input.sessionID)) + const same = ag.model && model.providerID === ag.model.providerID && model.modelID === ag.model.modelID + const full = + !input.variant && ag.variant && same + ? yield* provider.getModel(model.providerID, model.modelID).pipe(Effect.catchDefect(() => Effect.void)) + : undefined + const variant = input.variant ?? (ag.variant && full?.variants?.[ag.variant] ? ag.variant : undefined) + + const info: MessageV2.User = { + id: input.messageID ?? MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + tools: input.tools, + agent: ag.name, + model: { + providerID: model.providerID, + modelID: model.modelID, + variant, + }, + system: input.system, + format: input.format, + } + + yield* Effect.addFinalizer(() => instruction.clear(info.id)) + + type Draft = T extends MessageV2.Part ? Omit & { id?: string } : never + const assign = (part: Draft): MessageV2.Part => ({ + ...part, + id: part.id ? PartID.make(part.id) : PartID.ascending(), + }) + + const resolvePart: (part: PromptInput["parts"][number]) => Effect.Effect[]> = Effect.fn( + "SessionPrompt.resolveUserPart", + )(function* (part) { + if (part.type === "file") { + if (part.source?.type === "resource") { + const { clientName, uri } = part.source + log.info("mcp resource", { clientName, uri, mime: part.mime }) + const pieces: Draft[] = [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Reading MCP resource: ${part.filename} (${uri})`, + }, + ] + const exit = yield* mcp.readResource(clientName, uri).pipe(Effect.exit) + if (Exit.isSuccess(exit)) { + const content = exit.value + if (!content) throw new Error(`Resource not found: ${clientName}/${uri}`) + const items = Array.isArray(content.contents) ? content.contents : [content.contents] + for (const c of items) { + if ("text" in c && c.text) { + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: c.text, + }) + } else if ("blob" in c && c.blob) { + const mime = "mimeType" in c ? c.mimeType : part.mime + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `[Binary content: ${mime}]`, + }) } - pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) - } else { - const error = Cause.squash(exit.cause) - log.error("failed to read MCP resource", { error, clientName, uri }) - const message = error instanceof Error ? error.message : String(error) - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Failed to read MCP resource ${part.filename}: ${message}`, - }) } - return pieces + pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) + } else { + const error = Cause.squash(exit.cause) + log.error("failed to read MCP resource", { error, clientName, uri }) + const message = error instanceof Error ? error.message : String(error) + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Failed to read MCP resource ${part.filename}: ${message}`, + }) } - const url = new URL(part.url) - switch (url.protocol) { - case "data:": - if (part.mime === "text/plain") { - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify({ filePath: part.filename })}`, - }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: decodeDataUrl(part.url), - }, - { ...part, messageID: info.id, sessionID: input.sessionID }, - ] - } - break - case "file:": { - log.info("file", { mime: part.mime }) - const filepath = fileURLToPath(part.url) - if (yield* fsys.isDir(filepath)) part.mime = "application/x-directory" - - const { read } = yield* registry.named() - const execRead = (args: Parameters[0], extra?: Tool.Context["extra"]) => { - const controller = new AbortController() - return read - .execute(args, { - sessionID: input.sessionID, - abort: controller.signal, - agent: input.agent!, - messageID: info.id, - extra: { bypassCwdCheck: true, ...extra }, - messages: [], - metadata: () => Effect.void, - ask: () => Effect.void, - }) - .pipe(Effect.onInterrupt(() => Effect.sync(() => controller.abort()))) - } - - if (part.mime === "text/plain") { - let offset: number | undefined - let limit: number | undefined - const range = { start: url.searchParams.get("start"), end: url.searchParams.get("end") } - if (range.start != null) { - const filePathURI = part.url.split("?")[0] - let start = parseInt(range.start) - let end = range.end ? parseInt(range.end) : undefined - if (start === end) { - const symbols = yield* lsp - .documentSymbol(filePathURI) - .pipe(Effect.catch(() => Effect.succeed([]))) - for (const symbol of symbols) { - let r: LSP.Range | undefined - if ("range" in symbol) r = symbol.range - else if ("location" in symbol) r = symbol.location.range - if (r?.start?.line && r?.start?.line === start) { - start = r.start.line - end = r?.end?.line ?? start - break - } - } - } - offset = Math.max(start, 1) - if (end) limit = end - (offset - 1) - } - const args = { filePath: filepath, offset, limit } - const pieces: Draft[] = [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, - }, - ] - const exit = yield* provider.getModel(info.model.providerID, info.model.modelID).pipe( - Effect.flatMap((mdl) => execRead(args, { model: mdl })), - Effect.exit, - ) - if (Exit.isSuccess(exit)) { - const result = exit.value - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: result.output, - }) - if (result.attachments?.length) { - pieces.push( - ...result.attachments.map((a) => ({ - ...a, - synthetic: true, - filename: a.filename ?? part.filename, - messageID: info.id, - sessionID: input.sessionID, - })), - ) - } else { - pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) - } - } else { - const error = Cause.squash(exit.cause) - log.error("failed to read file", { error }) - const message = error instanceof Error ? error.message : String(error) - yield* bus.publish(Session.Event.Error, { - sessionID: input.sessionID, - error: new NamedError.Unknown({ message }).toObject(), - }) - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Read tool failed to read ${filepath} with the following error: ${message}`, - }) - } - return pieces - } - - if (part.mime === "application/x-directory") { - const args = { filePath: filepath } - const exit = yield* execRead(args).pipe(Effect.exit) - if (Exit.isFailure(exit)) { - const error = Cause.squash(exit.cause) - log.error("failed to read directory", { error }) - const message = error instanceof Error ? error.message : String(error) - yield* bus.publish(Session.Event.Error, { - sessionID: input.sessionID, - error: new NamedError.Unknown({ message }).toObject(), - }) - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Read tool failed to read ${filepath} with the following error: ${message}`, - }, - ] - } - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, - }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: exit.value.output, - }, - { ...part, messageID: info.id, sessionID: input.sessionID }, - ] - } - - yield* filetime.read(input.sessionID, filepath) + return pieces + } + const url = new URL(part.url) + switch (url.protocol) { + case "data:": + if (part.mime === "text/plain") { return [ { messageID: info.id, sessionID: input.sessionID, type: "text", synthetic: true, - text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`, + text: `Called the Read tool with the following input: ${JSON.stringify({ filePath: part.filename })}`, }, { - id: part.id, messageID: info.id, sessionID: input.sessionID, - type: "file", - url: - `data:${part.mime};base64,` + - Buffer.from(yield* fsys.readFile(filepath).pipe(Effect.catch(Effect.die))).toString("base64"), - mime: part.mime, - filename: part.filename!, - source: part.source, + type: "text", + synthetic: true, + text: decodeDataUrl(part.url), }, + { ...part, messageID: info.id, sessionID: input.sessionID }, ] } + break + case "file:": { + log.info("file", { mime: part.mime }) + const filepath = fileURLToPath(part.url) + if (yield* fsys.isDir(filepath)) part.mime = "application/x-directory" + + const { read } = yield* registry.named() + const execRead = (args: Parameters[0], extra?: Tool.Context["extra"]) => { + const controller = new AbortController() + return read + .execute(args, { + sessionID: input.sessionID, + abort: controller.signal, + agent: input.agent!, + messageID: info.id, + extra: { bypassCwdCheck: true, ...extra }, + messages: [], + metadata: () => Effect.void, + ask: () => Effect.void, + }) + .pipe(Effect.onInterrupt(() => Effect.sync(() => controller.abort()))) + } + + if (part.mime === "text/plain") { + let offset: number | undefined + let limit: number | undefined + const range = { start: url.searchParams.get("start"), end: url.searchParams.get("end") } + if (range.start != null) { + const filePathURI = part.url.split("?")[0] + let start = parseInt(range.start) + let end = range.end ? parseInt(range.end) : undefined + if (start === end) { + const symbols = yield* lsp + .documentSymbol(filePathURI) + .pipe(Effect.catch(() => Effect.succeed([]))) + for (const symbol of symbols) { + let r: LSP.Range | undefined + if ("range" in symbol) r = symbol.range + else if ("location" in symbol) r = symbol.location.range + if (r?.start?.line && r?.start?.line === start) { + start = r.start.line + end = r?.end?.line ?? start + break + } + } + } + offset = Math.max(start, 1) + if (end) limit = end - (offset - 1) + } + const args = { filePath: filepath, offset, limit } + const pieces: Draft[] = [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, + }, + ] + const exit = yield* provider.getModel(info.model.providerID, info.model.modelID).pipe( + Effect.flatMap((mdl) => execRead(args, { model: mdl })), + Effect.exit, + ) + if (Exit.isSuccess(exit)) { + const result = exit.value + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: result.output, + }) + if (result.attachments?.length) { + pieces.push( + ...result.attachments.map((a) => ({ + ...a, + synthetic: true, + filename: a.filename ?? part.filename, + messageID: info.id, + sessionID: input.sessionID, + })), + ) + } else { + pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) + } + } else { + const error = Cause.squash(exit.cause) + log.error("failed to read file", { error }) + const message = error instanceof Error ? error.message : String(error) + yield* bus.publish(Session.Event.Error, { + sessionID: input.sessionID, + error: new NamedError.Unknown({ message }).toObject(), + }) + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Read tool failed to read ${filepath} with the following error: ${message}`, + }) + } + return pieces + } + + if (part.mime === "application/x-directory") { + const args = { filePath: filepath } + const exit = yield* execRead(args).pipe(Effect.exit) + if (Exit.isFailure(exit)) { + const error = Cause.squash(exit.cause) + log.error("failed to read directory", { error }) + const message = error instanceof Error ? error.message : String(error) + yield* bus.publish(Session.Event.Error, { + sessionID: input.sessionID, + error: new NamedError.Unknown({ message }).toObject(), + }) + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Read tool failed to read ${filepath} with the following error: ${message}`, + }, + ] + } + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, + }, + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: exit.value.output, + }, + { ...part, messageID: info.id, sessionID: input.sessionID }, + ] + } + + yield* filetime.read(input.sessionID, filepath) + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`, + }, + { + id: part.id, + messageID: info.id, + sessionID: input.sessionID, + type: "file", + url: + `data:${part.mime};base64,` + + Buffer.from(yield* fsys.readFile(filepath).pipe(Effect.catch(Effect.die))).toString("base64"), + mime: part.mime, + filename: part.filename!, + source: part.source, + }, + ] } } - - if (part.type === "agent") { - const perm = Permission.evaluate("task", part.name, ag.permission) - const hint = perm.action === "deny" ? " . Invoked by user; guaranteed to exist." : "" - return [ - { ...part, messageID: info.id, sessionID: input.sessionID }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: - " Use the above message and context to generate a prompt and call the task tool with subagent: " + - part.name + - hint, - }, - ] - } - - return [{ ...part, messageID: info.id, sessionID: input.sessionID }] - }) - - const parts = yield* Effect.forEach(input.parts, resolvePart, { concurrency: "unbounded" }).pipe( - Effect.map((x) => x.flat().map(assign)), - ) - - yield* plugin.trigger( - "chat.message", - { - sessionID: input.sessionID, - agent: input.agent, - model: input.model, - messageID: input.messageID, - variant: input.variant, - }, - { message: info, parts }, - ) - - const parsed = MessageV2.Info.safeParse(info) - if (!parsed.success) { - log.error("invalid user message before save", { - sessionID: input.sessionID, - messageID: info.id, - agent: info.agent, - model: info.model, - issues: parsed.error.issues, - }) } - parts.forEach((part, index) => { - const p = MessageV2.Part.safeParse(part) - if (p.success) return - log.error("invalid user part before save", { - sessionID: input.sessionID, - messageID: info.id, - partID: part.id, - partType: part.type, - index, - issues: p.error.issues, - part, - }) - }) - yield* sessions.updateMessage(info) - for (const part of parts) yield* sessions.updatePart(part) + if (part.type === "agent") { + const perm = Permission.evaluate("task", part.name, ag.permission) + const hint = perm.action === "deny" ? " . Invoked by user; guaranteed to exist." : "" + return [ + { ...part, messageID: info.id, sessionID: input.sessionID }, + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: + " Use the above message and context to generate a prompt and call the task tool with subagent: " + + part.name + + hint, + }, + ] + } - return { info, parts } - }, Effect.scoped) - - const prompt: (input: PromptInput) => Effect.Effect = Effect.fn("SessionPrompt.prompt")( - function* (input: PromptInput) { - const session = yield* sessions.get(input.sessionID) - yield* revert.cleanup(session) - const message = yield* createUserMessage(input) - yield* sessions.touch(input.sessionID) - - const permissions: Permission.Ruleset = [] - for (const [t, enabled] of Object.entries(input.tools ?? {})) { - permissions.push({ permission: t, action: enabled ? "allow" : "deny", pattern: "*" }) - } - if (permissions.length > 0) { - session.permission = permissions - yield* sessions.setPermission({ sessionID: session.id, permission: permissions }) - } - - if (input.noReply === true) return message - return yield* loop({ sessionID: input.sessionID }) - }, - ) - - const lastAssistant = Effect.fnUntraced(function* (sessionID: SessionID) { - const match = yield* sessions.findMessage(sessionID, (m) => m.info.role !== "user") - if (Option.isSome(match)) return match.value - const msgs = yield* sessions.messages({ sessionID, limit: 1 }) - if (msgs.length > 0) return msgs[0] - throw new Error("Impossible") + return [{ ...part, messageID: info.id, sessionID: input.sessionID }] }) - const runLoop: (sessionID: SessionID) => Effect.Effect = Effect.fn("SessionPrompt.run")( - function* (sessionID: SessionID) { - const ctx = yield* InstanceState.context - const slog = elog.with({ sessionID }) - let structured: unknown | undefined - let step = 0 - const session = yield* sessions.get(sessionID) + const parts = yield* Effect.forEach(input.parts, resolvePart, { concurrency: "unbounded" }).pipe( + Effect.map((x) => x.flat().map(assign)), + ) - while (true) { - yield* status.set(sessionID, { type: "busy" }) - yield* slog.info("loop", { step }) + yield* plugin.trigger( + "chat.message", + { + sessionID: input.sessionID, + agent: input.agent, + model: input.model, + messageID: input.messageID, + variant: input.variant, + }, + { message: info, parts }, + ) - let msgs = yield* MessageV2.filterCompactedEffect(sessionID) + const parsed = MessageV2.Info.safeParse(info) + if (!parsed.success) { + log.error("invalid user message before save", { + sessionID: input.sessionID, + messageID: info.id, + agent: info.agent, + model: info.model, + issues: parsed.error.issues, + }) + } + parts.forEach((part, index) => { + const p = MessageV2.Part.safeParse(part) + if (p.success) return + log.error("invalid user part before save", { + sessionID: input.sessionID, + messageID: info.id, + partID: part.id, + partType: part.type, + index, + issues: p.error.issues, + part, + }) + }) - let lastUser: MessageV2.User | undefined - let lastAssistant: MessageV2.Assistant | undefined - let lastFinished: MessageV2.Assistant | undefined - let tasks: (MessageV2.CompactionPart | MessageV2.SubtaskPart)[] = [] - for (let i = msgs.length - 1; i >= 0; i--) { - const msg = msgs[i] - if (!lastUser && msg.info.role === "user") lastUser = msg.info - if (!lastAssistant && msg.info.role === "assistant") lastAssistant = msg.info - if (!lastFinished && msg.info.role === "assistant" && msg.info.finish) lastFinished = msg.info - if (lastUser && lastFinished) break - const task = msg.parts.filter((part) => part.type === "compaction" || part.type === "subtask") - if (task && !lastFinished) tasks.push(...task) - } + yield* sessions.updateMessage(info) + for (const part of parts) yield* sessions.updatePart(part) - if (!lastUser) throw new Error("No user message found in stream. This should never happen.") + return { info, parts } + }, Effect.scoped) - const lastAssistantMsg = msgs.findLast( - (msg) => msg.info.role === "assistant" && msg.info.id === lastAssistant?.id, - ) - // Some providers return "stop" even when the assistant message contains tool calls. - // Keep the loop running so tool results can be sent back to the model. - // Skip provider-executed tool parts — those were fully handled within the - // provider's stream (e.g. DWS Agent Platform) and don't need a re-loop. - const hasToolCalls = - lastAssistantMsg?.parts.some((part) => part.type === "tool" && !part.metadata?.providerExecuted) ?? false + const prompt: (input: PromptInput) => Effect.Effect = Effect.fn("SessionPrompt.prompt")( + function* (input: PromptInput) { + const session = yield* sessions.get(input.sessionID) + yield* revert.cleanup(session) + const message = yield* createUserMessage(input) + yield* sessions.touch(input.sessionID) - if ( - lastAssistant?.finish && - !["tool-calls"].includes(lastAssistant.finish) && - !hasToolCalls && - lastUser.id < lastAssistant.id - ) { - yield* slog.info("exiting loop") - break - } + const permissions: Permission.Ruleset = [] + for (const [t, enabled] of Object.entries(input.tools ?? {})) { + permissions.push({ permission: t, action: enabled ? "allow" : "deny", pattern: "*" }) + } + if (permissions.length > 0) { + session.permission = permissions + yield* sessions.setPermission({ sessionID: session.id, permission: permissions }) + } - step++ - if (step === 1) - yield* title({ - session, - modelID: lastUser.model.modelID, - providerID: lastUser.model.providerID, - history: msgs, - }).pipe(Effect.ignore, Effect.forkIn(scope)) + if (input.noReply === true) return message + return yield* loop({ sessionID: input.sessionID }) + }, + ) - const model = yield* getModel(lastUser.model.providerID, lastUser.model.modelID, sessionID) - const task = tasks.pop() + const lastAssistant = Effect.fnUntraced(function* (sessionID: SessionID) { + const match = yield* sessions.findMessage(sessionID, (m) => m.info.role !== "user") + if (Option.isSome(match)) return match.value + const msgs = yield* sessions.messages({ sessionID, limit: 1 }) + if (msgs.length > 0) return msgs[0] + throw new Error("Impossible") + }) - if (task?.type === "subtask") { - yield* handleSubtask({ task, model, lastUser, sessionID, session, msgs }) - continue - } + const runLoop: (sessionID: SessionID) => Effect.Effect = Effect.fn("SessionPrompt.run")( + function* (sessionID: SessionID) { + const ctx = yield* InstanceState.context + const slog = elog.with({ sessionID }) + let structured: unknown | undefined + let step = 0 + const session = yield* sessions.get(sessionID) - if (task?.type === "compaction") { - const result = yield* compaction.process({ - messages: msgs, - parentID: lastUser.id, - sessionID, - auto: task.auto, - overflow: task.overflow, - }) - if (result === "stop") break - continue - } + while (true) { + yield* status.set(sessionID, { type: "busy" }) + yield* slog.info("loop", { step }) - if ( - lastFinished && - lastFinished.summary !== true && - (yield* compaction.isOverflow({ tokens: lastFinished.tokens, model })) - ) { - yield* compaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true }) - continue - } + let msgs = yield* MessageV2.filterCompactedEffect(sessionID) - const agent = yield* agents.get(lastUser.agent) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${lastUser.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) - throw error - } - const maxSteps = agent.steps ?? Infinity - const isLastStep = step >= maxSteps - msgs = yield* insertReminders({ messages: msgs, agent, session }) + let lastUser: MessageV2.User | undefined + let lastAssistant: MessageV2.Assistant | undefined + let lastFinished: MessageV2.Assistant | undefined + let tasks: (MessageV2.CompactionPart | MessageV2.SubtaskPart)[] = [] + for (let i = msgs.length - 1; i >= 0; i--) { + const msg = msgs[i] + if (!lastUser && msg.info.role === "user") lastUser = msg.info + if (!lastAssistant && msg.info.role === "assistant") lastAssistant = msg.info + if (!lastFinished && msg.info.role === "assistant" && msg.info.finish) lastFinished = msg.info + if (lastUser && lastFinished) break + const task = msg.parts.filter((part) => part.type === "compaction" || part.type === "subtask") + if (task && !lastFinished) tasks.push(...task) + } - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - parentID: lastUser.id, - role: "assistant", - mode: agent.name, - agent: agent.name, - variant: lastUser.model.variant, - path: { cwd: ctx.directory, root: ctx.worktree }, - cost: 0, - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: model.id, - providerID: model.providerID, - time: { created: Date.now() }, - sessionID, - } - yield* sessions.updateMessage(msg) - const handle = yield* processor.create({ - assistantMessage: msg, - sessionID, - model, - }) + if (!lastUser) throw new Error("No user message found in stream. This should never happen.") - const outcome: "break" | "continue" = yield* Effect.gen(function* () { - const lastUserMsg = msgs.findLast((m) => m.info.role === "user") - const bypassAgentCheck = lastUserMsg?.parts.some((p) => p.type === "agent") ?? false + const lastAssistantMsg = msgs.findLast( + (msg) => msg.info.role === "assistant" && msg.info.id === lastAssistant?.id, + ) + // Some providers return "stop" even when the assistant message contains tool calls. + // Keep the loop running so tool results can be sent back to the model. + // Skip provider-executed tool parts — those were fully handled within the + // provider's stream (e.g. DWS Agent Platform) and don't need a re-loop. + const hasToolCalls = + lastAssistantMsg?.parts.some((part) => part.type === "tool" && !part.metadata?.providerExecuted) ?? false - const tools = yield* resolveTools({ - agent, - session, - model, - tools: lastUser.tools, - processor: handle, - bypassAgentCheck, - messages: msgs, - }) + if ( + lastAssistant?.finish && + !["tool-calls"].includes(lastAssistant.finish) && + !hasToolCalls && + lastUser.id < lastAssistant.id + ) { + yield* slog.info("exiting loop") + break + } - if (lastUser.format?.type === "json_schema") { - tools["StructuredOutput"] = createStructuredOutputTool({ - schema: lastUser.format.schema, - onSuccess(output) { - structured = output - }, - }) - } + step++ + if (step === 1) + yield* title({ + session, + modelID: lastUser.model.modelID, + providerID: lastUser.model.providerID, + history: msgs, + }).pipe(Effect.ignore, Effect.forkIn(scope)) - if (step === 1) - yield* summary - .summarize({ sessionID, messageID: lastUser.id }) - .pipe(Effect.ignore, Effect.forkIn(scope)) + const model = yield* getModel(lastUser.model.providerID, lastUser.model.modelID, sessionID) + const task = tasks.pop() - if (step > 1 && lastFinished) { - for (const m of msgs) { - if (m.info.role !== "user" || m.info.id <= lastFinished.id) continue - for (const p of m.parts) { - if (p.type !== "text" || p.ignored || p.synthetic) continue - if (!p.text.trim()) continue - p.text = [ - "", - "The user sent the following message:", - p.text, - "", - "Please address this message and continue with your tasks.", - "", - ].join("\n") - } - } - } - - yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) - - const [skills, env, instructions, modelMsgs] = yield* Effect.all([ - sys.skills(agent), - Effect.sync(() => sys.environment(model)), - instruction.system().pipe(Effect.orDie), - MessageV2.toModelMessagesEffect(msgs, model), - ]) - const system = [...env, ...(skills ? [skills] : []), ...instructions] - const format = lastUser.format ?? { type: "text" as const } - if (format.type === "json_schema") system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT) - const result = yield* handle.process({ - user: lastUser, - agent, - permission: session.permission, - sessionID, - parentSessionID: session.parentID, - system, - messages: [...modelMsgs, ...(isLastStep ? [{ role: "assistant" as const, content: MAX_STEPS }] : [])], - tools, - model, - toolChoice: format.type === "json_schema" ? "required" : undefined, - }) - - if (structured !== undefined) { - handle.message.structured = structured - handle.message.finish = handle.message.finish ?? "stop" - yield* sessions.updateMessage(handle.message) - return "break" as const - } - - const finished = handle.message.finish && !["tool-calls", "unknown"].includes(handle.message.finish) - if (finished && !handle.message.error) { - if (format.type === "json_schema") { - handle.message.error = new MessageV2.StructuredOutputError({ - message: "Model did not produce structured output", - retries: 0, - }).toObject() - yield* sessions.updateMessage(handle.message) - return "break" as const - } - } - - if (result === "stop") return "break" as const - if (result === "compact") { - yield* compaction.create({ - sessionID, - agent: lastUser.agent, - model: lastUser.model, - auto: true, - overflow: !handle.message.finish, - }) - } - return "continue" as const - }).pipe(Effect.ensuring(instruction.clear(handle.message.id))) - if (outcome === "break") break + if (task?.type === "subtask") { + yield* handleSubtask({ task, model, lastUser, sessionID, session, msgs }) continue } - yield* compaction.prune({ sessionID }).pipe(Effect.ignore, Effect.forkIn(scope)) - return yield* lastAssistant(sessionID) - }, - ) - - const loop: (input: z.infer) => Effect.Effect = Effect.fn( - "SessionPrompt.loop", - )(function* (input: z.infer) { - return yield* state.ensureRunning(input.sessionID, lastAssistant(input.sessionID), runLoop(input.sessionID)) - }) - - const shell: (input: ShellInput) => Effect.Effect = Effect.fn("SessionPrompt.shell")( - function* (input: ShellInput) { - return yield* state.startShell(input.sessionID, lastAssistant(input.sessionID), shellImpl(input)) - }, - ) - - const command = Effect.fn("SessionPrompt.command")(function* (input: CommandInput) { - yield* elog.info("command", { sessionID: input.sessionID, command: input.command, agent: input.agent }) - const cmd = yield* commands.get(input.command) - if (!cmd) { - const available = (yield* commands.list()).map((c) => c.name) - const hint = available.length ? ` Available commands: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Command not found: "${input.command}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - const agentName = cmd.agent ?? input.agent ?? (yield* agents.defaultAgent()) - - const raw = input.arguments.match(argsRegex) ?? [] - const args = raw.map((arg) => arg.replace(quoteTrimRegex, "")) - const templateCommand = yield* Effect.promise(async () => cmd.template) - - const placeholders = templateCommand.match(placeholderRegex) ?? [] - let last = 0 - for (const item of placeholders) { - const value = Number(item.slice(1)) - if (value > last) last = value - } - - const withArgs = templateCommand.replaceAll(placeholderRegex, (_, index) => { - const position = Number(index) - const argIndex = position - 1 - if (argIndex >= args.length) return "" - if (position === last) return args.slice(argIndex).join(" ") - return args[argIndex] - }) - const usesArgumentsPlaceholder = templateCommand.includes("$ARGUMENTS") - let template = withArgs.replaceAll("$ARGUMENTS", input.arguments) - - if (placeholders.length === 0 && !usesArgumentsPlaceholder && input.arguments.trim()) { - template = template + "\n\n" + input.arguments - } - - const shellMatches = ConfigMarkdown.shell(template) - if (shellMatches.length > 0) { - const sh = Shell.preferred() - const results = yield* Effect.promise(() => - Promise.all( - shellMatches.map(async ([, cmd]) => (await Process.text([cmd], { shell: sh, nothrow: true })).text), - ), - ) - let index = 0 - template = template.replace(bashRegex, () => results[index++]) - } - template = template.trim() - - const taskModel = yield* Effect.gen(function* () { - if (cmd.model) return Provider.parseModel(cmd.model) - if (cmd.agent) { - const cmdAgent = yield* agents.get(cmd.agent) - if (cmdAgent?.model) return cmdAgent.model + if (task?.type === "compaction") { + const result = yield* compaction.process({ + messages: msgs, + parentID: lastUser.id, + sessionID, + auto: task.auto, + overflow: task.overflow, + }) + if (result === "stop") break + continue } - if (input.model) return Provider.parseModel(input.model) - return yield* lastModel(input.sessionID) - }) - yield* getModel(taskModel.providerID, taskModel.modelID, input.sessionID) + if ( + lastFinished && + lastFinished.summary !== true && + (yield* compaction.isOverflow({ tokens: lastFinished.tokens, model })) + ) { + yield* compaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true }) + continue + } - const agent = yield* agents.get(agentName) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error + const agent = yield* agents.get(lastUser.agent) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${lastUser.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) + throw error + } + const maxSteps = agent.steps ?? Infinity + const isLastStep = step >= maxSteps + msgs = yield* insertReminders({ messages: msgs, agent, session }) + + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + parentID: lastUser.id, + role: "assistant", + mode: agent.name, + agent: agent.name, + variant: lastUser.model.variant, + path: { cwd: ctx.directory, root: ctx.worktree }, + cost: 0, + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: model.id, + providerID: model.providerID, + time: { created: Date.now() }, + sessionID, + } + yield* sessions.updateMessage(msg) + const handle = yield* processor.create({ + assistantMessage: msg, + sessionID, + model, + }) + + const outcome: "break" | "continue" = yield* Effect.gen(function* () { + const lastUserMsg = msgs.findLast((m) => m.info.role === "user") + const bypassAgentCheck = lastUserMsg?.parts.some((p) => p.type === "agent") ?? false + + const tools = yield* resolveTools({ + agent, + session, + model, + tools: lastUser.tools, + processor: handle, + bypassAgentCheck, + messages: msgs, + }) + + if (lastUser.format?.type === "json_schema") { + tools["StructuredOutput"] = createStructuredOutputTool({ + schema: lastUser.format.schema, + onSuccess(output) { + structured = output + }, + }) + } + + if (step === 1) + yield* summary + .summarize({ sessionID, messageID: lastUser.id }) + .pipe(Effect.ignore, Effect.forkIn(scope)) + + if (step > 1 && lastFinished) { + for (const m of msgs) { + if (m.info.role !== "user" || m.info.id <= lastFinished.id) continue + for (const p of m.parts) { + if (p.type !== "text" || p.ignored || p.synthetic) continue + if (!p.text.trim()) continue + p.text = [ + "", + "The user sent the following message:", + p.text, + "", + "Please address this message and continue with your tasks.", + "", + ].join("\n") + } + } + } + + yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) + + const [skills, env, instructions, modelMsgs] = yield* Effect.all([ + sys.skills(agent), + Effect.sync(() => sys.environment(model)), + instruction.system().pipe(Effect.orDie), + MessageV2.toModelMessagesEffect(msgs, model), + ]) + const system = [...env, ...(skills ? [skills] : []), ...instructions] + const format = lastUser.format ?? { type: "text" as const } + if (format.type === "json_schema") system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT) + const result = yield* handle.process({ + user: lastUser, + agent, + permission: session.permission, + sessionID, + parentSessionID: session.parentID, + system, + messages: [...modelMsgs, ...(isLastStep ? [{ role: "assistant" as const, content: MAX_STEPS }] : [])], + tools, + model, + toolChoice: format.type === "json_schema" ? "required" : undefined, + }) + + if (structured !== undefined) { + handle.message.structured = structured + handle.message.finish = handle.message.finish ?? "stop" + yield* sessions.updateMessage(handle.message) + return "break" as const + } + + const finished = handle.message.finish && !["tool-calls", "unknown"].includes(handle.message.finish) + if (finished && !handle.message.error) { + if (format.type === "json_schema") { + handle.message.error = new MessageV2.StructuredOutputError({ + message: "Model did not produce structured output", + retries: 0, + }).toObject() + yield* sessions.updateMessage(handle.message) + return "break" as const + } + } + + if (result === "stop") return "break" as const + if (result === "compact") { + yield* compaction.create({ + sessionID, + agent: lastUser.agent, + model: lastUser.model, + auto: true, + overflow: !handle.message.finish, + }) + } + return "continue" as const + }).pipe(Effect.ensuring(instruction.clear(handle.message.id))) + if (outcome === "break") break + continue } - const templateParts = yield* resolvePromptParts(template) - const isSubtask = (agent.mode === "subagent" && cmd.subtask !== false) || cmd.subtask === true - const parts = isSubtask - ? [ - { - type: "subtask" as const, - agent: agent.name, - description: cmd.description ?? "", - command: input.command, - model: { providerID: taskModel.providerID, modelID: taskModel.modelID }, - prompt: templateParts.find((y) => y.type === "text")?.text ?? "", - }, - ] - : [...templateParts, ...(input.parts ?? [])] + yield* compaction.prune({ sessionID }).pipe(Effect.ignore, Effect.forkIn(scope)) + return yield* lastAssistant(sessionID) + }, + ) - const userAgent = isSubtask ? (input.agent ?? (yield* agents.defaultAgent())) : agentName - const userModel = isSubtask - ? input.model - ? Provider.parseModel(input.model) - : yield* lastModel(input.sessionID) - : taskModel + const loop: (input: z.infer) => Effect.Effect = Effect.fn( + "SessionPrompt.loop", + )(function* (input: z.infer) { + return yield* state.ensureRunning(input.sessionID, lastAssistant(input.sessionID), runLoop(input.sessionID)) + }) - yield* plugin.trigger( - "command.execute.before", - { command: input.command, sessionID: input.sessionID, arguments: input.arguments }, - { parts }, + const shell: (input: ShellInput) => Effect.Effect = Effect.fn("SessionPrompt.shell")( + function* (input: ShellInput) { + return yield* state.startShell(input.sessionID, lastAssistant(input.sessionID), shellImpl(input)) + }, + ) + + const command = Effect.fn("SessionPrompt.command")(function* (input: CommandInput) { + yield* elog.info("command", { sessionID: input.sessionID, command: input.command, agent: input.agent }) + const cmd = yield* commands.get(input.command) + if (!cmd) { + const available = (yield* commands.list()).map((c) => c.name) + const hint = available.length ? ` Available commands: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Command not found: "${input.command}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + const agentName = cmd.agent ?? input.agent ?? (yield* agents.defaultAgent()) + + const raw = input.arguments.match(argsRegex) ?? [] + const args = raw.map((arg) => arg.replace(quoteTrimRegex, "")) + const templateCommand = yield* Effect.promise(async () => cmd.template) + + const placeholders = templateCommand.match(placeholderRegex) ?? [] + let last = 0 + for (const item of placeholders) { + const value = Number(item.slice(1)) + if (value > last) last = value + } + + const withArgs = templateCommand.replaceAll(placeholderRegex, (_, index) => { + const position = Number(index) + const argIndex = position - 1 + if (argIndex >= args.length) return "" + if (position === last) return args.slice(argIndex).join(" ") + return args[argIndex] + }) + const usesArgumentsPlaceholder = templateCommand.includes("$ARGUMENTS") + let template = withArgs.replaceAll("$ARGUMENTS", input.arguments) + + if (placeholders.length === 0 && !usesArgumentsPlaceholder && input.arguments.trim()) { + template = template + "\n\n" + input.arguments + } + + const shellMatches = ConfigMarkdown.shell(template) + if (shellMatches.length > 0) { + const sh = Shell.preferred() + const results = yield* Effect.promise(() => + Promise.all( + shellMatches.map(async ([, cmd]) => (await Process.text([cmd], { shell: sh, nothrow: true })).text), + ), ) + let index = 0 + template = template.replace(bashRegex, () => results[index++]) + } + template = template.trim() - const result = yield* prompt({ - sessionID: input.sessionID, - messageID: input.messageID, - model: userModel, - agent: userAgent, - parts, - variant: input.variant, - }) - yield* bus.publish(Command.Event.Executed, { - name: input.command, - sessionID: input.sessionID, - arguments: input.arguments, - messageID: result.info.id, - }) - return result + const taskModel = yield* Effect.gen(function* () { + if (cmd.model) return Provider.parseModel(cmd.model) + if (cmd.agent) { + const cmdAgent = yield* agents.get(cmd.agent) + if (cmdAgent?.model) return cmdAgent.model + } + if (input.model) return Provider.parseModel(input.model) + return yield* lastModel(input.sessionID) }) - return Service.of({ - cancel, - prompt, - loop, - shell, - command, - resolvePromptParts, - }) - }), - ) + yield* getModel(taskModel.providerID, taskModel.modelID, input.sessionID) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(SessionRunState.defaultLayer), - Layer.provide(SessionStatus.defaultLayer), - Layer.provide(SessionCompaction.defaultLayer), - Layer.provide(SessionProcessor.defaultLayer), - Layer.provide(Command.defaultLayer), - Layer.provide(Permission.defaultLayer), - Layer.provide(MCP.defaultLayer), - Layer.provide(LSP.defaultLayer), - Layer.provide(FileTime.defaultLayer), - Layer.provide(ToolRegistry.defaultLayer), - Layer.provide(Truncate.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Instruction.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(SessionRevert.defaultLayer), - Layer.provide(SessionSummary.defaultLayer), - Layer.provide( - Layer.mergeAll( - Agent.defaultLayer, - SystemPrompt.defaultLayer, - LLM.defaultLayer, - Bus.layer, - CrossSpawnSpawner.defaultLayer, - ), + const agent = yield* agents.get(agentName) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + + const templateParts = yield* resolvePromptParts(template) + const isSubtask = (agent.mode === "subagent" && cmd.subtask !== false) || cmd.subtask === true + const parts = isSubtask + ? [ + { + type: "subtask" as const, + agent: agent.name, + description: cmd.description ?? "", + command: input.command, + model: { providerID: taskModel.providerID, modelID: taskModel.modelID }, + prompt: templateParts.find((y) => y.type === "text")?.text ?? "", + }, + ] + : [...templateParts, ...(input.parts ?? [])] + + const userAgent = isSubtask ? (input.agent ?? (yield* agents.defaultAgent())) : agentName + const userModel = isSubtask + ? input.model + ? Provider.parseModel(input.model) + : yield* lastModel(input.sessionID) + : taskModel + + yield* plugin.trigger( + "command.execute.before", + { command: input.command, sessionID: input.sessionID, arguments: input.arguments }, + { parts }, + ) + + const result = yield* prompt({ + sessionID: input.sessionID, + messageID: input.messageID, + model: userModel, + agent: userAgent, + parts, + variant: input.variant, + }) + yield* bus.publish(Command.Event.Executed, { + name: input.command, + sessionID: input.sessionID, + arguments: input.arguments, + messageID: result.info.id, + }) + return result + }) + + return Service.of({ + cancel, + prompt, + loop, + shell, + command, + resolvePromptParts, + }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(SessionRunState.defaultLayer), + Layer.provide(SessionStatus.defaultLayer), + Layer.provide(SessionCompaction.defaultLayer), + Layer.provide(SessionProcessor.defaultLayer), + Layer.provide(Command.defaultLayer), + Layer.provide(Permission.defaultLayer), + Layer.provide(MCP.defaultLayer), + Layer.provide(LSP.defaultLayer), + Layer.provide(FileTime.defaultLayer), + Layer.provide(ToolRegistry.defaultLayer), + Layer.provide(Truncate.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Instruction.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(SessionRevert.defaultLayer), + Layer.provide(SessionSummary.defaultLayer), + Layer.provide( + Layer.mergeAll( + Agent.defaultLayer, + SystemPrompt.defaultLayer, + LLM.defaultLayer, + Bus.layer, + CrossSpawnSpawner.defaultLayer, ), ), - ) - export const PromptInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, + ), +) +export const PromptInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + agent: z.string().optional(), + noReply: z.boolean().optional(), + tools: z + .record(z.string(), z.boolean()) + .optional() + .describe( + "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", + ), + format: MessageV2.Format.optional(), + system: z.string().optional(), + variant: z.string().optional(), + parts: z.array( + z.discriminatedUnion("type", [ + MessageV2.TextPart.omit({ + messageID: true, + sessionID: true, }) - .optional(), - agent: z.string().optional(), - noReply: z.boolean().optional(), - tools: z - .record(z.string(), z.boolean()) - .optional() - .describe( - "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", - ), - format: MessageV2.Format.optional(), - system: z.string().optional(), - variant: z.string().optional(), - parts: z.array( - z.discriminatedUnion("type", [ - MessageV2.TextPart.omit({ - messageID: true, - sessionID: true, + .partial({ + id: true, }) - .partial({ - id: true, - }) - .meta({ - ref: "TextPartInput", - }), + .meta({ + ref: "TextPartInput", + }), + MessageV2.FilePart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "FilePartInput", + }), + MessageV2.AgentPart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "AgentPartInput", + }), + MessageV2.SubtaskPart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "SubtaskPartInput", + }), + ]), + ), +}) +export type PromptInput = z.infer + +export const LoopInput = z.object({ + sessionID: SessionID.zod, +}) + +export const ShellInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), + agent: z.string(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + command: z.string(), +}) +export type ShellInput = z.infer + +export const CommandInput = z.object({ + messageID: MessageID.zod.optional(), + sessionID: SessionID.zod, + agent: z.string().optional(), + model: z.string().optional(), + arguments: z.string(), + command: z.string(), + variant: z.string().optional(), + parts: z + .array( + z.discriminatedUnion("type", [ MessageV2.FilePart.omit({ messageID: true, sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "FilePartInput", - }), - MessageV2.AgentPart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "AgentPartInput", - }), - MessageV2.SubtaskPart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "SubtaskPartInput", - }), + }).partial({ + id: true, + }), ]), - ), + ) + .optional(), +}) +export type CommandInput = z.infer + +/** @internal Exported for testing */ +export function createStructuredOutputTool(input: { + schema: Record + onSuccess: (output: unknown) => void +}): AITool { + // Remove $schema property if present (not needed for tool input) + const { $schema: _, ...toolSchema } = input.schema + + return tool({ + description: STRUCTURED_OUTPUT_DESCRIPTION, + inputSchema: jsonSchema(toolSchema as JSONSchema7), + async execute(args) { + // AI SDK validates args against inputSchema before calling execute() + input.onSuccess(args) + return { + output: "Structured output captured successfully.", + title: "Structured Output", + metadata: { valid: true }, + } + }, + toModelOutput({ output }) { + return { + type: "text", + value: output.output, + } + }, }) - export type PromptInput = z.infer - - export const LoopInput = z.object({ - sessionID: SessionID.zod, - }) - - export const ShellInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - agent: z.string(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - }) - .optional(), - command: z.string(), - }) - export type ShellInput = z.infer - - export const CommandInput = z.object({ - messageID: MessageID.zod.optional(), - sessionID: SessionID.zod, - agent: z.string().optional(), - model: z.string().optional(), - arguments: z.string(), - command: z.string(), - variant: z.string().optional(), - parts: z - .array( - z.discriminatedUnion("type", [ - MessageV2.FilePart.omit({ - messageID: true, - sessionID: true, - }).partial({ - id: true, - }), - ]), - ) - .optional(), - }) - export type CommandInput = z.infer - - /** @internal Exported for testing */ - export function createStructuredOutputTool(input: { - schema: Record - onSuccess: (output: unknown) => void - }): AITool { - // Remove $schema property if present (not needed for tool input) - const { $schema: _, ...toolSchema } = input.schema - - return tool({ - description: STRUCTURED_OUTPUT_DESCRIPTION, - inputSchema: jsonSchema(toolSchema as JSONSchema7), - async execute(args) { - // AI SDK validates args against inputSchema before calling execute() - input.onSuccess(args) - return { - output: "Structured output captured successfully.", - title: "Structured Output", - metadata: { valid: true }, - } - }, - toModelOutput({ output }) { - return { - type: "text", - value: output.output, - } - }, - }) - } - const bashRegex = /!`([^`]+)`/g - // Match [Image N] as single token, quoted strings, or non-space sequences - const argsRegex = /(?:\[Image\s+\d+\]|"[^"]*"|'[^']*'|[^\s"']+)/gi - const placeholderRegex = /\$(\d+)/g - const quoteTrimRegex = /^["']|["']$/g } +const bashRegex = /!`([^`]+)`/g +// Match [Image N] as single token, quoted strings, or non-space sequences +const argsRegex = /(?:\[Image\s+\d+\]|"[^"]*"|'[^']*'|[^\s"']+)/gi +const placeholderRegex = /\$(\d+)/g +const quoteTrimRegex = /^["']|["']$/g + +export * as SessionPrompt from "./prompt" diff --git a/packages/opencode/src/session/retry.ts b/packages/opencode/src/session/retry.ts index 6aad55f3f8..12fd4d345d 100644 --- a/packages/opencode/src/session/retry.ts +++ b/packages/opencode/src/session/retry.ts @@ -3,123 +3,123 @@ import { Cause, Clock, Duration, Effect, Schedule } from "effect" import { MessageV2 } from "./message-v2" import { iife } from "@/util/iife" -export namespace SessionRetry { - export type Err = ReturnType +export type Err = ReturnType - // This exported message is shared with the TUI upsell detector. Matching on a - // literal error string kind of sucks, but it is the simplest for now. - export const GO_UPSELL_MESSAGE = "Free usage exceeded, subscribe to Go https://opencode.ai/go" +// This exported message is shared with the TUI upsell detector. Matching on a +// literal error string kind of sucks, but it is the simplest for now. +export const GO_UPSELL_MESSAGE = "Free usage exceeded, subscribe to Go https://opencode.ai/go" - export const RETRY_INITIAL_DELAY = 2000 - export const RETRY_BACKOFF_FACTOR = 2 - export const RETRY_MAX_DELAY_NO_HEADERS = 30_000 // 30 seconds - export const RETRY_MAX_DELAY = 2_147_483_647 // max 32-bit signed integer for setTimeout +export const RETRY_INITIAL_DELAY = 2000 +export const RETRY_BACKOFF_FACTOR = 2 +export const RETRY_MAX_DELAY_NO_HEADERS = 30_000 // 30 seconds +export const RETRY_MAX_DELAY = 2_147_483_647 // max 32-bit signed integer for setTimeout - function cap(ms: number) { - return Math.min(ms, RETRY_MAX_DELAY) - } - - export function delay(attempt: number, error?: MessageV2.APIError) { - if (error) { - const headers = error.data.responseHeaders - if (headers) { - const retryAfterMs = headers["retry-after-ms"] - if (retryAfterMs) { - const parsedMs = Number.parseFloat(retryAfterMs) - if (!Number.isNaN(parsedMs)) { - return cap(parsedMs) - } - } - - const retryAfter = headers["retry-after"] - if (retryAfter) { - const parsedSeconds = Number.parseFloat(retryAfter) - if (!Number.isNaN(parsedSeconds)) { - // convert seconds to milliseconds - return cap(Math.ceil(parsedSeconds * 1000)) - } - // Try parsing as HTTP date format - const parsed = Date.parse(retryAfter) - Date.now() - if (!Number.isNaN(parsed) && parsed > 0) { - return cap(Math.ceil(parsed)) - } - } - - return cap(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1)) - } - } - - return cap(Math.min(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1), RETRY_MAX_DELAY_NO_HEADERS)) - } - - export function retryable(error: Err) { - // context overflow errors should not be retried - if (MessageV2.ContextOverflowError.isInstance(error)) return undefined - if (MessageV2.APIError.isInstance(error)) { - const status = error.data.statusCode - // 5xx errors are transient server failures and should always be retried, - // even when the provider SDK doesn't explicitly mark them as retryable. - if (!error.data.isRetryable && !(status !== undefined && status >= 500)) return undefined - if (error.data.responseBody?.includes("FreeUsageLimitError")) return GO_UPSELL_MESSAGE - return error.data.message.includes("Overloaded") ? "Provider is overloaded" : error.data.message - } - - // Check for rate limit patterns in plain text error messages - const msg = error.data?.message - if (typeof msg === "string") { - const lower = msg.toLowerCase() - if ( - lower.includes("rate increased too quickly") || - lower.includes("rate limit") || - lower.includes("too many requests") - ) { - return msg - } - } - - const json = iife(() => { - try { - if (typeof error.data?.message === "string") { - const parsed = JSON.parse(error.data.message) - return parsed - } - - return JSON.parse(error.data.message) - } catch { - return undefined - } - }) - if (!json || typeof json !== "object") return undefined - const code = typeof json.code === "string" ? json.code : "" - - if (json.type === "error" && json.error?.type === "too_many_requests") { - return "Too Many Requests" - } - if (code.includes("exhausted") || code.includes("unavailable")) { - return "Provider is overloaded" - } - if (json.type === "error" && typeof json.error?.code === "string" && json.error.code.includes("rate_limit")) { - return "Rate Limited" - } - return undefined - } - - export function policy(opts: { - parse: (error: unknown) => Err - set: (input: { attempt: number; message: string; next: number }) => Effect.Effect - }) { - return Schedule.fromStepWithMetadata( - Effect.succeed((meta: Schedule.InputMetadata) => { - const error = opts.parse(meta.input) - const message = retryable(error) - if (!message) return Cause.done(meta.attempt) - return Effect.gen(function* () { - const wait = delay(meta.attempt, MessageV2.APIError.isInstance(error) ? error : undefined) - const now = yield* Clock.currentTimeMillis - yield* opts.set({ attempt: meta.attempt, message, next: now + wait }) - return [meta.attempt, Duration.millis(wait)] as [number, Duration.Duration] - }) - }), - ) - } +function cap(ms: number) { + return Math.min(ms, RETRY_MAX_DELAY) } + +export function delay(attempt: number, error?: MessageV2.APIError) { + if (error) { + const headers = error.data.responseHeaders + if (headers) { + const retryAfterMs = headers["retry-after-ms"] + if (retryAfterMs) { + const parsedMs = Number.parseFloat(retryAfterMs) + if (!Number.isNaN(parsedMs)) { + return cap(parsedMs) + } + } + + const retryAfter = headers["retry-after"] + if (retryAfter) { + const parsedSeconds = Number.parseFloat(retryAfter) + if (!Number.isNaN(parsedSeconds)) { + // convert seconds to milliseconds + return cap(Math.ceil(parsedSeconds * 1000)) + } + // Try parsing as HTTP date format + const parsed = Date.parse(retryAfter) - Date.now() + if (!Number.isNaN(parsed) && parsed > 0) { + return cap(Math.ceil(parsed)) + } + } + + return cap(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1)) + } + } + + return cap(Math.min(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1), RETRY_MAX_DELAY_NO_HEADERS)) +} + +export function retryable(error: Err) { + // context overflow errors should not be retried + if (MessageV2.ContextOverflowError.isInstance(error)) return undefined + if (MessageV2.APIError.isInstance(error)) { + const status = error.data.statusCode + // 5xx errors are transient server failures and should always be retried, + // even when the provider SDK doesn't explicitly mark them as retryable. + if (!error.data.isRetryable && !(status !== undefined && status >= 500)) return undefined + if (error.data.responseBody?.includes("FreeUsageLimitError")) return GO_UPSELL_MESSAGE + return error.data.message.includes("Overloaded") ? "Provider is overloaded" : error.data.message + } + + // Check for rate limit patterns in plain text error messages + const msg = error.data?.message + if (typeof msg === "string") { + const lower = msg.toLowerCase() + if ( + lower.includes("rate increased too quickly") || + lower.includes("rate limit") || + lower.includes("too many requests") + ) { + return msg + } + } + + const json = iife(() => { + try { + if (typeof error.data?.message === "string") { + const parsed = JSON.parse(error.data.message) + return parsed + } + + return JSON.parse(error.data.message) + } catch { + return undefined + } + }) + if (!json || typeof json !== "object") return undefined + const code = typeof json.code === "string" ? json.code : "" + + if (json.type === "error" && json.error?.type === "too_many_requests") { + return "Too Many Requests" + } + if (code.includes("exhausted") || code.includes("unavailable")) { + return "Provider is overloaded" + } + if (json.type === "error" && typeof json.error?.code === "string" && json.error.code.includes("rate_limit")) { + return "Rate Limited" + } + return undefined +} + +export function policy(opts: { + parse: (error: unknown) => Err + set: (input: { attempt: number; message: string; next: number }) => Effect.Effect +}) { + return Schedule.fromStepWithMetadata( + Effect.succeed((meta: Schedule.InputMetadata) => { + const error = opts.parse(meta.input) + const message = retryable(error) + if (!message) return Cause.done(meta.attempt) + return Effect.gen(function* () { + const wait = delay(meta.attempt, MessageV2.APIError.isInstance(error) ? error : undefined) + const now = yield* Clock.currentTimeMillis + yield* opts.set({ attempt: meta.attempt, message, next: now + wait }) + return [meta.attempt, Duration.millis(wait)] as [number, Duration.Duration] + }) + }), + ) +} + +export * as SessionRetry from "./retry" diff --git a/packages/opencode/src/session/revert.ts b/packages/opencode/src/session/revert.ts index f09ccf24ad..c7e5220f12 100644 --- a/packages/opencode/src/session/revert.ts +++ b/packages/opencode/src/session/revert.ts @@ -11,151 +11,151 @@ import { SessionID, MessageID, PartID } from "./schema" import { SessionRunState } from "./run-state" import { SessionSummary } from "./summary" -export namespace SessionRevert { - const log = Log.create({ service: "session.revert" }) +const log = Log.create({ service: "session.revert" }) - export const RevertInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod.optional(), - }) - export type RevertInput = z.infer +export const RevertInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod.optional(), +}) +export type RevertInput = z.infer - export interface Interface { - readonly revert: (input: RevertInput) => Effect.Effect - readonly unrevert: (input: { sessionID: SessionID }) => Effect.Effect - readonly cleanup: (session: Session.Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionRevert") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const sessions = yield* Session.Service - const snap = yield* Snapshot.Service - const storage = yield* Storage.Service - const bus = yield* Bus.Service - const summary = yield* SessionSummary.Service - const state = yield* SessionRunState.Service - - const revert = Effect.fn("SessionRevert.revert")(function* (input: RevertInput) { - yield* state.assertNotBusy(input.sessionID) - const all = yield* sessions.messages({ sessionID: input.sessionID }) - let lastUser: MessageV2.User | undefined - const session = yield* sessions.get(input.sessionID) - - let rev: Session.Info["revert"] - const patches: Snapshot.Patch[] = [] - for (const msg of all) { - if (msg.info.role === "user") lastUser = msg.info - const remaining = [] - for (const part of msg.parts) { - if (rev) { - if (part.type === "patch") patches.push(part) - continue - } - - if (!rev) { - if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) { - const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined - rev = { - messageID: !partID && lastUser ? lastUser.id : msg.info.id, - partID, - } - } - remaining.push(part) - } - } - } - - if (!rev) return session - - rev.snapshot = session.revert?.snapshot ?? (yield* snap.track()) - if (session.revert?.snapshot) yield* snap.restore(session.revert.snapshot) - yield* snap.revert(patches) - if (rev.snapshot) rev.diff = yield* snap.diff(rev.snapshot as string) - const range = all.filter((msg) => msg.info.id >= rev!.messageID) - const diffs = yield* summary.computeDiff({ messages: range }) - yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) - yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - yield* sessions.setRevert({ - sessionID: input.sessionID, - revert: rev, - summary: { - additions: diffs.reduce((sum, x) => sum + x.additions, 0), - deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), - files: diffs.length, - }, - }) - return yield* sessions.get(input.sessionID) - }) - - const unrevert = Effect.fn("SessionRevert.unrevert")(function* (input: { sessionID: SessionID }) { - log.info("unreverting", input) - yield* state.assertNotBusy(input.sessionID) - const session = yield* sessions.get(input.sessionID) - if (!session.revert) return session - if (session.revert.snapshot) yield* snap.restore(session.revert!.snapshot!) - yield* sessions.clearRevert(input.sessionID) - return yield* sessions.get(input.sessionID) - }) - - const cleanup = Effect.fn("SessionRevert.cleanup")(function* (session: Session.Info) { - if (!session.revert) return - const sessionID = session.id - const msgs = yield* sessions.messages({ sessionID }) - const messageID = session.revert.messageID - const remove = [] as MessageV2.WithParts[] - let target: MessageV2.WithParts | undefined - for (const msg of msgs) { - if (msg.info.id < messageID) continue - if (msg.info.id > messageID) { - remove.push(msg) - continue - } - if (session.revert.partID) { - target = msg - continue - } - remove.push(msg) - } - for (const msg of remove) { - SyncEvent.run(MessageV2.Event.Removed, { - sessionID, - messageID: msg.info.id, - }) - } - if (session.revert.partID && target) { - const partID = session.revert.partID - const idx = target.parts.findIndex((part) => part.id === partID) - if (idx >= 0) { - const removeParts = target.parts.slice(idx) - target.parts = target.parts.slice(0, idx) - for (const part of removeParts) { - SyncEvent.run(MessageV2.Event.PartRemoved, { - sessionID, - messageID: target.info.id, - partID: part.id, - }) - } - } - } - yield* sessions.clearRevert(sessionID) - }) - - return Service.of({ revert, unrevert, cleanup }) - }), - ) - - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(SessionRunState.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Storage.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(SessionSummary.defaultLayer), - ), - ) +export interface Interface { + readonly revert: (input: RevertInput) => Effect.Effect + readonly unrevert: (input: { sessionID: SessionID }) => Effect.Effect + readonly cleanup: (session: Session.Info) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SessionRevert") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const sessions = yield* Session.Service + const snap = yield* Snapshot.Service + const storage = yield* Storage.Service + const bus = yield* Bus.Service + const summary = yield* SessionSummary.Service + const state = yield* SessionRunState.Service + + const revert = Effect.fn("SessionRevert.revert")(function* (input: RevertInput) { + yield* state.assertNotBusy(input.sessionID) + const all = yield* sessions.messages({ sessionID: input.sessionID }) + let lastUser: MessageV2.User | undefined + const session = yield* sessions.get(input.sessionID) + + let rev: Session.Info["revert"] + const patches: Snapshot.Patch[] = [] + for (const msg of all) { + if (msg.info.role === "user") lastUser = msg.info + const remaining = [] + for (const part of msg.parts) { + if (rev) { + if (part.type === "patch") patches.push(part) + continue + } + + if (!rev) { + if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) { + const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined + rev = { + messageID: !partID && lastUser ? lastUser.id : msg.info.id, + partID, + } + } + remaining.push(part) + } + } + } + + if (!rev) return session + + rev.snapshot = session.revert?.snapshot ?? (yield* snap.track()) + if (session.revert?.snapshot) yield* snap.restore(session.revert.snapshot) + yield* snap.revert(patches) + if (rev.snapshot) rev.diff = yield* snap.diff(rev.snapshot as string) + const range = all.filter((msg) => msg.info.id >= rev!.messageID) + const diffs = yield* summary.computeDiff({ messages: range }) + yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) + yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) + yield* sessions.setRevert({ + sessionID: input.sessionID, + revert: rev, + summary: { + additions: diffs.reduce((sum, x) => sum + x.additions, 0), + deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), + files: diffs.length, + }, + }) + return yield* sessions.get(input.sessionID) + }) + + const unrevert = Effect.fn("SessionRevert.unrevert")(function* (input: { sessionID: SessionID }) { + log.info("unreverting", input) + yield* state.assertNotBusy(input.sessionID) + const session = yield* sessions.get(input.sessionID) + if (!session.revert) return session + if (session.revert.snapshot) yield* snap.restore(session.revert!.snapshot!) + yield* sessions.clearRevert(input.sessionID) + return yield* sessions.get(input.sessionID) + }) + + const cleanup = Effect.fn("SessionRevert.cleanup")(function* (session: Session.Info) { + if (!session.revert) return + const sessionID = session.id + const msgs = yield* sessions.messages({ sessionID }) + const messageID = session.revert.messageID + const remove = [] as MessageV2.WithParts[] + let target: MessageV2.WithParts | undefined + for (const msg of msgs) { + if (msg.info.id < messageID) continue + if (msg.info.id > messageID) { + remove.push(msg) + continue + } + if (session.revert.partID) { + target = msg + continue + } + remove.push(msg) + } + for (const msg of remove) { + SyncEvent.run(MessageV2.Event.Removed, { + sessionID, + messageID: msg.info.id, + }) + } + if (session.revert.partID && target) { + const partID = session.revert.partID + const idx = target.parts.findIndex((part) => part.id === partID) + if (idx >= 0) { + const removeParts = target.parts.slice(idx) + target.parts = target.parts.slice(0, idx) + for (const part of removeParts) { + SyncEvent.run(MessageV2.Event.PartRemoved, { + sessionID, + messageID: target.info.id, + partID: part.id, + }) + } + } + } + yield* sessions.clearRevert(sessionID) + }) + + return Service.of({ revert, unrevert, cleanup }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(SessionRunState.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Storage.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(SessionSummary.defaultLayer), + ), +) + +export * as SessionRevert from "./revert" diff --git a/packages/opencode/src/session/run-state.ts b/packages/opencode/src/session/run-state.ts index a18e0b5732..7a106f8a4c 100644 --- a/packages/opencode/src/session/run-state.ts +++ b/packages/opencode/src/session/run-state.ts @@ -6,103 +6,103 @@ import { MessageV2 } from "./message-v2" import { SessionID } from "./schema" import { SessionStatus } from "./status" -export namespace SessionRunState { - export interface Interface { - readonly assertNotBusy: (sessionID: SessionID) => Effect.Effect - readonly cancel: (sessionID: SessionID) => Effect.Effect - readonly ensureRunning: ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) => Effect.Effect - readonly startShell: ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionRunState") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const status = yield* SessionStatus.Service - - const state = yield* InstanceState.make( - Effect.fn("SessionRunState.state")(function* () { - const scope = yield* Scope.Scope - const runners = new Map>() - yield* Effect.addFinalizer( - Effect.fnUntraced(function* () { - yield* Effect.forEach(runners.values(), (runner) => runner.cancel, { - concurrency: "unbounded", - discard: true, - }) - runners.clear() - }), - ) - return { runners, scope } - }), - ) - - const runner = Effect.fn("SessionRunState.runner")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - ) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (existing) return existing - const next = Runner.make(data.scope, { - onIdle: Effect.gen(function* () { - data.runners.delete(sessionID) - yield* status.set(sessionID, { type: "idle" }) - }), - onBusy: status.set(sessionID, { type: "busy" }), - onInterrupt, - busy: () => { - throw new Session.BusyError(sessionID) - }, - }) - data.runners.set(sessionID, next) - return next - }) - - const assertNotBusy = Effect.fn("SessionRunState.assertNotBusy")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (existing?.busy) throw new Session.BusyError(sessionID) - }) - - const cancel = Effect.fn("SessionRunState.cancel")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (!existing || !existing.busy) { - yield* status.set(sessionID, { type: "idle" }) - return - } - yield* existing.cancel - }) - - const ensureRunning = Effect.fn("SessionRunState.ensureRunning")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) { - return yield* (yield* runner(sessionID, onInterrupt)).ensureRunning(work) - }) - - const startShell = Effect.fn("SessionRunState.startShell")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) { - return yield* (yield* runner(sessionID, onInterrupt)).startShell(work) - }) - - return Service.of({ assertNotBusy, cancel, ensureRunning, startShell }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(SessionStatus.defaultLayer)) +export interface Interface { + readonly assertNotBusy: (sessionID: SessionID) => Effect.Effect + readonly cancel: (sessionID: SessionID) => Effect.Effect + readonly ensureRunning: ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) => Effect.Effect + readonly startShell: ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SessionRunState") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const status = yield* SessionStatus.Service + + const state = yield* InstanceState.make( + Effect.fn("SessionRunState.state")(function* () { + const scope = yield* Scope.Scope + const runners = new Map>() + yield* Effect.addFinalizer( + Effect.fnUntraced(function* () { + yield* Effect.forEach(runners.values(), (runner) => runner.cancel, { + concurrency: "unbounded", + discard: true, + }) + runners.clear() + }), + ) + return { runners, scope } + }), + ) + + const runner = Effect.fn("SessionRunState.runner")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + ) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (existing) return existing + const next = Runner.make(data.scope, { + onIdle: Effect.gen(function* () { + data.runners.delete(sessionID) + yield* status.set(sessionID, { type: "idle" }) + }), + onBusy: status.set(sessionID, { type: "busy" }), + onInterrupt, + busy: () => { + throw new Session.BusyError(sessionID) + }, + }) + data.runners.set(sessionID, next) + return next + }) + + const assertNotBusy = Effect.fn("SessionRunState.assertNotBusy")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (existing?.busy) throw new Session.BusyError(sessionID) + }) + + const cancel = Effect.fn("SessionRunState.cancel")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (!existing || !existing.busy) { + yield* status.set(sessionID, { type: "idle" }) + return + } + yield* existing.cancel + }) + + const ensureRunning = Effect.fn("SessionRunState.ensureRunning")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) { + return yield* (yield* runner(sessionID, onInterrupt)).ensureRunning(work) + }) + + const startShell = Effect.fn("SessionRunState.startShell")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) { + return yield* (yield* runner(sessionID, onInterrupt)).startShell(work) + }) + + return Service.of({ assertNotBusy, cancel, ensureRunning, startShell }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(SessionStatus.defaultLayer)) + +export * as SessionRunState from "./run-state" diff --git a/packages/opencode/src/session/status.ts b/packages/opencode/src/session/status.ts index f0d4e6cf79..7f46c70a8a 100644 --- a/packages/opencode/src/session/status.ts +++ b/packages/opencode/src/session/status.ts @@ -5,84 +5,84 @@ import { SessionID } from "./schema" import { Effect, Layer, Context } from "effect" import z from "zod" -export namespace SessionStatus { - export const Info = z - .union([ - z.object({ - type: z.literal("idle"), - }), - z.object({ - type: z.literal("retry"), - attempt: z.number(), - message: z.string(), - next: z.number(), - }), - z.object({ - type: z.literal("busy"), - }), - ]) - .meta({ - ref: "SessionStatus", - }) - export type Info = z.infer - - export const Event = { - Status: BusEvent.define( - "session.status", - z.object({ - sessionID: SessionID.zod, - status: Info, - }), - ), - // deprecated - Idle: BusEvent.define( - "session.idle", - z.object({ - sessionID: SessionID.zod, - }), - ), - } - - export interface Interface { - readonly get: (sessionID: SessionID) => Effect.Effect - readonly list: () => Effect.Effect> - readonly set: (sessionID: SessionID, status: Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionStatus") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - - const state = yield* InstanceState.make( - Effect.fn("SessionStatus.state")(() => Effect.succeed(new Map())), - ) - - const get = Effect.fn("SessionStatus.get")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - return data.get(sessionID) ?? { type: "idle" as const } - }) - - const list = Effect.fn("SessionStatus.list")(function* () { - return new Map(yield* InstanceState.get(state)) - }) - - const set = Effect.fn("SessionStatus.set")(function* (sessionID: SessionID, status: Info) { - const data = yield* InstanceState.get(state) - yield* bus.publish(Event.Status, { sessionID, status }) - if (status.type === "idle") { - yield* bus.publish(Event.Idle, { sessionID }) - data.delete(sessionID) - return - } - data.set(sessionID, status) - }) - - return Service.of({ get, list, set }) +export const Info = z + .union([ + z.object({ + type: z.literal("idle"), }), - ) + z.object({ + type: z.literal("retry"), + attempt: z.number(), + message: z.string(), + next: z.number(), + }), + z.object({ + type: z.literal("busy"), + }), + ]) + .meta({ + ref: "SessionStatus", + }) +export type Info = z.infer - export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) +export const Event = { + Status: BusEvent.define( + "session.status", + z.object({ + sessionID: SessionID.zod, + status: Info, + }), + ), + // deprecated + Idle: BusEvent.define( + "session.idle", + z.object({ + sessionID: SessionID.zod, + }), + ), } + +export interface Interface { + readonly get: (sessionID: SessionID) => Effect.Effect + readonly list: () => Effect.Effect> + readonly set: (sessionID: SessionID, status: Info) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionStatus") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + + const state = yield* InstanceState.make( + Effect.fn("SessionStatus.state")(() => Effect.succeed(new Map())), + ) + + const get = Effect.fn("SessionStatus.get")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + return data.get(sessionID) ?? { type: "idle" as const } + }) + + const list = Effect.fn("SessionStatus.list")(function* () { + return new Map(yield* InstanceState.get(state)) + }) + + const set = Effect.fn("SessionStatus.set")(function* (sessionID: SessionID, status: Info) { + const data = yield* InstanceState.get(state) + yield* bus.publish(Event.Status, { sessionID, status }) + if (status.type === "idle") { + yield* bus.publish(Event.Idle, { sessionID }) + data.delete(sessionID) + return + } + data.set(sessionID, status) + }) + + return Service.of({ get, list, set }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as SessionStatus from "./status" diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index 9f8e70f162..2be08f3f43 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -7,159 +7,159 @@ import * as Session from "./session" import { MessageV2 } from "./message-v2" import { SessionID, MessageID } from "./schema" -export namespace SessionSummary { - function unquoteGitPath(input: string) { - if (!input.startsWith('"')) return input - if (!input.endsWith('"')) return input - const body = input.slice(1, -1) - const bytes: number[] = [] +function unquoteGitPath(input: string) { + if (!input.startsWith('"')) return input + if (!input.endsWith('"')) return input + const body = input.slice(1, -1) + const bytes: number[] = [] - for (let i = 0; i < body.length; i++) { - const char = body[i]! - if (char !== "\\") { - bytes.push(char.charCodeAt(0)) - continue - } - - const next = body[i + 1] - if (!next) { - bytes.push("\\".charCodeAt(0)) - continue - } - - if (next >= "0" && next <= "7") { - const chunk = body.slice(i + 1, i + 4) - const match = chunk.match(/^[0-7]{1,3}/) - if (!match) { - bytes.push(next.charCodeAt(0)) - i++ - continue - } - bytes.push(parseInt(match[0], 8)) - i += match[0].length - continue - } - - const escaped = - next === "n" - ? "\n" - : next === "r" - ? "\r" - : next === "t" - ? "\t" - : next === "b" - ? "\b" - : next === "f" - ? "\f" - : next === "v" - ? "\v" - : next === "\\" || next === '"' - ? next - : undefined - - bytes.push((escaped ?? next).charCodeAt(0)) - i++ + for (let i = 0; i < body.length; i++) { + const char = body[i]! + if (char !== "\\") { + bytes.push(char.charCodeAt(0)) + continue } - return Buffer.from(bytes).toString() + const next = body[i + 1] + if (!next) { + bytes.push("\\".charCodeAt(0)) + continue + } + + if (next >= "0" && next <= "7") { + const chunk = body.slice(i + 1, i + 4) + const match = chunk.match(/^[0-7]{1,3}/) + if (!match) { + bytes.push(next.charCodeAt(0)) + i++ + continue + } + bytes.push(parseInt(match[0], 8)) + i += match[0].length + continue + } + + const escaped = + next === "n" + ? "\n" + : next === "r" + ? "\r" + : next === "t" + ? "\t" + : next === "b" + ? "\b" + : next === "f" + ? "\f" + : next === "v" + ? "\v" + : next === "\\" || next === '"' + ? next + : undefined + + bytes.push((escaped ?? next).charCodeAt(0)) + i++ } - export interface Interface { - readonly summarize: (input: { sessionID: SessionID; messageID: MessageID }) => Effect.Effect - readonly diff: (input: { sessionID: SessionID; messageID?: MessageID }) => Effect.Effect - readonly computeDiff: (input: { messages: MessageV2.WithParts[] }) => Effect.Effect - } + return Buffer.from(bytes).toString() +} - export class Service extends Context.Service()("@opencode/SessionSummary") {} +export interface Interface { + readonly summarize: (input: { sessionID: SessionID; messageID: MessageID }) => Effect.Effect + readonly diff: (input: { sessionID: SessionID; messageID?: MessageID }) => Effect.Effect + readonly computeDiff: (input: { messages: MessageV2.WithParts[] }) => Effect.Effect +} - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const sessions = yield* Session.Service - const snapshot = yield* Snapshot.Service - const storage = yield* Storage.Service - const bus = yield* Bus.Service +export class Service extends Context.Service()("@opencode/SessionSummary") {} - const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { - messages: MessageV2.WithParts[] - }) { - let from: string | undefined - let to: string | undefined - for (const item of input.messages) { - if (!from) { - for (const part of item.parts) { - if (part.type === "step-start" && part.snapshot) { - from = part.snapshot - break - } +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const sessions = yield* Session.Service + const snapshot = yield* Snapshot.Service + const storage = yield* Storage.Service + const bus = yield* Bus.Service + + const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { + messages: MessageV2.WithParts[] + }) { + let from: string | undefined + let to: string | undefined + for (const item of input.messages) { + if (!from) { + for (const part of item.parts) { + if (part.type === "step-start" && part.snapshot) { + from = part.snapshot + break } } - for (const part of item.parts) { - if (part.type === "step-finish" && part.snapshot) to = part.snapshot - } } - if (from && to) return yield* snapshot.diffFull(from, to) - return [] + for (const part of item.parts) { + if (part.type === "step-finish" && part.snapshot) to = part.snapshot + } + } + if (from && to) return yield* snapshot.diffFull(from, to) + return [] + }) + + const summarize = Effect.fn("SessionSummary.summarize")(function* (input: { + sessionID: SessionID + messageID: MessageID + }) { + const all = yield* sessions.messages({ sessionID: input.sessionID }) + if (!all.length) return + + const diffs = yield* computeDiff({ messages: all }) + yield* sessions.setSummary({ + sessionID: input.sessionID, + summary: { + additions: diffs.reduce((sum, x) => sum + x.additions, 0), + deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), + files: diffs.length, + }, }) + yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) + yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - const summarize = Effect.fn("SessionSummary.summarize")(function* (input: { - sessionID: SessionID - messageID: MessageID - }) { - const all = yield* sessions.messages({ sessionID: input.sessionID }) - if (!all.length) return + const messages = all.filter( + (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID), + ) + const target = messages.find((m) => m.info.id === input.messageID) + if (!target || target.info.role !== "user") return + const msgDiffs = yield* computeDiff({ messages }) + target.info.summary = { ...target.info.summary, diffs: msgDiffs } + yield* sessions.updateMessage(target.info) + }) - const diffs = yield* computeDiff({ messages: all }) - yield* sessions.setSummary({ - sessionID: input.sessionID, - summary: { - additions: diffs.reduce((sum, x) => sum + x.additions, 0), - deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), - files: diffs.length, - }, - }) - yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) - yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - - const messages = all.filter( - (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID), - ) - const target = messages.find((m) => m.info.id === input.messageID) - if (!target || target.info.role !== "user") return - const msgDiffs = yield* computeDiff({ messages }) - target.info.summary = { ...target.info.summary, diffs: msgDiffs } - yield* sessions.updateMessage(target.info) + const diff = Effect.fn("SessionSummary.diff")(function* (input: { sessionID: SessionID; messageID?: MessageID }) { + const diffs = yield* storage + .read(["session_diff", input.sessionID]) + .pipe(Effect.catch(() => Effect.succeed([] as Snapshot.FileDiff[]))) + const next = diffs.map((item) => { + const file = unquoteGitPath(item.file) + if (file === item.file) return item + return { ...item, file } }) + const changed = next.some((item, i) => item.file !== diffs[i]?.file) + if (changed) yield* storage.write(["session_diff", input.sessionID], next).pipe(Effect.ignore) + return next + }) - const diff = Effect.fn("SessionSummary.diff")(function* (input: { sessionID: SessionID; messageID?: MessageID }) { - const diffs = yield* storage - .read(["session_diff", input.sessionID]) - .pipe(Effect.catch(() => Effect.succeed([] as Snapshot.FileDiff[]))) - const next = diffs.map((item) => { - const file = unquoteGitPath(item.file) - if (file === item.file) return item - return { ...item, file } - }) - const changed = next.some((item, i) => item.file !== diffs[i]?.file) - if (changed) yield* storage.write(["session_diff", input.sessionID], next).pipe(Effect.ignore) - return next - }) + return Service.of({ summarize, diff, computeDiff }) + }), +) - return Service.of({ summarize, diff, computeDiff }) - }), - ) +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Storage.defaultLayer), + Layer.provide(Bus.layer), + ), +) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Storage.defaultLayer), - Layer.provide(Bus.layer), - ), - ) +export const DiffInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), +}) - export const DiffInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - }) -} +export * as SessionSummary from "./summary" diff --git a/packages/opencode/src/session/system.ts b/packages/opencode/src/session/system.ts index 952ff5b04b..ec60f6eef7 100644 --- a/packages/opencode/src/session/system.ts +++ b/packages/opencode/src/session/system.ts @@ -16,69 +16,69 @@ import type { Agent } from "@/agent/agent" import { Permission } from "@/permission" import { Skill } from "@/skill" -export namespace SystemPrompt { - export function provider(model: Provider.Model) { - if (model.api.id.includes("gpt-4") || model.api.id.includes("o1") || model.api.id.includes("o3")) - return [PROMPT_BEAST] - if (model.api.id.includes("gpt")) { - if (model.api.id.includes("codex")) { - return [PROMPT_CODEX] - } - return [PROMPT_GPT] +export function provider(model: Provider.Model) { + if (model.api.id.includes("gpt-4") || model.api.id.includes("o1") || model.api.id.includes("o3")) + return [PROMPT_BEAST] + if (model.api.id.includes("gpt")) { + if (model.api.id.includes("codex")) { + return [PROMPT_CODEX] } - if (model.api.id.includes("gemini-")) return [PROMPT_GEMINI] - if (model.api.id.includes("claude")) return [PROMPT_ANTHROPIC] - if (model.api.id.toLowerCase().includes("trinity")) return [PROMPT_TRINITY] - if (model.api.id.toLowerCase().includes("kimi")) return [PROMPT_KIMI] - return [PROMPT_DEFAULT] + return [PROMPT_GPT] } - - export interface Interface { - readonly environment: (model: Provider.Model) => string[] - readonly skills: (agent: Agent.Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SystemPrompt") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const skill = yield* Skill.Service - - return Service.of({ - environment(model) { - const project = Instance.project - return [ - [ - `You are powered by the model named ${model.api.id}. The exact model ID is ${model.providerID}/${model.api.id}`, - `Here is some useful information about the environment you are running in:`, - ``, - ` Working directory: ${Instance.directory}`, - ` Workspace root folder: ${Instance.worktree}`, - ` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`, - ` Platform: ${process.platform}`, - ` Today's date: ${new Date().toDateString()}`, - ``, - ].join("\n"), - ] - }, - - skills: Effect.fn("SystemPrompt.skills")(function* (agent: Agent.Info) { - if (Permission.disabled(["skill"], agent.permission).has("skill")) return - - const list = yield* skill.available(agent) - - return [ - "Skills provide specialized instructions and workflows for specific tasks.", - "Use the skill tool to load a skill when a task matches its description.", - // the agents seem to ingest the information about skills a bit better if we present a more verbose - // version of them here and a less verbose version in tool description, rather than vice versa. - Skill.fmt(list, { verbose: true }), - ].join("\n") - }), - }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Skill.defaultLayer)) + if (model.api.id.includes("gemini-")) return [PROMPT_GEMINI] + if (model.api.id.includes("claude")) return [PROMPT_ANTHROPIC] + if (model.api.id.toLowerCase().includes("trinity")) return [PROMPT_TRINITY] + if (model.api.id.toLowerCase().includes("kimi")) return [PROMPT_KIMI] + return [PROMPT_DEFAULT] } + +export interface Interface { + readonly environment: (model: Provider.Model) => string[] + readonly skills: (agent: Agent.Info) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SystemPrompt") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const skill = yield* Skill.Service + + return Service.of({ + environment(model) { + const project = Instance.project + return [ + [ + `You are powered by the model named ${model.api.id}. The exact model ID is ${model.providerID}/${model.api.id}`, + `Here is some useful information about the environment you are running in:`, + ``, + ` Working directory: ${Instance.directory}`, + ` Workspace root folder: ${Instance.worktree}`, + ` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`, + ` Platform: ${process.platform}`, + ` Today's date: ${new Date().toDateString()}`, + ``, + ].join("\n"), + ] + }, + + skills: Effect.fn("SystemPrompt.skills")(function* (agent: Agent.Info) { + if (Permission.disabled(["skill"], agent.permission).has("skill")) return + + const list = yield* skill.available(agent) + + return [ + "Skills provide specialized instructions and workflows for specific tasks.", + "Use the skill tool to load a skill when a task matches its description.", + // the agents seem to ingest the information about skills a bit better if we present a more verbose + // version of them here and a less verbose version in tool description, rather than vice versa. + Skill.fmt(list, { verbose: true }), + ].join("\n") + }), + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Skill.defaultLayer)) + +export * as SystemPrompt from "./system" diff --git a/packages/opencode/src/session/todo.ts b/packages/opencode/src/session/todo.ts index eec2bb3a30..5523fdc86a 100644 --- a/packages/opencode/src/session/todo.ts +++ b/packages/opencode/src/session/todo.ts @@ -6,80 +6,80 @@ import z from "zod" import { Database, eq, asc } from "../storage" import { TodoTable } from "./session.sql" -export namespace Todo { - export const Info = z - .object({ - content: z.string().describe("Brief description of the task"), - status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"), - priority: z.string().describe("Priority level of the task: high, medium, low"), - }) - .meta({ ref: "Todo" }) - export type Info = z.infer +export const Info = z + .object({ + content: z.string().describe("Brief description of the task"), + status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"), + priority: z.string().describe("Priority level of the task: high, medium, low"), + }) + .meta({ ref: "Todo" }) +export type Info = z.infer - export const Event = { - Updated: BusEvent.define( - "todo.updated", - z.object({ - sessionID: SessionID.zod, - todos: z.array(Info), - }), - ), - } - - export interface Interface { - readonly update: (input: { sessionID: SessionID; todos: Info[] }) => Effect.Effect - readonly get: (sessionID: SessionID) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionTodo") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - - const update = Effect.fn("Todo.update")(function* (input: { sessionID: SessionID; todos: Info[] }) { - yield* Effect.sync(() => - Database.transaction((db) => { - db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run() - if (input.todos.length === 0) return - db.insert(TodoTable) - .values( - input.todos.map((todo, position) => ({ - session_id: input.sessionID, - content: todo.content, - status: todo.status, - priority: todo.priority, - position, - })), - ) - .run() - }), - ) - yield* bus.publish(Event.Updated, input) - }) - - const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { - const rows = yield* Effect.sync(() => - Database.use((db) => - db - .select() - .from(TodoTable) - .where(eq(TodoTable.session_id, sessionID)) - .orderBy(asc(TodoTable.position)) - .all(), - ), - ) - return rows.map((row) => ({ - content: row.content, - status: row.status, - priority: row.priority, - })) - }) - - return Service.of({ update, get }) +export const Event = { + Updated: BusEvent.define( + "todo.updated", + z.object({ + sessionID: SessionID.zod, + todos: z.array(Info), }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + ), } + +export interface Interface { + readonly update: (input: { sessionID: SessionID; todos: Info[] }) => Effect.Effect + readonly get: (sessionID: SessionID) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionTodo") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + + const update = Effect.fn("Todo.update")(function* (input: { sessionID: SessionID; todos: Info[] }) { + yield* Effect.sync(() => + Database.transaction((db) => { + db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run() + if (input.todos.length === 0) return + db.insert(TodoTable) + .values( + input.todos.map((todo, position) => ({ + session_id: input.sessionID, + content: todo.content, + status: todo.status, + priority: todo.priority, + position, + })), + ) + .run() + }), + ) + yield* bus.publish(Event.Updated, input) + }) + + const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { + const rows = yield* Effect.sync(() => + Database.use((db) => + db + .select() + .from(TodoTable) + .where(eq(TodoTable.session_id, sessionID)) + .orderBy(asc(TodoTable.position)) + .all(), + ), + ) + return rows.map((row) => ({ + content: row.content, + status: row.status, + priority: row.priority, + })) + }) + + return Service.of({ update, get }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as Todo from "./todo" From 266fb9342238a62cf9d66437c81c91ee40e8632c Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 00:50:44 +0000 Subject: [PATCH 093/120] chore: generate --- packages/opencode/src/session/message-v2.ts | 11 ++--------- packages/opencode/src/session/prompt.ts | 12 +++--------- packages/opencode/src/session/summary.ts | 4 +--- packages/opencode/src/session/todo.ts | 7 +------ 4 files changed, 7 insertions(+), 27 deletions(-) diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index 5e7e008401..46686947e1 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -544,10 +544,7 @@ const part = (row: typeof PartTable.$inferSelect) => }) as Part const older = (row: Cursor) => - or( - lt(MessageTable.time_created, row.time), - and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), - ) + or(lt(MessageTable.time_created, row.time), and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id))) function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { const ids = rows.map((row) => row.id) @@ -930,11 +927,7 @@ export function filterCompacted(msgs: Iterable) { const completed = new Set() for (const msg of msgs) { result.push(msg) - if ( - msg.info.role === "user" && - completed.has(msg.info.id) && - msg.parts.some((part) => part.type === "compaction") - ) + if (msg.info.role === "user" && completed.has(msg.info.id) && msg.parts.some((part) => part.type === "compaction")) break if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) completed.add(msg.info.parentID) diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 14fdf30780..9faa618788 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -1073,9 +1073,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the let start = parseInt(range.start) let end = range.end ? parseInt(range.end) : undefined if (start === end) { - const symbols = yield* lsp - .documentSymbol(filePathURI) - .pipe(Effect.catch(() => Effect.succeed([]))) + const symbols = yield* lsp.documentSymbol(filePathURI).pipe(Effect.catch(() => Effect.succeed([]))) for (const symbol of symbols) { let r: LSP.Range | undefined if ("range" in symbol) r = symbol.range @@ -1453,9 +1451,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the } if (step === 1) - yield* summary - .summarize({ sessionID, messageID: lastUser.id }) - .pipe(Effect.ignore, Effect.forkIn(scope)) + yield* summary.summarize({ sessionID, messageID: lastUser.id }).pipe(Effect.ignore, Effect.forkIn(scope)) if (step > 1 && lastFinished) { for (const m of msgs) { @@ -1723,9 +1719,7 @@ export const PromptInput = z.object({ tools: z .record(z.string(), z.boolean()) .optional() - .describe( - "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", - ), + .describe("@deprecated tools and permissions have been merged, you can set permissions on the session itself now"), format: MessageV2.Format.optional(), system: z.string().optional(), variant: z.string().optional(), diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index 2be08f3f43..70b3102f6e 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -79,9 +79,7 @@ export const layer = Layer.effect( const storage = yield* Storage.Service const bus = yield* Bus.Service - const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { - messages: MessageV2.WithParts[] - }) { + const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { messages: MessageV2.WithParts[] }) { let from: string | undefined let to: string | undefined for (const item of input.messages) { diff --git a/packages/opencode/src/session/todo.ts b/packages/opencode/src/session/todo.ts index 5523fdc86a..4840f86a3d 100644 --- a/packages/opencode/src/session/todo.ts +++ b/packages/opencode/src/session/todo.ts @@ -61,12 +61,7 @@ export const layer = Layer.effect( const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { const rows = yield* Effect.sync(() => Database.use((db) => - db - .select() - .from(TodoTable) - .where(eq(TodoTable.session_id, sessionID)) - .orderBy(asc(TodoTable.position)) - .all(), + db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(), ), ) return rows.map((row) => ({ From d2cb1613ace83609db6a60f3065914e322f93d02 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:59:42 -0400 Subject: [PATCH 094/120] refactor: unwrap SessionEntry namespace + self-reexport (#22977) --- packages/opencode/src/v2/session-entry.ts | 618 +++++++++++----------- 1 file changed, 309 insertions(+), 309 deletions(-) diff --git a/packages/opencode/src/v2/session-entry.ts b/packages/opencode/src/v2/session-entry.ts index 490f184227..140fa47d23 100644 --- a/packages/opencode/src/v2/session-entry.ts +++ b/packages/opencode/src/v2/session-entry.ts @@ -2,317 +2,317 @@ import { Schema } from "effect" import { SessionEvent } from "./session-event" import { produce } from "immer" -export namespace SessionEntry { - export const ID = SessionEvent.ID - export type ID = Schema.Schema.Type +export const ID = SessionEvent.ID +export type ID = Schema.Schema.Type - const Base = { - id: SessionEvent.ID, - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - }), - } +const Base = { + id: SessionEvent.ID, + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + }), +} - export class User extends Schema.Class("Session.Entry.User")({ - ...Base, - text: SessionEvent.Prompt.fields.text, - files: SessionEvent.Prompt.fields.files, - agents: SessionEvent.Prompt.fields.agents, - type: Schema.Literal("user"), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - }), - }) { - static fromEvent(event: SessionEvent.Prompt) { - return new User({ - id: event.id, - type: "user", - metadata: event.metadata, - text: event.text, - files: event.files, - agents: event.agents, - time: { created: event.timestamp }, - }) - } - } - - export class Synthetic extends Schema.Class("Session.Entry.Synthetic")({ - ...SessionEvent.Synthetic.fields, - ...Base, - type: Schema.Literal("synthetic"), - }) {} - - export class ToolStatePending extends Schema.Class("Session.Entry.ToolState.Pending")({ - status: Schema.Literal("pending"), - input: Schema.String, - }) {} - - export class ToolStateRunning extends Schema.Class("Session.Entry.ToolState.Running")({ - status: Schema.Literal("running"), - input: Schema.Record(Schema.String, Schema.Unknown), - title: Schema.String.pipe(Schema.optional), - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - }) {} - - export class ToolStateCompleted extends Schema.Class("Session.Entry.ToolState.Completed")({ - status: Schema.Literal("completed"), - input: Schema.Record(Schema.String, Schema.Unknown), - output: Schema.String, - title: Schema.String, - metadata: Schema.Record(Schema.String, Schema.Unknown), - attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional), - }) {} - - export class ToolStateError extends Schema.Class("Session.Entry.ToolState.Error")({ - status: Schema.Literal("error"), - input: Schema.Record(Schema.String, Schema.Unknown), - error: Schema.String, - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - }) {} - - export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) - export type ToolState = Schema.Schema.Type - - export class AssistantTool extends Schema.Class("Session.Entry.Assistant.Tool")({ - type: Schema.Literal("tool"), - callID: Schema.String, - name: Schema.String, - state: ToolState, - time: Schema.Struct({ - created: Schema.DateTimeUtc, - ran: Schema.DateTimeUtc.pipe(Schema.optional), - completed: Schema.DateTimeUtc.pipe(Schema.optional), - pruned: Schema.DateTimeUtc.pipe(Schema.optional), - }), - }) {} - - export class AssistantText extends Schema.Class("Session.Entry.Assistant.Text")({ - type: Schema.Literal("text"), - text: Schema.String, - }) {} - - export class AssistantReasoning extends Schema.Class("Session.Entry.Assistant.Reasoning")({ - type: Schema.Literal("reasoning"), - text: Schema.String, - }) {} - - export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]) - export type AssistantContent = Schema.Schema.Type - - export class Assistant extends Schema.Class("Session.Entry.Assistant")({ - ...Base, - type: Schema.Literal("assistant"), - content: AssistantContent.pipe(Schema.Array), - cost: Schema.Number.pipe(Schema.optional), - tokens: Schema.Struct({ - input: Schema.Number, - output: Schema.Number, - reasoning: Schema.Number, - cache: Schema.Struct({ - read: Schema.Number, - write: Schema.Number, - }), - }).pipe(Schema.optional), - error: Schema.String.pipe(Schema.optional), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - completed: Schema.DateTimeUtc.pipe(Schema.optional), - }), - }) {} - - export class Compaction extends Schema.Class("Session.Entry.Compaction")({ - ...SessionEvent.Compacted.fields, - type: Schema.Literal("compaction"), - ...Base, - }) {} - - export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]) - - export type Entry = Schema.Schema.Type - - export type Type = Entry["type"] - - export type History = { - entries: Entry[] - pending: Entry[] - } - - export function step(old: History, event: SessionEvent.Event): History { - return produce(old, (draft) => { - const lastAssistant = draft.entries.findLast((x) => x.type === "assistant") - const pendingAssistant = lastAssistant && !lastAssistant.time.completed ? lastAssistant : undefined - - switch (event.type) { - case "prompt": { - if (pendingAssistant) { - // @ts-expect-error - draft.pending.push(User.fromEvent(event)) - break - } - // @ts-expect-error - draft.entries.push(User.fromEvent(event)) - break - } - case "step.started": { - if (pendingAssistant) pendingAssistant.time.completed = event.timestamp - draft.entries.push({ - id: event.id, - type: "assistant", - time: { - created: event.timestamp, - }, - content: [], - }) - break - } - case "text.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "text", - text: "", - }) - break - } - case "text.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "text") - if (match) match.text += event.delta - break - } - case "text.ended": { - break - } - case "tool.input.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "tool", - callID: event.callID, - name: event.name, - time: { - created: event.timestamp, - }, - state: { - status: "pending", - input: "", - }, - }) - break - } - case "tool.input.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - // oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string) - if (match) match.state.input += event.delta - break - } - case "tool.input.ended": { - break - } - case "tool.called": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match) { - match.time.ran = event.timestamp - match.state = { - status: "running", - input: event.input, - } - } - break - } - case "tool.success": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match && match.state.status === "running") { - match.state = { - status: "completed", - input: match.state.input, - output: event.output ?? "", - title: event.title, - metadata: event.metadata ?? {}, - // @ts-expect-error - attachments: event.attachments ?? [], - } - } - break - } - case "tool.error": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match && match.state.status === "running") { - match.state = { - status: "error", - error: event.error, - input: match.state.input, - metadata: event.metadata ?? {}, - } - } - break - } - case "reasoning.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "reasoning", - text: "", - }) - break - } - case "reasoning.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") - if (match) match.text += event.delta - break - } - case "reasoning.ended": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") - if (match) match.text = event.text - break - } - case "step.ended": { - if (!pendingAssistant) break - pendingAssistant.time.completed = event.timestamp - pendingAssistant.cost = event.cost - pendingAssistant.tokens = event.tokens - break - } - } +export class User extends Schema.Class("Session.Entry.User")({ + ...Base, + text: SessionEvent.Prompt.fields.text, + files: SessionEvent.Prompt.fields.files, + agents: SessionEvent.Prompt.fields.agents, + type: Schema.Literal("user"), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + }), +}) { + static fromEvent(event: SessionEvent.Prompt) { + return new User({ + id: event.id, + type: "user", + metadata: event.metadata, + text: event.text, + files: event.files, + agents: event.agents, + time: { created: event.timestamp }, }) } - - /* - export interface Interface { - readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry - readonly fromSession: (sessionID: SessionID) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionEntry") {} - - export const layer: Layer.Layer = Layer.effect( - Service, - Effect.gen(function* () { - const decodeEntry = Schema.decodeUnknownSync(Entry) - - const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type }) - - const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) { - return Database.use((db) => - db - .select() - .from(SessionEntryTable) - .where(eq(SessionEntryTable.session_id, sessionID)) - .orderBy(SessionEntryTable.id) - .all() - .map((row) => decode(row)), - ) - }) - - return Service.of({ - decode, - fromSession, - }) - }), - ) - */ } + +export class Synthetic extends Schema.Class("Session.Entry.Synthetic")({ + ...SessionEvent.Synthetic.fields, + ...Base, + type: Schema.Literal("synthetic"), +}) {} + +export class ToolStatePending extends Schema.Class("Session.Entry.ToolState.Pending")({ + status: Schema.Literal("pending"), + input: Schema.String, +}) {} + +export class ToolStateRunning extends Schema.Class("Session.Entry.ToolState.Running")({ + status: Schema.Literal("running"), + input: Schema.Record(Schema.String, Schema.Unknown), + title: Schema.String.pipe(Schema.optional), + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), +}) {} + +export class ToolStateCompleted extends Schema.Class("Session.Entry.ToolState.Completed")({ + status: Schema.Literal("completed"), + input: Schema.Record(Schema.String, Schema.Unknown), + output: Schema.String, + title: Schema.String, + metadata: Schema.Record(Schema.String, Schema.Unknown), + attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional), +}) {} + +export class ToolStateError extends Schema.Class("Session.Entry.ToolState.Error")({ + status: Schema.Literal("error"), + input: Schema.Record(Schema.String, Schema.Unknown), + error: Schema.String, + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), +}) {} + +export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) +export type ToolState = Schema.Schema.Type + +export class AssistantTool extends Schema.Class("Session.Entry.Assistant.Tool")({ + type: Schema.Literal("tool"), + callID: Schema.String, + name: Schema.String, + state: ToolState, + time: Schema.Struct({ + created: Schema.DateTimeUtc, + ran: Schema.DateTimeUtc.pipe(Schema.optional), + completed: Schema.DateTimeUtc.pipe(Schema.optional), + pruned: Schema.DateTimeUtc.pipe(Schema.optional), + }), +}) {} + +export class AssistantText extends Schema.Class("Session.Entry.Assistant.Text")({ + type: Schema.Literal("text"), + text: Schema.String, +}) {} + +export class AssistantReasoning extends Schema.Class("Session.Entry.Assistant.Reasoning")({ + type: Schema.Literal("reasoning"), + text: Schema.String, +}) {} + +export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]) +export type AssistantContent = Schema.Schema.Type + +export class Assistant extends Schema.Class("Session.Entry.Assistant")({ + ...Base, + type: Schema.Literal("assistant"), + content: AssistantContent.pipe(Schema.Array), + cost: Schema.Number.pipe(Schema.optional), + tokens: Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + reasoning: Schema.Number, + cache: Schema.Struct({ + read: Schema.Number, + write: Schema.Number, + }), + }).pipe(Schema.optional), + error: Schema.String.pipe(Schema.optional), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + completed: Schema.DateTimeUtc.pipe(Schema.optional), + }), +}) {} + +export class Compaction extends Schema.Class("Session.Entry.Compaction")({ + ...SessionEvent.Compacted.fields, + type: Schema.Literal("compaction"), + ...Base, +}) {} + +export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]) + +export type Entry = Schema.Schema.Type + +export type Type = Entry["type"] + +export type History = { + entries: Entry[] + pending: Entry[] +} + +export function step(old: History, event: SessionEvent.Event): History { + return produce(old, (draft) => { + const lastAssistant = draft.entries.findLast((x) => x.type === "assistant") + const pendingAssistant = lastAssistant && !lastAssistant.time.completed ? lastAssistant : undefined + + switch (event.type) { + case "prompt": { + if (pendingAssistant) { + // @ts-expect-error + draft.pending.push(User.fromEvent(event)) + break + } + // @ts-expect-error + draft.entries.push(User.fromEvent(event)) + break + } + case "step.started": { + if (pendingAssistant) pendingAssistant.time.completed = event.timestamp + draft.entries.push({ + id: event.id, + type: "assistant", + time: { + created: event.timestamp, + }, + content: [], + }) + break + } + case "text.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "text", + text: "", + }) + break + } + case "text.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "text") + if (match) match.text += event.delta + break + } + case "text.ended": { + break + } + case "tool.input.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "tool", + callID: event.callID, + name: event.name, + time: { + created: event.timestamp, + }, + state: { + status: "pending", + input: "", + }, + }) + break + } + case "tool.input.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + // oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string) + if (match) match.state.input += event.delta + break + } + case "tool.input.ended": { + break + } + case "tool.called": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match) { + match.time.ran = event.timestamp + match.state = { + status: "running", + input: event.input, + } + } + break + } + case "tool.success": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match && match.state.status === "running") { + match.state = { + status: "completed", + input: match.state.input, + output: event.output ?? "", + title: event.title, + metadata: event.metadata ?? {}, + // @ts-expect-error + attachments: event.attachments ?? [], + } + } + break + } + case "tool.error": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match && match.state.status === "running") { + match.state = { + status: "error", + error: event.error, + input: match.state.input, + metadata: event.metadata ?? {}, + } + } + break + } + case "reasoning.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "reasoning", + text: "", + }) + break + } + case "reasoning.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") + if (match) match.text += event.delta + break + } + case "reasoning.ended": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") + if (match) match.text = event.text + break + } + case "step.ended": { + if (!pendingAssistant) break + pendingAssistant.time.completed = event.timestamp + pendingAssistant.cost = event.cost + pendingAssistant.tokens = event.tokens + break + } + } + }) +} + +/* +export interface Interface { + readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry + readonly fromSession: (sessionID: SessionID) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionEntry") {} + +export const layer: Layer.Layer = Layer.effect( + Service, + Effect.gen(function* () { + const decodeEntry = Schema.decodeUnknownSync(Entry) + + const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type }) + + const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) { + return Database.use((db) => + db + .select() + .from(SessionEntryTable) + .where(eq(SessionEntryTable.session_id, sessionID)) + .orderBy(SessionEntryTable.id) + .all() + .map((row) => decode(row)), + ) + }) + + return Service.of({ + decode, + fromSession, + }) + }), +) +*/ + +export * as SessionEntry from "./session-entry" From 54046e0b985d8ffd5e343cadcc479570b96f8a5b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:00:30 -0400 Subject: [PATCH 095/120] refactor: unwrap SessionV2 namespace + self-reexport (#22978) --- packages/opencode/src/v2/session.ts | 114 ++++++++++++++-------------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/packages/opencode/src/v2/session.ts b/packages/opencode/src/v2/session.ts index ce1b39031f..79a6916120 100644 --- a/packages/opencode/src/v2/session.ts +++ b/packages/opencode/src/v2/session.ts @@ -4,66 +4,66 @@ import { Struct } from "effect" import { Session } from "@/session" import { SessionID } from "@/session/schema" -export namespace SessionV2 { - export const ID = SessionID +export const ID = SessionID - export type ID = Schema.Schema.Type +export type ID = Schema.Schema.Type - export class PromptInput extends Schema.Class("Session.PromptInput")({ - ...Struct.omit(SessionEntry.User.fields, ["time", "type"]), - id: Schema.optionalKey(SessionEntry.ID), - sessionID: SessionV2.ID, - }) {} +export class PromptInput extends Schema.Class("Session.PromptInput")({ + ...Struct.omit(SessionEntry.User.fields, ["time", "type"]), + id: Schema.optionalKey(SessionEntry.ID), + sessionID: ID, +}) {} - export class CreateInput extends Schema.Class("Session.CreateInput")({ - id: Schema.optionalKey(SessionV2.ID), - }) {} +export class CreateInput extends Schema.Class("Session.CreateInput")({ + id: Schema.optionalKey(ID), +}) {} - export class Info extends Schema.Class("Session.Info")({ - id: SessionV2.ID, - model: Schema.Struct({ - id: Schema.String, - providerID: Schema.String, - modelID: Schema.String, - }).pipe(Schema.optional), - }) {} +export class Info extends Schema.Class("Session.Info")({ + id: ID, + model: Schema.Struct({ + id: Schema.String, + providerID: Schema.String, + modelID: Schema.String, + }).pipe(Schema.optional), +}) {} - export interface Interface { - fromID: (id: SessionV2.ID) => Effect.Effect - create: (input: CreateInput) => Effect.Effect - prompt: (input: PromptInput) => Effect.Effect - } - - export class Service extends Context.Service()("Session.Service") {} - - export const layer = Layer.effect(Service)( - Effect.gen(function* () { - const session = yield* Session.Service - - const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) { - throw new Error("Not implemented") - }) - - const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) { - throw new Error("Not implemented") - }) - - const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) { - const match = yield* session.get(id) - return fromV1(match) - }) - - return Service.of({ - create, - prompt, - fromID, - }) - }), - ) - - function fromV1(input: Session.Info): Info { - return new Info({ - id: SessionV2.ID.make(input.id), - }) - } +export interface Interface { + fromID: (id: ID) => Effect.Effect + create: (input: CreateInput) => Effect.Effect + prompt: (input: PromptInput) => Effect.Effect } + +export class Service extends Context.Service()("Session.Service") {} + +export const layer = Layer.effect(Service)( + Effect.gen(function* () { + const session = yield* Session.Service + + const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) { + throw new Error("Not implemented") + }) + + const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) { + throw new Error("Not implemented") + }) + + const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) { + const match = yield* session.get(id) + return fromV1(match) + }) + + return Service.of({ + create, + prompt, + fromID, + }) + }), +) + +function fromV1(input: Session.Info): Info { + return new Info({ + id: ID.make(input.id), + }) +} + +export * as SessionV2 from "./session" From 5022895e2b9b556275c5cd419cb32452329ada08 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:01:24 -0400 Subject: [PATCH 096/120] refactor: unwrap ExperimentalHttpApiServer namespace + self-reexport (#22979) --- .../src/server/instance/httpapi/server.ts | 202 +++++++++--------- 1 file changed, 101 insertions(+), 101 deletions(-) diff --git a/packages/opencode/src/server/instance/httpapi/server.ts b/packages/opencode/src/server/instance/httpapi/server.ts index 299a177f50..362d0970b9 100644 --- a/packages/opencode/src/server/instance/httpapi/server.ts +++ b/packages/opencode/src/server/instance/httpapi/server.ts @@ -25,106 +25,106 @@ const Headers = Schema.Struct({ "x-opencode-directory": Schema.optional(Schema.String), }) -export namespace ExperimentalHttpApiServer { - function decode(input: string) { - try { - return decodeURIComponent(input) - } catch { - return input - } +function decode(input: string) { + try { + return decodeURIComponent(input) + } catch { + return input } - - class Unauthorized extends Schema.TaggedErrorClass()( - "Unauthorized", - { message: Schema.String }, - { httpApiStatus: 401 }, - ) {} - - class Authorization extends HttpApiMiddleware.Service()("@opencode/ExperimentalHttpApiAuthorization", { - error: Unauthorized, - security: { - basic: HttpApiSecurity.basic, - }, - }) {} - - const normalize = HttpRouter.middleware()( - Effect.gen(function* () { - return (effect) => - Effect.gen(function* () { - const query = yield* HttpServerRequest.schemaSearchParams(Query) - if (!query.auth_token) return yield* effect - const req = yield* HttpServerRequest.HttpServerRequest - const next = req.modify({ - headers: { - ...req.headers, - authorization: `Basic ${query.auth_token}`, - }, - }) - return yield* effect.pipe(Effect.provideService(HttpServerRequest.HttpServerRequest, next)) - }) - }), - ).layer - - const auth = Layer.succeed( - Authorization, - Authorization.of({ - basic: (effect, { credential }) => - Effect.gen(function* () { - if (!Flag.OPENCODE_SERVER_PASSWORD) return yield* effect - - const user = Flag.OPENCODE_SERVER_USERNAME ?? "opencode" - if (credential.username !== user) { - return yield* new Unauthorized({ message: "Unauthorized" }) - } - if (Redacted.value(credential.password) !== Flag.OPENCODE_SERVER_PASSWORD) { - return yield* new Unauthorized({ message: "Unauthorized" }) - } - return yield* effect - }), - }), - ) - - const instance = HttpRouter.middleware()( - Effect.gen(function* () { - return (effect) => - Effect.gen(function* () { - const query = yield* HttpServerRequest.schemaSearchParams(Query) - const headers = yield* HttpServerRequest.schemaHeaders(Headers) - const raw = query.directory || headers["x-opencode-directory"] || process.cwd() - const workspace = query.workspace || undefined - const ctx = yield* Effect.promise(() => - Instance.provide({ - directory: Filesystem.resolve(decode(raw)), - init: () => AppRuntime.runPromise(InstanceBootstrap), - fn: () => Instance.current, - }), - ) - - const next = workspace ? effect.pipe(Effect.provideService(WorkspaceRef, workspace)) : effect - return yield* next.pipe(Effect.provideService(InstanceRef, ctx)) - }) - }), - ).layer - - const QuestionSecured = QuestionApi.middleware(Authorization) - const PermissionSecured = PermissionApi.middleware(Authorization) - const ProviderSecured = ProviderApi.middleware(Authorization) - - export const routes = Layer.mergeAll( - HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), - HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), - HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), - ).pipe( - Layer.provide(auth), - Layer.provide(normalize), - Layer.provide(instance), - Layer.provide(HttpServer.layerServices), - Layer.provideMerge(Observability.layer), - ) - - export const webHandler = lazy(() => - HttpRouter.toWebHandler(routes, { - memoMap, - }), - ) } + +class Unauthorized extends Schema.TaggedErrorClass()( + "Unauthorized", + { message: Schema.String }, + { httpApiStatus: 401 }, +) {} + +class Authorization extends HttpApiMiddleware.Service()("@opencode/ExperimentalHttpApiAuthorization", { + error: Unauthorized, + security: { + basic: HttpApiSecurity.basic, + }, +}) {} + +const normalize = HttpRouter.middleware()( + Effect.gen(function* () { + return (effect) => + Effect.gen(function* () { + const query = yield* HttpServerRequest.schemaSearchParams(Query) + if (!query.auth_token) return yield* effect + const req = yield* HttpServerRequest.HttpServerRequest + const next = req.modify({ + headers: { + ...req.headers, + authorization: `Basic ${query.auth_token}`, + }, + }) + return yield* effect.pipe(Effect.provideService(HttpServerRequest.HttpServerRequest, next)) + }) + }), +).layer + +const auth = Layer.succeed( + Authorization, + Authorization.of({ + basic: (effect, { credential }) => + Effect.gen(function* () { + if (!Flag.OPENCODE_SERVER_PASSWORD) return yield* effect + + const user = Flag.OPENCODE_SERVER_USERNAME ?? "opencode" + if (credential.username !== user) { + return yield* new Unauthorized({ message: "Unauthorized" }) + } + if (Redacted.value(credential.password) !== Flag.OPENCODE_SERVER_PASSWORD) { + return yield* new Unauthorized({ message: "Unauthorized" }) + } + return yield* effect + }), + }), +) + +const instance = HttpRouter.middleware()( + Effect.gen(function* () { + return (effect) => + Effect.gen(function* () { + const query = yield* HttpServerRequest.schemaSearchParams(Query) + const headers = yield* HttpServerRequest.schemaHeaders(Headers) + const raw = query.directory || headers["x-opencode-directory"] || process.cwd() + const workspace = query.workspace || undefined + const ctx = yield* Effect.promise(() => + Instance.provide({ + directory: Filesystem.resolve(decode(raw)), + init: () => AppRuntime.runPromise(InstanceBootstrap), + fn: () => Instance.current, + }), + ) + + const next = workspace ? effect.pipe(Effect.provideService(WorkspaceRef, workspace)) : effect + return yield* next.pipe(Effect.provideService(InstanceRef, ctx)) + }) + }), +).layer + +const QuestionSecured = QuestionApi.middleware(Authorization) +const PermissionSecured = PermissionApi.middleware(Authorization) +const ProviderSecured = ProviderApi.middleware(Authorization) + +export const routes = Layer.mergeAll( + HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), + HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), + HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), +).pipe( + Layer.provide(auth), + Layer.provide(normalize), + Layer.provide(instance), + Layer.provide(HttpServer.layerServices), + Layer.provideMerge(Observability.layer), +) + +export const webHandler = lazy(() => + HttpRouter.toWebHandler(routes, { + memoMap, + }), +) + +export * as ExperimentalHttpApiServer from "./server" From 94878d76f8a32c36909647a0d9e1f6e383f60908 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:02:07 -0400 Subject: [PATCH 097/120] refactor: unwrap TuiPluginRuntime namespace + self-reexport (#22980) --- .../src/cli/cmd/tui/plugin/runtime.ts | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index e1b2eca1dd..e4a0e59eb1 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -918,113 +918,113 @@ async function installPluginBySpec( } } -export namespace TuiPluginRuntime { - let dir = "" - let loaded: Promise | undefined - let runtime: RuntimeState | undefined - export const Slot = View +let dir = "" +let loaded: Promise | undefined +let runtime: RuntimeState | undefined +export const Slot = View - export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { - const cwd = process.cwd() - if (loaded) { - if (dir !== cwd) { - throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) - } - return loaded +export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { + const cwd = process.cwd() + if (loaded) { + if (dir !== cwd) { + throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) } - - dir = cwd - loaded = load(input) return loaded } - export function list() { - if (!runtime) return [] - return listPluginStatus(runtime) - } + dir = cwd + loaded = load(input) + return loaded +} - export async function activatePlugin(id: string) { - return activatePluginById(runtime, id, true) - } +export function list() { + if (!runtime) return [] + return listPluginStatus(runtime) +} - export async function deactivatePlugin(id: string) { - return deactivatePluginById(runtime, id, true) - } +export async function activatePlugin(id: string) { + return activatePluginById(runtime, id, true) +} - export async function addPlugin(spec: string) { - return addPluginBySpec(runtime, spec) - } +export async function deactivatePlugin(id: string) { + return deactivatePluginById(runtime, id, true) +} - export async function installPlugin(spec: string, options?: { global?: boolean }) { - return installPluginBySpec(runtime, spec, options?.global) - } +export async function addPlugin(spec: string) { + return addPluginBySpec(runtime, spec) +} - export async function dispose() { - const task = loaded - loaded = undefined - dir = "" - if (task) await task - const state = runtime - runtime = undefined - if (!state) return - const queue = [...state.plugins].reverse() - for (const plugin of queue) { - await deactivatePluginEntry(state, plugin, false) - } - } +export async function installPlugin(spec: string, options?: { global?: boolean }) { + return installPluginBySpec(runtime, spec, options?.global) +} - async function load(input: { api: Api; config: TuiConfig.Info }) { - const { api, config } = input - const cwd = process.cwd() - const slots = setupSlots(api) - const next: RuntimeState = { - directory: cwd, - api, - slots, - plugins: [], - plugins_by_id: new Map(), - pending: new Map(), - } - runtime = next - try { - await Instance.provide({ - directory: cwd, - fn: async () => { - const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { - log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) - } - - for (const item of INTERNAL_TUI_PLUGINS) { - log.info("loading internal tui plugin", { id: item.id }) - const entry = loadInternalPlugin(item) - const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) - addPluginEntry(next, { - id: entry.id, - load: entry, - meta, - themes: {}, - plugin: entry.module.tui, - enabled: true, - }) - } - - const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) - await addExternalPluginEntries(next, ready) - - applyInitialPluginEnabledState(next, config) - for (const plugin of next.plugins) { - if (!plugin.enabled) continue - // Keep plugin execution sequential for deterministic side effects: - // command registration order affects keybind/command precedence, - // route registration is last-wins when ids collide, - // and hook chains rely on stable plugin ordering. - await activatePluginEntry(next, plugin, false) - } - }, - }) - } catch (error) { - fail("failed to load tui plugins", { directory: cwd, error }) - } +export async function dispose() { + const task = loaded + loaded = undefined + dir = "" + if (task) await task + const state = runtime + runtime = undefined + if (!state) return + const queue = [...state.plugins].reverse() + for (const plugin of queue) { + await deactivatePluginEntry(state, plugin, false) } } + +async function load(input: { api: Api; config: TuiConfig.Info }) { + const { api, config } = input + const cwd = process.cwd() + const slots = setupSlots(api) + const next: RuntimeState = { + directory: cwd, + api, + slots, + plugins: [], + plugins_by_id: new Map(), + pending: new Map(), + } + runtime = next + try { + await Instance.provide({ + directory: cwd, + fn: async () => { + const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { + log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) + } + + for (const item of INTERNAL_TUI_PLUGINS) { + log.info("loading internal tui plugin", { id: item.id }) + const entry = loadInternalPlugin(item) + const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) + addPluginEntry(next, { + id: entry.id, + load: entry, + meta, + themes: {}, + plugin: entry.module.tui, + enabled: true, + }) + } + + const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) + await addExternalPluginEntries(next, ready) + + applyInitialPluginEnabledState(next, config) + for (const plugin of next.plugins) { + if (!plugin.enabled) continue + // Keep plugin execution sequential for deterministic side effects: + // command registration order affects keybind/command precedence, + // route registration is last-wins when ids collide, + // and hook chains rely on stable plugin ordering. + await activatePluginEntry(next, plugin, false) + } + }, + }) + } catch (error) { + fail("failed to load tui plugins", { directory: cwd, error }) + } +} + +export * as TuiPluginRuntime from "./runtime" From c59df636cc3d9b203e2b84dcefecba15eda5b457 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:02:09 -0400 Subject: [PATCH 098/120] chore: delete empty v2/session-common + collapse patch barrel (#22981) --- packages/opencode/src/patch/index.ts | 681 ++++++++++++++++++++- packages/opencode/src/patch/patch.ts | 678 -------------------- packages/opencode/src/v2/session-common.ts | 1 - 3 files changed, 680 insertions(+), 680 deletions(-) delete mode 100644 packages/opencode/src/patch/patch.ts delete mode 100644 packages/opencode/src/v2/session-common.ts diff --git a/packages/opencode/src/patch/index.ts b/packages/opencode/src/patch/index.ts index cec24614d8..19e1d7555b 100644 --- a/packages/opencode/src/patch/index.ts +++ b/packages/opencode/src/patch/index.ts @@ -1 +1,680 @@ -export * as Patch from "./patch" +import z from "zod" +import * as path from "path" +import * as fs from "fs/promises" +import { readFileSync } from "fs" +import { Log } from "../util" + +const log = Log.create({ service: "patch" }) + +// Schema definitions +export const PatchSchema = z.object({ + patchText: z.string().describe("The full patch text that describes all changes to be made"), +}) + +export type PatchParams = z.infer + +// Core types matching the Rust implementation +export interface ApplyPatchArgs { + patch: string + hunks: Hunk[] + workdir?: string +} + +export type Hunk = + | { type: "add"; path: string; contents: string } + | { type: "delete"; path: string } + | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } + +export interface UpdateFileChunk { + old_lines: string[] + new_lines: string[] + change_context?: string + is_end_of_file?: boolean +} + +export interface ApplyPatchAction { + changes: Map + patch: string + cwd: string +} + +export type ApplyPatchFileChange = + | { type: "add"; content: string } + | { type: "delete"; content: string } + | { type: "update"; unified_diff: string; move_path?: string; new_content: string } + +export interface AffectedPaths { + added: string[] + modified: string[] + deleted: string[] +} + +export enum ApplyPatchError { + ParseError = "ParseError", + IoError = "IoError", + ComputeReplacements = "ComputeReplacements", + ImplicitInvocation = "ImplicitInvocation", +} + +export enum MaybeApplyPatch { + Body = "Body", + ShellParseError = "ShellParseError", + PatchParseError = "PatchParseError", + NotApplyPatch = "NotApplyPatch", +} + +export enum MaybeApplyPatchVerified { + Body = "Body", + ShellParseError = "ShellParseError", + CorrectnessError = "CorrectnessError", + NotApplyPatch = "NotApplyPatch", +} + +// Parser implementation +function parsePatchHeader( + lines: string[], + startIdx: number, +): { filePath: string; movePath?: string; nextIdx: number } | null { + const line = lines[startIdx] + + if (line.startsWith("*** Add File:")) { + const filePath = line.slice("*** Add File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Delete File:")) { + const filePath = line.slice("*** Delete File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Update File:")) { + const filePath = line.slice("*** Update File:".length).trim() + let movePath: string | undefined + let nextIdx = startIdx + 1 + + // Check for move directive + if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { + movePath = lines[nextIdx].slice("*** Move to:".length).trim() + nextIdx++ + } + + return filePath ? { filePath, movePath, nextIdx } : null + } + + return null +} + +function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { + const chunks: UpdateFileChunk[] = [] + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("@@")) { + // Parse context line + const contextLine = lines[i].substring(2).trim() + i++ + + const oldLines: string[] = [] + const newLines: string[] = [] + let isEndOfFile = false + + // Parse change lines + while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { + const changeLine = lines[i] + + if (changeLine === "*** End of File") { + isEndOfFile = true + i++ + break + } + + if (changeLine.startsWith(" ")) { + // Keep line - appears in both old and new + const content = changeLine.substring(1) + oldLines.push(content) + newLines.push(content) + } else if (changeLine.startsWith("-")) { + // Remove line - only in old + oldLines.push(changeLine.substring(1)) + } else if (changeLine.startsWith("+")) { + // Add line - only in new + newLines.push(changeLine.substring(1)) + } + + i++ + } + + chunks.push({ + old_lines: oldLines, + new_lines: newLines, + change_context: contextLine || undefined, + is_end_of_file: isEndOfFile || undefined, + }) + } else { + i++ + } + } + + return { chunks, nextIdx: i } +} + +function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { + let content = "" + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("+")) { + content += lines[i].substring(1) + "\n" + } + i++ + } + + // Remove trailing newline + if (content.endsWith("\n")) { + content = content.slice(0, -1) + } + + return { content, nextIdx: i } +} + +function stripHeredoc(input: string): string { + // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) + const endIdx = lines.findIndex((line) => line.trim() === endMarker) + + if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { + throw new Error("Invalid patch format: missing Begin/End markers") + } + + // Parse content between markers + i = beginIdx + 1 + + while (i < endIdx) { + const header = parsePatchHeader(lines, i) + if (!header) { + i++ + continue + } + + if (lines[i].startsWith("*** Add File:")) { + const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) + hunks.push({ + type: "add", + path: header.filePath, + contents: content, + }) + i = nextIdx + } else if (lines[i].startsWith("*** Delete File:")) { + hunks.push({ + type: "delete", + path: header.filePath, + }) + i = header.nextIdx + } else if (lines[i].startsWith("*** Update File:")) { + const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) + hunks.push({ + type: "update", + path: header.filePath, + move_path: header.movePath, + chunks, + }) + i = nextIdx + } else { + i++ + } + } + + return { hunks } +} + +// Apply patch functionality +export function maybeParseApplyPatch( + argv: string[], +): + | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } + | { type: MaybeApplyPatch.PatchParseError; error: Error } + | { type: MaybeApplyPatch.NotApplyPatch } { + const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] + + // Direct invocation: apply_patch + if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { + try { + const { hunks } = parsePatch(argv[1]) + return { + type: MaybeApplyPatch.Body, + args: { + patch: argv[1], + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + + // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' + if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { + // Simple extraction - in real implementation would need proper bash parsing + const script = argv[2] + const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) + + if (heredocMatch) { + const patchContent = heredocMatch[2] + try { + const { hunks } = parsePatch(patchContent) + return { + type: MaybeApplyPatch.Body, + args: { + patch: patchContent, + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + } + + return { type: MaybeApplyPatch.NotApplyPatch } +} + +// File content manipulation +interface ApplyPatchFileUpdate { + unified_diff: string + content: string +} + +export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { + // Read original file content + let originalContent: string + try { + originalContent = readFileSync(filePath, "utf-8") + } catch (error) { + throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) + } + + let originalLines = originalContent.split("\n") + + // Drop trailing empty element for consistent line counting + if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { + originalLines.pop() + } + + const replacements = computeReplacements(originalLines, filePath, chunks) + let newLines = applyReplacements(originalLines, replacements) + + // Ensure trailing newline + if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { + newLines.push("") + } + + const newContent = newLines.join("\n") + + // Generate unified diff + const unifiedDiff = generateUnifiedDiff(originalContent, newContent) + + return { + unified_diff: unifiedDiff, + content: newContent, + } +} + +function computeReplacements( + originalLines: string[], + filePath: string, + chunks: UpdateFileChunk[], +): Array<[number, number, string[]]> { + const replacements: Array<[number, number, string[]]> = [] + let lineIndex = 0 + + for (const chunk of chunks) { + // Handle context-based seeking + if (chunk.change_context) { + const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) + if (contextIdx === -1) { + throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) + } + lineIndex = contextIdx + 1 + } + + // Handle pure addition (no old lines) + if (chunk.old_lines.length === 0) { + const insertionIdx = + originalLines.length > 0 && originalLines[originalLines.length - 1] === "" + ? originalLines.length - 1 + : originalLines.length + replacements.push([insertionIdx, 0, chunk.new_lines]) + continue + } + + // Try to match old lines in the file + let pattern = chunk.old_lines + let newSlice = chunk.new_lines + let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + + // Retry without trailing empty line if not found + if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { + pattern = pattern.slice(0, -1) + if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { + newSlice = newSlice.slice(0, -1) + } + found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + } + + if (found !== -1) { + replacements.push([found, pattern.length, newSlice]) + lineIndex = found + pattern.length + } else { + throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) + } + } + + // Sort replacements by index to apply in order + replacements.sort((a, b) => a[0] - b[0]) + + return replacements +} + +function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { + // Apply replacements in reverse order to avoid index shifting + const result = [...lines] + + for (let i = replacements.length - 1; i >= 0; i--) { + const [startIdx, oldLen, newSegment] = replacements[i] + + // Remove old lines + result.splice(startIdx, oldLen) + + // Insert new lines + for (let j = 0; j < newSegment.length; j++) { + result.splice(startIdx + j, 0, newSegment[j]) + } + } + + return result +} + +// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) +function normalizeUnicode(str: string): string { + return str + .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes + .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes + .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes + .replace(/\u2026/g, "...") // ellipsis + .replace(/\u00A0/g, " ") // non-breaking space +} + +type Comparator = (a: string, b: string) => boolean + +function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { + // If EOF anchor, try matching from end of file first + if (eof) { + const fromEnd = lines.length - pattern.length + if (fromEnd >= startIndex) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[fromEnd + j], pattern[j])) { + matches = false + break + } + } + if (matches) return fromEnd + } + } + + // Forward search from startIndex + for (let i = startIndex; i <= lines.length - pattern.length; i++) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[i + j], pattern[j])) { + matches = false + break + } + } + if (matches) return i + } + + return -1 +} + +function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { + if (pattern.length === 0) return -1 + + // Pass 1: exact match + const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) + if (exact !== -1) return exact + + // Pass 2: rstrip (trim trailing whitespace) + const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) + if (rstrip !== -1) return rstrip + + // Pass 3: trim (both ends) + const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) + if (trim !== -1) return trim + + // Pass 4: normalized (Unicode punctuation to ASCII) + const normalized = tryMatch( + lines, + pattern, + startIndex, + (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), + eof, + ) + return normalized +} + +function generateUnifiedDiff(oldContent: string, newContent: string): string { + const oldLines = oldContent.split("\n") + const newLines = newContent.split("\n") + + // Simple diff generation - in a real implementation you'd use a proper diff algorithm + let diff = "@@ -1 +1 @@\n" + + // Find changes (simplified approach) + const maxLen = Math.max(oldLines.length, newLines.length) + let hasChanges = false + + for (let i = 0; i < maxLen; i++) { + const oldLine = oldLines[i] || "" + const newLine = newLines[i] || "" + + if (oldLine !== newLine) { + if (oldLine) diff += `-${oldLine}\n` + if (newLine) diff += `+${newLine}\n` + hasChanges = true + } else if (oldLine) { + diff += ` ${oldLine}\n` + } + } + + return hasChanges ? diff : "" +} + +// Apply hunks to filesystem +export async function applyHunksToFiles(hunks: Hunk[]): Promise { + if (hunks.length === 0) { + throw new Error("No files were modified.") + } + + const added: string[] = [] + const modified: string[] = [] + const deleted: string[] = [] + + for (const hunk of hunks) { + switch (hunk.type) { + case "add": + // Create parent directories + const addDir = path.dirname(hunk.path) + if (addDir !== "." && addDir !== "/") { + await fs.mkdir(addDir, { recursive: true }) + } + + await fs.writeFile(hunk.path, hunk.contents, "utf-8") + added.push(hunk.path) + log.info(`Added file: ${hunk.path}`) + break + + case "delete": + await fs.unlink(hunk.path) + deleted.push(hunk.path) + log.info(`Deleted file: ${hunk.path}`) + break + + case "update": + const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) + + if (hunk.move_path) { + // Handle file move + const moveDir = path.dirname(hunk.move_path) + if (moveDir !== "." && moveDir !== "/") { + await fs.mkdir(moveDir, { recursive: true }) + } + + await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") + await fs.unlink(hunk.path) + modified.push(hunk.move_path) + log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) + } else { + // Regular update + await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") + modified.push(hunk.path) + log.info(`Updated file: ${hunk.path}`) + } + break + } + } + + return { added, modified, deleted } +} + +// Main patch application function +export async function applyPatch(patchText: string): Promise { + const { hunks } = parsePatch(patchText) + return applyHunksToFiles(hunks) +} + +// Async version of maybeParseApplyPatchVerified +export async function maybeParseApplyPatchVerified( + argv: string[], + cwd: string, +): Promise< + | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } + | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } + | { type: MaybeApplyPatchVerified.NotApplyPatch } +> { + // Detect implicit patch invocation (raw patch without apply_patch command) + if (argv.length === 1) { + try { + parsePatch(argv[0]) + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(ApplyPatchError.ImplicitInvocation), + } + } catch { + // Not a patch, continue + } + } + + const result = maybeParseApplyPatch(argv) + + switch (result.type) { + case MaybeApplyPatch.Body: + const { args } = result + const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd + const changes = new Map() + + for (const hunk of args.hunks) { + const resolvedPath = path.resolve( + effectiveCwd, + hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, + ) + + switch (hunk.type) { + case "add": + changes.set(resolvedPath, { + type: "add", + content: hunk.contents, + }) + break + + case "delete": + // For delete, we need to read the current content + const deletePath = path.resolve(effectiveCwd, hunk.path) + try { + const content = await fs.readFile(deletePath, "utf-8") + changes.set(resolvedPath, { + type: "delete", + content, + }) + } catch { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(`Failed to read file for deletion: ${deletePath}`), + } + } + break + + case "update": + const updatePath = path.resolve(effectiveCwd, hunk.path) + try { + const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) + changes.set(resolvedPath, { + type: "update", + unified_diff: fileUpdate.unified_diff, + move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, + new_content: fileUpdate.content, + }) + } catch (error) { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: error as Error, + } + } + break + } + } + + return { + type: MaybeApplyPatchVerified.Body, + action: { + changes, + patch: args.patch, + cwd: effectiveCwd, + }, + } + + case MaybeApplyPatch.PatchParseError: + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: result.error, + } + + case MaybeApplyPatch.NotApplyPatch: + return { type: MaybeApplyPatchVerified.NotApplyPatch } + } +} + +export * as Patch from "." diff --git a/packages/opencode/src/patch/patch.ts b/packages/opencode/src/patch/patch.ts deleted file mode 100644 index 1dc99b4da9..0000000000 --- a/packages/opencode/src/patch/patch.ts +++ /dev/null @@ -1,678 +0,0 @@ -import z from "zod" -import * as path from "path" -import * as fs from "fs/promises" -import { readFileSync } from "fs" -import { Log } from "../util" - -const log = Log.create({ service: "patch" }) - -// Schema definitions -export const PatchSchema = z.object({ - patchText: z.string().describe("The full patch text that describes all changes to be made"), -}) - -export type PatchParams = z.infer - -// Core types matching the Rust implementation -export interface ApplyPatchArgs { - patch: string - hunks: Hunk[] - workdir?: string -} - -export type Hunk = - | { type: "add"; path: string; contents: string } - | { type: "delete"; path: string } - | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } - -export interface UpdateFileChunk { - old_lines: string[] - new_lines: string[] - change_context?: string - is_end_of_file?: boolean -} - -export interface ApplyPatchAction { - changes: Map - patch: string - cwd: string -} - -export type ApplyPatchFileChange = - | { type: "add"; content: string } - | { type: "delete"; content: string } - | { type: "update"; unified_diff: string; move_path?: string; new_content: string } - -export interface AffectedPaths { - added: string[] - modified: string[] - deleted: string[] -} - -export enum ApplyPatchError { - ParseError = "ParseError", - IoError = "IoError", - ComputeReplacements = "ComputeReplacements", - ImplicitInvocation = "ImplicitInvocation", -} - -export enum MaybeApplyPatch { - Body = "Body", - ShellParseError = "ShellParseError", - PatchParseError = "PatchParseError", - NotApplyPatch = "NotApplyPatch", -} - -export enum MaybeApplyPatchVerified { - Body = "Body", - ShellParseError = "ShellParseError", - CorrectnessError = "CorrectnessError", - NotApplyPatch = "NotApplyPatch", -} - -// Parser implementation -function parsePatchHeader( - lines: string[], - startIdx: number, -): { filePath: string; movePath?: string; nextIdx: number } | null { - const line = lines[startIdx] - - if (line.startsWith("*** Add File:")) { - const filePath = line.slice("*** Add File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Delete File:")) { - const filePath = line.slice("*** Delete File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Update File:")) { - const filePath = line.slice("*** Update File:".length).trim() - let movePath: string | undefined - let nextIdx = startIdx + 1 - - // Check for move directive - if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { - movePath = lines[nextIdx].slice("*** Move to:".length).trim() - nextIdx++ - } - - return filePath ? { filePath, movePath, nextIdx } : null - } - - return null -} - -function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { - const chunks: UpdateFileChunk[] = [] - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("@@")) { - // Parse context line - const contextLine = lines[i].substring(2).trim() - i++ - - const oldLines: string[] = [] - const newLines: string[] = [] - let isEndOfFile = false - - // Parse change lines - while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { - const changeLine = lines[i] - - if (changeLine === "*** End of File") { - isEndOfFile = true - i++ - break - } - - if (changeLine.startsWith(" ")) { - // Keep line - appears in both old and new - const content = changeLine.substring(1) - oldLines.push(content) - newLines.push(content) - } else if (changeLine.startsWith("-")) { - // Remove line - only in old - oldLines.push(changeLine.substring(1)) - } else if (changeLine.startsWith("+")) { - // Add line - only in new - newLines.push(changeLine.substring(1)) - } - - i++ - } - - chunks.push({ - old_lines: oldLines, - new_lines: newLines, - change_context: contextLine || undefined, - is_end_of_file: isEndOfFile || undefined, - }) - } else { - i++ - } - } - - return { chunks, nextIdx: i } -} - -function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { - let content = "" - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("+")) { - content += lines[i].substring(1) + "\n" - } - i++ - } - - // Remove trailing newline - if (content.endsWith("\n")) { - content = content.slice(0, -1) - } - - return { content, nextIdx: i } -} - -function stripHeredoc(input: string): string { - // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) - const endIdx = lines.findIndex((line) => line.trim() === endMarker) - - if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { - throw new Error("Invalid patch format: missing Begin/End markers") - } - - // Parse content between markers - i = beginIdx + 1 - - while (i < endIdx) { - const header = parsePatchHeader(lines, i) - if (!header) { - i++ - continue - } - - if (lines[i].startsWith("*** Add File:")) { - const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) - hunks.push({ - type: "add", - path: header.filePath, - contents: content, - }) - i = nextIdx - } else if (lines[i].startsWith("*** Delete File:")) { - hunks.push({ - type: "delete", - path: header.filePath, - }) - i = header.nextIdx - } else if (lines[i].startsWith("*** Update File:")) { - const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) - hunks.push({ - type: "update", - path: header.filePath, - move_path: header.movePath, - chunks, - }) - i = nextIdx - } else { - i++ - } - } - - return { hunks } -} - -// Apply patch functionality -export function maybeParseApplyPatch( - argv: string[], -): - | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } - | { type: MaybeApplyPatch.PatchParseError; error: Error } - | { type: MaybeApplyPatch.NotApplyPatch } { - const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] - - // Direct invocation: apply_patch - if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { - try { - const { hunks } = parsePatch(argv[1]) - return { - type: MaybeApplyPatch.Body, - args: { - patch: argv[1], - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - - // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' - if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { - // Simple extraction - in real implementation would need proper bash parsing - const script = argv[2] - const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) - - if (heredocMatch) { - const patchContent = heredocMatch[2] - try { - const { hunks } = parsePatch(patchContent) - return { - type: MaybeApplyPatch.Body, - args: { - patch: patchContent, - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - } - - return { type: MaybeApplyPatch.NotApplyPatch } -} - -// File content manipulation -interface ApplyPatchFileUpdate { - unified_diff: string - content: string -} - -export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { - // Read original file content - let originalContent: string - try { - originalContent = readFileSync(filePath, "utf-8") - } catch (error) { - throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) - } - - let originalLines = originalContent.split("\n") - - // Drop trailing empty element for consistent line counting - if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { - originalLines.pop() - } - - const replacements = computeReplacements(originalLines, filePath, chunks) - let newLines = applyReplacements(originalLines, replacements) - - // Ensure trailing newline - if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { - newLines.push("") - } - - const newContent = newLines.join("\n") - - // Generate unified diff - const unifiedDiff = generateUnifiedDiff(originalContent, newContent) - - return { - unified_diff: unifiedDiff, - content: newContent, - } -} - -function computeReplacements( - originalLines: string[], - filePath: string, - chunks: UpdateFileChunk[], -): Array<[number, number, string[]]> { - const replacements: Array<[number, number, string[]]> = [] - let lineIndex = 0 - - for (const chunk of chunks) { - // Handle context-based seeking - if (chunk.change_context) { - const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) - if (contextIdx === -1) { - throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) - } - lineIndex = contextIdx + 1 - } - - // Handle pure addition (no old lines) - if (chunk.old_lines.length === 0) { - const insertionIdx = - originalLines.length > 0 && originalLines[originalLines.length - 1] === "" - ? originalLines.length - 1 - : originalLines.length - replacements.push([insertionIdx, 0, chunk.new_lines]) - continue - } - - // Try to match old lines in the file - let pattern = chunk.old_lines - let newSlice = chunk.new_lines - let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - - // Retry without trailing empty line if not found - if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { - pattern = pattern.slice(0, -1) - if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { - newSlice = newSlice.slice(0, -1) - } - found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - } - - if (found !== -1) { - replacements.push([found, pattern.length, newSlice]) - lineIndex = found + pattern.length - } else { - throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) - } - } - - // Sort replacements by index to apply in order - replacements.sort((a, b) => a[0] - b[0]) - - return replacements -} - -function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { - // Apply replacements in reverse order to avoid index shifting - const result = [...lines] - - for (let i = replacements.length - 1; i >= 0; i--) { - const [startIdx, oldLen, newSegment] = replacements[i] - - // Remove old lines - result.splice(startIdx, oldLen) - - // Insert new lines - for (let j = 0; j < newSegment.length; j++) { - result.splice(startIdx + j, 0, newSegment[j]) - } - } - - return result -} - -// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) -function normalizeUnicode(str: string): string { - return str - .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes - .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes - .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes - .replace(/\u2026/g, "...") // ellipsis - .replace(/\u00A0/g, " ") // non-breaking space -} - -type Comparator = (a: string, b: string) => boolean - -function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { - // If EOF anchor, try matching from end of file first - if (eof) { - const fromEnd = lines.length - pattern.length - if (fromEnd >= startIndex) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[fromEnd + j], pattern[j])) { - matches = false - break - } - } - if (matches) return fromEnd - } - } - - // Forward search from startIndex - for (let i = startIndex; i <= lines.length - pattern.length; i++) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[i + j], pattern[j])) { - matches = false - break - } - } - if (matches) return i - } - - return -1 -} - -function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { - if (pattern.length === 0) return -1 - - // Pass 1: exact match - const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) - if (exact !== -1) return exact - - // Pass 2: rstrip (trim trailing whitespace) - const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) - if (rstrip !== -1) return rstrip - - // Pass 3: trim (both ends) - const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) - if (trim !== -1) return trim - - // Pass 4: normalized (Unicode punctuation to ASCII) - const normalized = tryMatch( - lines, - pattern, - startIndex, - (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), - eof, - ) - return normalized -} - -function generateUnifiedDiff(oldContent: string, newContent: string): string { - const oldLines = oldContent.split("\n") - const newLines = newContent.split("\n") - - // Simple diff generation - in a real implementation you'd use a proper diff algorithm - let diff = "@@ -1 +1 @@\n" - - // Find changes (simplified approach) - const maxLen = Math.max(oldLines.length, newLines.length) - let hasChanges = false - - for (let i = 0; i < maxLen; i++) { - const oldLine = oldLines[i] || "" - const newLine = newLines[i] || "" - - if (oldLine !== newLine) { - if (oldLine) diff += `-${oldLine}\n` - if (newLine) diff += `+${newLine}\n` - hasChanges = true - } else if (oldLine) { - diff += ` ${oldLine}\n` - } - } - - return hasChanges ? diff : "" -} - -// Apply hunks to filesystem -export async function applyHunksToFiles(hunks: Hunk[]): Promise { - if (hunks.length === 0) { - throw new Error("No files were modified.") - } - - const added: string[] = [] - const modified: string[] = [] - const deleted: string[] = [] - - for (const hunk of hunks) { - switch (hunk.type) { - case "add": - // Create parent directories - const addDir = path.dirname(hunk.path) - if (addDir !== "." && addDir !== "/") { - await fs.mkdir(addDir, { recursive: true }) - } - - await fs.writeFile(hunk.path, hunk.contents, "utf-8") - added.push(hunk.path) - log.info(`Added file: ${hunk.path}`) - break - - case "delete": - await fs.unlink(hunk.path) - deleted.push(hunk.path) - log.info(`Deleted file: ${hunk.path}`) - break - - case "update": - const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) - - if (hunk.move_path) { - // Handle file move - const moveDir = path.dirname(hunk.move_path) - if (moveDir !== "." && moveDir !== "/") { - await fs.mkdir(moveDir, { recursive: true }) - } - - await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") - await fs.unlink(hunk.path) - modified.push(hunk.move_path) - log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) - } else { - // Regular update - await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") - modified.push(hunk.path) - log.info(`Updated file: ${hunk.path}`) - } - break - } - } - - return { added, modified, deleted } -} - -// Main patch application function -export async function applyPatch(patchText: string): Promise { - const { hunks } = parsePatch(patchText) - return applyHunksToFiles(hunks) -} - -// Async version of maybeParseApplyPatchVerified -export async function maybeParseApplyPatchVerified( - argv: string[], - cwd: string, -): Promise< - | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } - | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } - | { type: MaybeApplyPatchVerified.NotApplyPatch } -> { - // Detect implicit patch invocation (raw patch without apply_patch command) - if (argv.length === 1) { - try { - parsePatch(argv[0]) - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(ApplyPatchError.ImplicitInvocation), - } - } catch { - // Not a patch, continue - } - } - - const result = maybeParseApplyPatch(argv) - - switch (result.type) { - case MaybeApplyPatch.Body: - const { args } = result - const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd - const changes = new Map() - - for (const hunk of args.hunks) { - const resolvedPath = path.resolve( - effectiveCwd, - hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, - ) - - switch (hunk.type) { - case "add": - changes.set(resolvedPath, { - type: "add", - content: hunk.contents, - }) - break - - case "delete": - // For delete, we need to read the current content - const deletePath = path.resolve(effectiveCwd, hunk.path) - try { - const content = await fs.readFile(deletePath, "utf-8") - changes.set(resolvedPath, { - type: "delete", - content, - }) - } catch { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(`Failed to read file for deletion: ${deletePath}`), - } - } - break - - case "update": - const updatePath = path.resolve(effectiveCwd, hunk.path) - try { - const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) - changes.set(resolvedPath, { - type: "update", - unified_diff: fileUpdate.unified_diff, - move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, - new_content: fileUpdate.content, - }) - } catch (error) { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: error as Error, - } - } - break - } - } - - return { - type: MaybeApplyPatchVerified.Body, - action: { - changes, - patch: args.patch, - cwd: effectiveCwd, - }, - } - - case MaybeApplyPatch.PatchParseError: - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: result.error, - } - - case MaybeApplyPatch.NotApplyPatch: - return { type: MaybeApplyPatchVerified.NotApplyPatch } - } -} diff --git a/packages/opencode/src/v2/session-common.ts b/packages/opencode/src/v2/session-common.ts deleted file mode 100644 index 556bd79b61..0000000000 --- a/packages/opencode/src/v2/session-common.ts +++ /dev/null @@ -1 +0,0 @@ -export namespace SessionCommon {} From 8afb625bab10c44e5b0437af4550f020f332cdf5 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:19:01 -0400 Subject: [PATCH 099/120] refactor: extract Diagnostic namespace into lsp/diagnostic.ts + self-reexport (#22983) --- packages/opencode/src/lsp/diagnostic.ts | 29 +++++++++++++++++++++++++ packages/opencode/src/lsp/lsp.ts | 28 +----------------------- 2 files changed, 30 insertions(+), 27 deletions(-) create mode 100644 packages/opencode/src/lsp/diagnostic.ts diff --git a/packages/opencode/src/lsp/diagnostic.ts b/packages/opencode/src/lsp/diagnostic.ts new file mode 100644 index 0000000000..4bc085e788 --- /dev/null +++ b/packages/opencode/src/lsp/diagnostic.ts @@ -0,0 +1,29 @@ +import * as LSPClient from "./client" + +const MAX_PER_FILE = 20 + +export function pretty(diagnostic: LSPClient.Diagnostic) { + const severityMap = { + 1: "ERROR", + 2: "WARN", + 3: "INFO", + 4: "HINT", + } + + const severity = severityMap[diagnostic.severity || 1] + const line = diagnostic.range.start.line + 1 + const col = diagnostic.range.start.character + 1 + + return `${severity} [${line}:${col}] ${diagnostic.message}` +} + +export function report(file: string, issues: LSPClient.Diagnostic[]) { + const errors = issues.filter((item) => item.severity === 1) + if (errors.length === 0) return "" + const limited = errors.slice(0, MAX_PER_FILE) + const more = errors.length - MAX_PER_FILE + const suffix = more > 0 ? `\n... and ${more} more` : "" + return `\n${limited.map(pretty).join("\n")}${suffix}\n` +} + +export * as Diagnostic from "./diagnostic" diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index d895e73256..97af8209bb 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -505,30 +505,4 @@ export const layer = Layer.effect( export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer)) -export namespace Diagnostic { - const MAX_PER_FILE = 20 - - export function pretty(diagnostic: LSPClient.Diagnostic) { - const severityMap = { - 1: "ERROR", - 2: "WARN", - 3: "INFO", - 4: "HINT", - } - - const severity = severityMap[diagnostic.severity || 1] - const line = diagnostic.range.start.line + 1 - const col = diagnostic.range.start.character + 1 - - return `${severity} [${line}:${col}] ${diagnostic.message}` - } - - export function report(file: string, issues: LSPClient.Diagnostic[]) { - const errors = issues.filter((item) => item.severity === 1) - if (errors.length === 0) return "" - const limited = errors.slice(0, MAX_PER_FILE) - const more = errors.length - MAX_PER_FILE - const suffix = more > 0 ? `\n... and ${more} more` : "" - return `\n${limited.map(pretty).join("\n")}${suffix}\n` - } -} +export * as Diagnostic from "./diagnostic" From 6405e3a7b12d49ade2f251360d221999836ccc02 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:32:36 -0400 Subject: [PATCH 100/120] tui: stabilize session dialog ordering (#22987) --- .../src/cli/cmd/tui/component/dialog-session-list.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index 75c79dcdd8..60ef6087ba 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -113,7 +113,11 @@ export function DialogSessionList() { const today = new Date().toDateString() return sessions() .filter((x) => x.parentID === undefined) - .toSorted((a, b) => b.time.updated - a.time.updated) + .toSorted((a, b) => { + const updatedDay = new Date(b.time.updated).setHours(0, 0, 0, 0) - new Date(a.time.updated).setHours(0, 0, 0, 0) + if (updatedDay !== 0) return updatedDay + return b.time.created - a.time.created + }) .map((x) => { const workspace = x.workspaceID ? project.workspace.get(x.workspaceID) : undefined From 326471a25c50cb83d33e6b327bc88faf38a4db11 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:35:26 -0400 Subject: [PATCH 101/120] refactor: split config lsp and formatter schemas (#22986) --- AGENTS.md | 1 + packages/opencode/AGENTS.md | 4 ++ packages/opencode/src/config/config.ts | 55 ++--------------------- packages/opencode/src/config/formatter.ts | 13 ++++++ packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/lsp.ts | 39 ++++++++++++++++ 6 files changed, 63 insertions(+), 51 deletions(-) create mode 100644 packages/opencode/src/config/formatter.ts create mode 100644 packages/opencode/src/config/lsp.ts diff --git a/AGENTS.md b/AGENTS.md index a7895c831f..44d08ae955 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,6 +14,7 @@ - Use Bun APIs when possible, like `Bun.file()` - Rely on type inference when possible; avoid explicit type annotations or interfaces unless necessary for exports or clarity - Prefer functional array methods (flatMap, filter, map) over for loops; use type guards on filter to maintain type inference downstream +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. Reduce total variable count by inlining when a value is only used once. diff --git a/packages/opencode/AGENTS.md b/packages/opencode/AGENTS.md index f0f32fdd16..761b9b5c5e 100644 --- a/packages/opencode/AGENTS.md +++ b/packages/opencode/AGENTS.md @@ -23,6 +23,10 @@ See `specs/effect/migration.md` for the compact pattern reference and examples. - Use `Effect.callback` for callback-based APIs. - Prefer `DateTime.nowAsDate` over `new Date(yield* Clock.currentTimeMillis)` when you need a `Date`. +## Module conventions + +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. + ## Schemas and errors - Use `Schema.Class` for multi-field data. diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index adccb6353b..2edc455df3 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -12,7 +12,6 @@ import { Auth } from "../auth" import { Env } from "../env" import { applyEdits, modify } from "jsonc-parser" import { Instance, type InstanceContext } from "../project/instance" -import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" import { existsSync } from "fs" import { GlobalBus } from "@/bus/global" @@ -37,6 +36,8 @@ import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" import { ConfigPaths } from "./paths" +import { ConfigFormatter } from "./formatter" +import { ConfigLSP } from "./lsp" const log = Log.create({ service: "config" }) @@ -186,56 +187,8 @@ export const Info = z ) .optional() .describe("MCP (Model Context Protocol) server configurations"), - formatter: z - .union([ - z.literal(false), - z.record( - z.string(), - z.object({ - disabled: z.boolean().optional(), - command: z.array(z.string()).optional(), - environment: z.record(z.string(), z.string()).optional(), - extensions: z.array(z.string()).optional(), - }), - ), - ]) - .optional(), - lsp: z - .union([ - z.literal(false), - z.record( - z.string(), - z.union([ - z.object({ - disabled: z.literal(true), - }), - z.object({ - command: z.array(z.string()), - extensions: z.array(z.string()).optional(), - disabled: z.boolean().optional(), - env: z.record(z.string(), z.string()).optional(), - initialization: z.record(z.string(), z.any()).optional(), - }), - ]), - ), - ]) - .optional() - .refine( - (data) => { - if (!data) return true - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((s) => s.id)) - - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ), + formatter: ConfigFormatter.Info.optional(), + lsp: ConfigLSP.Info.optional(), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), permission: ConfigPermission.Info.optional(), diff --git a/packages/opencode/src/config/formatter.ts b/packages/opencode/src/config/formatter.ts new file mode 100644 index 0000000000..7ac56214c9 --- /dev/null +++ b/packages/opencode/src/config/formatter.ts @@ -0,0 +1,13 @@ +export * as ConfigFormatter from "./formatter" + +import z from "zod" + +export const Entry = z.object({ + disabled: z.boolean().optional(), + command: z.array(z.string()).optional(), + environment: z.record(z.string(), z.string()).optional(), + extensions: z.array(z.string()).optional(), +}) + +export const Info = z.union([z.literal(false), z.record(z.string(), Entry)]) +export type Info = z.infer diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index c4a1c608b1..a05c29d25c 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -2,6 +2,8 @@ export * as Config from "./config" export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" export * as ConfigError from "./error" +export * as ConfigFormatter from "./formatter" +export * as ConfigLSP from "./lsp" export * as ConfigVariable from "./variable" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts new file mode 100644 index 0000000000..afb83908b9 --- /dev/null +++ b/packages/opencode/src/config/lsp.ts @@ -0,0 +1,39 @@ +export * as ConfigLSP from "./lsp" + +import z from "zod" +import * as LSPServer from "../lsp/server" + +export const Disabled = z.object({ + disabled: z.literal(true), +}) + +export const Entry = z.union([ + Disabled, + z.object({ + command: z.array(z.string()), + extensions: z.array(z.string()).optional(), + disabled: z.boolean().optional(), + env: z.record(z.string(), z.string()).optional(), + initialization: z.record(z.string(), z.any()).optional(), + }), +]) + +export const Info = z + .union([z.literal(false), z.record(z.string(), Entry)]) + .refine( + (data) => { + if (typeof data === "boolean") return true + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) + + return Object.entries(data).every(([id, config]) => { + if (config.disabled) return true + if (serverIds.has(id)) return true + return Boolean(config.extensions) + }) + }, + { + error: "For custom LSP servers, 'extensions' array is required.", + }, + ) + +export type Info = z.infer From f13778215ae8927e8bb500f421f835566eb9a017 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:35:47 -0400 Subject: [PATCH 102/120] perf: speed up skill directory discovery (#22990) --- packages/opencode/src/skill/index.ts | 60 +++++++++++++++++++--------- 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/packages/opencode/src/skill/index.ts b/packages/opencode/src/skill/index.ts index b139b39e6e..dd5cc4e5d5 100644 --- a/packages/opencode/src/skill/index.ts +++ b/packages/opencode/src/skill/index.ts @@ -54,6 +54,16 @@ type State = { dirs: Set } +type DiscoveryState = { + matches: string[] + dirs: string[] +} + +type ScanState = { + matches: Set + dirs: Set +} + export interface Interface { readonly get: (name: string) => Effect.Effect readonly all: () => Effect.Effect @@ -102,8 +112,7 @@ const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.I }) const scan = Effect.fnUntraced(function* ( - state: State, - bus: Bus.Interface, + state: ScanState, root: string, pattern: string, opts?: { dot?: boolean; scope?: string }, @@ -126,26 +135,26 @@ const scan = Effect.fnUntraced(function* ( }), ) - yield* Effect.forEach(matches, (match) => add(state, match, bus), { - concurrency: "unbounded", - discard: true, - }) + for (const match of matches) { + state.matches.add(match) + state.dirs.add(path.dirname(match)) + } }) -const loadSkills = Effect.fnUntraced(function* ( - state: State, +const discoverSkills = Effect.fnUntraced(function* ( config: Config.Interface, discovery: Discovery.Interface, - bus: Bus.Interface, fsys: AppFileSystem.Interface, directory: string, worktree: string, ) { + const state: ScanState = { matches: new Set(), dirs: new Set() } + if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { for (const dir of EXTERNAL_DIRS) { const root = path.join(Global.Path.home, dir) if (!(yield* fsys.isDir(root))) continue - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) + yield* scan(state, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) } const upDirs = yield* fsys @@ -153,13 +162,13 @@ const loadSkills = Effect.fnUntraced(function* ( .pipe(Effect.catch(() => Effect.succeed([] as string[]))) for (const root of upDirs) { - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) + yield* scan(state, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) } } const configDirs = yield* config.directories() for (const dir of configDirs) { - yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) + yield* scan(state, dir, OPENCODE_SKILL_PATTERN) } const cfg = yield* config.get() @@ -171,17 +180,28 @@ const loadSkills = Effect.fnUntraced(function* ( continue } - yield* scan(state, bus, dir, SKILL_PATTERN) + yield* scan(state, dir, SKILL_PATTERN) } for (const url of cfg.skills?.urls ?? []) { const pulledDirs = yield* discovery.pull(url) for (const dir of pulledDirs) { - state.dirs.add(dir) - yield* scan(state, bus, dir, SKILL_PATTERN) + yield* scan(state, dir, SKILL_PATTERN) } } + return { + matches: Array.from(state.matches), + dirs: Array.from(state.dirs), + } +}) + +const loadSkills = Effect.fnUntraced(function* (state: State, discovered: DiscoveryState, bus: Bus.Interface) { + yield* Effect.forEach(discovered.matches, (match) => add(state, match, bus), { + concurrency: "unbounded", + discard: true, + }) + log.info("init", { count: Object.keys(state.skills).length }) }) @@ -194,10 +214,15 @@ export const layer = Layer.effect( const config = yield* Config.Service const bus = yield* Bus.Service const fsys = yield* AppFileSystem.Service + const discovered = yield* InstanceState.make( + Effect.fn("Skill.discovery")(function* (ctx) { + return yield* discoverSkills(config, discovery, fsys, ctx.directory, ctx.worktree) + }), + ) const state = yield* InstanceState.make( Effect.fn("Skill.state")(function* (ctx) { const s: State = { skills: {}, dirs: new Set() } - yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) + yield* loadSkills(s, yield* InstanceState.get(discovered), bus) return s }), ) @@ -213,8 +238,7 @@ export const layer = Layer.effect( }) const dirs = Effect.fn("Skill.dirs")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.dirs) + return (yield* InstanceState.get(discovered)).dirs }) const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { From 5b9fa322551b77ddf2ab004c5491a82d37081b22 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 01:36:45 +0000 Subject: [PATCH 103/120] chore: generate --- packages/opencode/src/config/lsp.ts | 30 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts index afb83908b9..233f7e523c 100644 --- a/packages/opencode/src/config/lsp.ts +++ b/packages/opencode/src/config/lsp.ts @@ -18,22 +18,20 @@ export const Entry = z.union([ }), ]) -export const Info = z - .union([z.literal(false), z.record(z.string(), Entry)]) - .refine( - (data) => { - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) +export const Info = z.union([z.literal(false), z.record(z.string(), Entry)]).refine( + (data) => { + if (typeof data === "boolean") return true + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ) + return Object.entries(data).every(([id, config]) => { + if (config.disabled) return true + if (serverIds.has(id)) return true + return Boolean(config.extensions) + }) + }, + { + error: "For custom LSP servers, 'extensions' array is required.", + }, +) export type Info = z.infer From 9c87a144e879dd9b76c90cb1415e63005aac2843 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:43:57 -0400 Subject: [PATCH 104/120] refactor: normalize AccountRepo to canonical Effect service pattern (#22991) --- packages/opencode/src/account/account.ts | 4 +- packages/opencode/src/account/repo.ts | 264 +++++++++--------- packages/opencode/test/account/repo.test.ts | 78 +++--- .../opencode/test/account/service.test.ts | 20 +- .../opencode/test/share/share-next.test.ts | 2 +- 5 files changed, 184 insertions(+), 184 deletions(-) diff --git a/packages/opencode/src/account/account.ts b/packages/opencode/src/account/account.ts index 657c61b1e5..23981fd852 100644 --- a/packages/opencode/src/account/account.ts +++ b/packages/opencode/src/account/account.ts @@ -181,10 +181,10 @@ export interface Interface { export class Service extends Context.Service()("@opencode/Account") {} -export const layer: Layer.Layer = Layer.effect( +export const layer: Layer.Layer = Layer.effect( Service, Effect.gen(function* () { - const repo = yield* AccountRepo + const repo = yield* AccountRepo.Service const http = yield* HttpClient.HttpClient const httpRead = withTransientReadRetry(http) const httpOk = HttpClient.filterStatusOk(http) diff --git a/packages/opencode/src/account/repo.ts b/packages/opencode/src/account/repo.ts index 5d8a8e33f6..450db1bd74 100644 --- a/packages/opencode/src/account/repo.ts +++ b/packages/opencode/src/account/repo.ts @@ -13,154 +13,154 @@ type DbTransactionCallback = Parameters>[0] const ACCOUNT_STATE_ID = 1 -export namespace AccountRepo { - export interface Service { - readonly active: () => Effect.Effect, AccountRepoError> - readonly list: () => Effect.Effect - readonly remove: (accountID: AccountID) => Effect.Effect - readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect - readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> - readonly persistToken: (input: { - accountID: AccountID - accessToken: AccessToken - refreshToken: RefreshToken - expiry: Option.Option - }) => Effect.Effect - readonly persistAccount: (input: { - id: AccountID - email: string - url: string - accessToken: AccessToken - refreshToken: RefreshToken - expiry: number - orgID: Option.Option - }) => Effect.Effect - } +export interface Interface { + readonly active: () => Effect.Effect, AccountRepoError> + readonly list: () => Effect.Effect + readonly remove: (accountID: AccountID) => Effect.Effect + readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect + readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> + readonly persistToken: (input: { + accountID: AccountID + accessToken: AccessToken + refreshToken: RefreshToken + expiry: Option.Option + }) => Effect.Effect + readonly persistAccount: (input: { + id: AccountID + email: string + url: string + accessToken: AccessToken + refreshToken: RefreshToken + expiry: number + orgID: Option.Option + }) => Effect.Effect } -export class AccountRepo extends Context.Service()("@opencode/AccountRepo") { - static readonly layer: Layer.Layer = Layer.effect( - AccountRepo, - Effect.gen(function* () { - const decode = Schema.decodeUnknownSync(Info) +export class Service extends Context.Service()("@opencode/AccountRepo") {} - const query = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.use(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), +export const layer: Layer.Layer = Layer.effect( + Service, + Effect.gen(function* () { + const decode = Schema.decodeUnknownSync(Info) + + const query = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.use(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const tx = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.transaction(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const current = (db: DbClient) => { + const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() + if (!state?.active_account_id) return + const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() + if (!account) return + return { ...account, active_org_id: state.active_org_id ?? null } + } + + const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { + const id = Option.getOrNull(orgID) + return db + .insert(AccountStateTable) + .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) + .onConflictDoUpdate({ + target: AccountStateTable.id, + set: { active_account_id: accountID, active_org_id: id }, }) + .run() + } - const tx = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.transaction(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), - }) + const active = Effect.fn("AccountRepo.active")(() => + query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), + ) - const current = (db: DbClient) => { - const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() - if (!state?.active_account_id) return - const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() - if (!account) return - return { ...account, active_org_id: state.active_org_id ?? null } - } + const list = Effect.fn("AccountRepo.list")(() => + query((db) => + db + .select() + .from(AccountTable) + .all() + .map((row: AccountRow) => decode({ ...row, active_org_id: null })), + ), + ) - const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { - const id = Option.getOrNull(orgID) - return db - .insert(AccountStateTable) - .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) - .onConflictDoUpdate({ - target: AccountStateTable.id, - set: { active_account_id: accountID, active_org_id: id }, - }) + const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => + tx((db) => { + db.update(AccountStateTable) + .set({ active_account_id: null, active_org_id: null }) + .where(eq(AccountStateTable.active_account_id, accountID)) .run() - } + db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() + }).pipe(Effect.asVoid), + ) - const active = Effect.fn("AccountRepo.active")(() => - query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), - ) + const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => + query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), + ) - const list = Effect.fn("AccountRepo.list")(() => - query((db) => - db - .select() - .from(AccountTable) - .all() - .map((row: AccountRow) => decode({ ...row, active_org_id: null })), - ), - ) + const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => + query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( + Effect.map(Option.fromNullishOr), + ), + ) - const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => - tx((db) => { - db.update(AccountStateTable) - .set({ active_account_id: null, active_org_id: null }) - .where(eq(AccountStateTable.active_account_id, accountID)) - .run() - db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() - }).pipe(Effect.asVoid), - ) + const persistToken = Effect.fn("AccountRepo.persistToken")((input) => + query((db) => + db + .update(AccountTable) + .set({ + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: Option.getOrNull(input.expiry), + }) + .where(eq(AccountTable.id, input.accountID)) + .run(), + ).pipe(Effect.asVoid), + ) - const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => - query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), - ) + const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => + tx((db) => { + const url = normalizeServerUrl(input.url) - const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => - query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( - Effect.map(Option.fromNullishOr), - ), - ) - - const persistToken = Effect.fn("AccountRepo.persistToken")((input) => - query((db) => - db - .update(AccountTable) - .set({ - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: Option.getOrNull(input.expiry), - }) - .where(eq(AccountTable.id, input.accountID)) - .run(), - ).pipe(Effect.asVoid), - ) - - const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => - tx((db) => { - const url = normalizeServerUrl(input.url) - - db.insert(AccountTable) - .values({ - id: input.id, + db.insert(AccountTable) + .values({ + id: input.id, + email: input.email, + url, + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: input.expiry, + }) + .onConflictDoUpdate({ + target: AccountTable.id, + set: { email: input.email, url, access_token: input.accessToken, refresh_token: input.refreshToken, token_expiry: input.expiry, - }) - .onConflictDoUpdate({ - target: AccountTable.id, - set: { - email: input.email, - url, - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: input.expiry, - }, - }) - .run() - void state(db, input.id, input.orgID) - }).pipe(Effect.asVoid), - ) + }, + }) + .run() + void state(db, input.id, input.orgID) + }).pipe(Effect.asVoid), + ) - return AccountRepo.of({ - active, - list, - remove, - use, - getRow, - persistToken, - persistAccount, - }) - }), - ) -} + return Service.of({ + active, + list, + remove, + use, + getRow, + persistToken, + persistAccount, + }) + }), +) + +export * as AccountRepo from "./repo" diff --git a/packages/opencode/test/account/repo.test.ts b/packages/opencode/test/account/repo.test.ts index 93d0481521..8e59b85b31 100644 --- a/packages/opencode/test/account/repo.test.ts +++ b/packages/opencode/test/account/repo.test.ts @@ -18,14 +18,14 @@ const it = testEffect(Layer.merge(AccountRepo.layer, truncate)) it.live("list returns empty when no accounts exist", () => Effect.gen(function* () { - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts).toEqual([]) }), ) it.live("active returns none when no accounts exist", () => Effect.gen(function* () { - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isNone(active)).toBe(true) }), ) @@ -33,7 +33,7 @@ it.live("active returns none when no accounts exist", () => it.live("persistAccount inserts and getRow retrieves", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -45,13 +45,13 @@ it.live("persistAccount inserts and getRow retrieves", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.isSome(row)).toBe(true) const value = Option.getOrThrow(row) expect(value.id).toBe(AccountID.make("user-1")) expect(value.email).toBe("test@example.com") - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-1")) }), ) @@ -60,7 +60,7 @@ it.live("persistAccount normalizes trailing slashes in stored server URLs", () = Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -72,9 +72,9 @@ it.live("persistAccount normalizes trailing slashes in stored server URLs", () = }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) - const active = yield* AccountRepo.use((r) => r.active()) - const list = yield* AccountRepo.use((r) => r.list()) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) + const active = yield* AccountRepo.Service.use((r) => r.active()) + const list = yield* AccountRepo.Service.use((r) => r.list()) expect(Option.getOrThrow(row).url).toBe("https://control.example.com") expect(Option.getOrThrow(active).url).toBe("https://control.example.com") @@ -87,7 +87,7 @@ it.live("persistAccount sets the active account and org", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "first@example.com", @@ -99,7 +99,7 @@ it.live("persistAccount sets the active account and org", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "second@example.com", @@ -112,7 +112,7 @@ it.live("persistAccount sets the active account and org", () => ) // Last persisted account is active with its org - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isSome(active)).toBe(true) expect(Option.getOrThrow(active).id).toBe(AccountID.make("user-2")) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) @@ -124,7 +124,7 @@ it.live("list returns all accounts", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "a@example.com", @@ -136,7 +136,7 @@ it.live("list returns all accounts", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "b@example.com", @@ -148,7 +148,7 @@ it.live("list returns all accounts", () => }), ) - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts.length).toBe(2) expect(accounts.map((a) => a.email).sort()).toEqual(["a@example.com", "b@example.com"]) }), @@ -158,7 +158,7 @@ it.live("remove deletes an account", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -170,9 +170,9 @@ it.live("remove deletes an account", () => }), ) - yield* AccountRepo.use((r) => r.remove(id)) + yield* AccountRepo.Service.use((r) => r.remove(id)) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.isNone(row)).toBe(true) }), ) @@ -182,7 +182,7 @@ it.live("use stores the selected org and marks the account active", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "first@example.com", @@ -194,7 +194,7 @@ it.live("use stores the selected org and marks the account active", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "second@example.com", @@ -206,13 +206,13 @@ it.live("use stores the selected org and marks the account active", () => }), ) - yield* AccountRepo.use((r) => r.use(id1, Option.some(OrgID.make("org-99")))) - const active1 = yield* AccountRepo.use((r) => r.active()) + yield* AccountRepo.Service.use((r) => r.use(id1, Option.some(OrgID.make("org-99")))) + const active1 = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active1).id).toBe(id1) expect(Option.getOrThrow(active1).active_org_id).toBe(OrgID.make("org-99")) - yield* AccountRepo.use((r) => r.use(id1, Option.none())) - const active2 = yield* AccountRepo.use((r) => r.active()) + yield* AccountRepo.Service.use((r) => r.use(id1, Option.none())) + const active2 = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active2).active_org_id).toBeNull() }), ) @@ -221,7 +221,7 @@ it.live("persistToken updates token fields", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -234,7 +234,7 @@ it.live("persistToken updates token fields", () => ) const expiry = Date.now() + 7200_000 - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistToken({ accountID: id, accessToken: AccessToken.make("new_token"), @@ -243,7 +243,7 @@ it.live("persistToken updates token fields", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("new_token")) expect(value.refresh_token).toBe(RefreshToken.make("new_refresh")) @@ -255,7 +255,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -267,7 +267,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistToken({ accountID: id, accessToken: AccessToken.make("new_token"), @@ -276,7 +276,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.getOrThrow(row).token_expiry).toBeNull() }), ) @@ -285,7 +285,7 @@ it.live("persistAccount upserts on conflict", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -297,7 +297,7 @@ it.live("persistAccount upserts on conflict", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -309,14 +309,14 @@ it.live("persistAccount upserts on conflict", () => }), ) - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts.length).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_v2")) - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) }), ) @@ -325,7 +325,7 @@ it.live("remove clears active state when deleting the active account", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -337,16 +337,16 @@ it.live("remove clears active state when deleting the active account", () => }), ) - yield* AccountRepo.use((r) => r.remove(id)) + yield* AccountRepo.Service.use((r) => r.remove(id)) - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isNone(active)).toBe(true) }), ) it.live("getRow returns none for nonexistent account", () => Effect.gen(function* () { - const row = yield* AccountRepo.use((r) => r.getRow(AccountID.make("nope"))) + const row = yield* AccountRepo.Service.use((r) => r.getRow(AccountID.make("nope"))) expect(Option.isNone(row)).toBe(true) }), ) diff --git a/packages/opencode/test/account/service.test.ts b/packages/opencode/test/account/service.test.ts index 053fd2a0ed..f0daab3a15 100644 --- a/packages/opencode/test/account/service.test.ts +++ b/packages/opencode/test/account/service.test.ts @@ -122,7 +122,7 @@ it.live("login maps transport failures to account transport errors", () => it.live("orgsByAccount groups orgs per account", () => Effect.gen(function* () { - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: AccountID.make("user-1"), email: "one@example.com", @@ -134,7 +134,7 @@ it.live("orgsByAccount groups orgs per account", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: AccountID.make("user-2"), email: "two@example.com", @@ -177,7 +177,7 @@ it.live("token refresh persists the new token", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -206,7 +206,7 @@ it.live("token refresh persists the new token", () => expect(Option.getOrThrow(token)).toBeDefined() expect(String(Option.getOrThrow(token))).toBe("at_new") - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -218,7 +218,7 @@ it.live("token refreshes before expiry when inside the eager refresh window", () Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -251,7 +251,7 @@ it.live("token refreshes before expiry when inside the eager refresh window", () expect(String(Option.getOrThrow(token))).toBe("at_new") expect(refreshCalls).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -262,7 +262,7 @@ it.live("concurrent config and token requests coalesce token refresh", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -315,7 +315,7 @@ it.live("concurrent config and token requests coalesce token refresh", () => expect(String(Option.getOrThrow(token))).toBe("at_new") expect(refreshCalls).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -326,7 +326,7 @@ it.live("config sends the selected org header", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -388,7 +388,7 @@ it.live("poll stores the account and first org on success", () => expect(res.email).toBe("user@example.com") } - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active)).toEqual( expect.objectContaining({ id: "user-1", diff --git a/packages/opencode/test/share/share-next.test.ts b/packages/opencode/test/share/share-next.test.ts index 2359f06a31..930c4062f6 100644 --- a/packages/opencode/test/share/share-next.test.ts +++ b/packages/opencode/test/share/share-next.test.ts @@ -72,7 +72,7 @@ const share = (id: SessionID) => Database.use((db) => db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, id)).get()) const seed = (url: string, org?: string) => - AccountRepo.use((repo) => + AccountRepo.Service.use((repo) => repo.persistAccount({ id: AccountID.make("account-1"), email: "user@example.com", From 4f8986aa48cbab66ca6e72272c3c7d27ffc8e0eb Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:51:02 -0400 Subject: [PATCH 105/120] refactor: unwrap Question namespace + fix script to emit "." for index.ts (#22992) --- .../script/unwrap-and-self-reexport.ts | 11 +- packages/opencode/src/question/index.ts | 384 +++++++++--------- 2 files changed, 200 insertions(+), 195 deletions(-) diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts index 5ae703182e..09256f3a51 100644 --- a/packages/opencode/script/unwrap-and-self-reexport.ts +++ b/packages/opencode/script/unwrap-and-self-reexport.ts @@ -207,10 +207,15 @@ const rewrittenBody = dedented.map(rewriteLine) // Assemble the new file. Collapse multiple trailing blank lines so the // self-reexport sits cleanly at the end. +// +// When the file is itself `index.ts`, prefer `"."` over `"./index"` — both are +// valid but `"."` matches the existing convention in the codebase (e.g. +// pty/index.ts, file/index.ts, etc.) and avoids referencing "index" literally. const basename = path.basename(absPath, ".ts") +const reexportSource = basename === "index" ? "." : `./${basename}` const assembled = [...before, ...rewrittenBody, ...after].join("\n") const trimmed = assembled.replace(/\s+$/g, "") -const output = `${trimmed}\n\nexport * as ${nsName} from "./${basename}"\n` +const output = `${trimmed}\n\nexport * as ${nsName} from "${reexportSource}"\n` if (dryRun) { console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) @@ -218,7 +223,7 @@ if (dryRun) { console.log(`body lines: ${body.length}`) console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) console.log(`self-refs rewr: ${rewriteCount}`) - console.log(`self-reexport: export * as ${nsName} from "./${basename}"`) + console.log(`self-reexport: export * as ${nsName} from "${reexportSource}"`) console.log(`output preview (last 10 lines):`) const outputLines = output.split("\n") for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { @@ -231,7 +236,7 @@ fs.writeFileSync(absPath, output) console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) console.log(` body lines: ${body.length}`) console.log(` self-refs rewr: ${rewriteCount}`) -console.log(` self-reexport: export * as ${nsName} from "./${basename}"`) +console.log(` self-reexport: export * as ${nsName} from "${reexportSource}"`) console.log("") console.log("Next: verify with") console.log(" bunx --bun tsgo --noEmit") diff --git a/packages/opencode/src/question/index.ts b/packages/opencode/src/question/index.ts index 627d04564d..3b377c9827 100644 --- a/packages/opencode/src/question/index.ts +++ b/packages/opencode/src/question/index.ts @@ -8,222 +8,222 @@ import { Log } from "@/util" import { withStatics } from "@/util/schema" import { QuestionID } from "./schema" -export namespace Question { - const log = Log.create({ service: "question" }) +const log = Log.create({ service: "question" }) - // Schemas +// Schemas - export class Option extends Schema.Class