From 6ce481e95b7c5da814efcb379b8578552514e43e Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 10:09:14 -0400 Subject: [PATCH 001/201] move useful scripts to script folder --- packages/opencode/{ => script}/time.ts | 2 ++ packages/opencode/{ => script}/trace-imports.ts | 0 2 files changed, 2 insertions(+) rename packages/opencode/{ => script}/time.ts (88%) rename packages/opencode/{ => script}/trace-imports.ts (100%) diff --git a/packages/opencode/time.ts b/packages/opencode/script/time.ts similarity index 88% rename from packages/opencode/time.ts rename to packages/opencode/script/time.ts index c00936db26..0db795ed0a 100755 --- a/packages/opencode/time.ts +++ b/packages/opencode/script/time.ts @@ -1,3 +1,5 @@ +#!/usr/bin/env bun + import path from "path" const toDynamicallyImport = path.join(process.cwd(), process.argv[2]) await import(toDynamicallyImport) diff --git a/packages/opencode/trace-imports.ts b/packages/opencode/script/trace-imports.ts similarity index 100% rename from packages/opencode/trace-imports.ts rename to packages/opencode/script/trace-imports.ts From 8ab17f5ce0bb3e74a49aa661fc1beef8314b8e04 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 11:18:44 -0400 Subject: [PATCH 002/201] tui: fix path comparison in theme installer to handle different path formats --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index af37ffbd76..cdb778d146 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -194,7 +194,8 @@ function createThemeInstaller( } return } - if (prev?.dest === dest && prev.mtime === mtime && prev.size === size) return + if (path.normalize(prev?.dest ?? "") === path.normalize(dest) && prev.mtime === mtime && prev.size === size) + return } const text = await Filesystem.readText(src).catch((error) => { From 2b1696f1d174fde75f57abd9f498b53bf00e0d68 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 11:28:19 -0400 Subject: [PATCH 003/201] Revert "tui: fix path comparison in theme installer to handle different path formats" This reverts commit 8ab17f5ce0bb3e74a49aa661fc1beef8314b8e04. --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index cdb778d146..af37ffbd76 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -194,8 +194,7 @@ function createThemeInstaller( } return } - if (path.normalize(prev?.dest ?? "") === path.normalize(dest) && prev.mtime === mtime && prev.size === size) - return + if (prev?.dest === dest && prev.mtime === mtime && prev.size === size) return } const text = await Filesystem.readText(src).catch((error) => { From a200f6fb8b5e02aaf50a4f3e6c1a377f66e1c582 Mon Sep 17 00:00:00 2001 From: Frank Date: Thu, 16 Apr 2026 11:32:54 -0400 Subject: [PATCH 004/201] zen: opus 4.7 --- packages/web/src/content/docs/ar/zen.mdx | 2 ++ packages/web/src/content/docs/bs/zen.mdx | 2 ++ packages/web/src/content/docs/da/zen.mdx | 2 ++ packages/web/src/content/docs/de/zen.mdx | 2 ++ packages/web/src/content/docs/es/zen.mdx | 2 ++ packages/web/src/content/docs/fr/zen.mdx | 2 ++ packages/web/src/content/docs/it/zen.mdx | 2 ++ packages/web/src/content/docs/ja/zen.mdx | 2 ++ packages/web/src/content/docs/ko/zen.mdx | 2 ++ packages/web/src/content/docs/nb/zen.mdx | 2 ++ packages/web/src/content/docs/pl/zen.mdx | 2 ++ packages/web/src/content/docs/pt-br/zen.mdx | 2 ++ packages/web/src/content/docs/ru/zen.mdx | 2 ++ packages/web/src/content/docs/th/zen.mdx | 2 ++ packages/web/src/content/docs/tr/zen.mdx | 2 ++ packages/web/src/content/docs/zen.mdx | 2 ++ packages/web/src/content/docs/zh-cn/zen.mdx | 2 ++ packages/web/src/content/docs/zh-tw/zen.mdx | 2 ++ 18 files changed, 36 insertions(+) diff --git a/packages/web/src/content/docs/ar/zen.mdx b/packages/web/src/content/docs/ar/zen.mdx index 7a3931c85c..5d056c9b50 100644 --- a/packages/web/src/content/docs/ar/zen.mdx +++ b/packages/web/src/content/docs/ar/zen.mdx @@ -74,6 +74,7 @@ OpenCode Zen هي بوابة AI تتيح لك الوصول إلى هذه الن | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -128,6 +129,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/bs/zen.mdx b/packages/web/src/content/docs/bs/zen.mdx index 76a25e5f65..5f46290649 100644 --- a/packages/web/src/content/docs/bs/zen.mdx +++ b/packages/web/src/content/docs/bs/zen.mdx @@ -79,6 +79,7 @@ Našim modelima možete pristupiti i preko sljedećih API endpointa. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Podržavamo pay-as-you-go model. Ispod su cijene **po 1M tokena**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/da/zen.mdx b/packages/web/src/content/docs/da/zen.mdx index 146fa02803..456b98ef1a 100644 --- a/packages/web/src/content/docs/da/zen.mdx +++ b/packages/web/src/content/docs/da/zen.mdx @@ -79,6 +79,7 @@ Du kan også få adgang til vores modeller gennem følgende API-endpoints. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Vi understøtter en pay-as-you-go-model. Nedenfor er priserne **pr. 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/de/zen.mdx b/packages/web/src/content/docs/de/zen.mdx index bc9438672b..238047500b 100644 --- a/packages/web/src/content/docs/de/zen.mdx +++ b/packages/web/src/content/docs/de/zen.mdx @@ -70,6 +70,7 @@ Du kannst auch über die folgenden API-Endpunkte auf unsere Modelle zugreifen. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Wir unterstützen ein Pay-as-you-go-Modell. Unten findest du die Preise **pro 1M | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/es/zen.mdx b/packages/web/src/content/docs/es/zen.mdx index 6fc0fb13e1..ed8f8b334f 100644 --- a/packages/web/src/content/docs/es/zen.mdx +++ b/packages/web/src/content/docs/es/zen.mdx @@ -79,6 +79,7 @@ También puedes acceder a nuestros modelos a través de los siguientes endpoints | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Admitimos un modelo de pago por uso. A continuación se muestran los precios **p | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/fr/zen.mdx b/packages/web/src/content/docs/fr/zen.mdx index a71328a72e..e9fb209d21 100644 --- a/packages/web/src/content/docs/fr/zen.mdx +++ b/packages/web/src/content/docs/fr/zen.mdx @@ -70,6 +70,7 @@ Vous pouvez également accéder à nos modèles via les points de terminaison AP | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Nous prenons en charge un modèle de paiement à l'utilisation. Vous trouverez c | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/it/zen.mdx b/packages/web/src/content/docs/it/zen.mdx index cdc2c9a45b..338fc289c8 100644 --- a/packages/web/src/content/docs/it/zen.mdx +++ b/packages/web/src/content/docs/it/zen.mdx @@ -79,6 +79,7 @@ Puoi anche accedere ai nostri modelli tramite i seguenti endpoint API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Supportiamo un modello pay-as-you-go. Qui sotto trovi i prezzi **per 1M token**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ja/zen.mdx b/packages/web/src/content/docs/ja/zen.mdx index b8da1308c1..970b883940 100644 --- a/packages/web/src/content/docs/ja/zen.mdx +++ b/packages/web/src/content/docs/ja/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen は、OpenCode のほかのプロバイダーと同じように動 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ko/zen.mdx b/packages/web/src/content/docs/ko/zen.mdx index af74b71afd..10abc4ba60 100644 --- a/packages/web/src/content/docs/ko/zen.mdx +++ b/packages/web/src/content/docs/ko/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen은 OpenCode의 다른 provider와 똑같이 작동합니다. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/nb/zen.mdx b/packages/web/src/content/docs/nb/zen.mdx index a216be1064..605dadee59 100644 --- a/packages/web/src/content/docs/nb/zen.mdx +++ b/packages/web/src/content/docs/nb/zen.mdx @@ -79,6 +79,7 @@ Du kan også få tilgang til modellene våre gjennom følgende API-endepunkter. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Vi støtter en pay-as-you-go-modell. Nedenfor er prisene **per 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/pl/zen.mdx b/packages/web/src/content/docs/pl/zen.mdx index ffbdb66fa9..aaefc179b3 100644 --- a/packages/web/src/content/docs/pl/zen.mdx +++ b/packages/web/src/content/docs/pl/zen.mdx @@ -79,6 +79,7 @@ Możesz też uzyskać dostęp do naszych modeli przez poniższe endpointy API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ Obsługujemy model pay-as-you-go. Poniżej znajdują się ceny **za 1M tokenów* | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/pt-br/zen.mdx b/packages/web/src/content/docs/pt-br/zen.mdx index d911e441f4..a44c4de600 100644 --- a/packages/web/src/content/docs/pt-br/zen.mdx +++ b/packages/web/src/content/docs/pt-br/zen.mdx @@ -70,6 +70,7 @@ Você também pode acessar nossos modelos pelos seguintes endpoints de API. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Oferecemos um modelo pay-as-you-go. Abaixo estão os preços **por 1M tokens**. | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/ru/zen.mdx b/packages/web/src/content/docs/ru/zen.mdx index 8a92bf502e..555ca68524 100644 --- a/packages/web/src/content/docs/ru/zen.mdx +++ b/packages/web/src/content/docs/ru/zen.mdx @@ -79,6 +79,7 @@ OpenCode Zen работает как любой другой провайдер | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -135,6 +136,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/th/zen.mdx b/packages/web/src/content/docs/th/zen.mdx index 2c82c1e07d..0e5ddcbfc7 100644 --- a/packages/web/src/content/docs/th/zen.mdx +++ b/packages/web/src/content/docs/th/zen.mdx @@ -72,6 +72,7 @@ OpenCode Zen ทำงานเหมือน provider อื่น ๆ ใน | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -126,6 +127,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/tr/zen.mdx b/packages/web/src/content/docs/tr/zen.mdx index 30aa2bb9d7..16f0fb0dd4 100644 --- a/packages/web/src/content/docs/tr/zen.mdx +++ b/packages/web/src/content/docs/tr/zen.mdx @@ -70,6 +70,7 @@ Modellerimize aşağıdaki API uç noktaları aracılığıyla da erişebilirsin | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ Kullandıkça öde modelini destekliyoruz. Aşağıda **1M token başına** fiya | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zen.mdx b/packages/web/src/content/docs/zen.mdx index cf634a9c98..f4c4f51106 100644 --- a/packages/web/src/content/docs/zen.mdx +++ b/packages/web/src/content/docs/zen.mdx @@ -79,6 +79,7 @@ You can also access our models through the following API endpoints. | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -130,6 +131,7 @@ We support a pay-as-you-go model. Below are the prices **per 1M tokens**. | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3.6 Plus | $0.50 | $3.00 | $0.05 | $0.625 | | Qwen3.5 Plus | $0.20 | $1.20 | $0.02 | $0.25 | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zh-cn/zen.mdx b/packages/web/src/content/docs/zh-cn/zen.mdx index cd281a4b0c..22cffb9019 100644 --- a/packages/web/src/content/docs/zh-cn/zen.mdx +++ b/packages/web/src/content/docs/zh-cn/zen.mdx @@ -70,6 +70,7 @@ OpenCode Zen 的工作方式与 OpenCode 中的任何其他提供商相同。 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -124,6 +125,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | diff --git a/packages/web/src/content/docs/zh-tw/zen.mdx b/packages/web/src/content/docs/zh-tw/zen.mdx index bdc3c07db6..75b97c82b6 100644 --- a/packages/web/src/content/docs/zh-tw/zen.mdx +++ b/packages/web/src/content/docs/zh-tw/zen.mdx @@ -74,6 +74,7 @@ OpenCode Zen 的運作方式和 OpenCode 中的其他供應商一樣。 | GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | | GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` | +| Claude Opus 4.7 | claude-opus-4-7 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | | Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` | @@ -129,6 +130,7 @@ https://opencode.ai/zen/v1/models | GLM 5 | $1.00 | $3.20 | $0.20 | - | | Kimi K2.5 | $0.60 | $3.00 | $0.10 | - | | Qwen3 Coder 480B | $0.45 | $1.50 | - | - | +| Claude Opus 4.7 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.6 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 | | Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 | From cc7acd90ab2fda54f06ff687a46d7364e479dc32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Thu, 16 Apr 2026 17:43:15 +0200 Subject: [PATCH 005/201] fix(nix): add shared package to bun install filters (#22665) --- nix/node_modules.nix | 1 + packages/shared/package.json | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nix/node_modules.nix b/nix/node_modules.nix index e10e85d2fe..ba97405df9 100644 --- a/nix/node_modules.nix +++ b/nix/node_modules.nix @@ -55,6 +55,7 @@ stdenvNoCC.mkDerivation { --filter './packages/opencode' \ --filter './packages/desktop' \ --filter './packages/app' \ + --filter './packages/shared' \ --frozen-lockfile \ --ignore-scripts \ --no-progress diff --git a/packages/shared/package.json b/packages/shared/package.json index ac2d8f2097..bdfca12a93 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -17,10 +17,10 @@ }, "imports": {}, "devDependencies": { + "@tsconfig/bun": "catalog:", "@types/semver": "catalog:", "@types/bun": "catalog:", - "@types/npmcli__arborist": "6.3.3", - "@tsconfig/bun": "catalog:" + "@types/npmcli__arborist": "6.3.3" }, "dependencies": { "@effect/platform-node": "catalog:", From 378c05f202b0fda6561451a93639712a11400972 Mon Sep 17 00:00:00 2001 From: Graham Campbell Date: Thu, 16 Apr 2026 16:57:36 +0100 Subject: [PATCH 006/201] feat: Add support for claude opus 4.7 xhigh adaptive reasoning effort (#22833) Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> --- packages/opencode/src/provider/transform.ts | 21 +++-- .../opencode/test/provider/transform.test.ts | 92 +++++++++++++++++++ 2 files changed, 107 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index c940b31c8c..92862b0ca6 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -389,12 +389,21 @@ export function topK(model: Provider.Model) { const WIDELY_SUPPORTED_EFFORTS = ["low", "medium", "high"] const OPENAI_EFFORTS = ["none", "minimal", ...WIDELY_SUPPORTED_EFFORTS, "xhigh"] +function anthropicAdaptiveEfforts(apiId: string): string[] | null { + if (["opus-4-7", "opus-4.7"].some((v) => apiId.includes(v))) { + return ["low", "medium", "high", "xhigh", "max"] + } + if (["opus-4-6", "opus-4.6", "sonnet-4-6", "sonnet-4.6"].some((v) => apiId.includes(v))) { + return ["low", "medium", "high", "max"] + } + return null +} + export function variants(model: Provider.Model): Record> { if (!model.capabilities.reasoning) return {} const id = model.id.toLowerCase() - const isAnthropicAdaptive = ["opus-4-6", "opus-4.6", "sonnet-4-6", "sonnet-4.6"].some((v) => model.api.id.includes(v)) - const adaptiveEfforts = ["low", "medium", "high", "max"] + const adaptiveEfforts = anthropicAdaptiveEfforts(model.api.id) if ( id.includes("deepseek") || id.includes("minimax") || @@ -429,7 +438,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -578,7 +587,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -609,7 +618,7 @@ export function variants(model: Provider.Model): Record [ effort, @@ -716,7 +725,7 @@ export function variants(model: Provider.Model): Record [ effort, diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index 0666d0f641..d53ce38b16 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -2246,6 +2246,46 @@ describe("ProviderTransform.variants", () => { }) }) + test("anthropic opus 4.7 models return adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "gateway", + api: { + id: "anthropic/claude-opus-4-7", + url: "https://gateway.ai", + npm: "@ai-sdk/gateway", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "xhigh", + }) + expect(result.max).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "max", + }) + }) + + test("anthropic opus 4.7 dot-format models return adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "gateway", + api: { + id: "anthropic/claude-opus-4.7", + url: "https://gateway.ai", + npm: "@ai-sdk/gateway", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + }) + test("anthropic models return anthropic thinking options", () => { const model = createMockModel({ id: "anthropic/claude-sonnet-4", @@ -2654,6 +2694,32 @@ describe("ProviderTransform.variants", () => { }) }) + test("opus 4.7 returns adaptive thinking options with xhigh", () => { + const model = createMockModel({ + id: "anthropic/claude-opus-4-7", + providerID: "anthropic", + api: { + id: "claude-opus-4-7", + url: "https://api.anthropic.com", + npm: "@ai-sdk/anthropic", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "xhigh", + }) + expect(result.max).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "max", + }) + }) + test("returns high and max with thinking config", () => { const model = createMockModel({ id: "anthropic/claude-4", @@ -2702,6 +2768,32 @@ describe("ProviderTransform.variants", () => { }) }) + test("anthropic opus 4.7 returns adaptive reasoning options with xhigh", () => { + const model = createMockModel({ + id: "bedrock/anthropic-claude-opus-4-7", + providerID: "bedrock", + api: { + id: "anthropic.claude-opus-4-7", + url: "https://bedrock.amazonaws.com", + npm: "@ai-sdk/amazon-bedrock", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh", "max"]) + expect(result.xhigh).toEqual({ + reasoningConfig: { + type: "adaptive", + maxReasoningEffort: "xhigh", + }, + }) + expect(result.max).toEqual({ + reasoningConfig: { + type: "adaptive", + maxReasoningEffort: "max", + }, + }) + }) + test("returns WIDELY_SUPPORTED_EFFORTS with reasoningConfig", () => { const model = createMockModel({ id: "bedrock/llama-4", From 8c0205a84ab225e6901eff92e6a589e8fc88b679 Mon Sep 17 00:00:00 2001 From: Nacai <111849193+B67687@users.noreply.github.com> Date: Fri, 17 Apr 2026 00:01:35 +0800 Subject: [PATCH 007/201] fix: align stale bot message with actual 60-day threshold (#22842) Co-authored-by: opencode-agent[bot] Co-authored-by: rekram1-node --- script/github/close-issues.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/script/github/close-issues.ts b/script/github/close-issues.ts index 7b38bf6758..e8f0573ebb 100755 --- a/script/github/close-issues.ts +++ b/script/github/close-issues.ts @@ -2,8 +2,7 @@ const repo = "anomalyco/opencode" const days = 60 -const msg = - "To stay organized issues are automatically closed after 90 days of no activity. If the issue is still relevant please open a new one." +const msg = `To stay organized issues are automatically closed after ${days} days of no activity. If the issue is still relevant please open a new one.` const token = process.env.GITHUB_TOKEN if (!token) { From 305460b25fc673f707a238f180d93e58d80f1ee9 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:15:44 -0400 Subject: [PATCH 008/201] fix: add a few more tests for sync and session restore (#22837) --- packages/opencode/src/server/instance/sync.ts | 1 + packages/opencode/test/sync/index.test.ts | 48 +++ .../test/workspace/workspace-restore.test.ts | 280 ++++++++++++++++++ 3 files changed, 329 insertions(+) create mode 100644 packages/opencode/test/workspace/workspace-restore.test.ts diff --git a/packages/opencode/src/server/instance/sync.ts b/packages/opencode/src/server/instance/sync.ts index 633e77f10e..ac43b638eb 100644 --- a/packages/opencode/src/server/instance/sync.ts +++ b/packages/opencode/src/server/instance/sync.ts @@ -53,6 +53,7 @@ export const SyncRoutes = lazy(() => const body = c.req.valid("json") const events = body.events const source = events[0].aggregateID + log.info("sync replay requested", { sessionID: source, events: events.length, diff --git a/packages/opencode/test/sync/index.test.ts b/packages/opencode/test/sync/index.test.ts index 2ba716cac0..36429c3d84 100644 --- a/packages/opencode/test/sync/index.test.ts +++ b/packages/opencode/test/sync/index.test.ts @@ -187,5 +187,53 @@ describe("SyncEvent", () => { ).toThrow(/Unknown event type/) }), ) + + test( + "replayAll accepts later chunks after the first batch", + withInstance(() => { + const { Created } = setup() + const id = Identifier.descending("message") + + const one = SyncEvent.replayAll([ + { + id: "evt_1", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 0, + aggregateID: id, + data: { id, name: "first" }, + }, + { + id: "evt_2", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 1, + aggregateID: id, + data: { id, name: "second" }, + }, + ]) + + const two = SyncEvent.replayAll([ + { + id: "evt_3", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 2, + aggregateID: id, + data: { id, name: "third" }, + }, + { + id: "evt_4", + type: SyncEvent.versionedType(Created.type, Created.version), + seq: 3, + aggregateID: id, + data: { id, name: "fourth" }, + }, + ]) + + expect(one).toBe(id) + expect(two).toBe(id) + + const rows = Database.use((db) => db.select().from(EventTable).all()) + expect(rows.map((row) => row.seq)).toEqual([0, 1, 2, 3]) + }), + ) }) }) diff --git a/packages/opencode/test/workspace/workspace-restore.test.ts b/packages/opencode/test/workspace/workspace-restore.test.ts new file mode 100644 index 0000000000..ee9ad059f8 --- /dev/null +++ b/packages/opencode/test/workspace/workspace-restore.test.ts @@ -0,0 +1,280 @@ +import { afterEach, beforeEach, describe, expect, mock, spyOn, test } from "bun:test" +import fs from "node:fs/promises" +import path from "node:path" +import { GlobalBus } from "../../src/bus/global" +import { registerAdaptor } from "../../src/control-plane/adaptors" +import type { WorkspaceAdaptor } from "../../src/control-plane/types" +import { Workspace } from "../../src/control-plane/workspace" +import { AppRuntime } from "../../src/effect/app-runtime" +import { Flag } from "../../src/flag/flag" +import { ModelID, ProviderID } from "../../src/provider/schema" +import { Instance } from "../../src/project/instance" +import { Session as SessionNs } from "../../src/session" +import { MessageV2 } from "../../src/session/message-v2" +import { MessageID, PartID, type SessionID } from "../../src/session/schema" +import { Database, asc, eq } from "../../src/storage" +import { SyncEvent } from "../../src/sync" +import { EventTable } from "../../src/sync/event.sql" +import { Log } from "../../src/util" +import { resetDatabase } from "../fixture/db" +import { tmpdir } from "../fixture/fixture" + +void Log.init({ print: false }) + +const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES + +beforeEach(() => { + Database.close() + // @ts-expect-error test override + Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true +}) + +afterEach(async () => { + mock.restore() + await Instance.disposeAll() + // @ts-expect-error test override + Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original + await resetDatabase() +}) + +function create(input?: SessionNs.CreateInput) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.create(input))) +} + +function get(id: SessionID) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.get(id))) +} + +function updateMessage(msg: T) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.updateMessage(msg))) +} + +function updatePart(part: T) { + return AppRuntime.runPromise(SessionNs.Service.use((svc) => svc.updatePart(part))) +} + +async function user(sessionID: SessionID, text: string) { + const msg = await updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID, + agent: "build", + model: { providerID: ProviderID.make("test"), modelID: ModelID.make("test") }, + time: { created: Date.now() }, + }) + await updatePart({ + id: PartID.ascending(), + sessionID, + messageID: msg.id, + type: "text", + text, + }) +} + +function remote(dir: string, url: string): WorkspaceAdaptor { + return { + name: "remote", + description: "remote", + configure(info) { + return { + ...info, + directory: dir, + } + }, + async create() { + await fs.mkdir(dir, { recursive: true }) + }, + async remove() {}, + target() { + return { + type: "remote" as const, + url, + } + }, + } +} + +function local(dir: string): WorkspaceAdaptor { + return { + name: "local", + description: "local", + configure(info) { + return { + ...info, + directory: dir, + } + }, + async create() { + await fs.mkdir(dir, { recursive: true }) + }, + async remove() {}, + target() { + return { + type: "local" as const, + directory: dir, + } + }, + } +} + +function eventStreamResponse() { + return new Response(new ReadableStream({ start() {} }), { + status: 200, + headers: { + "content-type": "text/event-stream", + }, + }) +} + +describe("Workspace.sessionRestore", () => { + test("replays session events in batches of 10 and emits progress", async () => { + await using tmp = await tmpdir({ git: true }) + const dir = path.join(tmp.path, ".restore") + const seen: any[] = [] + const posts: Array<{ + path: string + body: { directory: string; events: Array<{ seq: number; aggregateID: string }> } + }> = [] + const on = (evt: any) => seen.push(evt) + GlobalBus.on("event", on) + + const raw = globalThis.fetch + spyOn(globalThis, "fetch").mockImplementation( + Object.assign( + async (input: URL | RequestInfo, init?: BunFetchRequestInit | RequestInit) => { + const url = new URL(typeof input === "string" || input instanceof URL ? input : input.url) + if (url.pathname !== "/base/sync/replay") { + return eventStreamResponse() + } + const body = JSON.parse(String(init?.body)) + posts.push({ + path: url.pathname, + body, + }) + return Response.json({ sessionID: body.events[0].aggregateID }) + }, + { + preconnect: raw.preconnect?.bind(raw), + }, + ) as typeof globalThis.fetch, + ) + + try { + const setup = await Instance.provide({ + directory: tmp.path, + fn: async () => { + registerAdaptor(Instance.project.id, "worktree", remote(dir, "https://workspace.test/base")) + const space = await Workspace.create({ + type: "worktree", + branch: null, + extra: null, + projectID: Instance.project.id, + }) + const session = await create({}) + for (let i = 0; i < 6; i++) { + await user(session.id, `msg ${i}`) + } + const rows = Database.use((db) => + db + .select({ seq: EventTable.seq }) + .from(EventTable) + .where(eq(EventTable.aggregate_id, session.id)) + .orderBy(asc(EventTable.seq)) + .all(), + ) + const result = await Workspace.sessionRestore({ + workspaceID: space.id, + sessionID: session.id, + }) + return { space, session, rows, result } + }, + }) + + expect(setup.rows).toHaveLength(13) + expect(setup.result).toEqual({ total: 2 }) + expect(posts).toHaveLength(2) + expect(posts[0]?.path).toBe("/base/sync/replay") + expect(posts[1]?.path).toBe("/base/sync/replay") + expect(posts[0]?.body.directory).toBe(dir) + expect(posts[1]?.body.directory).toBe(dir) + expect(posts[0]?.body.events).toHaveLength(10) + expect(posts[1]?.body.events).toHaveLength(4) + expect(posts.flatMap((item) => item.body.events.map((event) => event.seq))).toEqual([ + ...setup.rows.map((row) => row.seq), + setup.rows.at(-1)!.seq + 1, + ]) + expect(posts[1]?.body.events.at(-1)).toMatchObject({ + aggregateID: setup.session.id, + seq: setup.rows.at(-1)!.seq + 1, + type: SyncEvent.versionedType(SessionNs.Event.Updated.type, SessionNs.Event.Updated.version), + data: { + sessionID: setup.session.id, + info: { + workspaceID: setup.space.id, + }, + }, + }) + + const restore = seen.filter( + (evt) => evt.workspace === setup.space.id && evt.payload.type === Workspace.Event.Restore.type, + ) + expect(restore.map((evt) => evt.payload.properties.step)).toEqual([0, 1, 2]) + expect(restore.map((evt) => evt.payload.properties.total)).toEqual([2, 2, 2]) + expect(restore.map((evt) => evt.payload.properties.sessionID)).toEqual([ + setup.session.id, + setup.session.id, + setup.session.id, + ]) + } finally { + GlobalBus.off("event", on) + } + }) + + test("replays locally without posting to a server", async () => { + await using tmp = await tmpdir({ git: true }) + const dir = path.join(tmp.path, ".restore-local") + const seen: any[] = [] + const on = (evt: any) => seen.push(evt) + GlobalBus.on("event", on) + + const fetch = spyOn(globalThis, "fetch") + const replayAll = spyOn(SyncEvent, "replayAll") + + try { + const setup = await Instance.provide({ + directory: tmp.path, + fn: async () => { + registerAdaptor(Instance.project.id, "local-restore", local(dir)) + const space = await Workspace.create({ + type: "local-restore", + branch: null, + extra: null, + projectID: Instance.project.id, + }) + const session = await create({}) + for (let i = 0; i < 6; i++) { + await user(session.id, `msg ${i}`) + } + const result = await Workspace.sessionRestore({ + workspaceID: space.id, + sessionID: session.id, + }) + const updated = await get(session.id) + return { space, session, result, updated } + }, + }) + + expect(setup.result).toEqual({ total: 2 }) + expect(fetch).not.toHaveBeenCalled() + expect(replayAll).toHaveBeenCalledTimes(2) + expect(setup.updated.workspaceID).toBe(setup.space.id) + + const restore = seen.filter( + (evt) => evt.workspace === setup.space.id && evt.payload.type === Workspace.Event.Restore.type, + ) + expect(restore.map((evt) => evt.payload.properties.step)).toEqual([0, 1, 2]) + } finally { + GlobalBus.off("event", on) + } + }) +}) From 06afd332913e1ad4b067a0f1a1c906ca8376bc45 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:24:40 -0400 Subject: [PATCH 009/201] refactor(tui): improve workspace management (#22691) --- .../dialog-session-delete-failed.tsx | 101 +++++++++++++ .../cmd/tui/component/dialog-session-list.tsx | 97 ++++++++++++- .../tui/component/dialog-workspace-create.tsx | 136 +++++++++++++++++- .../cli/cmd/tui/component/prompt/index.tsx | 4 +- .../opencode/src/cli/cmd/tui/context/sync.tsx | 17 ++- .../src/control-plane/workspace-context.ts | 6 +- packages/opencode/src/effect/bridge.ts | 3 +- packages/opencode/src/effect/instance-ref.ts | 3 +- packages/opencode/src/session/session.ts | 3 +- .../test/cli/tui/sync-provider.test.tsx | 16 +-- 10 files changed, 349 insertions(+), 37 deletions(-) create mode 100644 packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx new file mode 100644 index 0000000000..4a22a0c492 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-delete-failed.tsx @@ -0,0 +1,101 @@ +import { TextAttributes } from "@opentui/core" +import { useTheme } from "../context/theme" +import { useDialog } from "../ui/dialog" +import { createStore } from "solid-js/store" +import { For } from "solid-js" +import { useKeyboard } from "@opentui/solid" + +export function DialogSessionDeleteFailed(props: { + session: string + workspace: string + onDelete?: () => boolean | void | Promise + onRestore?: () => boolean | void | Promise + onDone?: () => void +}) { + const dialog = useDialog() + const { theme } = useTheme() + const [store, setStore] = createStore({ + active: "delete" as "delete" | "restore", + }) + + const options = [ + { + id: "delete" as const, + title: "Delete workspace", + description: "Delete the workspace and all sessions attached to it.", + run: props.onDelete, + }, + { + id: "restore" as const, + title: "Restore to new workspace", + description: "Try to restore this session into a new workspace.", + run: props.onRestore, + }, + ] + + async function confirm() { + const result = await options.find((item) => item.id === store.active)?.run?.() + if (result === false) return + props.onDone?.() + if (!props.onDone) dialog.clear() + } + + useKeyboard((evt) => { + if (evt.name === "return") { + void confirm() + } + if (evt.name === "left" || evt.name === "up") { + setStore("active", "delete") + } + if (evt.name === "right" || evt.name === "down") { + setStore("active", "restore") + } + }) + + return ( + + + + Failed to Delete Session + + dialog.clear()}> + esc + + + + {`The session "${props.session}" could not be deleted because the workspace "${props.workspace}" is not available.`} + + + Choose how you want to recover this broken workspace session. + + + + {(item) => ( + { + setStore("active", item.id) + void confirm() + }} + > + + {item.title} + + + {item.description} + + + )} + + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index f58b73c9a7..75c79dcdd8 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -13,8 +13,10 @@ import { DialogSessionRename } from "./dialog-session-rename" import { Keybind } from "@/util" import { createDebouncedSignal } from "../util/signal" import { useToast } from "../ui/toast" -import { DialogWorkspaceCreate, openWorkspaceSession } from "./dialog-workspace-create" +import { DialogWorkspaceCreate, openWorkspaceSession, restoreWorkspaceSession } from "./dialog-workspace-create" import { Spinner } from "./spinner" +import { errorMessage } from "@/util/error" +import { DialogSessionDeleteFailed } from "./dialog-session-delete-failed" type WorkspaceStatus = "connected" | "connecting" | "disconnected" | "error" @@ -30,7 +32,7 @@ export function DialogSessionList() { const [toDelete, setToDelete] = createSignal() const [search, setSearch] = createDebouncedSignal("", 150) - const [searchResults] = createResource(search, async (query) => { + const [searchResults, { refetch }] = createResource(search, async (query) => { if (!query) return undefined const result = await sdk.client.session.list({ search: query, limit: 30 }) return result.data ?? [] @@ -56,6 +58,57 @@ export function DialogSessionList() { )) } + function recover(session: NonNullable[number]>) { + const workspace = project.workspace.get(session.workspaceID!) + const list = () => dialog.replace(() => ) + dialog.replace(() => ( + { + const current = currentSessionID() + const info = current ? sync.data.session.find((item) => item.id === current) : undefined + const result = await sdk.client.experimental.workspace.remove({ id: session.workspaceID! }) + if (result.error) { + toast.show({ + variant: "error", + title: "Failed to delete workspace", + message: errorMessage(result.error), + }) + return false + } + await project.workspace.sync() + await sync.session.refresh() + if (search()) await refetch() + if (info?.workspaceID === session.workspaceID) { + route.navigate({ type: "home" }) + } + return true + }} + onRestore={() => { + dialog.replace(() => ( + + restoreWorkspaceSession({ + dialog, + sdk, + sync, + project, + toast, + workspaceID, + sessionID: session.id, + done: list, + }) + } + /> + )) + return false + }} + /> + )) + } + const options = createMemo(() => { const today = new Date().toDateString() return sessions() @@ -145,9 +198,43 @@ export function DialogSessionList() { title: "delete", onTrigger: async (option) => { if (toDelete() === option.value) { - void sdk.client.session.delete({ - sessionID: option.value, - }) + const session = sessions().find((item) => item.id === option.value) + const status = session?.workspaceID ? project.workspace.status(session.workspaceID) : undefined + + try { + const result = await sdk.client.session.delete({ + sessionID: option.value, + }) + if (result.error) { + if (session?.workspaceID) { + recover(session) + } else { + toast.show({ + variant: "error", + title: "Failed to delete session", + message: errorMessage(result.error), + }) + } + setToDelete(undefined) + return + } + } catch (err) { + if (session?.workspaceID) { + recover(session) + } else { + toast.show({ + variant: "error", + title: "Failed to delete session", + message: errorMessage(err), + }) + } + setToDelete(undefined) + return + } + if (status && status !== "connected") { + await sync.session.refresh() + } + if (search()) await refetch() setToDelete(undefined) return } diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index 447a1c3258..ca504d864d 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -6,6 +6,8 @@ import { useSync } from "@tui/context/sync" import { useProject } from "@tui/context/project" import { createMemo, createSignal, onMount } from "solid-js" import { setTimeout as sleep } from "node:timers/promises" +import { errorData, errorMessage } from "@/util/error" +import * as Log from "@/util/log" import { useSDK } from "../context/sdk" import { useToast } from "../ui/toast" @@ -15,6 +17,8 @@ type Adaptor = { description: string } +const log = Log.Default.clone().tag("service", "tui-workspace") + function scoped(sdk: ReturnType, sync: ReturnType, workspaceID: string) { return createOpencodeClient({ baseUrl: sdk.url, @@ -33,8 +37,20 @@ export async function openWorkspaceSession(input: { workspaceID: string }) { const client = scoped(input.sdk, input.sync, input.workspaceID) + log.info("workspace session create requested", { + workspaceID: input.workspaceID, + }) + + console.log("opening!") while (true) { - const result = await client.session.create({ workspaceID: input.workspaceID }).catch(() => undefined) + console.log("creating") + const result = await client.session.create({ workspace: input.workspaceID }).catch((err) => { + log.error("workspace session create request failed", { + workspaceID: input.workspaceID, + error: errorData(err), + }) + return undefined + }) if (!result) { input.toast.show({ message: "Failed to create workspace session", @@ -42,26 +58,113 @@ export async function openWorkspaceSession(input: { }) return } - if (result.response.status >= 500 && result.response.status < 600) { + log.info("workspace session create response", { + workspaceID: input.workspaceID, + status: result.response?.status, + sessionID: result.data?.id, + }) + if (result.response?.status && result.response.status >= 500 && result.response.status < 600) { + log.warn("workspace session create retrying after server error", { + workspaceID: input.workspaceID, + status: result.response.status, + }) await sleep(1000) continue } if (!result.data) { + log.error("workspace session create returned no data", { + workspaceID: input.workspaceID, + status: result.response?.status, + }) input.toast.show({ message: "Failed to create workspace session", variant: "error", }) return } + input.route.navigate({ type: "session", sessionID: result.data.id, }) + log.info("workspace session create complete", { + workspaceID: input.workspaceID, + sessionID: result.data.id, + }) input.dialog.clear() return } } +export async function restoreWorkspaceSession(input: { + dialog: ReturnType + sdk: ReturnType + sync: ReturnType + project: ReturnType + toast: ReturnType + workspaceID: string + sessionID: string + done?: () => void +}) { + log.info("session restore requested", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + }) + const result = await input.sdk.client.experimental.workspace + .sessionRestore({ id: input.workspaceID, sessionID: input.sessionID }) + .catch((err) => { + log.error("session restore request failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + return undefined + }) + if (!result?.data) { + log.error("session restore failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + status: result?.response?.status, + error: result?.error ? errorData(result.error) : undefined, + }) + input.toast.show({ + message: `Failed to restore session: ${errorMessage(result?.error ?? "no response")}`, + variant: "error", + }) + return + } + + log.info("session restore response", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + status: result.response?.status, + total: result.data.total, + }) + + await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()]).catch((err) => { + log.error("session restore refresh failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + throw err + }) + + log.info("session restore complete", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + total: result.data.total, + }) + + input.toast.show({ + message: "Session restored into the new workspace", + variant: "success", + }) + input.done?.() + if (input.done) return + input.dialog.clear() +} + export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) => Promise | void }) { const dialog = useDialog() const sync = useSync() @@ -123,18 +226,43 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) = const create = async (type: string) => { if (creating()) return setCreating(type) + log.info("workspace create requested", { + type, + }) + + const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => { + log.error("workspace create request failed", { + type, + error: errorData(err), + }) + return undefined + }) - const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch(() => undefined) const workspace = result?.data if (!workspace) { setCreating(undefined) + log.error("workspace create failed", { + type, + status: result?.response.status, + error: result?.error ? errorData(result.error) : undefined, + }) toast.show({ - message: "Failed to create workspace", + message: `Failed to create workspace: ${errorMessage(result?.error ?? "no response")}`, variant: "error", }) return } + log.info("workspace create response", { + type, + workspaceID: workspace.id, + status: result.response?.status, + }) + await project.workspace.sync() + log.info("workspace create synced", { + type, + workspaceID: workspace.id, + }) await props.onSelect(workspace.id) setCreating(undefined) } diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index b4ab82729f..e64a16eb8a 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -617,9 +617,7 @@ export function Prompt(props: PromptProps) { let sessionID = props.sessionID if (sessionID == null) { - const res = await sdk.client.session.create({ - workspaceID: props.workspaceID, - }) + const res = await sdk.client.session.create({ workspace: props.workspaceID }) if (res.error) { console.log("Creating a session failed:", res.error) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 46227e28aa..38b4457445 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -474,6 +474,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ if (match.found) return store.session[match.index] return undefined }, + async refresh() { + const start = Date.now() - 30 * 24 * 60 * 60 * 1000 + const list = await sdk.client.session + .list({ start }) + .then((x) => (x.data ?? []).toSorted((a, b) => a.id.localeCompare(b.id))) + setStore("session", reconcile(list)) + }, status(sessionID: string) { const session = result.session.get(sessionID) if (!session) return "idle" @@ -485,13 +492,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { + console.log('YO', sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return - const workspace = project.workspace.current() const [session, messages, todo, diff] = await Promise.all([ - sdk.client.session.get({ sessionID, workspace }, { throwOnError: true }), - sdk.client.session.messages({ sessionID, limit: 100, workspace }), - sdk.client.session.todo({ sessionID, workspace }), - sdk.client.session.diff({ sessionID, workspace }), + sdk.client.session.get({ sessionID }, { throwOnError: true }), + sdk.client.session.messages({ sessionID, limit: 100 }), + sdk.client.session.todo({ sessionID }), + sdk.client.session.diff({ sessionID }), ]) setStore( produce((draft) => { diff --git a/packages/opencode/src/control-plane/workspace-context.ts b/packages/opencode/src/control-plane/workspace-context.ts index 565472a24f..3d4fa5baef 100644 --- a/packages/opencode/src/control-plane/workspace-context.ts +++ b/packages/opencode/src/control-plane/workspace-context.ts @@ -2,17 +2,17 @@ import { LocalContext } from "../util" import type { WorkspaceID } from "../control-plane/schema" export interface WorkspaceContext { - workspaceID: string + workspaceID: WorkspaceID } const context = LocalContext.create("instance") export const WorkspaceContext = { async provide(input: { workspaceID: WorkspaceID; fn: () => R }): Promise { - return context.provide({ workspaceID: input.workspaceID as string }, () => input.fn()) + return context.provide({ workspaceID: input.workspaceID }, () => input.fn()) }, - restore(workspaceID: string, fn: () => R): R { + restore(workspaceID: WorkspaceID, fn: () => R): R { return context.provide({ workspaceID }, fn) }, diff --git a/packages/opencode/src/effect/bridge.ts b/packages/opencode/src/effect/bridge.ts index d79fc74f47..03e5aefd23 100644 --- a/packages/opencode/src/effect/bridge.ts +++ b/packages/opencode/src/effect/bridge.ts @@ -1,6 +1,7 @@ import { Effect, Fiber } from "effect" import { WorkspaceContext } from "@/control-plane/workspace-context" import { Instance, type InstanceContext } from "@/project/instance" +import type { WorkspaceID } from "@/control-plane/schema" import { LocalContext } from "@/util" import { InstanceRef, WorkspaceRef } from "./instance-ref" import { attachWith } from "./run-service" @@ -10,7 +11,7 @@ export interface Shape { readonly fork: (effect: Effect.Effect) => Fiber.Fiber } -function restore(instance: InstanceContext | undefined, workspace: string | undefined, fn: () => R): R { +function restore(instance: InstanceContext | undefined, workspace: WorkspaceID | undefined, fn: () => R): R { if (instance && workspace !== undefined) { return WorkspaceContext.restore(workspace, () => Instance.restore(instance, fn)) } diff --git a/packages/opencode/src/effect/instance-ref.ts b/packages/opencode/src/effect/instance-ref.ts index 301316c771..effc560c58 100644 --- a/packages/opencode/src/effect/instance-ref.ts +++ b/packages/opencode/src/effect/instance-ref.ts @@ -1,10 +1,11 @@ import { Context } from "effect" import type { InstanceContext } from "@/project/instance" +import type { WorkspaceID } from "@/control-plane/schema" export const InstanceRef = Context.Reference("~opencode/InstanceRef", { defaultValue: () => undefined, }) -export const WorkspaceRef = Context.Reference("~opencode/WorkspaceRef", { +export const WorkspaceRef = Context.Reference("~opencode/WorkspaceRef", { defaultValue: () => undefined, }) diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index e288aec73a..8c5fc29e4a 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -519,12 +519,13 @@ export const layer: Layer.Layer = workspaceID?: WorkspaceID }) { const directory = yield* InstanceState.directory + const workspace = yield* InstanceState.workspaceID return yield* createNext({ parentID: input?.parentID, directory, title: input?.title, permission: input?.permission, - workspaceID: input?.workspaceID, + workspaceID: workspace, }) }) diff --git a/packages/opencode/test/cli/tui/sync-provider.test.tsx b/packages/opencode/test/cli/tui/sync-provider.test.tsx index 3ef126ef4c..e75e186199 100644 --- a/packages/opencode/test/cli/tui/sync-provider.test.tsx +++ b/packages/opencode/test/cli/tui/sync-provider.test.tsx @@ -264,27 +264,15 @@ describe("SyncProvider", () => { log.length = 0 await sync.session.sync("ses_1") + expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) - expect(log.filter((item) => item.path === "/session/ses_1" && item.workspace === "ws_a")).toHaveLength(1) - expect(sync.data.todo.ses_1[0]?.content).toBe("todo-ws_a") - expect(sync.data.message.ses_1[0]?.id).toBe("msg_1") - expect(sync.data.part.msg_1[0]).toMatchObject({ type: "text", text: "part-ws_a" }) - expect(sync.data.session_diff.ses_1[0]?.file).toBe("ws_a.ts") - - log.length = 0 project.workspace.set("ws_b") await waitBoot(log, "ws_b") expect(project.workspace.current()).toBe("ws_b") log.length = 0 await sync.session.sync("ses_1") - await wait(() => log.some((item) => item.path === "/session/ses_1" && item.workspace === "ws_b")) - - expect(log.filter((item) => item.path === "/session/ses_1" && item.workspace === "ws_b")).toHaveLength(1) - expect(sync.data.todo.ses_1[0]?.content).toBe("todo-ws_b") - expect(sync.data.message.ses_1[0]?.id).toBe("msg_1") - expect(sync.data.part.msg_1[0]).toMatchObject({ type: "text", text: "part-ws_b" }) - expect(sync.data.session_diff.ses_1[0]?.file).toBe("ws_b.ts") + expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) } finally { app.renderer.destroy() } From d82bc3a421c04e3abbade123344dc40d81e03395 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 16:26:12 +0000 Subject: [PATCH 010/201] chore: generate --- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 38b4457445..10b70d50ac 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -492,7 +492,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { - console.log('YO', sessionID, fullSyncedSessions.has(sessionID)) + console.log("YO", sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return const [session, messages, todo, diff] = await Promise.all([ sdk.client.session.get({ sessionID }, { throwOnError: true }), From b28956f0dbc22d786fab24b2a34fd07fba6d27ec Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 12:35:37 -0400 Subject: [PATCH 011/201] fix(core): better global sync event structure (#22858) --- packages/opencode/src/control-plane/workspace.ts | 3 +-- packages/opencode/src/sync/sync-event.ts | 12 ++++-------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index b43fe848ba..d870eb6360 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -460,8 +460,7 @@ export namespace Workspace { if (!("payload" in evt)) return if (evt.payload.type === "sync") { - // This name -> type is temporary - SyncEvent.replay({ ...evt.payload, type: evt.payload.name } as SyncEvent.SerializedEvent) + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) } GlobalBus.emit("event", { diff --git a/packages/opencode/src/sync/sync-event.ts b/packages/opencode/src/sync/sync-event.ts index db487ddd24..94c889d917 100644 --- a/packages/opencode/src/sync/sync-event.ts +++ b/packages/opencode/src/sync/sync-event.ts @@ -155,8 +155,10 @@ function process(def: Def, event: Event, options: { workspace: WorkspaceContext.workspaceID, payload: { type: "sync", - name: versionedType(def.type, def.version), - ...event, + syncEvent: { + type: versionedType(def.type, def.version), + ...event, + }, }, }) } @@ -164,12 +166,6 @@ function process(def: Def, event: Event, options: { }) } -// TODO: -// -// * Support applying multiple events at one time. One transaction, -// and it validets all the sequence ids -// * when loading events from db, apply zod validation to ensure shape - export function replay(event: SerializedEvent, options?: { publish: boolean }) { const def = registry.get(event.type) if (!def) { From bfffc3c2c6349d9199dd1a73260612b5ec2da88d Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:19:43 -0400 Subject: [PATCH 012/201] tui: ensure TUI plugins load with proper project context when multiple directories are open Fixes potential plugin resolution issues when switching between projects by wrapping plugin loading in Instance.provide(). This ensures each plugin resolves dependencies relative to its correct project directory instead of inheriting context from whatever instance happened to be active. Also reorganizes config loading code into focused modules (command.ts, managed.ts, plugin.ts) to make the codebase easier to maintain and test. --- .../src/cli/cmd/tui/plugin/runtime.ts | 70 +++--- packages/opencode/src/config/command.ts | 76 ++++++ packages/opencode/src/config/config.ts | 233 +++--------------- packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/managed.ts | 71 ++++++ packages/opencode/src/config/paths.ts | 5 +- packages/opencode/test/config/config.test.ts | 29 ++- packages/shared/src/npm.ts | 43 ++-- 8 files changed, 262 insertions(+), 267 deletions(-) create mode 100644 packages/opencode/src/config/command.ts create mode 100644 packages/opencode/src/config/managed.ts diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index af37ffbd76..ac1c0fc3b8 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -16,6 +16,7 @@ import { TuiConfig } from "@/cli/cmd/tui/config/tui" import { Log } from "@/util" import { errorData, errorMessage } from "@/util/error" import { isRecord } from "@/util/record" +import { Instance } from "@/project/instance" import { readPackageThemes, readPluginId, @@ -789,7 +790,13 @@ async function addPluginBySpec(state: RuntimeState | undefined, raw: string) { state.pending.delete(spec) return true } - const ready = await resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies()) + const ready = await Instance.provide({ + directory: state.directory, + fn: () => resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies()), + }).catch((error) => { + fail("failed to add tui plugin", { path: next, error }) + return [] as PluginLoad[] + }) if (!ready.length) { return false } @@ -980,37 +987,42 @@ export namespace TuiPluginRuntime { } runtime = next try { - const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { - log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) - } + await Instance.provide({ + directory: cwd, + fn: async () => { + const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { + log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) + } - for (const item of INTERNAL_TUI_PLUGINS) { - log.info("loading internal tui plugin", { id: item.id }) - const entry = loadInternalPlugin(item) - const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) - addPluginEntry(next, { - id: entry.id, - load: entry, - meta, - themes: {}, - plugin: entry.module.tui, - enabled: true, - }) - } + for (const item of INTERNAL_TUI_PLUGINS) { + log.info("loading internal tui plugin", { id: item.id }) + const entry = loadInternalPlugin(item) + const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) + addPluginEntry(next, { + id: entry.id, + load: entry, + meta, + themes: {}, + plugin: entry.module.tui, + enabled: true, + }) + } - const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) - await addExternalPluginEntries(next, ready) + const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) + await addExternalPluginEntries(next, ready) - applyInitialPluginEnabledState(next, config) - for (const plugin of next.plugins) { - if (!plugin.enabled) continue - // Keep plugin execution sequential for deterministic side effects: - // command registration order affects keybind/command precedence, - // route registration is last-wins when ids collide, - // and hook chains rely on stable plugin ordering. - await activatePluginEntry(next, plugin, false) - } + applyInitialPluginEnabledState(next, config) + for (const plugin of next.plugins) { + if (!plugin.enabled) continue + // Keep plugin execution sequential for deterministic side effects: + // command registration order affects keybind/command precedence, + // route registration is last-wins when ids collide, + // and hook chains rely on stable plugin ordering. + await activatePluginEntry(next, plugin, false) + } + }, + }) } catch (error) { fail("failed to load tui plugins", { directory: cwd, error }) } diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts new file mode 100644 index 0000000000..4b2d58f3ff --- /dev/null +++ b/packages/opencode/src/config/command.ts @@ -0,0 +1,76 @@ +import { Log } from "../util" +import path from "path" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Bus } from "@/bus" +import * as ConfigMarkdown from "./markdown" +import { InvalidError } from "./paths" + +const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) + +const log = Log.create({ service: "config" }) + +function rel(item: string, patterns: string[]) { + const normalizedItem = item.replaceAll("\\", "/") + for (const pattern of patterns) { + const index = normalizedItem.indexOf(pattern) + if (index === -1) continue + return normalizedItem.slice(index + pattern.length) + } +} + +function trim(file: string) { + const ext = path.extname(file) + return ext.length ? file.slice(0, -ext.length) : file +} + +export namespace ConfigCommand { + export const Info = z.object({ + template: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: ModelId.optional(), + subtask: z.boolean().optional(), + }) + + export type Info = z.infer + + export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{command,commands}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse command ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load command", { command: item, err }) + return undefined + }) + if (!md) continue + + const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] + const file = rel(item, patterns) ?? path.basename(item) + const name = trim(file) + + const config = { + name, + ...md.data, + template: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + } + return result + } +} diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 97e7a662d0..3922357f2e 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -2,9 +2,8 @@ import { Log } from "../util" import path from "path" import { pathToFileURL } from "url" import os from "os" -import { Process } from "../util" import z from "zod" -import { mergeDeep, pipe, unique } from "remeda" +import { mergeDeep, pipe } from "remeda" import { Global } from "../global" import fsNode from "fs/promises" import { NamedError } from "@opencode-ai/shared/util/error" @@ -35,10 +34,11 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" - -import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" import { InstanceRef } from "@/effect/instance-ref" import { Npm } from "@opencode-ai/shared/npm" +import { ConfigPlugin } from "./plugin" +import { ConfigManaged } from "./managed" +import { ConfigCommand } from "./command" const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) const PluginOptions = z.record(z.string(), z.unknown()) @@ -55,78 +55,6 @@ export type PluginOrigin = { const log = Log.create({ service: "config" }) -// Managed settings directory for enterprise deployments (highest priority, admin-controlled) -// These settings override all user and project settings -function systemManagedConfigDir(): string { - switch (process.platform) { - case "darwin": - return "/Library/Application Support/opencode" - case "win32": - return path.join(process.env.ProgramData || "C:\\ProgramData", "opencode") - default: - return "/etc/opencode" - } -} - -export function managedConfigDir() { - return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() -} - -const managedDir = managedConfigDir() - -const MANAGED_PLIST_DOMAIN = "ai.opencode.managed" - -// Keys injected by macOS/MDM into the managed plist that are not OpenCode config -const PLIST_META = new Set([ - "PayloadDisplayName", - "PayloadIdentifier", - "PayloadType", - "PayloadUUID", - "PayloadVersion", - "_manualProfile", -]) - -/** - * Parse raw JSON (from plutil conversion of a managed plist) into OpenCode config. - * Strips MDM metadata keys before parsing through the config schema. - * Pure function — no OS interaction, safe to unit test directly. - */ -export function parseManagedPlist(json: string, source: string): Info { - const raw = JSON.parse(json) - for (const key of Object.keys(raw)) { - if (PLIST_META.has(key)) delete raw[key] - } - return parseConfig(JSON.stringify(raw), source) -} - -/** - * Read macOS managed preferences deployed via .mobileconfig / MDM (Jamf, Kandji, etc). - * MDM-installed profiles write to /Library/Managed Preferences/ which is only writable by root. - * User-scoped plists are checked first, then machine-scoped. - */ -async function readManagedPreferences(): Promise { - if (process.platform !== "darwin") return {} - - const domain = MANAGED_PLIST_DOMAIN - const user = os.userInfo().username - const paths = [ - path.join("/Library/Managed Preferences", user, `${domain}.plist`), - path.join("/Library/Managed Preferences", `${domain}.plist`), - ] - - for (const plist of paths) { - if (!existsSync(plist)) continue - log.info("reading macOS managed preferences", { path: plist }) - const result = await Process.run(["plutil", "-convert", "json", "-o", "-", plist], { nothrow: true }) - if (result.code !== 0) { - log.warn("failed to convert managed preferences plist", { path: plist }) - continue - } - return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) - } - return {} -} - // Custom merge function that concatenates array fields instead of replacing them function mergeConfigConcatArrays(target: Info, source: Info): Info { const merged = mergeDeep(target, source) @@ -154,44 +82,6 @@ function trim(file: string) { return ext.length ? file.slice(0, -ext.length) : file } -async function loadCommand(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{command,commands}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse command ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load command", { command: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] - const file = rel(item, patterns) ?? path.basename(item) - const name = trim(file) - - const config = { - name, - ...md.data, - template: md.content.trim(), - } - const parsed = Command.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) - } - return result -} - async function loadAgent(dir: string) { const result: Record = {} @@ -267,60 +157,6 @@ async function loadMode(dir: string) { return result } -async function loadPlugin(dir: string) { - const plugins: PluginSpec[] = [] - - for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - plugins.push(pathToFileURL(item).href) - } - return plugins -} - -export function pluginSpecifier(plugin: PluginSpec): string { - return Array.isArray(plugin) ? plugin[0] : plugin -} - -export function pluginOptions(plugin: PluginSpec): PluginOptions | undefined { - return Array.isArray(plugin) ? plugin[1] : undefined -} - -export async function resolvePluginSpec(plugin: PluginSpec, configFilepath: string): Promise { - const spec = pluginSpecifier(plugin) - if (!isPathPluginSpec(spec)) return plugin - - const base = path.dirname(configFilepath) - const file = (() => { - if (spec.startsWith("file://")) return spec - if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href - return pathToFileURL(path.resolve(base, spec)).href - })() - - const resolved = await resolvePathPluginTarget(file).catch(() => file) - - if (Array.isArray(plugin)) return [resolved, plugin[1]] - return resolved -} - -export function deduplicatePluginOrigins(plugins: PluginOrigin[]): PluginOrigin[] { - const seen = new Set() - const list: PluginOrigin[] = [] - - for (const plugin of plugins.toReversed()) { - const spec = pluginSpecifier(plugin.spec) - const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg - if (seen.has(name)) continue - seen.add(name) - list.push(plugin) - } - - return list.toReversed() -} - export const McpLocal = z .object({ type: z.literal("local").describe("Type of MCP server connection"), @@ -453,15 +289,6 @@ export const Permission = z }) export type Permission = z.infer -export const Command = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ModelId.optional(), - subtask: z.boolean().optional(), -}) -export type Command = z.infer - export const Skills = z.object({ paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), urls: z @@ -854,7 +681,7 @@ export const Info = z logLevel: Log.Level.optional().describe("Log level"), server: Server.optional().describe("Server configuration for opencode serve and web commands"), command: z - .record(z.string(), Command) + .record(z.string(), ConfigCommand.Info) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), skills: Skills.optional().describe("Additional skill folder paths"), @@ -1095,7 +922,7 @@ function writable(info: Info) { return next } -function parseConfig(text: string, filepath: string): Info { +export function parseConfig(text: string, filepath: string): Info { const errors: JsoncParseError[] = [] const data = parseJsonc(text, errors, { allowTrailingComma: true }) if (errors.length) { @@ -1193,7 +1020,7 @@ export const layer = Layer.effect( if (data.plugin && isFile) { const list = data.plugin for (let i = 0; i < list.length; i++) { - list[i] = yield* Effect.promise(() => resolvePluginSpec(list[i], options.path)) + list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) } } return data @@ -1253,7 +1080,7 @@ export const layer = Layer.effect( return yield* cachedGlobal }) - const setupConfigDir = Effect.fnUntraced(function* (dir: string) { + const ensureGitignore = Effect.fn("Config.ensureGitignore")(function* (dir: string) { const gitignore = path.join(dir, ".gitignore") const hasIgnore = yield* fs.existsSafe(gitignore) if (!hasIgnore) { @@ -1262,9 +1089,6 @@ export const layer = Layer.effect( ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), ) } - yield* npmSvc.install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) }) const loadInstanceState = Effect.fn("Config.loadInstanceState")(function* (ctx: InstanceContext) { @@ -1284,7 +1108,7 @@ export const layer = Layer.effect( const track = Effect.fnUntraced(function* (source: string, list: PluginSpec[] | undefined, kind?: PluginScope) { if (!list?.length) return const hit = kind ?? (yield* scope(source)) - const plugins = deduplicatePluginOrigins([ + const plugins = ConfigPlugin.deduplicatePluginOrigins([ ...(result.plugin_origins ?? []), ...list.map((spec) => ({ spec, source, scope: hit })), ]) @@ -1347,7 +1171,7 @@ export const layer = Layer.effect( const deps: Fiber.Fiber[] = [] - for (const dir of unique(directories)) { + for (const dir of directories) { if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { for (const file of ["opencode.json", "opencode.jsonc"]) { const source = path.join(dir, file) @@ -1359,24 +1183,30 @@ export const layer = Layer.effect( } } - const dep = yield* setupConfigDir(dir).pipe( - Effect.exit, - Effect.tap((exit) => - Exit.isFailure(exit) - ? Effect.sync(() => { - log.warn("background dependency install failed", { dir, error: String(exit.cause) }) - }) - : Effect.void, - ), - Effect.asVoid, - Effect.forkScoped, - ) + yield* ensureGitignore(dir).pipe(Effect.forkScoped) + + const dep = yield* npmSvc + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe( + Effect.exit, + Effect.tap((exit) => + Exit.isFailure(exit) + ? Effect.sync(() => { + log.warn("background dependency install failed", { dir, error: String(exit.cause) }) + }) + : Effect.void, + ), + Effect.asVoid, + Effect.forkDetach, + ) deps.push(dep) - result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => loadCommand(dir))) + result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadAgent(dir))) result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadMode(dir))) - const list = yield* Effect.promise(() => loadPlugin(dir)) + const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) yield* track(dir, list) } @@ -1429,6 +1259,7 @@ export const layer = Layer.effect( ) } + const managedDir = ConfigManaged.managedConfigDir() if (existsSync(managedDir)) { for (const file of ["opencode.json", "opencode.jsonc"]) { const source = path.join(managedDir, file) @@ -1437,7 +1268,7 @@ export const layer = Layer.effect( } // macOS managed preferences (.mobileconfig deployed via MDM) override everything - result = mergeConfigConcatArrays(result, yield* Effect.promise(() => readManagedPreferences())) + result = mergeConfigConcatArrays(result, yield* Effect.promise(() => ConfigManaged.readManagedPreferences())) for (const [name, mode] of Object.entries(result.mode ?? {})) { result.agent = mergeDeep(result.agent ?? {}, { diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index fbcca1aa9a..8380d370d8 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,3 +1,5 @@ export * as Config from "./config" +export * as ConfigCommand from "./command" +export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" export * as ConfigPaths from "./paths" diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts new file mode 100644 index 0000000000..61c535185f --- /dev/null +++ b/packages/opencode/src/config/managed.ts @@ -0,0 +1,71 @@ +import { existsSync } from "fs" +import os from "os" +import path from "path" +import { type Info, parseConfig } from "./config" +import { Log, Process } from "../util" + +const log = Log.create({ service: "config" }) + +const MANAGED_PLIST_DOMAIN = "ai.opencode.managed" + +// Keys injected by macOS/MDM into the managed plist that are not OpenCode config +const PLIST_META = new Set([ + "PayloadDisplayName", + "PayloadIdentifier", + "PayloadType", + "PayloadUUID", + "PayloadVersion", + "_manualProfile", +]) + +function systemManagedConfigDir(): string { + switch (process.platform) { + case "darwin": + return "/Library/Application Support/opencode" + case "win32": + return path.join(process.env.ProgramData || "C:\\ProgramData", "opencode") + default: + return "/etc/opencode" + } +} + +function managedConfigDir() { + return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() +} + +function parseManagedPlist(json: string, source: string): Info { + const raw = JSON.parse(json) + for (const key of Object.keys(raw)) { + if (PLIST_META.has(key)) delete raw[key] + } + return parseConfig(JSON.stringify(raw), source) +} + +async function readManagedPreferences(): Promise { + if (process.platform !== "darwin") return {} + + const user = os.userInfo().username + const paths = [ + path.join("/Library/Managed Preferences", user, `${MANAGED_PLIST_DOMAIN}.plist`), + path.join("/Library/Managed Preferences", `${MANAGED_PLIST_DOMAIN}.plist`), + ] + + for (const plist of paths) { + if (!existsSync(plist)) continue + log.info("reading macOS managed preferences", { path: plist }) + const result = await Process.run(["plutil", "-convert", "json", "-o", "-", plist], { nothrow: true }) + if (result.code !== 0) { + log.warn("failed to convert managed preferences plist", { path: plist }) + continue + } + return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) + } + + return {} +} + +export const ConfigManaged = { + managedConfigDir, + parseManagedPlist, + readManagedPreferences, +} diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index eeb9d62d3f..fabd3fd5f8 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -6,13 +6,14 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" import { Global } from "@/global" +import { unique } from "remeda" export async function projectFiles(name: string, directory: string, worktree?: string) { return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) } export async function directories(directory: string, worktree?: string) { - return [ + return unique([ Global.Path.config, ...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? await Array.fromAsync( @@ -31,7 +32,7 @@ export async function directories(directory: string, worktree?: string) { }), )), ...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []), - ] + ]) } export function fileInDirectory(dir: string, name: string) { diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 1f36312447..303fa8ba08 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -1,7 +1,7 @@ -import { test, expect, describe, mock, afterEach, beforeEach, spyOn } from "bun:test" -import { Deferred, Effect, Fiber, Layer, Option } from "effect" +import { test, expect, describe, mock, afterEach, beforeEach } from "bun:test" +import { Effect, Layer, Option } from "effect" import { NodeFileSystem, NodePath } from "@effect/platform-node" -import { Config } from "../../src/config" +import { Config, ConfigManaged } from "../../src/config" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { Instance } from "../../src/project/instance" @@ -10,7 +10,7 @@ import { AccessToken, Account, AccountID, OrgID } from "../../src/account" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Env } from "../../src/env" import { provideTmpdirInstance } from "../fixture/fixture" -import { tmpdir, tmpdirScoped } from "../fixture/fixture" +import { tmpdir } from "../fixture/fixture" import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" import { testEffect } from "../lib/effect" @@ -24,7 +24,6 @@ import { pathToFileURL } from "url" import { Global } from "../../src/global" import { ProjectID } from "../../src/project/schema" import { Filesystem } from "../../src/util" -import * as Network from "../../src/util/network" import { ConfigPlugin } from "@/config/plugin" import { Npm } from "@opencode-ai/shared/npm" @@ -1860,14 +1859,14 @@ describe("resolvePluginSpec", () => { }) const file = path.join(tmp.path, "opencode.json") - const hit = await Config.resolvePluginSpec("./plugin", file) - expect(Config.pluginSpecifier(hit)).toBe(pathToFileURL(path.join(tmp.path, "plugin", "index.ts")).href) + const hit = await ConfigPlugin.resolvePluginSpec("./plugin", file) + expect(ConfigPlugin.pluginSpecifier(hit)).toBe(pathToFileURL(path.join(tmp.path, "plugin", "index.ts")).href) }) }) describe("deduplicatePluginOrigins", () => { const dedupe = (plugins: Config.PluginSpec[]) => - Config.deduplicatePluginOrigins( + ConfigPlugin.deduplicatePluginOrigins( plugins.map((spec) => ({ spec, source: "", @@ -1937,8 +1936,8 @@ describe("deduplicatePluginOrigins", () => { const config = await load() const plugins = config.plugin ?? [] - expect(plugins.some((p) => Config.pluginSpecifier(p) === "my-plugin@1.0.0")).toBe(true) - expect(plugins.some((p) => Config.pluginSpecifier(p).startsWith("file://"))).toBe(true) + expect(plugins.some((p) => ConfigPlugin.pluginSpecifier(p) === "my-plugin@1.0.0")).toBe(true) + expect(plugins.some((p) => ConfigPlugin.pluginSpecifier(p).startsWith("file://"))).toBe(true) }, }) }) @@ -2209,7 +2208,7 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { // parseManagedPlist unit tests — pure function, no OS interaction test("parseManagedPlist strips MDM metadata keys", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ PayloadDisplayName: "OpenCode Managed", PayloadIdentifier: "ai.opencode.managed.test", @@ -2231,7 +2230,7 @@ test("parseManagedPlist strips MDM metadata keys", async () => { }) test("parseManagedPlist parses server settings", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", server: { hostname: "127.0.0.1", mdns: false }, @@ -2245,7 +2244,7 @@ test("parseManagedPlist parses server settings", async () => { }) test("parseManagedPlist parses permission rules", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", permission: { @@ -2269,7 +2268,7 @@ test("parseManagedPlist parses permission rules", async () => { }) test("parseManagedPlist parses enabled_providers", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json", enabled_providers: ["anthropic", "google"], @@ -2280,7 +2279,7 @@ test("parseManagedPlist parses enabled_providers", async () => { }) test("parseManagedPlist handles empty config", async () => { - const config = await Config.parseManagedPlist( + const config = await ConfigManaged.parseManagedPlist( JSON.stringify({ $schema: "https://opencode.ai/config.json" }), "test:mobileconfig", ) diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts index 955cafa190..e4f42227de 100644 --- a/packages/shared/src/npm.ts +++ b/packages/shared/src/npm.ts @@ -142,7 +142,7 @@ export namespace Npm { yield* flock.acquire(`npm-install:${dir}`) - const reify = Effect.fnUntraced(function* () { + const reify = Effect.fn("Npm.reify")(function* () { const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) const arb = new Arborist({ path: dir, @@ -176,28 +176,31 @@ export namespace Npm { const pkgAny = pkg as any const lockAny = lock as any - const declared = new Set([ - ...Object.keys(pkgAny?.dependencies || {}), - ...Object.keys(pkgAny?.devDependencies || {}), - ...Object.keys(pkgAny?.peerDependencies || {}), - ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), - ]) + yield* Effect.gen(function* () { + const declared = new Set([ + ...Object.keys(pkgAny?.dependencies || {}), + ...Object.keys(pkgAny?.devDependencies || {}), + ...Object.keys(pkgAny?.peerDependencies || {}), + ...Object.keys(pkgAny?.optionalDependencies || {}), + ...(input?.add || []), + ]) - const root = lockAny?.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root?.dependencies || {}), - ...Object.keys(root?.devDependencies || {}), - ...Object.keys(root?.peerDependencies || {}), - ...Object.keys(root?.optionalDependencies || {}), - ]) + const root = lockAny?.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root?.dependencies || {}), + ...Object.keys(root?.devDependencies || {}), + ...Object.keys(root?.peerDependencies || {}), + ...Object.keys(root?.optionalDependencies || {}), + ]) - for (const name of declared) { - if (!locked.has(name)) { - yield* reify() - return + for (const name of declared) { + if (!locked.has(name)) { + yield* reify() + return + } } - } + }).pipe(Effect.withSpan("Npm.checkDirty")) + return }, Effect.scoped) const which = Effect.fn("Npm.which")(function* (pkg: string) { From 33bb847a1dfb5e79b4815813739671a40afa0e51 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:40:16 -0400 Subject: [PATCH 013/201] config: refactor --- packages/opencode/src/acp/agent.ts | 3 +- packages/opencode/src/cli/cmd/mcp.ts | 9 +- packages/opencode/src/config/agent.ts | 171 ++++++++ packages/opencode/src/config/command.ts | 102 ++--- packages/opencode/src/config/config.ts | 401 ++---------------- packages/opencode/src/config/entry-name.ts | 16 + packages/opencode/src/config/index.ts | 4 + packages/opencode/src/config/mcp.ts | 70 +++ packages/opencode/src/config/model-id.ts | 3 + packages/opencode/src/config/permission.ts | 68 +++ packages/opencode/src/mcp/mcp.ts | 25 +- .../opencode/src/permission/permission.ts | 4 +- packages/opencode/src/server/instance/mcp.ts | 3 +- packages/opencode/test/config/config.test.ts | 5 +- 14 files changed, 449 insertions(+), 435 deletions(-) create mode 100644 packages/opencode/src/config/agent.ts create mode 100644 packages/opencode/src/config/entry-name.ts create mode 100644 packages/opencode/src/config/mcp.ts create mode 100644 packages/opencode/src/config/model-id.ts create mode 100644 packages/opencode/src/config/permission.ts diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 53bc7ed5fb..9388c87f12 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -44,6 +44,7 @@ import { AppRuntime } from "@/effect/app-runtime" import { Installation } from "@/installation" import { MessageV2 } from "@/session/message-v2" import { Config } from "@/config" +import { ConfigMCP } from "@/config/mcp" import { Todo } from "@/session/todo" import { z } from "zod" import { LoadAPIKeyError } from "ai" @@ -1213,7 +1214,7 @@ export namespace ACP { description: "compact the session", }) - const mcpServers: Record = {} + const mcpServers: Record = {} for (const server of params.mcpServers) { if ("type" in server) { mcpServers[server.name] = { diff --git a/packages/opencode/src/cli/cmd/mcp.ts b/packages/opencode/src/cli/cmd/mcp.ts index dc6d5e8896..a5751ce836 100644 --- a/packages/opencode/src/cli/cmd/mcp.ts +++ b/packages/opencode/src/cli/cmd/mcp.ts @@ -8,6 +8,7 @@ import { MCP } from "../../mcp" import { McpAuth } from "../../mcp/auth" import { McpOAuthProvider } from "../../mcp/oauth-provider" import { Config } from "../../config" +import { ConfigMCP } from "../../config/mcp" import { Instance } from "../../project/instance" import { Installation } from "../../installation" import { InstallationVersion } from "../../installation/version" @@ -43,7 +44,7 @@ function getAuthStatusText(status: MCP.AuthStatus): string { type McpEntry = NonNullable[string] -type McpConfigured = Config.Mcp +type McpConfigured = ConfigMCP.Info function isMcpConfigured(config: McpEntry): config is McpConfigured { return typeof config === "object" && config !== null && "type" in config } @@ -426,7 +427,7 @@ async function resolveConfigPath(baseDir: string, global = false) { return candidates[0] } -async function addMcpToConfig(name: string, mcpConfig: Config.Mcp, configPath: string) { +async function addMcpToConfig(name: string, mcpConfig: ConfigMCP.Info, configPath: string) { let text = "{}" if (await Filesystem.exists(configPath)) { text = await Filesystem.readText(configPath) @@ -514,7 +515,7 @@ export const McpAddCommand = cmd({ }) if (prompts.isCancel(command)) throw new UI.CancelledError() - const mcpConfig: Config.Mcp = { + const mcpConfig: ConfigMCP.Info = { type: "local", command: command.split(" "), } @@ -544,7 +545,7 @@ export const McpAddCommand = cmd({ }) if (prompts.isCancel(useOAuth)) throw new UI.CancelledError() - let mcpConfig: Config.Mcp + let mcpConfig: ConfigMCP.Info if (useOAuth) { const hasClientId = await prompts.confirm({ diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts new file mode 100644 index 0000000000..3819368e82 --- /dev/null +++ b/packages/opencode/src/config/agent.ts @@ -0,0 +1,171 @@ +export * as ConfigAgent from "./agent" + +import { Log } from "../util" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Bus } from "@/bus" +import { configEntryNameFromPath } from "./entry-name" +import * as ConfigMarkdown from "./markdown" +import { ConfigModelID } from "./model-id" +import { InvalidError } from "./paths" +import { ConfigPermission } from "./permission" + +const log = Log.create({ service: "config" }) + +export const Info = z + .object({ + model: ConfigModelID.optional(), + variant: z + .string() + .optional() + .describe("Default model variant for this agent (applies only when using the agent's configured model)."), + temperature: z.number().optional(), + top_p: z.number().optional(), + prompt: z.string().optional(), + tools: z.record(z.string(), z.boolean()).optional().describe("@deprecated Use 'permission' field instead"), + disable: z.boolean().optional(), + description: z.string().optional().describe("Description of when to use the agent"), + mode: z.enum(["subagent", "primary", "all"]).optional(), + hidden: z + .boolean() + .optional() + .describe("Hide this subagent from the @ autocomplete menu (default: false, only applies to mode: subagent)"), + options: z.record(z.string(), z.any()).optional(), + color: z + .union([ + z.string().regex(/^#[0-9a-fA-F]{6}$/, "Invalid hex color format"), + z.enum(["primary", "secondary", "accent", "success", "warning", "error", "info"]), + ]) + .optional() + .describe("Hex color code (e.g., #FF5733) or theme color (e.g., primary)"), + steps: z + .number() + .int() + .positive() + .optional() + .describe("Maximum number of agentic iterations before forcing text-only response"), + maxSteps: z.number().int().positive().optional().describe("@deprecated Use 'steps' field instead."), + permission: ConfigPermission.Info.optional(), + }) + .catchall(z.any()) + .transform((agent, _ctx) => { + const knownKeys = new Set([ + "name", + "model", + "variant", + "prompt", + "description", + "temperature", + "top_p", + "mode", + "hidden", + "color", + "steps", + "maxSteps", + "options", + "permission", + "disable", + "tools", + ]) + + const options: Record = { ...agent.options } + for (const [key, value] of Object.entries(agent)) { + if (!knownKeys.has(key)) options[key] = value + } + + const permission: ConfigPermission.Info = {} + for (const [tool, enabled] of Object.entries(agent.tools ?? {})) { + const action = enabled ? "allow" : "deny" + if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { + permission.edit = action + continue + } + permission[tool] = action + } + Object.assign(permission, agent.permission) + + const steps = agent.steps ?? agent.maxSteps + + return { ...agent, options, permission, steps } as typeof agent & { + options?: Record + permission?: ConfigPermission.Info + steps?: number + } + }) + .meta({ + ref: "AgentConfig", + }) +export type Info = z.infer + +export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{agent,agents}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse agent ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load agent", { agent: item, err }) + return undefined + }) + if (!md) continue + + const patterns = ["/.opencode/agent/", "/.opencode/agents/", "/agent/", "/agents/"] + const name = configEntryNameFromPath(item, patterns) + + const config = { + name, + ...md.data, + prompt: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + } + return result +} + +export async function loadMode(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{mode,modes}/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse mode ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load mode", { mode: item, err }) + return undefined + }) + if (!md) continue + + const config = { + name: configEntryNameFromPath(item, []), + ...md.data, + prompt: md.content.trim(), + } + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = { + ...parsed.data, + mode: "primary" as const, + } + } + } + return result +} diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 4b2d58f3ff..5606bdd4c7 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -1,76 +1,60 @@ +export * as ConfigCommand from "./command" + import { Log } from "../util" -import path from "path" import z from "zod" import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" +import { configEntryNameFromPath } from "./entry-name" import * as ConfigMarkdown from "./markdown" +import { ConfigModelID } from "./model-id" import { InvalidError } from "./paths" -const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) - const log = Log.create({ service: "config" }) -function rel(item: string, patterns: string[]) { - const normalizedItem = item.replaceAll("\\", "/") - for (const pattern of patterns) { - const index = normalizedItem.indexOf(pattern) - if (index === -1) continue - return normalizedItem.slice(index + pattern.length) - } -} +export const Info = z.object({ + template: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: ConfigModelID.optional(), + subtask: z.boolean().optional(), +}) -function trim(file: string) { - const ext = path.extname(file) - return ext.length ? file.slice(0, -ext.length) : file -} +export type Info = z.infer -export namespace ConfigCommand { - export const Info = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ModelId.optional(), - subtask: z.boolean().optional(), - }) +export async function load(dir: string) { + const result: Record = {} + for (const item of await Glob.scan("{command,commands}/**/*.md", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + const md = await ConfigMarkdown.parse(item).catch(async (err) => { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse command ${item}` + const { Session } = await import("@/session") + void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load command", { command: item, err }) + return undefined + }) + if (!md) continue - export type Info = z.infer + const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] + const name = configEntryNameFromPath(item, patterns) - export async function load(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{command,commands}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse command ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load command", { command: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"] - const file = rel(item, patterns) ?? path.basename(item) - const name = trim(file) - - const config = { - name, - ...md.data, - template: md.content.trim(), - } - const parsed = Info.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) + const config = { + name, + ...md.data, + template: md.content.trim(), } - return result + const parsed = Info.safeParse(config) + if (parsed.success) { + result[config.name] = parsed.data + continue + } + throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) } + return result } diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 3922357f2e..92d66cf2bb 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -20,12 +20,9 @@ import { import { Instance, type InstanceContext } from "../project/instance" import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" -import * as ConfigMarkdown from "./markdown" import { existsSync } from "fs" -import { Bus } from "@/bus" import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" -import { Glob } from "@opencode-ai/shared/util/glob" import { Account } from "@/account" import { isRecord } from "@/util/record" import * as ConfigPaths from "./paths" @@ -36,22 +33,13 @@ import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { InstanceRef } from "@/effect/instance-ref" import { Npm } from "@opencode-ai/shared/npm" +import { ConfigAgent } from "./agent" +import { ConfigMCP } from "./mcp" +import { ConfigModelID } from "./model-id" import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" - -const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) -const PluginOptions = z.record(z.string(), z.unknown()) -export const PluginSpec = z.union([z.string(), z.tuple([z.string(), PluginOptions])]) - -export type PluginOptions = z.infer -export type PluginSpec = z.infer -export type PluginScope = "global" | "local" -export type PluginOrigin = { - spec: PluginSpec - source: string - scope: PluginScope -} +import { ConfigPermission } from "./permission" const log = Log.create({ service: "config" }) @@ -64,231 +52,6 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } -export type InstallInput = { - waitTick?: (input: { dir: string; attempt: number; delay: number; waited: number }) => void | Promise -} - -function rel(item: string, patterns: string[]) { - const normalizedItem = item.replaceAll("\\", "/") - for (const pattern of patterns) { - const index = normalizedItem.indexOf(pattern) - if (index === -1) continue - return normalizedItem.slice(index + pattern.length) - } -} - -function trim(file: string) { - const ext = path.extname(file) - return ext.length ? file.slice(0, -ext.length) : file -} - -async function loadAgent(dir: string) { - const result: Record = {} - - for (const item of await Glob.scan("{agent,agents}/**/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse agent ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load agent", { agent: item, err }) - return undefined - }) - if (!md) continue - - const patterns = ["/.opencode/agent/", "/.opencode/agents/", "/agent/", "/agents/"] - const file = rel(item, patterns) ?? path.basename(item) - const agentName = trim(file) - - const config = { - name: agentName, - ...md.data, - prompt: md.content.trim(), - } - const parsed = Agent.safeParse(config) - if (parsed.success) { - result[config.name] = parsed.data - continue - } - throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error }) - } - return result -} - -async function loadMode(dir: string) { - const result: Record = {} - for (const item of await Glob.scan("{mode,modes}/*.md", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - const md = await ConfigMarkdown.parse(item).catch(async (err) => { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse mode ${item}` - const { Session } = await import("@/session") - void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load mode", { mode: item, err }) - return undefined - }) - if (!md) continue - - const config = { - name: path.basename(item, ".md"), - ...md.data, - prompt: md.content.trim(), - } - const parsed = Agent.safeParse(config) - if (parsed.success) { - result[config.name] = { - ...parsed.data, - mode: "primary" as const, - } - continue - } - } - return result -} - -export const McpLocal = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) - -export const McpOAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) -export type McpOAuth = z.infer - -export const McpRemote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([McpOAuth, z.literal(false)]) - .optional() - .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) - -export const Mcp = z.discriminatedUnion("type", [McpLocal, McpRemote]) -export type Mcp = z.infer - -export const PermissionAction = z.enum(["ask", "allow", "deny"]).meta({ - ref: "PermissionActionConfig", -}) -export type PermissionAction = z.infer - -export const PermissionObject = z.record(z.string(), PermissionAction).meta({ - ref: "PermissionObjectConfig", -}) -export type PermissionObject = z.infer - -export const PermissionRule = z.union([PermissionAction, PermissionObject]).meta({ - ref: "PermissionRuleConfig", -}) -export type PermissionRule = z.infer - -// Capture original key order before zod reorders, then rebuild in original order -const permissionPreprocess = (val: unknown) => { - if (typeof val === "object" && val !== null && !Array.isArray(val)) { - return { __originalKeys: Object.keys(val), ...val } - } - return val -} - -const permissionTransform = (x: unknown): Record => { - if (typeof x === "string") return { "*": x as PermissionAction } - const obj = x as { __originalKeys?: string[] } & Record - const { __originalKeys, ...rest } = obj - if (!__originalKeys) return rest as Record - const result: Record = {} - for (const key of __originalKeys) { - if (key in rest) result[key] = rest[key] as PermissionRule - } - return result -} - -export const Permission = z - .preprocess( - permissionPreprocess, - z - .object({ - __originalKeys: z.string().array().optional(), - read: PermissionRule.optional(), - edit: PermissionRule.optional(), - glob: PermissionRule.optional(), - grep: PermissionRule.optional(), - list: PermissionRule.optional(), - bash: PermissionRule.optional(), - task: PermissionRule.optional(), - external_directory: PermissionRule.optional(), - todowrite: PermissionAction.optional(), - question: PermissionAction.optional(), - webfetch: PermissionAction.optional(), - websearch: PermissionAction.optional(), - codesearch: PermissionAction.optional(), - lsp: PermissionRule.optional(), - doom_loop: PermissionAction.optional(), - skill: PermissionRule.optional(), - }) - .catchall(PermissionRule) - .or(PermissionAction), - ) - .transform(permissionTransform) - .meta({ - ref: "PermissionConfig", - }) -export type Permission = z.infer - export const Skills = z.object({ paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), urls: z @@ -298,95 +61,6 @@ export const Skills = z.object({ }) export type Skills = z.infer -export const Agent = z - .object({ - model: ModelId.optional(), - variant: z - .string() - .optional() - .describe("Default model variant for this agent (applies only when using the agent's configured model)."), - temperature: z.number().optional(), - top_p: z.number().optional(), - prompt: z.string().optional(), - tools: z.record(z.string(), z.boolean()).optional().describe("@deprecated Use 'permission' field instead"), - disable: z.boolean().optional(), - description: z.string().optional().describe("Description of when to use the agent"), - mode: z.enum(["subagent", "primary", "all"]).optional(), - hidden: z - .boolean() - .optional() - .describe("Hide this subagent from the @ autocomplete menu (default: false, only applies to mode: subagent)"), - options: z.record(z.string(), z.any()).optional(), - color: z - .union([ - z.string().regex(/^#[0-9a-fA-F]{6}$/, "Invalid hex color format"), - z.enum(["primary", "secondary", "accent", "success", "warning", "error", "info"]), - ]) - .optional() - .describe("Hex color code (e.g., #FF5733) or theme color (e.g., primary)"), - steps: z - .number() - .int() - .positive() - .optional() - .describe("Maximum number of agentic iterations before forcing text-only response"), - maxSteps: z.number().int().positive().optional().describe("@deprecated Use 'steps' field instead."), - permission: Permission.optional(), - }) - .catchall(z.any()) - .transform((agent, _ctx) => { - const knownKeys = new Set([ - "name", - "model", - "variant", - "prompt", - "description", - "temperature", - "top_p", - "mode", - "hidden", - "color", - "steps", - "maxSteps", - "options", - "permission", - "disable", - "tools", - ]) - - // Extract unknown properties into options - const options: Record = { ...agent.options } - for (const [key, value] of Object.entries(agent)) { - if (!knownKeys.has(key)) options[key] = value - } - - // Convert legacy tools config to permissions - const permission: Permission = {} - for (const [tool, enabled] of Object.entries(agent.tools ?? {})) { - const action = enabled ? "allow" : "deny" - // write, edit, patch, multiedit all map to edit permission - if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { - permission.edit = action - } else { - permission[tool] = action - } - } - Object.assign(permission, agent.permission) - - // Convert legacy maxSteps to steps - const steps = agent.steps ?? agent.maxSteps - - return { ...agent, options, permission, steps } as typeof agent & { - options?: Record - permission?: Permission - steps?: number - } - }) - .meta({ - ref: "AgentConfig", - }) -export type Agent = z.infer - export const Keybinds = z .object({ leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), @@ -696,7 +370,7 @@ export const Info = z .describe( "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), - plugin: PluginSpec.array().optional(), + plugin: ConfigPlugin.Spec.array().optional(), share: z .enum(["manual", "auto", "disabled"]) .optional() @@ -718,8 +392,8 @@ export const Info = z .array(z.string()) .optional() .describe("When set, ONLY these providers will be enabled. All other providers will be ignored"), - model: ModelId.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), - small_model: ModelId.describe( + model: ConfigModelID.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), + small_model: ConfigModelID.describe( "Small model to use for tasks like title generation in the format of provider/model", ).optional(), default_agent: z @@ -731,26 +405,26 @@ export const Info = z username: z.string().optional().describe("Custom username to display in conversations instead of system username"), mode: z .object({ - build: Agent.optional(), - plan: Agent.optional(), + build: ConfigAgent.Info.optional(), + plan: ConfigAgent.Info.optional(), }) - .catchall(Agent) + .catchall(ConfigAgent.Info) .optional() .describe("@deprecated Use `agent` field instead."), agent: z .object({ // primary - plan: Agent.optional(), - build: Agent.optional(), + plan: ConfigAgent.Info.optional(), + build: ConfigAgent.Info.optional(), // subagent - general: Agent.optional(), - explore: Agent.optional(), + general: ConfigAgent.Info.optional(), + explore: ConfigAgent.Info.optional(), // specialized - title: Agent.optional(), - summary: Agent.optional(), - compaction: Agent.optional(), + title: ConfigAgent.Info.optional(), + summary: ConfigAgent.Info.optional(), + compaction: ConfigAgent.Info.optional(), }) - .catchall(Agent) + .catchall(ConfigAgent.Info) .optional() .describe("Agent configuration, see https://opencode.ai/docs/agents"), provider: z.record(z.string(), Provider).optional().describe("Custom provider configurations and model overrides"), @@ -758,7 +432,7 @@ export const Info = z .record( z.string(), z.union([ - Mcp, + ConfigMCP.Info, z .object({ enabled: z.boolean(), @@ -820,7 +494,7 @@ export const Info = z ), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), - permission: Permission.optional(), + permission: ConfigPermission.Info.optional(), tools: z.record(z.string(), z.boolean()).optional(), enterprise: z .object({ @@ -867,7 +541,7 @@ export const Info = z }) export type Info = z.output & { - plugin_origins?: PluginOrigin[] + plugin_origins?: ConfigPlugin.Origin[] } type State = { @@ -1084,10 +758,17 @@ export const layer = Layer.effect( const gitignore = path.join(dir, ".gitignore") const hasIgnore = yield* fs.existsSafe(gitignore) if (!hasIgnore) { - yield* fs.writeFileString( - gitignore, - ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), - ) + yield* fs + .writeFileString( + gitignore, + ["node_modules", "package.json", "package-lock.json", "bun.lock", ".gitignore"].join("\n"), + ) + .pipe( + Effect.catchIf( + (e) => e.reason._tag === "PermissionDenied", + () => Effect.void, + ), + ) } }) @@ -1105,7 +786,11 @@ export const layer = Layer.effect( return "global" }) - const track = Effect.fnUntraced(function* (source: string, list: PluginSpec[] | undefined, kind?: PluginScope) { + const track = Effect.fnUntraced(function* ( + source: string, + list: ConfigPlugin.Spec[] | undefined, + kind?: ConfigPlugin.Scope, + ) { if (!list?.length) return const hit = kind ?? (yield* scope(source)) const plugins = ConfigPlugin.deduplicatePluginOrigins([ @@ -1116,7 +801,7 @@ export const layer = Layer.effect( result.plugin_origins = plugins }) - const merge = (source: string, next: Info, kind?: PluginScope) => { + const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { result = mergeConfigConcatArrays(result, next) return track(source, next.plugin, kind) } @@ -1183,7 +868,7 @@ export const layer = Layer.effect( } } - yield* ensureGitignore(dir).pipe(Effect.forkScoped) + yield* ensureGitignore(dir).pipe(Effect.orDie) const dep = yield* npmSvc .install(dir, { @@ -1204,8 +889,8 @@ export const layer = Layer.effect( deps.push(dep) result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) - result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadAgent(dir))) - result.agent = mergeDeep(result.agent, yield* Effect.promise(() => loadMode(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) yield* track(dir, list) } @@ -1284,9 +969,9 @@ export const layer = Layer.effect( } if (result.tools) { - const perms: Record = {} + const perms: Record = {} for (const [tool, enabled] of Object.entries(result.tools)) { - const action: PermissionAction = enabled ? "allow" : "deny" + const action: ConfigPermission.Action = enabled ? "allow" : "deny" if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { perms.edit = action continue diff --git a/packages/opencode/src/config/entry-name.ts b/packages/opencode/src/config/entry-name.ts new file mode 100644 index 0000000000..a553152c97 --- /dev/null +++ b/packages/opencode/src/config/entry-name.ts @@ -0,0 +1,16 @@ +import path from "path" + +function sliceAfterMatch(filePath: string, searchRoots: string[]) { + const normalizedPath = filePath.replaceAll("\\", "/") + for (const searchRoot of searchRoots) { + const index = normalizedPath.indexOf(searchRoot) + if (index === -1) continue + return normalizedPath.slice(index + searchRoot.length) + } +} + +export function configEntryNameFromPath(filePath: string, searchRoots: string[]) { + const candidate = sliceAfterMatch(filePath, searchRoots) ?? path.basename(filePath) + const ext = path.extname(candidate) + return ext.length ? candidate.slice(0, -ext.length) : candidate +} diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index 8380d370d8..f1af71867d 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,5 +1,9 @@ export * as Config from "./config" +export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" +export * as ConfigMCP from "./mcp" +export { ConfigModelID } from "./model-id" +export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts new file mode 100644 index 0000000000..fb8f8caa41 --- /dev/null +++ b/packages/opencode/src/config/mcp.ts @@ -0,0 +1,70 @@ +import z from "zod" + +export namespace ConfigMCP { + export const Local = z + .object({ + type: z.literal("local").describe("Type of MCP server connection"), + command: z.string().array().describe("Command and arguments to run the MCP server"), + environment: z + .record(z.string(), z.string()) + .optional() + .describe("Environment variables to set when running the MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpLocalConfig", + }) + + export const OAuth = z + .object({ + clientId: z + .string() + .optional() + .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), + clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), + scope: z.string().optional().describe("OAuth scopes to request during authorization"), + redirectUri: z + .string() + .optional() + .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), + }) + .strict() + .meta({ + ref: "McpOAuthConfig", + }) + export type OAuth = z.infer + + export const Remote = z + .object({ + type: z.literal("remote").describe("Type of MCP server connection"), + url: z.string().describe("URL of the remote MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), + oauth: z + .union([OAuth, z.literal(false)]) + .optional() + .describe( + "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + ), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpRemoteConfig", + }) + + export const Info = z.discriminatedUnion("type", [Local, Remote]) + export type Info = z.infer +} diff --git a/packages/opencode/src/config/model-id.ts b/packages/opencode/src/config/model-id.ts new file mode 100644 index 0000000000..909e9aa929 --- /dev/null +++ b/packages/opencode/src/config/model-id.ts @@ -0,0 +1,3 @@ +import z from "zod" + +export const ConfigModelID = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) diff --git a/packages/opencode/src/config/permission.ts b/packages/opencode/src/config/permission.ts new file mode 100644 index 0000000000..af01f6f2a3 --- /dev/null +++ b/packages/opencode/src/config/permission.ts @@ -0,0 +1,68 @@ +export * as ConfigPermission from "./permission" +import z from "zod" + +const permissionPreprocess = (val: unknown) => { + if (typeof val === "object" && val !== null && !Array.isArray(val)) { + return { __originalKeys: globalThis.Object.keys(val), ...val } + } + return val +} + +export const Action = z.enum(["ask", "allow", "deny"]).meta({ + ref: "PermissionActionConfig", +}) +export type Action = z.infer + +export const Object = z.record(z.string(), Action).meta({ + ref: "PermissionObjectConfig", +}) +export type Object = z.infer + +export const Rule = z.union([Action, Object]).meta({ + ref: "PermissionRuleConfig", +}) +export type Rule = z.infer + +const transform = (x: unknown): Record => { + if (typeof x === "string") return { "*": x as Action } + const obj = x as { __originalKeys?: string[] } & Record + const { __originalKeys, ...rest } = obj + if (!__originalKeys) return rest as Record + const result: Record = {} + for (const key of __originalKeys) { + if (key in rest) result[key] = rest[key] as Rule + } + return result +} + +export const Info = z + .preprocess( + permissionPreprocess, + z + .object({ + __originalKeys: z.string().array().optional(), + read: Rule.optional(), + edit: Rule.optional(), + glob: Rule.optional(), + grep: Rule.optional(), + list: Rule.optional(), + bash: Rule.optional(), + task: Rule.optional(), + external_directory: Rule.optional(), + todowrite: Action.optional(), + question: Action.optional(), + webfetch: Action.optional(), + websearch: Action.optional(), + codesearch: Action.optional(), + lsp: Rule.optional(), + doom_loop: Action.optional(), + skill: Rule.optional(), + }) + .catchall(Rule) + .or(Action), + ) + .transform(transform) + .meta({ + ref: "PermissionConfig", + }) +export type Info = z.infer diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts index 1f1022538f..6666e0854f 100644 --- a/packages/opencode/src/mcp/mcp.ts +++ b/packages/opencode/src/mcp/mcp.ts @@ -10,6 +10,7 @@ import { ToolListChangedNotificationSchema, } from "@modelcontextprotocol/sdk/types.js" import { Config } from "../config" +import { ConfigMCP } from "../config/mcp" import { Log } from "../util" import { NamedError } from "@opencode-ai/shared/util/error" import z from "zod/v4" @@ -123,7 +124,7 @@ type PromptInfo = Awaited>["prompts"][numbe type ResourceInfo = Awaited>["resources"][number] type McpEntry = NonNullable[string] -function isMcpConfigured(entry: McpEntry): entry is Config.Mcp { +function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { return typeof entry === "object" && entry !== null && "type" in entry } @@ -224,7 +225,7 @@ export interface Interface { readonly tools: () => Effect.Effect> readonly prompts: () => Effect.Effect> readonly resources: () => Effect.Effect> - readonly add: (name: string, mcp: Config.Mcp) => Effect.Effect<{ status: Record | Status }> + readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> readonly connect: (name: string) => Effect.Effect readonly disconnect: (name: string) => Effect.Effect readonly getPrompt: ( @@ -276,7 +277,10 @@ export const layer = Layer.effect( const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } - const connectRemote = Effect.fn("MCP.connectRemote")(function* (key: string, mcp: Config.Mcp & { type: "remote" }) { + const connectRemote = Effect.fn("MCP.connectRemote")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "remote" }, + ) { const oauthDisabled = mcp.oauth === false const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined let authProvider: McpOAuthProvider | undefined @@ -382,7 +386,10 @@ export const layer = Layer.effect( } }) - const connectLocal = Effect.fn("MCP.connectLocal")(function* (key: string, mcp: Config.Mcp & { type: "local" }) { + const connectLocal = Effect.fn("MCP.connectLocal")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "local" }, + ) { const [cmd, ...args] = mcp.command const cwd = Instance.directory const transport = new StdioClientTransport({ @@ -414,7 +421,7 @@ export const layer = Layer.effect( ) }) - const create = Effect.fn("MCP.create")(function* (key: string, mcp: Config.Mcp) { + const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { if (mcp.enabled === false) { log.info("mcp server disabled", { key }) return DISABLED_RESULT @@ -424,8 +431,8 @@ export const layer = Layer.effect( const { client: mcpClient, status } = mcp.type === "remote" - ? yield* connectRemote(key, mcp as Config.Mcp & { type: "remote" }) - : yield* connectLocal(key, mcp as Config.Mcp & { type: "local" }) + ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) + : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) if (!mcpClient) { return { status } satisfies CreateResult @@ -588,7 +595,7 @@ export const layer = Layer.effect( return s.clients }) - const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: Config.Mcp) { + const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { const s = yield* InstanceState.get(state) const result = yield* create(name, mcp) @@ -602,7 +609,7 @@ export const layer = Layer.effect( return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) }) - const add = Effect.fn("MCP.add")(function* (name: string, mcp: Config.Mcp) { + const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { yield* createAndStore(name, mcp) const s = yield* InstanceState.get(state) return { status: s.status } diff --git a/packages/opencode/src/permission/permission.ts b/packages/opencode/src/permission/permission.ts index fe7fb85455..44dac3b1db 100644 --- a/packages/opencode/src/permission/permission.ts +++ b/packages/opencode/src/permission/permission.ts @@ -1,6 +1,6 @@ import { Bus } from "@/bus" import { BusEvent } from "@/bus/bus-event" -import { Config } from "@/config" +import { ConfigPermission } from "@/config/permission" import { InstanceState } from "@/effect" import { ProjectID } from "@/project/schema" import { MessageID, SessionID } from "@/session/schema" @@ -289,7 +289,7 @@ function expand(pattern: string): string { return pattern } -export function fromConfig(permission: Config.Permission) { +export function fromConfig(permission: ConfigPermission.Info) { const ruleset: Ruleset = [] for (const [key, value] of Object.entries(permission)) { if (typeof value === "string") { diff --git a/packages/opencode/src/server/instance/mcp.ts b/packages/opencode/src/server/instance/mcp.ts index 695008fc4e..f6e6f1eddb 100644 --- a/packages/opencode/src/server/instance/mcp.ts +++ b/packages/opencode/src/server/instance/mcp.ts @@ -3,6 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { MCP } from "../../mcp" import { Config } from "../../config" +import { ConfigMCP } from "../../config/mcp" import { AppRuntime } from "../../effect/app-runtime" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -53,7 +54,7 @@ export const McpRoutes = lazy(() => "json", z.object({ name: z.string(), - config: Config.Mcp, + config: ConfigMCP.Info, }), ), async (c) => { diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 303fa8ba08..21d6e3e93d 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -845,6 +845,9 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => { }, }) + // TODO: this is a hack to wait for backgruounded gitignore + await new Promise((resolve) => setTimeout(resolve, 1000)) + expect(await Filesystem.exists(path.join(tmp.extra, ".gitignore"))).toBe(true) expect(await Filesystem.readText(path.join(tmp.extra, ".gitignore"))).toContain("package-lock.json") } finally { @@ -1865,7 +1868,7 @@ describe("resolvePluginSpec", () => { }) describe("deduplicatePluginOrigins", () => { - const dedupe = (plugins: Config.PluginSpec[]) => + const dedupe = (plugins: ConfigPlugin.Spec[]) => ConfigPlugin.deduplicatePluginOrigins( plugins.map((spec) => ({ spec, From 9bf2dfea353135874e2ba5d284e6eb0cd1b9e35d Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:47:09 -0400 Subject: [PATCH 014/201] core: refactor config schemas into separate modules for better maintainability --- packages/opencode/src/config/config.ts | 288 +-------------------- packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/keybinds.ts | 316 +++++++++++------------ packages/opencode/src/config/provider.ts | 120 +++++++++ packages/opencode/src/config/skills.ts | 13 + 5 files changed, 296 insertions(+), 443 deletions(-) create mode 100644 packages/opencode/src/config/provider.ts create mode 100644 packages/opencode/src/config/skills.ts diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 92d66cf2bb..7df5dbe2ff 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -40,6 +40,8 @@ import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" import { ConfigPermission } from "./permission" +import { ConfigProvider } from "./provider" +import { ConfigSkills } from "./skills" const log = Log.create({ service: "config" }) @@ -52,168 +54,6 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } -export const Skills = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), -}) -export type Skills = z.infer - -export const Keybinds = z - .object({ - leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), - app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), - editor_open: z.string().optional().default("e").describe("Open external editor"), - theme_list: z.string().optional().default("t").describe("List available themes"), - sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), - scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), - username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), - status_view: z.string().optional().default("s").describe("View status"), - session_export: z.string().optional().default("x").describe("Export session to editor"), - session_new: z.string().optional().default("n").describe("Create a new session"), - session_list: z.string().optional().default("l").describe("List all sessions"), - session_timeline: z.string().optional().default("g").describe("Show session timeline"), - session_fork: z.string().optional().default("none").describe("Fork session from message"), - session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), - session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), - stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), - model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), - model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), - session_share: z.string().optional().default("none").describe("Share current session"), - session_unshare: z.string().optional().default("none").describe("Unshare current session"), - session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), - session_compact: z.string().optional().default("c").describe("Compact the session"), - messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), - messages_page_down: z - .string() - .optional() - .default("pagedown,ctrl+alt+f") - .describe("Scroll messages down by one page"), - messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), - messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), - messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), - messages_half_page_down: z.string().optional().default("ctrl+alt+d").describe("Scroll messages down by half page"), - messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), - messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), - messages_next: z.string().optional().default("none").describe("Navigate to next message"), - messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), - messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), - messages_copy: z.string().optional().default("y").describe("Copy message"), - messages_undo: z.string().optional().default("u").describe("Undo message"), - messages_redo: z.string().optional().default("r").describe("Redo message"), - messages_toggle_conceal: z - .string() - .optional() - .default("h") - .describe("Toggle code block concealment in messages"), - tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), - model_list: z.string().optional().default("m").describe("List available models"), - model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), - model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), - model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), - model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), - command_list: z.string().optional().default("ctrl+p").describe("List available commands"), - agent_list: z.string().optional().default("a").describe("List agents"), - agent_cycle: z.string().optional().default("tab").describe("Next agent"), - agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), - variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), - variant_list: z.string().optional().default("none").describe("List model variants"), - input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), - input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), - input_submit: z.string().optional().default("return").describe("Submit input"), - input_newline: z - .string() - .optional() - .default("shift+return,ctrl+return,alt+return,ctrl+j") - .describe("Insert newline in input"), - input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), - input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), - input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), - input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), - input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), - input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), - input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), - input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), - input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), - input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), - input_select_line_home: z.string().optional().default("ctrl+shift+a").describe("Select to start of line in input"), - input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), - input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), - input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), - input_select_visual_line_home: z - .string() - .optional() - .default("alt+shift+a") - .describe("Select to start of visual line in input"), - input_select_visual_line_end: z - .string() - .optional() - .default("alt+shift+e") - .describe("Select to end of visual line in input"), - input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), - input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), - input_select_buffer_home: z - .string() - .optional() - .default("shift+home") - .describe("Select to start of buffer in input"), - input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), - input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), - input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), - input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), - input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), - input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), - input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), - input_word_forward: z - .string() - .optional() - .default("alt+f,alt+right,ctrl+right") - .describe("Move word forward in input"), - input_word_backward: z - .string() - .optional() - .default("alt+b,alt+left,ctrl+left") - .describe("Move word backward in input"), - input_select_word_forward: z - .string() - .optional() - .default("alt+shift+f,alt+shift+right") - .describe("Select word forward in input"), - input_select_word_backward: z - .string() - .optional() - .default("alt+shift+b,alt+shift+left") - .describe("Select word backward in input"), - input_delete_word_forward: z - .string() - .optional() - .default("alt+d,alt+delete,ctrl+delete") - .describe("Delete word forward in input"), - input_delete_word_backward: z - .string() - .optional() - .default("ctrl+w,ctrl+backspace,alt+backspace") - .describe("Delete word backward in input"), - history_previous: z.string().optional().default("up").describe("Previous history item"), - history_next: z.string().optional().default("down").describe("Next history item"), - session_child_first: z.string().optional().default("down").describe("Go to first child session"), - session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), - session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), - session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), - terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), - tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), - plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), - display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), - }) - .strict() - .meta({ - ref: "KeybindsConfig", - }) - export const Server = z .object({ port: z.number().int().positive().optional().describe("Port to listen on"), @@ -232,123 +72,6 @@ export const Layout = z.enum(["auto", "stretch"]).meta({ }) export type Layout = z.infer -export const Model = z - .object({ - id: z.string(), - name: z.string(), - family: z.string().optional(), - release_date: z.string(), - attachment: z.boolean(), - reasoning: z.boolean(), - temperature: z.boolean(), - tool_call: z.boolean(), - interleaved: z - .union([ - z.literal(true), - z - .object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }) - .strict(), - ]) - .optional(), - cost: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - context_over_200k: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - }) - .optional(), - }) - .optional(), - limit: z.object({ - context: z.number(), - input: z.number().optional(), - output: z.number(), - }), - modalities: z - .object({ - input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - }) - .optional(), - experimental: z.boolean().optional(), - status: z.enum(["alpha", "beta", "deprecated"]).optional(), - provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()).optional(), - variants: z - .record( - z.string(), - z - .object({ - disabled: z.boolean().optional().describe("Disable this variant for the model"), - }) - .catchall(z.any()), - ) - .optional() - .describe("Variant-specific configuration"), - }) - .partial() - -export const Provider = z - .object({ - api: z.string().optional(), - name: z.string(), - env: z.array(z.string()), - id: z.string(), - npm: z.string().optional(), - whitelist: z.array(z.string()).optional(), - blacklist: z.array(z.string()).optional(), - options: z - .object({ - apiKey: z.string().optional(), - baseURL: z.string().optional(), - enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), - setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), - timeout: z - .union([ - z - .number() - .int() - .positive() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - z.literal(false).describe("Disable timeout for this provider entirely."), - ]) - .optional() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - chunkTimeout: z - .number() - .int() - .positive() - .optional() - .describe( - "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", - ), - }) - .catchall(z.any()) - .optional(), - models: z.record(z.string(), Model).optional(), - }) - .partial() - .strict() - .meta({ - ref: "ProviderConfig", - }) - -export type Provider = z.infer - export const Info = z .object({ $schema: z.string().optional().describe("JSON schema reference for configuration validation"), @@ -358,7 +81,7 @@ export const Info = z .record(z.string(), ConfigCommand.Info) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), - skills: Skills.optional().describe("Additional skill folder paths"), + skills: ConfigSkills.Info.optional().describe("Additional skill folder paths"), watcher: z .object({ ignore: z.array(z.string()).optional(), @@ -427,7 +150,10 @@ export const Info = z .catchall(ConfigAgent.Info) .optional() .describe("Agent configuration, see https://opencode.ai/docs/agents"), - provider: z.record(z.string(), Provider).optional().describe("Custom provider configurations and model overrides"), + provider: z + .record(z.string(), ConfigProvider.Info) + .optional() + .describe("Custom provider configurations and model overrides"), mcp: z .record( z.string(), diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index f1af71867d..37665d8c67 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -7,3 +7,5 @@ export * as ConfigMCP from "./mcp" export { ConfigModelID } from "./model-id" export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" +export * as ConfigProvider from "./provider" +export * as ConfigSkills from "./skills" diff --git a/packages/opencode/src/config/keybinds.ts b/packages/opencode/src/config/keybinds.ts index 9b8d9e2834..cb146b7cae 100644 --- a/packages/opencode/src/config/keybinds.ts +++ b/packages/opencode/src/config/keybinds.ts @@ -1,164 +1,156 @@ +export * as ConfigKeybinds from "./keybinds" + import z from "zod" -export namespace ConfigKeybinds { - export const Keybinds = z - .object({ - leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), - app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), - editor_open: z.string().optional().default("e").describe("Open external editor"), - theme_list: z.string().optional().default("t").describe("List available themes"), - sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), - scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), - username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), - status_view: z.string().optional().default("s").describe("View status"), - session_export: z.string().optional().default("x").describe("Export session to editor"), - session_new: z.string().optional().default("n").describe("Create a new session"), - session_list: z.string().optional().default("l").describe("List all sessions"), - session_timeline: z.string().optional().default("g").describe("Show session timeline"), - session_fork: z.string().optional().default("none").describe("Fork session from message"), - session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), - session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), - stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), - model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), - model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), - session_share: z.string().optional().default("none").describe("Share current session"), - session_unshare: z.string().optional().default("none").describe("Unshare current session"), - session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), - session_compact: z.string().optional().default("c").describe("Compact the session"), - messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), - messages_page_down: z - .string() - .optional() - .default("pagedown,ctrl+alt+f") - .describe("Scroll messages down by one page"), - messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), - messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), - messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), - messages_half_page_down: z - .string() - .optional() - .default("ctrl+alt+d") - .describe("Scroll messages down by half page"), - messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), - messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), - messages_next: z.string().optional().default("none").describe("Navigate to next message"), - messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), - messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), - messages_copy: z.string().optional().default("y").describe("Copy message"), - messages_undo: z.string().optional().default("u").describe("Undo message"), - messages_redo: z.string().optional().default("r").describe("Redo message"), - messages_toggle_conceal: z - .string() - .optional() - .default("h") - .describe("Toggle code block concealment in messages"), - tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), - model_list: z.string().optional().default("m").describe("List available models"), - model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), - model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), - model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), - model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), - command_list: z.string().optional().default("ctrl+p").describe("List available commands"), - agent_list: z.string().optional().default("a").describe("List agents"), - agent_cycle: z.string().optional().default("tab").describe("Next agent"), - agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), - variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), - variant_list: z.string().optional().default("none").describe("List model variants"), - input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), - input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), - input_submit: z.string().optional().default("return").describe("Submit input"), - input_newline: z - .string() - .optional() - .default("shift+return,ctrl+return,alt+return,ctrl+j") - .describe("Insert newline in input"), - input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), - input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), - input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), - input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), - input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), - input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), - input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), - input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), - input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), - input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), - input_select_line_home: z - .string() - .optional() - .default("ctrl+shift+a") - .describe("Select to start of line in input"), - input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), - input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), - input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), - input_select_visual_line_home: z - .string() - .optional() - .default("alt+shift+a") - .describe("Select to start of visual line in input"), - input_select_visual_line_end: z - .string() - .optional() - .default("alt+shift+e") - .describe("Select to end of visual line in input"), - input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), - input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), - input_select_buffer_home: z - .string() - .optional() - .default("shift+home") - .describe("Select to start of buffer in input"), - input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), - input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), - input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), - input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), - input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), - input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), - input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), - input_word_forward: z - .string() - .optional() - .default("alt+f,alt+right,ctrl+right") - .describe("Move word forward in input"), - input_word_backward: z - .string() - .optional() - .default("alt+b,alt+left,ctrl+left") - .describe("Move word backward in input"), - input_select_word_forward: z - .string() - .optional() - .default("alt+shift+f,alt+shift+right") - .describe("Select word forward in input"), - input_select_word_backward: z - .string() - .optional() - .default("alt+shift+b,alt+shift+left") - .describe("Select word backward in input"), - input_delete_word_forward: z - .string() - .optional() - .default("alt+d,alt+delete,ctrl+delete") - .describe("Delete word forward in input"), - input_delete_word_backward: z - .string() - .optional() - .default("ctrl+w,ctrl+backspace,alt+backspace") - .describe("Delete word backward in input"), - history_previous: z.string().optional().default("up").describe("Previous history item"), - history_next: z.string().optional().default("down").describe("Next history item"), - session_child_first: z.string().optional().default("down").describe("Go to first child session"), - session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), - session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), - session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), - terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), - tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), - plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), - display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), - }) - .strict() - .meta({ - ref: "KeybindsConfig", - }) -} +export const Keybinds = z + .object({ + leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"), + app_exit: z.string().optional().default("ctrl+c,ctrl+d,q").describe("Exit the application"), + editor_open: z.string().optional().default("e").describe("Open external editor"), + theme_list: z.string().optional().default("t").describe("List available themes"), + sidebar_toggle: z.string().optional().default("b").describe("Toggle sidebar"), + scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"), + username_toggle: z.string().optional().default("none").describe("Toggle username visibility"), + status_view: z.string().optional().default("s").describe("View status"), + session_export: z.string().optional().default("x").describe("Export session to editor"), + session_new: z.string().optional().default("n").describe("Create a new session"), + session_list: z.string().optional().default("l").describe("List all sessions"), + session_timeline: z.string().optional().default("g").describe("Show session timeline"), + session_fork: z.string().optional().default("none").describe("Fork session from message"), + session_rename: z.string().optional().default("ctrl+r").describe("Rename session"), + session_delete: z.string().optional().default("ctrl+d").describe("Delete session"), + stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"), + model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"), + model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"), + session_share: z.string().optional().default("none").describe("Share current session"), + session_unshare: z.string().optional().default("none").describe("Unshare current session"), + session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"), + session_compact: z.string().optional().default("c").describe("Compact the session"), + messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"), + messages_page_down: z + .string() + .optional() + .default("pagedown,ctrl+alt+f") + .describe("Scroll messages down by one page"), + messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"), + messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"), + messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"), + messages_half_page_down: z.string().optional().default("ctrl+alt+d").describe("Scroll messages down by half page"), + messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"), + messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"), + messages_next: z.string().optional().default("none").describe("Navigate to next message"), + messages_previous: z.string().optional().default("none").describe("Navigate to previous message"), + messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"), + messages_copy: z.string().optional().default("y").describe("Copy message"), + messages_undo: z.string().optional().default("u").describe("Undo message"), + messages_redo: z.string().optional().default("r").describe("Redo message"), + messages_toggle_conceal: z + .string() + .optional() + .default("h") + .describe("Toggle code block concealment in messages"), + tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"), + model_list: z.string().optional().default("m").describe("List available models"), + model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"), + model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"), + model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"), + model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"), + command_list: z.string().optional().default("ctrl+p").describe("List available commands"), + agent_list: z.string().optional().default("a").describe("List agents"), + agent_cycle: z.string().optional().default("tab").describe("Next agent"), + agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"), + variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"), + variant_list: z.string().optional().default("none").describe("List model variants"), + input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"), + input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"), + input_submit: z.string().optional().default("return").describe("Submit input"), + input_newline: z + .string() + .optional() + .default("shift+return,ctrl+return,alt+return,ctrl+j") + .describe("Insert newline in input"), + input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"), + input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"), + input_move_up: z.string().optional().default("up").describe("Move cursor up in input"), + input_move_down: z.string().optional().default("down").describe("Move cursor down in input"), + input_select_left: z.string().optional().default("shift+left").describe("Select left in input"), + input_select_right: z.string().optional().default("shift+right").describe("Select right in input"), + input_select_up: z.string().optional().default("shift+up").describe("Select up in input"), + input_select_down: z.string().optional().default("shift+down").describe("Select down in input"), + input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"), + input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"), + input_select_line_home: z.string().optional().default("ctrl+shift+a").describe("Select to start of line in input"), + input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"), + input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"), + input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"), + input_select_visual_line_home: z + .string() + .optional() + .default("alt+shift+a") + .describe("Select to start of visual line in input"), + input_select_visual_line_end: z + .string() + .optional() + .default("alt+shift+e") + .describe("Select to end of visual line in input"), + input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"), + input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"), + input_select_buffer_home: z + .string() + .optional() + .default("shift+home") + .describe("Select to start of buffer in input"), + input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"), + input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"), + input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"), + input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), + input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), + input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), + input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), + input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), + input_word_forward: z + .string() + .optional() + .default("alt+f,alt+right,ctrl+right") + .describe("Move word forward in input"), + input_word_backward: z + .string() + .optional() + .default("alt+b,alt+left,ctrl+left") + .describe("Move word backward in input"), + input_select_word_forward: z + .string() + .optional() + .default("alt+shift+f,alt+shift+right") + .describe("Select word forward in input"), + input_select_word_backward: z + .string() + .optional() + .default("alt+shift+b,alt+shift+left") + .describe("Select word backward in input"), + input_delete_word_forward: z + .string() + .optional() + .default("alt+d,alt+delete,ctrl+delete") + .describe("Delete word forward in input"), + input_delete_word_backward: z + .string() + .optional() + .default("ctrl+w,ctrl+backspace,alt+backspace") + .describe("Delete word backward in input"), + history_previous: z.string().optional().default("up").describe("Previous history item"), + history_next: z.string().optional().default("down").describe("Next history item"), + session_child_first: z.string().optional().default("down").describe("Go to first child session"), + session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), + session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), + session_parent: z.string().optional().default("up").describe("Go to parent session"), + terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), + terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), + tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), + plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), + display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"), + }) + .strict() + .meta({ + ref: "KeybindsConfig", + }) diff --git a/packages/opencode/src/config/provider.ts b/packages/opencode/src/config/provider.ts new file mode 100644 index 0000000000..09efedf497 --- /dev/null +++ b/packages/opencode/src/config/provider.ts @@ -0,0 +1,120 @@ +import z from "zod" + +export namespace ConfigProvider { + export const Model = z + .object({ + id: z.string(), + name: z.string(), + family: z.string().optional(), + release_date: z.string(), + attachment: z.boolean(), + reasoning: z.boolean(), + temperature: z.boolean(), + tool_call: z.boolean(), + interleaved: z + .union([ + z.literal(true), + z + .object({ + field: z.enum(["reasoning_content", "reasoning_details"]), + }) + .strict(), + ]) + .optional(), + cost: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + context_over_200k: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + }) + .optional(), + }) + .optional(), + limit: z.object({ + context: z.number(), + input: z.number().optional(), + output: z.number(), + }), + modalities: z + .object({ + input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + }) + .optional(), + experimental: z.boolean().optional(), + status: z.enum(["alpha", "beta", "deprecated"]).optional(), + provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), + options: z.record(z.string(), z.any()), + headers: z.record(z.string(), z.string()).optional(), + variants: z + .record( + z.string(), + z + .object({ + disabled: z.boolean().optional().describe("Disable this variant for the model"), + }) + .catchall(z.any()), + ) + .optional() + .describe("Variant-specific configuration"), + }) + .partial() + + export const Info = z + .object({ + api: z.string().optional(), + name: z.string(), + env: z.array(z.string()), + id: z.string(), + npm: z.string().optional(), + whitelist: z.array(z.string()).optional(), + blacklist: z.array(z.string()).optional(), + options: z + .object({ + apiKey: z.string().optional(), + baseURL: z.string().optional(), + enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), + setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), + timeout: z + .union([ + z + .number() + .int() + .positive() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + z.literal(false).describe("Disable timeout for this provider entirely."), + ]) + .optional() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + chunkTimeout: z + .number() + .int() + .positive() + .optional() + .describe( + "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + ), + }) + .catchall(z.any()) + .optional(), + models: z.record(z.string(), Model).optional(), + }) + .partial() + .strict() + .meta({ + ref: "ProviderConfig", + }) + + export type Info = z.infer +} diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts new file mode 100644 index 0000000000..bdc63f5d6a --- /dev/null +++ b/packages/opencode/src/config/skills.ts @@ -0,0 +1,13 @@ +import z from "zod" + +export namespace ConfigSkills { + export const Info = z.object({ + paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), + urls: z + .array(z.string()) + .optional() + .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), + }) + + export type Info = z.infer +} From 8b1f0e2d90c03fc5de6077f868af1548485cc466 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 12:55:40 -0400 Subject: [PATCH 015/201] core: add documentation comments to plugin configuration merge logic Adds explanatory comments to config.ts and plugin.ts clarifying: - How plugin specs are stored and normalized during config loading - Why plugin_origins tracks provenance for location-sensitive decisions - Why path-like specs are resolved early to prevent reinterpretation during merges - How plugin deduplication works while keeping origin metadata for writes and diagnostics --- packages/opencode/src/config/config.ts | 23 ++++++++++++++++++----- packages/opencode/src/config/plugin.ts | 11 ++++++++++- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 7df5dbe2ff..ed3be88082 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -93,6 +93,7 @@ export const Info = z .describe( "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), + // User-facing plugin config is stored as Specs; provenance gets attached later while configs are merged. plugin: ConfigPlugin.Spec.array().optional(), share: z .enum(["manual", "auto", "disabled"]) @@ -267,6 +268,8 @@ export const Info = z }) export type Info = z.output & { + // plugin_origins is derived state, not a persisted config field. It keeps each winning plugin spec together + // with the file and scope it came from so later runtime code can make location-sensitive decisions. plugin_origins?: ConfigPlugin.Origin[] } @@ -420,6 +423,8 @@ export const layer = Layer.effect( if (data.plugin && isFile) { const list = data.plugin for (let i = 0; i < list.length; i++) { + // Normalize path-like plugin specs while we still know which config file declared them. + // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) } } @@ -505,20 +510,26 @@ export const layer = Layer.effect( const consoleManagedProviders = new Set() let activeOrgName: string | undefined - const scope = Effect.fnUntraced(function* (source: string) { + const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { if (source.startsWith("http://") || source.startsWith("https://")) return "global" if (source === "OPENCODE_CONFIG_CONTENT") return "local" if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" return "global" }) - const track = Effect.fnUntraced(function* ( + const mergePluginOrigins = Effect.fnUntraced(function* ( source: string, + // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step + // is attached. list: ConfigPlugin.Spec[] | undefined, + // Scope can be inferred from the source path, but some callers already know whether the config should + // behave as global or local and can pass that explicitly. kind?: ConfigPlugin.Scope, ) { if (!list?.length) return - const hit = kind ?? (yield* scope(source)) + const hit = kind ?? (yield* pluginScopeForSource(source)) + // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while + // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. const plugins = ConfigPlugin.deduplicatePluginOrigins([ ...(result.plugin_origins ?? []), ...list.map((spec) => ({ spec, source, scope: hit })), @@ -529,7 +540,7 @@ export const layer = Layer.effect( const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { result = mergeConfigConcatArrays(result, next) - return track(source, next.plugin, kind) + return mergePluginOrigins(source, next.plugin, kind) } for (const [key, value] of Object.entries(auth)) { @@ -617,8 +628,10 @@ export const layer = Layer.effect( result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) + // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load + // returns normalized Specs and we only need to attach origin metadata here. const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) - yield* track(dir, list) + yield* mergePluginOrigins(dir, list) } if (process.env.OPENCODE_CONFIG_CONTENT) { diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index d13a9d5adc..3a10c0a715 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -8,11 +8,16 @@ export namespace ConfigPlugin { const Options = z.record(z.string(), z.unknown()) export type Options = z.infer + // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. + // It answers "what should we load?" but says nothing about where that value came from. export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) export type Spec = z.infer export type Scope = "global" | "local" + // Origin keeps the original config provenance attached to a spec. + // After multiple config files are merged, callers still need to know which file declared the plugin + // and whether it should behave like a global or project-local plugin. export type Origin = { spec: Spec source: string @@ -33,7 +38,7 @@ export namespace ConfigPlugin { return plugins } - export function pluginSpecifier(plugin: ConfigPlugin.Spec): string { + export function pluginSpecifier(plugin: Spec): string { return Array.isArray(plugin) ? plugin[0] : plugin } @@ -41,6 +46,8 @@ export namespace ConfigPlugin { return Array.isArray(plugin) ? plugin[1] : undefined } + // Path-like specs are resolved relative to the config file that declared them so merges later on do not + // accidentally reinterpret `./plugin.ts` relative to some other directory. export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { const spec = pluginSpecifier(plugin) if (!isPathPluginSpec(spec)) return plugin @@ -58,6 +65,8 @@ export namespace ConfigPlugin { return resolved } + // Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the + // full Origin so downstream code still knows which config file won and where follow-up writes should go. export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { const seen = new Set() const list: Origin[] = [] From c5deeee8c7b2e5b3927d28958d2ceb9ebddeb256 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 12:19:01 -0500 Subject: [PATCH 016/201] fix: ensure azure has store = true by default (#22764) --- packages/opencode/src/provider/transform.ts | 4 ++++ .../opencode/test/provider/transform.test.ts | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 92862b0ca6..a294c568d7 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -793,6 +793,10 @@ export function options(input: { result["store"] = false } + if (input.model.api.npm === "@ai-sdk/azure") { + result["store"] = true + } + if (input.model.api.npm === "@openrouter/ai-sdk-provider") { result["usage"] = { include: true, diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index d53ce38b16..f92b448cf3 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -100,6 +100,24 @@ describe("ProviderTransform.options - setCacheKey", () => { }) expect(result.store).toBe(false) }) + + test("should set store=true for azure provider by default", () => { + const azureModel = { + ...mockModel, + providerID: "azure", + api: { + id: "gpt-4", + url: "https://azure.com", + npm: "@ai-sdk/azure", + }, + } + const result = ProviderTransform.options({ + model: azureModel, + sessionID, + providerOptions: {}, + }) + expect(result.store).toBe(true) + }) }) describe("ProviderTransform.options - zai/zhipuai thinking", () => { From 03e20e6ac125b6a792c567451c2f904c6ed6941c Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:29:00 -0400 Subject: [PATCH 017/201] core: modularize config parsing to improve maintainability Extract error handling, parsing logic, and variable substitution into dedicated modules. This reduces duplication between tui.json and opencode.json parsing and makes the config system easier to extend for future config formats. --- .../opencode/src/cli/cmd/tui/config/tui.ts | 45 +++--- packages/opencode/src/config/agent.ts | 2 +- packages/opencode/src/config/command.ts | 2 +- packages/opencode/src/config/config.ts | 146 +++++++----------- packages/opencode/src/config/error.ts | 21 +++ packages/opencode/src/config/index.ts | 3 + packages/opencode/src/config/managed.ts | 16 +- packages/opencode/src/config/parse.ts | 80 ++++++++++ packages/opencode/src/config/paths.ts | 123 +-------------- packages/opencode/src/config/variable.ts | 84 ++++++++++ packages/opencode/test/config/config.test.ts | 86 ++++++----- 11 files changed, 335 insertions(+), 273 deletions(-) create mode 100644 packages/opencode/src/config/error.ts create mode 100644 packages/opencode/src/config/parse.ts create mode 100644 packages/opencode/src/config/variable.ts diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 6f2c161fb5..e8eb9ff5d3 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -1,6 +1,7 @@ import z from "zod" import { mergeDeep, unique } from "remeda" import { Context, Effect, Fiber, Layer } from "effect" +import { ConfigParse } from "@/config/parse" import * as ConfigPaths from "@/config/paths" import { migrateTuiConfig } from "./tui-migrate" import { TuiInfo } from "./tui-schema" @@ -68,6 +69,14 @@ export namespace TuiConfig { } } + async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config + } + async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { const data = await loadFile(file) acc.result = mergeDeep(acc.result, data) @@ -183,26 +192,22 @@ export namespace TuiConfig { } async function load(text: string, configFilepath: string): Promise { - const raw = await ConfigPaths.parseText(text, configFilepath, "empty") - if (!isRecord(raw)) return {} + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - const normalized = normalize(raw) - - const parsed = Info.safeParse(normalized) - if (!parsed.success) { - log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues }) - return {} - } - - const data = parsed.data - if (data.plugin) { - for (let i = 0; i < data.plugin.length; i++) { - data.plugin[i] = await ConfigPlugin.resolvePluginSpec(data.plugin[i], configFilepath) - } - } - - return data + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) } } diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts index 3819368e82..f754f009d4 100644 --- a/packages/opencode/src/config/agent.ts +++ b/packages/opencode/src/config/agent.ts @@ -6,9 +6,9 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" import { configEntryNameFromPath } from "./entry-name" +import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" import { ConfigModelID } from "./model-id" -import { InvalidError } from "./paths" import { ConfigPermission } from "./permission" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 5606bdd4c7..9799250567 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -6,9 +6,9 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" import { configEntryNameFromPath } from "./entry-name" +import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" import { ConfigModelID } from "./model-id" -import { InvalidError } from "./paths" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index ed3be88082..6b6d74ed82 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -10,13 +10,7 @@ import { NamedError } from "@opencode-ai/shared/util/error" import { Flag } from "../flag/flag" import { Auth } from "../auth" import { Env } from "../env" -import { - type ParseError as JsoncParseError, - applyEdits, - modify, - parse as parseJsonc, - printParseErrorCode, -} from "jsonc-parser" +import { applyEdits, modify } from "jsonc-parser" import { Instance, type InstanceContext } from "../project/instance" import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" @@ -25,6 +19,7 @@ import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" +import { InvalidError, JsonError } from "./error" import * as ConfigPaths from "./paths" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" @@ -39,6 +34,7 @@ import { ConfigModelID } from "./model-id" import { ConfigPlugin } from "./plugin" import { ConfigManaged } from "./managed" import { ConfigCommand } from "./command" +import { ConfigParse } from "./parse" import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" @@ -54,6 +50,28 @@ function mergeConfigConcatArrays(target: Info, source: Info): Info { return merged } +function normalizeLoadedConfig(data: unknown, source: string) { + if (!isRecord(data)) return data + const copy = { ...data } + const hadLegacy = "theme" in copy || "keybinds" in copy || "tui" in copy + if (!hadLegacy) return copy + delete copy.theme + delete copy.keybinds + delete copy.tui + log.warn("tui keys in opencode config are deprecated; move them to tui.json", { path: source }) + return copy +} + +async function resolveLoadedPlugins(config: T, filepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + // Normalize path-like plugin specs while we still know which config file declared them. + // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], filepath) + } + return config +} + export const Server = z .object({ port: z.number().int().positive().optional().describe("Port to listen on"), @@ -325,42 +343,6 @@ function writable(info: Info) { return next } -export function parseConfig(text: string, filepath: string): Info { - const errors: JsoncParseError[] = [] - const data = parseJsonc(text, errors, { allowTrailingComma: true }) - if (errors.length) { - const lines = text.split("\n") - const errorDetails = errors - .map((e) => { - const beforeOffset = text.substring(0, e.offset).split("\n") - const line = beforeOffset.length - const column = beforeOffset[beforeOffset.length - 1].length + 1 - const problemLine = lines[line - 1] - - const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` - if (!problemLine) return error - - return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` - }) - .join("\n") - - throw new JsonError({ - path: filepath, - message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`, - }) - } - - const parsed = Info.safeParse(data) - if (parsed.success) return parsed.data - - throw new InvalidError({ - path: filepath, - issues: parsed.error.issues, - }) -} - -export const { JsonError, InvalidError } = ConfigPaths - export const ConfigDirectoryTypoError = NamedError.create( "ConfigDirectoryTypoError", z.object({ @@ -393,48 +375,31 @@ export const layer = Layer.effect( text: string, options: { path: string } | { dir: string; source: string }, ) { - const original = text - const source = "path" in options ? options.path : options.source - const isFile = "path" in options - const data = yield* Effect.promise(() => - ConfigPaths.parseText(text, "path" in options ? options.path : { source: options.source, dir: options.dir }), - ) - - const normalized = (() => { - if (!data || typeof data !== "object" || Array.isArray(data)) return data - const copy = { ...(data as Record) } - const hadLegacy = "theme" in copy || "keybinds" in copy || "tui" in copy - if (!hadLegacy) return copy - delete copy.theme - delete copy.keybinds - delete copy.tui - log.warn("tui keys in opencode config are deprecated; move them to tui.json", { path: source }) - return copy - })() - - const parsed = Info.safeParse(normalized) - if (parsed.success) { - if (!parsed.data.$schema && isFile) { - parsed.data.$schema = "https://opencode.ai/config.json" - const updated = original.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",') - yield* fs.writeFileString(options.path, updated).pipe(Effect.catch(() => Effect.void)) - } - const data = parsed.data - if (data.plugin && isFile) { - const list = data.plugin - for (let i = 0; i < list.length; i++) { - // Normalize path-like plugin specs while we still know which config file declared them. - // This prevents `./plugin.ts` from being reinterpreted relative to some later merge location. - list[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(list[i], options.path)) - } - } - return data + if (!("path" in options)) { + return yield* Effect.promise(() => + ConfigParse.load(Info, text, { + type: "virtual", + dir: options.dir, + source: options.source, + normalize: normalizeLoadedConfig, + }), + ) } - throw new InvalidError({ - path: source, - issues: parsed.error.issues, - }) + const data = yield* Effect.promise(() => + ConfigParse.load(Info, text, { + type: "path", + path: options.path, + normalize: normalizeLoadedConfig, + }), + ) + yield* Effect.promise(() => resolveLoadedPlugins(data, options.path)) + if (!data.$schema) { + data.$schema = "https://opencode.ai/config.json" + const updated = text.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",') + yield* fs.writeFileString(options.path, updated).pipe(Effect.catch(() => Effect.void)) + } + return data }) const loadFile = Effect.fnUntraced(function* (filepath: string) { @@ -692,7 +657,16 @@ export const layer = Layer.effect( } // macOS managed preferences (.mobileconfig deployed via MDM) override everything - result = mergeConfigConcatArrays(result, yield* Effect.promise(() => ConfigManaged.readManagedPreferences())) + const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) + if (managed) { + result = mergeConfigConcatArrays( + result, + yield* loadConfig(managed.text, { + dir: path.dirname(managed.source), + source: managed.source, + }), + ) + } for (const [name, mode] of Object.entries(result.mode ?? {})) { result.agent = mergeDeep(result.agent ?? {}, { @@ -803,13 +777,13 @@ export const layer = Layer.effect( let next: Info if (!file.endsWith(".jsonc")) { - const existing = parseConfig(before, file) + const existing = ConfigParse.parse(Info, before, file) const merged = mergeDeep(writable(existing), input) yield* fs.writeFileString(file, JSON.stringify(merged, null, 2)).pipe(Effect.orDie) next = merged } else { const updated = patchJsonc(before, input) - next = parseConfig(updated, file) + next = ConfigParse.parse(Info, updated, file) yield* fs.writeFileString(file, updated).pipe(Effect.orDie) } diff --git a/packages/opencode/src/config/error.ts b/packages/opencode/src/config/error.ts new file mode 100644 index 0000000000..06f549fd85 --- /dev/null +++ b/packages/opencode/src/config/error.ts @@ -0,0 +1,21 @@ +export * as ConfigError from "./error" + +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" + +export const JsonError = NamedError.create( + "ConfigJsonError", + z.object({ + path: z.string(), + message: z.string().optional(), + }), +) + +export const InvalidError = NamedError.create( + "ConfigInvalidError", + z.object({ + path: z.string(), + issues: z.custom().optional(), + message: z.string().optional(), + }), +) diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index 37665d8c67..c4a1c608b1 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -1,10 +1,13 @@ export * as Config from "./config" export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" +export * as ConfigError from "./error" +export * as ConfigVariable from "./variable" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" export * as ConfigMCP from "./mcp" export { ConfigModelID } from "./model-id" +export * as ConfigParse from "./parse" export * as ConfigPermission from "./permission" export * as ConfigPaths from "./paths" export * as ConfigProvider from "./provider" diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts index 61c535185f..19b048ffce 100644 --- a/packages/opencode/src/config/managed.ts +++ b/packages/opencode/src/config/managed.ts @@ -1,7 +1,6 @@ import { existsSync } from "fs" import os from "os" import path from "path" -import { type Info, parseConfig } from "./config" import { Log, Process } from "../util" const log = Log.create({ service: "config" }) @@ -33,16 +32,16 @@ function managedConfigDir() { return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() } -function parseManagedPlist(json: string, source: string): Info { +function parseManagedPlist(json: string): string { const raw = JSON.parse(json) for (const key of Object.keys(raw)) { if (PLIST_META.has(key)) delete raw[key] } - return parseConfig(JSON.stringify(raw), source) + return JSON.stringify(raw) } -async function readManagedPreferences(): Promise { - if (process.platform !== "darwin") return {} +async function readManagedPreferences() { + if (process.platform !== "darwin") return const user = os.userInfo().username const paths = [ @@ -58,10 +57,13 @@ async function readManagedPreferences(): Promise { log.warn("failed to convert managed preferences plist", { path: plist }) continue } - return parseManagedPlist(result.stdout.toString(), `mobileconfig:${plist}`) + return { + source: `mobileconfig:${plist}`, + text: parseManagedPlist(result.stdout.toString()), + } } - return {} + return } export const ConfigManaged = { diff --git a/packages/opencode/src/config/parse.ts b/packages/opencode/src/config/parse.ts new file mode 100644 index 0000000000..65cc483859 --- /dev/null +++ b/packages/opencode/src/config/parse.ts @@ -0,0 +1,80 @@ +export * as ConfigParse from "./parse" + +import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser" +import z from "zod" +import { ConfigVariable } from "./variable" +import { InvalidError, JsonError } from "./error" + +type Schema = z.ZodType +type VariableMode = "error" | "empty" + +export type LoadOptions = + | { + type: "path" + path: string + missing?: VariableMode + normalize?: (data: unknown, source: string) => unknown + } + | { + type: "virtual" + dir: string + source: string + missing?: VariableMode + normalize?: (data: unknown, source: string) => unknown + } + +function issues(text: string, errors: JsoncParseError[]) { + const lines = text.split("\n") + return errors + .map((e) => { + const beforeOffset = text.substring(0, e.offset).split("\n") + const line = beforeOffset.length + const column = beforeOffset[beforeOffset.length - 1].length + 1 + const problemLine = lines[line - 1] + + const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` + if (!problemLine) return error + + return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` + }) + .join("\n") +} + +export function parse(schema: Schema, text: string, filepath: string): T { + const errors: JsoncParseError[] = [] + const data = parseJsonc(text, errors, { allowTrailingComma: true }) + if (errors.length) { + throw new JsonError({ + path: filepath, + message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${issues(text, errors)}\n--- End ---`, + }) + } + + const parsed = schema.safeParse(data) + if (parsed.success) return parsed.data + + throw new InvalidError({ + path: filepath, + issues: parsed.error.issues, + }) +} + +export async function load(schema: Schema, text: string, options: LoadOptions): Promise { + const source = options.type === "path" ? options.path : options.source + const expanded = await ConfigVariable.substitute( + text, + options.type === "path" ? { type: "path", path: options.path } : options, + options.missing, + ) + const data = parse(z.unknown(), expanded, source) + const normalized = options.normalize ? options.normalize(data, source) : data + const parsed = schema.safeParse(normalized) + if (!parsed.success) { + throw new InvalidError({ + path: source, + issues: parsed.error.issues, + }) + } + + return parsed.data +} diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index fabd3fd5f8..faf585d9b2 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -1,12 +1,9 @@ import path from "path" -import os from "os" -import z from "zod" -import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser" -import { NamedError } from "@opencode-ai/shared/util/error" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" import { Global } from "@/global" import { unique } from "remeda" +import { JsonError } from "./error" export async function projectFiles(name: string, directory: string, worktree?: string) { return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) @@ -39,23 +36,6 @@ export function fileInDirectory(dir: string, name: string) { return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)] } -export const JsonError = NamedError.create( - "ConfigJsonError", - z.object({ - path: z.string(), - message: z.string().optional(), - }), -) - -export const InvalidError = NamedError.create( - "ConfigInvalidError", - z.object({ - path: z.string(), - issues: z.custom().optional(), - message: z.string().optional(), - }), -) - /** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */ export async function readFile(filepath: string) { return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => { @@ -63,104 +43,3 @@ export async function readFile(filepath: string) { throw new JsonError({ path: filepath }, { cause: err }) }) } - -type ParseSource = string | { source: string; dir: string } - -function source(input: ParseSource) { - return typeof input === "string" ? input : input.source -} - -function dir(input: ParseSource) { - return typeof input === "string" ? path.dirname(input) : input.dir -} - -/** Apply {env:VAR} and {file:path} substitutions to config text. */ -async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") { - text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { - return process.env[varName] || "" - }) - - const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g)) - if (!fileMatches.length) return text - - const configDir = dir(input) - const configSource = source(input) - let out = "" - let cursor = 0 - - for (const match of fileMatches) { - const token = match[0] - const index = match.index! - out += text.slice(cursor, index) - - const lineStart = text.lastIndexOf("\n", index - 1) + 1 - const prefix = text.slice(lineStart, index).trimStart() - if (prefix.startsWith("//")) { - out += token - cursor = index + token.length - continue - } - - let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "") - if (filePath.startsWith("~/")) { - filePath = path.join(os.homedir(), filePath.slice(2)) - } - - const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath) - const fileContent = ( - await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => { - if (missing === "empty") return "" - - const errMsg = `bad file reference: "${token}"` - if (error.code === "ENOENT") { - throw new InvalidError( - { - path: configSource, - message: errMsg + ` ${resolvedPath} does not exist`, - }, - { cause: error }, - ) - } - throw new InvalidError({ path: configSource, message: errMsg }, { cause: error }) - }) - ).trim() - - out += JSON.stringify(fileContent).slice(1, -1) - cursor = index + token.length - } - - out += text.slice(cursor) - return out -} - -/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */ -export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") { - const configSource = source(input) - text = await substitute(text, input, missing) - - const errors: JsoncParseError[] = [] - const data = parseJsonc(text, errors, { allowTrailingComma: true }) - if (errors.length) { - const lines = text.split("\n") - const errorDetails = errors - .map((e) => { - const beforeOffset = text.substring(0, e.offset).split("\n") - const line = beforeOffset.length - const column = beforeOffset[beforeOffset.length - 1].length + 1 - const problemLine = lines[line - 1] - - const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` - if (!problemLine) return error - - return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` - }) - .join("\n") - - throw new JsonError({ - path: configSource, - message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`, - }) - } - - return data -} diff --git a/packages/opencode/src/config/variable.ts b/packages/opencode/src/config/variable.ts new file mode 100644 index 0000000000..e016e33a21 --- /dev/null +++ b/packages/opencode/src/config/variable.ts @@ -0,0 +1,84 @@ +export * as ConfigVariable from "./variable" + +import path from "path" +import os from "os" +import { Filesystem } from "@/util" +import { InvalidError } from "./error" + +type ParseSource = + | { + type: "path" + path: string + } + | { + type: "virtual" + source: string + dir: string + } + +function source(input: ParseSource) { + return input.type === "path" ? input.path : input.source +} + +function dir(input: ParseSource) { + return input.type === "path" ? path.dirname(input.path) : input.dir +} + +/** Apply {env:VAR} and {file:path} substitutions to config text. */ +export async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") { + text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { + return process.env[varName] || "" + }) + + const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g)) + if (!fileMatches.length) return text + + const configDir = dir(input) + const configSource = source(input) + let out = "" + let cursor = 0 + + for (const match of fileMatches) { + const token = match[0] + const index = match.index! + out += text.slice(cursor, index) + + const lineStart = text.lastIndexOf("\n", index - 1) + 1 + const prefix = text.slice(lineStart, index).trimStart() + if (prefix.startsWith("//")) { + out += token + cursor = index + token.length + continue + } + + let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "") + if (filePath.startsWith("~/")) { + filePath = path.join(os.homedir(), filePath.slice(2)) + } + + const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath) + const fileContent = ( + await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => { + if (missing === "empty") return "" + + const errMsg = `bad file reference: "${token}"` + if (error.code === "ENOENT") { + throw new InvalidError( + { + path: configSource, + message: errMsg + ` ${resolvedPath} does not exist`, + }, + { cause: error }, + ) + } + throw new InvalidError({ path: configSource, message: errMsg }, { cause: error }) + }) + ).trim() + + out += JSON.stringify(fileContent).slice(1, -1) + cursor = index + token.length + } + + out += text.slice(cursor) + return out +} diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 21d6e3e93d..c41f395e51 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -2,6 +2,7 @@ import { test, expect, describe, mock, afterEach, beforeEach } from "bun:test" import { Effect, Layer, Option } from "effect" import { NodeFileSystem, NodePath } from "@effect/platform-node" import { Config, ConfigManaged } from "../../src/config" +import { ConfigParse } from "../../src/config/parse" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { Instance } from "../../src/project/instance" @@ -2211,17 +2212,20 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { // parseManagedPlist unit tests — pure function, no OS interaction test("parseManagedPlist strips MDM metadata keys", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - PayloadDisplayName: "OpenCode Managed", - PayloadIdentifier: "ai.opencode.managed.test", - PayloadType: "ai.opencode.managed", - PayloadUUID: "AAAA-BBBB-CCCC", - PayloadVersion: 1, - _manualProfile: true, - share: "disabled", - model: "mdm/model", - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + PayloadDisplayName: "OpenCode Managed", + PayloadIdentifier: "ai.opencode.managed.test", + PayloadType: "ai.opencode.managed", + PayloadUUID: "AAAA-BBBB-CCCC", + PayloadVersion: 1, + _manualProfile: true, + share: "disabled", + model: "mdm/model", + }), + ), "test:mobileconfig", ) expect(config.share).toBe("disabled") @@ -2233,12 +2237,15 @@ test("parseManagedPlist strips MDM metadata keys", async () => { }) test("parseManagedPlist parses server settings", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - server: { hostname: "127.0.0.1", mdns: false }, - autoupdate: true, - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + server: { hostname: "127.0.0.1", mdns: false }, + autoupdate: true, + }), + ), "test:mobileconfig", ) expect(config.server?.hostname).toBe("127.0.0.1") @@ -2247,18 +2254,21 @@ test("parseManagedPlist parses server settings", async () => { }) test("parseManagedPlist parses permission rules", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - permission: { - "*": "ask", - bash: { "*": "ask", "rm -rf *": "deny", "curl *": "deny" }, - grep: "allow", - glob: "allow", - webfetch: "ask", - "~/.ssh/*": "deny", - }, - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + permission: { + "*": "ask", + bash: { "*": "ask", "rm -rf *": "deny", "curl *": "deny" }, + grep: "allow", + glob: "allow", + webfetch: "ask", + "~/.ssh/*": "deny", + }, + }), + ), "test:mobileconfig", ) expect(config.permission?.["*"]).toBe("ask") @@ -2271,19 +2281,23 @@ test("parseManagedPlist parses permission rules", async () => { }) test("parseManagedPlist parses enabled_providers", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ - $schema: "https://opencode.ai/config.json", - enabled_providers: ["anthropic", "google"], - }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist( + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + enabled_providers: ["anthropic", "google"], + }), + ), "test:mobileconfig", ) expect(config.enabled_providers).toEqual(["anthropic", "google"]) }) test("parseManagedPlist handles empty config", async () => { - const config = await ConfigManaged.parseManagedPlist( - JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + const config = ConfigParse.parse( + Config.Info, + await ConfigManaged.parseManagedPlist(JSON.stringify({ $schema: "https://opencode.ai/config.json" })), "test:mobileconfig", ) expect(config.$schema).toBe("https://opencode.ai/config.json") From cefa7f04c66fce1fb586736222a2d6b8a5609ded Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:32:22 -0400 Subject: [PATCH 018/201] core: reorganize ConfigPaths module export for cleaner dependency management --- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/config/paths.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 6b6d74ed82..a738ebf130 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -20,7 +20,6 @@ import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" import { InvalidError, JsonError } from "./error" -import * as ConfigPaths from "./paths" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" @@ -38,6 +37,7 @@ import { ConfigParse } from "./parse" import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" +import { ConfigPaths } from "./paths" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index faf585d9b2..dcf0c940f2 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -1,3 +1,5 @@ +export * as ConfigPaths from "./paths" + import path from "path" import { Filesystem } from "@/util" import { Flag } from "@/flag/flag" From bee5f919fc5ae915aad0a4b5138e1e7f8274b9de Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 13:32:22 -0400 Subject: [PATCH 019/201] core: reorganize ConfigPaths module export for cleaner dependency management --- packages/opencode/src/config/managed.ts | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/config/managed.ts b/packages/opencode/src/config/managed.ts index 19b048ffce..a53fb70af3 100644 --- a/packages/opencode/src/config/managed.ts +++ b/packages/opencode/src/config/managed.ts @@ -1,7 +1,10 @@ +export * as ConfigManaged from "./managed" + import { existsSync } from "fs" import os from "os" import path from "path" import { Log, Process } from "../util" +import { warn } from "console" const log = Log.create({ service: "config" }) @@ -28,11 +31,11 @@ function systemManagedConfigDir(): string { } } -function managedConfigDir() { +export function managedConfigDir() { return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir() } -function parseManagedPlist(json: string): string { +export function parseManagedPlist(json: string): string { const raw = JSON.parse(json) for (const key of Object.keys(raw)) { if (PLIST_META.has(key)) delete raw[key] @@ -40,7 +43,7 @@ function parseManagedPlist(json: string): string { return JSON.stringify(raw) } -async function readManagedPreferences() { +export async function readManagedPreferences() { if (process.platform !== "darwin") return const user = os.userInfo().username @@ -65,9 +68,3 @@ async function readManagedPreferences() { return } - -export const ConfigManaged = { - managedConfigDir, - parseManagedPlist, - readManagedPreferences, -} From c60862fc9e0a3378fe7be7c5079545c0d5c8d405 Mon Sep 17 00:00:00 2001 From: Thomas Butler <58192340+trbutler4@users.noreply.github.com> Date: Thu, 16 Apr 2026 13:21:04 -0500 Subject: [PATCH 020/201] fix: add missing glob dependency (#22851) --- bun.lock | 1 + nix/hashes.json | 2 +- packages/shared/package.json | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/bun.lock b/bun.lock index 644de37f2e..3d82a64af9 100644 --- a/bun.lock +++ b/bun.lock @@ -516,6 +516,7 @@ "@effect/platform-node": "catalog:", "@npmcli/arborist": "catalog:", "effect": "catalog:", + "glob": "13.0.5", "mime-types": "3.0.2", "minimatch": "10.2.5", "semver": "catalog:", diff --git a/nix/hashes.json b/nix/hashes.json index e9795e5c37..c844031be0 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,6 +1,6 @@ { "nodeModules": { - "x86_64-linux": "sha256-NJAK+cPjwn+2ojDLyyDmBQyx2pD+rILetp7VCylgjek=", + "x86_64-linux": "sha256-b9tsgqQDXd2uM/j+rZnvkoXbXzB4iYCEasXsy9kgIl4=", "aarch64-linux": "sha256-q8NTtFQJoyM7TTvErGA6RtmUscxoZKD/mj9N6S5YhkA=", "aarch64-darwin": "sha256-/ccoSZNLef6j9j14HzpVqhKCR+czM3mhPKPH51mHO24=", "x86_64-darwin": "sha256-6Pd10sMHL/5ZoWNvGPwPn4/AIs1TKjt/3gFyrVpBaE0=" diff --git a/packages/shared/package.json b/packages/shared/package.json index bdfca12a93..4d10a30a36 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -26,6 +26,7 @@ "@effect/platform-node": "catalog:", "@npmcli/arborist": "catalog:", "effect": "catalog:", + "glob": "13.0.5", "mime-types": "3.0.2", "minimatch": "10.2.5", "semver": "catalog:", From 143817d44e1ededeccbaff3e61690dfea4fe4109 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 13:28:20 -0500 Subject: [PATCH 021/201] chore: bump ai sdk deps for opus 4.7 (#22869) --- bun.lock | 30 ++++++++++++++---------------- packages/opencode/package.json | 8 ++++---- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/bun.lock b/bun.lock index 3d82a64af9..7225c0a9a0 100644 --- a/bun.lock +++ b/bun.lock @@ -146,7 +146,7 @@ "name": "@opencode-ai/console-function", "version": "1.4.6", "dependencies": { - "@ai-sdk/anthropic": "3.0.64", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/openai": "3.0.48", "@ai-sdk/openai-compatible": "2.0.37", "@hono/zod-validator": "catalog:", @@ -322,15 +322,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.93", - "@ai-sdk/anthropic": "3.0.67", + "@ai-sdk/amazon-bedrock": "4.0.94", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.97", + "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.109", + "@ai-sdk/google-vertex": "4.0.111", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", @@ -738,9 +738,9 @@ "@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="], - "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], + "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], "@ai-sdk/azure": ["@ai-sdk/azure@3.0.49", "", { "dependencies": { "@ai-sdk/openai": "3.0.48", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-wskgAL+OmrHG7by/iWIxEBQCEdc1mDudha/UZav46i0auzdFfsDB/k2rXZaC4/3nWSgMZkxr0W3ncyouEGX/eg=="], @@ -758,11 +758,11 @@ "@ai-sdk/fireworks": ["@ai-sdk/fireworks@2.0.46", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XRKR0zgRyegdmtK5CDUEjlyRp0Fo+XVCdoG+301U1SGtgRIAYG3ObVtgzVJBVpJdHFSLHuYeLTnNiQoUxD7+FQ=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.97", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ERHmVGX30YKTwxObuHQzNqoOf8Nb5WwYMDBn34e3TGGVn0vLEXwMimo7uRVTbhhi4gfu9WtwYTE4x1+csZok1w=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.102", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GrwDpaYJiVafrsA1MTbZtXPcQUI67g5AXiJo7Y1F8b+w+SiYHLk3ZIn1YmpQVoVAh2bjvxjj+Vo0AvfskuGH4g=="], "@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="], - "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.109", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/google": "3.0.63", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-QzQ+DgOoSYlkU4mK0H+iaCaW1bl5zOimH9X2E2oylcVyUtAdCuduQ959Uw1ygW3l09J2K/ceEDtK8OUPHyOA7g=="], + "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.111", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5gILpAWWI5idfal/MfoH3tlQeSnOJ9jfL8JB8m2fdc3ue/9xoXkYDpXpDL/nyJImFjMCi6eR0Fpvlo/IKEWDIg=="], "@ai-sdk/groq": ["@ai-sdk/groq@3.0.31", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XbbugpnFmXGu2TlXiq8KUJskP6/VVbuFcnFIGDzDIB/Chg6XHsNnqrTF80Zxkh0Pd3+NvbM+2Uqrtsndk6bDAg=="], @@ -5152,9 +5152,9 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], + "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], - "@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], + "@ai-sdk/amazon-bedrock/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], "@ai-sdk/azure/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], @@ -5166,7 +5166,7 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], + "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], "@ai-sdk/google-vertex/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -5684,6 +5684,8 @@ "ai/@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.95", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZmUNNbZl3V42xwQzPaNUi+s8eqR2lnrxf0bvB6YbLXpLjHYv0k2Y78t12cNOfY0bxGeuVVTLyk856uLuQIuXEQ=="], + "ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], + "ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], "ai-gateway-provider/@ai-sdk/google": ["@ai-sdk/google@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-uz8tIlkDgQJG9Js2Wh9JHzd4kI9+hYJqf9XXJLx60vyN5mRIqhr49iwR5zGP5Gl8odp2PeR3Gh2k+5bh3Z1HHw=="], @@ -5900,8 +5902,6 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], - "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.67", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-FFX4P5Fd6lcQJc2OLngZQkbbJHa0IDDZi087Edb8qRZx6h90krtM61ArbMUL8us/7ZUwojCXnyJ/wQ2Eflx2jQ=="], - "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -6094,8 +6094,6 @@ "@actions/github/@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], - "@ai-sdk/anthropic/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], - "@ai-sdk/azure/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 7ed33ebe09..3a98d0eb93 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -79,15 +79,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.93", - "@ai-sdk/anthropic": "3.0.67", + "@ai-sdk/amazon-bedrock": "4.0.94", + "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.97", + "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.109", + "@ai-sdk/google-vertex": "4.0.111", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", From 370770122c355f50cfc98193a25b1150e0288f31 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 18:29:57 +0000 Subject: [PATCH 022/201] chore: generate --- bun.lock | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/bun.lock b/bun.lock index 7225c0a9a0..e236f3f491 100644 --- a/bun.lock +++ b/bun.lock @@ -146,7 +146,7 @@ "name": "@opencode-ai/console-function", "version": "1.4.6", "dependencies": { - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", "@ai-sdk/openai-compatible": "2.0.37", "@hono/zod-validator": "catalog:", @@ -740,7 +740,7 @@ "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], "@ai-sdk/azure": ["@ai-sdk/azure@3.0.49", "", { "dependencies": { "@ai-sdk/openai": "3.0.48", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-wskgAL+OmrHG7by/iWIxEBQCEdc1mDudha/UZav46i0auzdFfsDB/k2rXZaC4/3nWSgMZkxr0W3ncyouEGX/eg=="], @@ -5152,10 +5152,14 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], + "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], "@ai-sdk/amazon-bedrock/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + "@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], + "@ai-sdk/azure/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="], @@ -5166,6 +5170,8 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], + "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], "@ai-sdk/google-vertex/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -5902,6 +5908,8 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], + "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], @@ -6094,6 +6102,8 @@ "@actions/github/@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], + "@ai-sdk/anthropic/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], + "@ai-sdk/azure/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], From 9afbdc102c2e7e449c98a08082b382cd2696715e Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 14:45:17 -0400 Subject: [PATCH 023/201] fix(test): make plugin loader theme source path separator-safe (#22870) --- packages/opencode/test/cli/tui/plugin-loader.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/test/cli/tui/plugin-loader.test.ts b/packages/opencode/test/cli/tui/plugin-loader.test.ts index dc64fb3365..f5b04ff434 100644 --- a/packages/opencode/test/cli/tui/plugin-loader.test.ts +++ b/packages/opencode/test/cli/tui/plugin-loader.test.ts @@ -331,7 +331,7 @@ export default { const localOpts = { fn_marker: tmp.extra.fnMarker, marker: tmp.extra.localMarker, - source: tmp.extra.localDest.replace(".opencode/themes/", ""), + source: path.join(tmp.path, tmp.extra.localThemeFile), dest: tmp.extra.localDest, theme_path: `./${tmp.extra.localThemeFile}`, theme_name: tmp.extra.localThemeName, From bf4c1078290a5bf7e580141b17e7b37d905de311 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:07:02 -0400 Subject: [PATCH 024/201] fix: remove 7 unnecessary `as any` casts in opencode core (#22840) --- .../src/cli/cmd/tui/routes/session/index.tsx | 2 +- packages/opencode/src/storage/json-migration.ts | 12 ++++++------ packages/opencode/src/util/defer.ts | 6 ++---- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 1a64c21d00..5b4308d593 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -597,7 +597,7 @@ export function Session() { { title: conceal() ? "Disable code concealment" : "Enable code concealment", value: "session.toggle.conceal", - keybind: "messages_toggle_conceal" as any, + keybind: "messages_toggle_conceal", category: "Session", onSelect: (dialog) => { setConceal((prev) => !prev) diff --git a/packages/opencode/src/storage/json-migration.ts b/packages/opencode/src/storage/json-migration.ts index 4803d452fe..12133ce435 100644 --- a/packages/opencode/src/storage/json-migration.ts +++ b/packages/opencode/src/storage/json-migration.ts @@ -95,7 +95,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase[0], label: string) { if (values.length === 0) return 0 try { db.insert(table).values(values).onConflictDoNothing().run() @@ -152,7 +152,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase() - const projectValues = [] as any[] + const projectValues: unknown[] = [] for (let i = 0; i < projectFiles.length; i += batchSize) { const end = Math.min(i + batchSize, projectFiles.length) const batch = await read(projectFiles, i, end) @@ -186,7 +186,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase path.basename(path.dirname(file))) const sessionIds = new Set() - const sessionValues = [] as any[] + const sessionValues: unknown[] = [] for (let i = 0; i < sessionFiles.length; i += batchSize) { const end = Math.min(i + batchSize, sessionFiles.length) const batch = await read(sessionFiles, i, end) @@ -314,7 +314,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase | NodeSQLiteDatabase path.basename(file, ".json")) - const permValues = [] as any[] + const permValues: unknown[] = [] for (let i = 0; i < permFiles.length; i += batchSize) { const end = Math.min(i + batchSize, permFiles.length) const batch = await read(permFiles, i, end) @@ -376,7 +376,7 @@ export async function run(db: SQLiteBunDatabase | NodeSQLiteDatabase path.basename(file, ".json")) - const shareValues = [] as any[] + const shareValues: unknown[] = [] for (let i = 0; i < shareFiles.length; i += batchSize) { const end = Math.min(i + batchSize, shareFiles.length) const batch = await read(shareFiles, i, end) diff --git a/packages/opencode/src/util/defer.ts b/packages/opencode/src/util/defer.ts index d1c9edc66a..33eb4d74d2 100644 --- a/packages/opencode/src/util/defer.ts +++ b/packages/opencode/src/util/defer.ts @@ -1,6 +1,4 @@ -export function defer void | Promise>( - fn: T, -): T extends () => Promise ? { [Symbol.asyncDispose]: () => Promise } : { [Symbol.dispose]: () => void } { +export function defer(fn: () => void | Promise): AsyncDisposable & Disposable { return { [Symbol.dispose]() { void fn() @@ -8,5 +6,5 @@ export function defer void | Promise>( [Symbol.asyncDispose]() { return Promise.resolve(fn()) }, - } as any + } } From 47e0e2342cbd9fd335864a403dbac9b3ec9a19af Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 14:12:43 -0500 Subject: [PATCH 025/201] tweak: set display 'summarized' by default for opus 4.7 thorugh messages api (#22873) --- packages/opencode/src/provider/transform.ts | 3 +++ packages/opencode/test/provider/transform.test.ts | 2 ++ 2 files changed, 5 insertions(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index a294c568d7..66e87fb3b8 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -594,6 +594,9 @@ export function variants(model: Provider.Model): Record { expect(result.xhigh).toEqual({ thinking: { type: "adaptive", + display: "summarized", }, effort: "xhigh", }) expect(result.max).toEqual({ thinking: { type: "adaptive", + display: "summarized", }, effort: "max", }) From 7c1b30291c8f8bde6e3ca7b257259384728a1eef Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 19:19:52 +0000 Subject: [PATCH 026/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index c844031be0..239b72fd70 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-b9tsgqQDXd2uM/j+rZnvkoXbXzB4iYCEasXsy9kgIl4=", - "aarch64-linux": "sha256-q8NTtFQJoyM7TTvErGA6RtmUscxoZKD/mj9N6S5YhkA=", - "aarch64-darwin": "sha256-/ccoSZNLef6j9j14HzpVqhKCR+czM3mhPKPH51mHO24=", - "x86_64-darwin": "sha256-6Pd10sMHL/5ZoWNvGPwPn4/AIs1TKjt/3gFyrVpBaE0=" + "x86_64-linux": "sha256-tYAb5Mo39UW1VEejYuo0jW0jzH2OyY/HrqgiZL3rmjY=", + "aarch64-linux": "sha256-3zGKV5UwokXpmY0nT1mry3IhNf2EQYLKT7ac+/trmQA=", + "aarch64-darwin": "sha256-oKXAut7eu/eW5a43OT8+aFuH1F1tuIldTs+7PUXSCv4=", + "x86_64-darwin": "sha256-Az+9X1scOEhw3aOO8laKJoZjiuz3qlLTIk1bx25P/z4=" } } From 219b473e660994ac69a0c5d753ac65da951f3bf8 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:24:24 -0400 Subject: [PATCH 027/201] refactor: unwrap BashArity namespace to flat exports + self-reexport (#22874) --- .../specs/effect/namespace-treeshake.md | 609 +++++------------- packages/opencode/src/permission/arity.ts | 296 ++++----- 2 files changed, 309 insertions(+), 596 deletions(-) diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md index 5d1fbd07e5..ac4d3987de 100644 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ b/packages/opencode/specs/effect/namespace-treeshake.md @@ -1,499 +1,212 @@ -# Namespace → flat export migration +# Namespace → self-reexport migration -Migrate `export namespace` to the `export * as` / flat-export pattern used by -effect-smol. Primary goal: tree-shakeability. Secondary: consistency with Effect -conventions, LLM-friendliness for future migrations. - -## What changes and what doesn't - -The **consumer API stays the same**. You still write `Provider.ModelNotFoundError`, -`Config.JsonError`, `Bus.publish`, etc. The namespace ergonomics are preserved. - -What changes is **how** the namespace is constructed — the TypeScript -`export namespace` keyword is replaced by `export * as` in a barrel file. This -is a mechanical change: unwrap the namespace body into flat exports, add a -one-line barrel. Consumers that import `{ Provider }` don't notice. - -Import paths actually get **nicer**. Today most consumers import from the -explicit file (`"../provider/provider"`). After the migration, each module has a -barrel `index.ts`, so imports become `"../provider"` or `"@/provider"`: +Migrate every `export namespace Foo { ... }` to flat top-level exports plus a +single self-reexport line at the bottom of the same file: ```ts -// BEFORE — points at the file directly -import { Provider } from "../provider/provider" - -// AFTER — resolves to provider/index.ts, same Provider namespace -import { Provider } from "../provider" +export * as Foo from "./foo" ``` -## Why this matters right now +No barrel `index.ts` files. No cross-directory indirection. Consumers keep the +exact same `import { Foo } from "../foo/foo"` ergonomics. -The CLI binary startup time (TOI) is too slow. Profiling shows we're loading -massive dependency graphs that are never actually used at runtime — because -bundlers cannot tree-shake TypeScript `export namespace` bodies. +## Why this pattern -### The problem in one sentence - -`cli/error.ts` needs 6 lightweight `.isInstance()` checks on error classes, but -importing `{ Provider }` from `provider.ts` forces the bundler to include **all -20+ `@ai-sdk/*` packages**, `@aws-sdk/credential-providers`, -`google-auth-library`, and every other top-level import in that 1709-line file. - -### Why `export namespace` defeats tree-shaking - -TypeScript compiles `export namespace Foo { ... }` to an IIFE: - -```js -// TypeScript output -export var Provider; -(function (Provider) { - Provider.ModelNotFoundError = NamedError.create(...) - // ... 1600 more lines of assignments ... -})(Provider || (Provider = {})) -``` - -This is **opaque to static analysis**. The bundler sees one big function call -whose return value populates an object. It cannot determine which properties are -used downstream, so it keeps everything. Every `import` statement at the top of -`provider.ts` executes unconditionally — that's 20+ AI SDK packages loaded into -memory just so the CLI can check `Provider.ModelNotFoundError.isInstance(x)`. - -### What `export * as` does differently - -`export * as Provider from "./provider"` compiles to a static re-export. The -bundler knows the exact shape of `Provider` at compile time — it's the named -export list of `./provider.ts`. When it sees `Provider.ModelNotFoundError` used -but `Provider.layer` unused, it can trace that `ModelNotFoundError` doesn't -reference `createAnthropic` or any AI SDK import, and drop them. The namespace -object still exists at runtime — same API — but the bundler can see inside it. - -### Concrete impact - -The worst import chain in the codebase: +We tested three options against Bun, esbuild, Rollup (what Vite uses under the +hood), Bun's runtime, and Node's native TypeScript runner. ``` -src/index.ts (entry point) + heavy.ts loaded? + A. namespace B. barrel C. self-reexport +Bun bundler YES YES no +esbuild YES YES no +Rollup (Vite) YES YES no +Bun runtime YES YES no +Node --experimental-strip-types SYNTAX ERROR YES no +``` + +- **`export namespace`** compiles to an IIFE. Bundlers see one opaque function + call and can't analyze what's used. Node's native TS runner rejects the + syntax outright: `SyntaxError: TypeScript namespace declaration is not +supported in strip-only mode`. +- **Barrel `index.ts`** files (`export * as Foo from "./foo"` in a separate + file) force every re-exported sibling to evaluate when you import one name. + Siblings with side effects (top-level imports of SDKs, etc.) always load. +- **Self-reexport** keeps the file as plain ESM. Bundlers see static named + exports. The module is only pulled in when something actually imports from + it. There is no barrel hop, so no sibling contamination and no circular + import hazard. + +Bundle overhead for the self-reexport wrapper is roughly 240 bytes per module +(`Object.defineProperty` namespace proxy). At ~100 modules that's ~24KB — +negligible for a CLI binary. + +## The pattern + +### Before + +```ts +// src/permission/arity.ts +export namespace BashArity { + export function prefix(tokens: string[]) { ... } +} +``` + +### After + +```ts +// src/permission/arity.ts +export function prefix(tokens: string[]) { ... } + +export * as BashArity from "./arity" +``` + +Consumers don't change at all: + +```ts +import { BashArity } from "@/permission/arity" +BashArity.prefix(...) // still works +``` + +Editors still auto-import `BashArity` like any named export, because the file +does have a named `BashArity` export at the module top level. + +### Odd but harmless + +`BashArity.BashArity.BashArity.prefix(...)` compiles and runs because the +namespace contains a re-export of itself. Nobody would write that. Not a +problem. + +## Why this is different from what we tried first + +An earlier pass used sibling barrel files (`index.ts` with `export * as ...`). +That turned out to be wrong for our constraints: + +1. The barrel file always loads all its sibling modules when you import + through it, even if you only need one. For our CLI this is exactly the + cost we're trying to avoid. +2. Barrel + sibling imports made it very easy to accidentally create circular + imports that only surface as `ReferenceError` at runtime, not at + typecheck. + +The self-reexport has none of those issues. There is no indirection. The +file and the namespace are the same unit. + +## Why this matters for startup + +The worst import chain in the codebase looks like: + +``` +src/index.ts └── FormatError from src/cli/error.ts - ├── { Provider } from provider/provider.ts (1709 lines) + ├── { Provider } from provider/provider.ts (~1700 lines) │ ├── 20+ @ai-sdk/* packages │ ├── @aws-sdk/credential-providers │ ├── google-auth-library - │ ├── gitlab-ai-provider, venice-ai-sdk-provider - │ └── fuzzysort, remeda, etc. - ├── { Config } from config/config.ts (1663 lines) - │ ├── jsonc-parser - │ ├── LSPServer (all server definitions) - │ └── Plugin, Auth, Env, Account, etc. - └── { MCP } from mcp/index.ts (930 lines) - ├── @modelcontextprotocol/sdk (3 transports) - └── open (browser launcher) + │ └── more + ├── { Config } from config/config.ts (~1600 lines) + └── { MCP } from mcp/mcp.ts (~900 lines) ``` -All of this gets pulled in to check `.isInstance()` on 6 error classes — code -that needs maybe 200 bytes total. This inflates the binary, increases startup -memory, and slows down initial module evaluation. - -### Why this also hurts memory - -Every module-level import is eagerly evaluated. Even with Bun's fast module -loader, evaluating 20+ AI SDK factory functions, the AWS credential chain, and -Google's auth library allocates objects, closures, and prototype chains that -persist for the lifetime of the process. Most CLI commands never use a provider -at all. - -## What effect-smol does - -effect-smol achieves tree-shakeable namespaced APIs via three structural choices. - -### 1. Each module is a separate file with flat named exports - -```ts -// Effect.ts — no namespace wrapper, just flat exports -export const gen: { ... } = internal.gen -export const fail: (error: E) => Effect = internal.fail -export const succeed: (value: A) => Effect = internal.succeed -// ... 230+ individual named exports -``` - -### 2. Barrel file uses `export * as` (not `export namespace`) - -```ts -// index.ts -export * as Effect from "./Effect.ts" -export * as Schema from "./Schema.ts" -export * as Stream from "./Stream.ts" -// ~134 modules -``` - -This creates a namespace-like API (`Effect.gen`, `Schema.parse`) but the -bundler knows the **exact shape** at compile time — it's the static export list -of that file. It can trace property accesses (`Effect.gen` → keep `gen`, -drop `timeout` if unused). With `export namespace`, the IIFE is opaque and -nothing can be dropped. - -### 3. `sideEffects: []` and deep imports - -```jsonc -// package.json -{ "sideEffects": [] } -``` - -Plus `"./*": "./src/*.ts"` in the exports map, enabling -`import * as Effect from "effect/Effect"` to bypass the barrel entirely. - -### 4. Errors as flat exports, not class declarations - -```ts -// Cause.ts -export const NoSuchElementErrorTypeId = core.NoSuchElementErrorTypeId -export interface NoSuchElementError extends YieldableError { ... } -export const NoSuchElementError: new(msg?: string) => NoSuchElementError = core.NoSuchElementError -export const isNoSuchElementError: (u: unknown) => u is NoSuchElementError = core.isNoSuchElementError -``` - -Each error is 4 independent exports: TypeId, interface, constructor (as const), -type guard. All individually shakeable. - -## The plan - -The core migration is **Phase 1** — convert `export namespace` to -`export * as`. Once that's done, the bundler can tree-shake individual exports -within each module. You do NOT need to break things into subfiles for -tree-shaking to work — the bundler traces which exports you actually access on -the namespace object and drops the rest, including their transitive imports. - -Splitting errors/schemas into separate files (Phase 0) is optional — it's a -lower-risk warmup step that can be done before or after the main conversion, and -it provides extra resilience against bundler edge cases. But the big win comes -from Phase 1. - -### Phase 0 (optional): Pre-split errors into subfiles - -This is a low-risk warmup that provides immediate benefit even before the full -`export * as` conversion. It's optional because Phase 1 alone is sufficient for -tree-shaking. But it's a good starting point if you want incremental progress: - -**For each namespace that defines errors** (15 files, ~30 error classes total): - -1. Create a sibling `errors.ts` file (e.g. `provider/errors.ts`) with the error - definitions as top-level named exports: - - ```ts - // provider/errors.ts - import z from "zod" - import { NamedError } from "@opencode-ai/shared/util/error" - import { ProviderID, ModelID } from "./schema" - - export const ModelNotFoundError = NamedError.create( - "ProviderModelNotFoundError", - z.object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - suggestions: z.array(z.string()).optional(), - }), - ) - - export const InitError = NamedError.create("ProviderInitError", z.object({ providerID: ProviderID.zod })) - ``` - -2. In the namespace file, re-export from the errors file to maintain backward - compatibility: - - ```ts - // provider/provider.ts — inside the namespace - export { ModelNotFoundError, InitError } from "./errors" - ``` - -3. Update `cli/error.ts` (and any other light consumers) to import directly: - - ```ts - // BEFORE - import { Provider } from "../provider/provider" - Provider.ModelNotFoundError.isInstance(input) - - // AFTER - import { ModelNotFoundError as ProviderModelNotFoundError } from "../provider/errors" - ProviderModelNotFoundError.isInstance(input) - ``` - -**Files to split (Phase 0):** - -| Current file | New errors file | Errors to extract | -| ----------------------- | ------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | -| `provider/provider.ts` | `provider/errors.ts` | ModelNotFoundError, InitError | -| `provider/auth.ts` | `provider/auth-errors.ts` | OauthMissing, OauthCodeMissing, OauthCallbackFailed, ValidationFailed | -| `config/config.ts` | (already has `config/paths.ts`) | ConfigDirectoryTypoError → move to paths.ts | -| `config/markdown.ts` | `config/markdown-errors.ts` | FrontmatterError | -| `mcp/index.ts` | `mcp/errors.ts` | Failed | -| `session/message-v2.ts` | `session/message-errors.ts` | OutputLengthError, AbortedError, StructuredOutputError, AuthError, APIError, ContextOverflowError | -| `session/message.ts` | (shares with message-v2) | OutputLengthError, AuthError | -| `cli/ui.ts` | `cli/ui-errors.ts` | CancelledError | -| `skill/index.ts` | `skill/errors.ts` | InvalidError, NameMismatchError | -| `worktree/index.ts` | `worktree/errors.ts` | NotGitError, NameGenerationFailedError, CreateFailedError, StartCommandFailedError, RemoveFailedError, ResetFailedError | -| `storage/storage.ts` | `storage/errors.ts` | NotFoundError | -| `npm/index.ts` | `npm/errors.ts` | InstallFailedError | -| `ide/index.ts` | `ide/errors.ts` | AlreadyInstalledError, InstallFailedError | -| `lsp/client.ts` | `lsp/errors.ts` | InitializeError | - -### Phase 1: The real migration — `export namespace` → `export * as` - -This is the phase that actually fixes tree-shaking. For each module: - -1. **Unwrap** the `export namespace Foo { ... }` — remove the namespace wrapper, - keep all the members as top-level `export const` / `export function` / etc. -2. **Rename** the file if it's currently `index.ts` (e.g. `bus/index.ts` → - `bus/bus.ts`), so the barrel can take `index.ts`. -3. **Create the barrel** `index.ts` with one line: `export * as Foo from "./foo"` - -The file structure change for a module that's currently a single file: - -``` -# BEFORE -provider/ - provider.ts ← 1709-line file with `export namespace Provider { ... }` - -# AFTER -provider/ - index.ts ← NEW: `export * as Provider from "./provider"` - provider.ts ← SAME file, same name, just unwrap the namespace -``` - -And the code change is purely removing the wrapper: - -```ts -// BEFORE: provider/provider.ts -export namespace Provider { - export class Service extends Context.Service<...>()("@opencode/Provider") {} - export const layer = Layer.effect(Service, ...) - export const ModelNotFoundError = NamedError.create(...) - export function parseModel(model: string) { ... } -} - -// AFTER: provider/provider.ts — identical exports, no namespace keyword -export class Service extends Context.Service<...>()("@opencode/Provider") {} -export const layer = Layer.effect(Service, ...) -export const ModelNotFoundError = NamedError.create(...) -export function parseModel(model: string) { ... } -``` - -```ts -// NEW: provider/index.ts -export * as Provider from "./provider" -``` - -Consumer code barely changes — import path gets shorter: - -```ts -// BEFORE -import { Provider } from "../provider/provider" - -// AFTER — resolves to provider/index.ts, same Provider object -import { Provider } from "../provider" -``` - -All access like `Provider.ModelNotFoundError`, `Provider.Service`, -`Provider.layer` works exactly as before. The difference is invisible to -consumers but lets the bundler see inside the namespace. - -**Once this is done, you don't need to break anything into subfiles for -tree-shaking.** The bundler traces that `Provider.ModelNotFoundError` only -depends on `NamedError` + `zod` + the schema file, and drops -`Provider.layer` + all 20 AI SDK imports when they're unused. This works because -`export * as` gives the bundler a static export list it can do inner-graph -analysis on — it knows which exports reference which imports. - -**Order of conversion** (by risk / size, do small modules first): - -1. Tiny utilities: `Archive`, `Color`, `Token`, `Rpc`, `LocalContext` (~7-66 lines each) -2. Small services: `Auth`, `Env`, `BusEvent`, `SessionStatus`, `SessionRunState`, `Editor`, `Selection` (~25-91 lines) -3. Medium services: `Bus`, `Format`, `FileTime`, `FileWatcher`, `Command`, `Question`, `Permission`, `Vcs`, `Project` -4. Large services: `Config`, `Provider`, `MCP`, `Session`, `SessionProcessor`, `SessionPrompt`, `ACP` - -### Phase 2: Build configuration - -After the module structure supports tree-shaking: - -1. Add `"sideEffects": []` to `packages/opencode/package.json` (or - `"sideEffects": false`) — this is safe because our services use explicit - layer composition, not import-time side effects. -2. Verify Bun's bundler respects the new structure. If Bun's tree-shaking is - insufficient, evaluate whether the compiled binary path needs an esbuild - pre-pass. -3. Consider adding `/*#__PURE__*/` annotations to `NamedError.create(...)` calls - — these are factory functions that return classes, and bundlers may not know - they're side-effect-free without the annotation. +All of that currently gets pulled in just to do `.isInstance()` on a handful +of error classes. The namespace IIFE shape is the main reason bundlers cannot +strip the unused parts. Self-reexport + flat ESM fixes it. ## Automation -The transformation is scripted. From `packages/opencode`: +From `packages/opencode`: ```bash bun script/unwrap-namespace.ts [--dry-run] ``` -The script uses ast-grep for accurate AST-based namespace boundary detection -(no false matches from braces in strings/templates/comments), then: +The script: -1. Removes the `export namespace Foo {` line and its closing `}` -2. Dedents the body by one indent level (2 spaces) -3. If the file is `index.ts`, renames it to `.ts` and creates a new - `index.ts` barrel -4. If the file is NOT `index.ts`, rewrites it in place and creates `index.ts` -5. Prints the exact commands to find and rewrite import paths +1. Uses ast-grep to locate the `export namespace Foo { ... }` block accurately. +2. Removes the `export namespace Foo {` line and the matching closing `}`. +3. Dedents the body by one indent level (2 spaces). +4. Rewrites `Foo.Bar` self-references inside the file to just `Bar`. +5. Appends `export * as Foo from "./"` at the bottom of the file. +6. Never creates a barrel `index.ts`. -### Walkthrough: converting a module - -Using `Provider` as an example: +### Typical flow for one file ```bash -# 1. Preview what will change -bun script/unwrap-namespace.ts src/provider/provider.ts --dry-run +# 1. Preview +bun script/unwrap-namespace.ts src/permission/arity.ts --dry-run -# 2. Apply the transformation -bun script/unwrap-namespace.ts src/provider/provider.ts +# 2. Apply +bun script/unwrap-namespace.ts src/permission/arity.ts -# 3. Rewrite import paths (script prints the exact command) -rg -l 'from.*provider/provider' src/ | xargs sed -i '' 's|provider/provider"|provider"|g' - -# 4. Verify -bun typecheck -bun run test +# 3. Verify +cd packages/opencode +bunx --bun tsgo --noEmit +bun run --conditions=browser ./src/index.ts generate +bun run test ``` -**What changes on disk:** +### Consumer imports usually don't need to change -``` -# BEFORE -provider/ - provider.ts ← 1709 lines, `export namespace Provider { ... }` - -# AFTER -provider/ - index.ts ← NEW: `export * as Provider from "./provider"` - provider.ts ← same file, namespace unwrapped to flat exports -``` - -**What changes in consumer code:** +Most consumers already import straight from the file, e.g.: ```ts -// BEFORE -import { Provider } from "../provider/provider" - -// AFTER — shorter path, same Provider object -import { Provider } from "../provider" +import { BashArity } from "@/permission/arity" +import { Config } from "@/config/config" ``` -All property access (`Provider.Service`, `Provider.ModelNotFoundError`, etc.) -stays identical. +Because the file itself now does `export * as Foo from "./foo"`, those imports +keep working with zero edits. -### Two cases the script handles - -**Case A: file is NOT `index.ts`** (e.g. `provider/provider.ts`) - -- Rewrites the file in place (unwrap + dedent) -- Creates `provider/index.ts` as the barrel -- Import paths change: `"../provider/provider"` → `"../provider"` - -**Case B: file IS `index.ts`** (e.g. `bus/index.ts`) - -- Renames `index.ts` → `bus.ts` (kebab-case of namespace name) -- Creates new `index.ts` as the barrel -- **No import rewrites needed** — `"@/bus"` already resolves to `bus/index.ts` - -## Do I need to split errors/schemas into subfiles? - -**No.** Once you do the `export * as` conversion, the bundler can tree-shake -individual exports within the file. If `cli/error.ts` only accesses -`Provider.ModelNotFoundError`, the bundler traces that `ModelNotFoundError` -doesn't reference `createAnthropic` and drops the AI SDK imports. - -Splitting into subfiles (errors.ts, schema.ts) is still a fine idea for **code -organization** — smaller files are easier to read and review. But it's not -required for tree-shaking. The `export * as` conversion alone is sufficient. - -The one case where subfile splitting provides extra tree-shake value is if an -imported package has module-level side effects that the bundler can't prove are -unused. In practice this is rare — most npm packages are side-effect-free — and -adding `"sideEffects": []` to package.json handles the common cases. - -## Scope - -| Metric | Count | -| ----------------------------------------------- | --------------- | -| Files with `export namespace` | 106 | -| Total namespace declarations | 118 (12 nested) | -| Files with `NamedError.create` inside namespace | 15 | -| Total error classes to extract | ~30 | -| Files using `export * as` today | 0 | - -Phase 1 (the `export * as` conversion) is the main change. It's mechanical and -LLM-friendly but touches every import site, so it should be done module by -module with type-checking between each step. Each module is an independent PR. - -## Rules for new code - -Going forward: - -- **No new `export namespace`**. Use a file with flat named exports and - `export * as` in the barrel. -- Keep the service, layer, errors, schemas, and runtime wiring together in one - file if you want — that's fine now. The `export * as` barrel makes everything - individually shakeable regardless of file structure. -- If a file grows large enough that it's hard to navigate, split by concern - (errors.ts, schema.ts, etc.) for readability. Not for tree-shaking — the - bundler handles that. - -## Circular import rules - -Barrel files (`index.ts` with `export * as`) introduce circular import risks. -These cause `ReferenceError: Cannot access 'X' before initialization` at -runtime — not caught by the type checker. - -### Rule 1: Sibling files never import through their own barrel - -Files in the same directory must import directly from the source file, never -through `"."` or `"@/"`: +The only edits needed are when a consumer was importing through a previous +barrel (`"@/config"` or `"../config"` resolving to `config/index.ts`). In +that case, repoint it at the file: ```ts -// BAD — circular: index.ts re-exports both files, so A → index → B → index → A -import { Sibling } from "." +// before +import { Config } from "@/config" -// GOOD — direct, no cycle -import * as Sibling from "./sibling" +// after +import { Config } from "@/config/config" ``` -### Rule 2: Cross-directory imports must not form cycles through barrels +### Dynamic imports in tests -If `src/lsp/lsp.ts` imports `Config` from `"../config"`, and -`src/config/config.ts` imports `LSPServer` from `"../lsp"`, that's a cycle: +If a test did `const { Foo } = await import("../../src/x/y")`, the destructure +still works because of the self-reexport. No change required. -``` -lsp/lsp.ts → config/index.ts → config/config.ts → lsp/index.ts → lsp/lsp.ts 💥 -``` +## Verification checklist (per PR) -Fix by importing the specific file, breaking the cycle: - -```ts -// In config/config.ts — import directly, not through the lsp barrel -import * as LSPServer from "../lsp/server" -``` - -### Why the type checker doesn't catch this - -TypeScript resolves types lazily — it doesn't evaluate module-scope -expressions. The `ReferenceError` only happens at runtime when a module-scope -`const` or function call accesses a value from a circular dependency that -hasn't finished initializing. The SDK build step (`bun run --conditions=browser -./src/index.ts generate`) is the reliable way to catch these because it -evaluates all modules eagerly. - -### How to verify - -After any namespace conversion, run: +Run all of these locally before pushing: ```bash cd packages/opencode +bunx --bun tsgo --noEmit bun run --conditions=browser ./src/index.ts generate +bun run test ``` -If this completes without `ReferenceError`, the module graph is safe. +Also do a quick grep in `src/`, `test/`, and `script/` to make sure no +consumer is still importing the namespace from an old barrel path that no +longer exports it. + +The SDK build step (`bun run --conditions=browser ./src/index.ts generate`) +evaluates every module eagerly and is the most reliable way to catch circular +import regressions at runtime — the typechecker does not catch these. + +## Rules for new code + +- No new `export namespace`. +- Every module file that wants a namespace gets a self-reexport at the + bottom: + `export * as Foo from "./foo"` +- Consumers import from the file itself: + `import { Foo } from "../path/to/foo"` +- No new barrel `index.ts` files for internal code. +- If a file needs a sibling, import the sibling file directly: + `import * as Sibling from "./sibling"`, not `from "."`. + +## Scope + +There are still dozens of `export namespace` files left across the codebase. +Each one is its own small PR. Do them one at a time, verified locally, rather +than batching by directory. diff --git a/packages/opencode/src/permission/arity.ts b/packages/opencode/src/permission/arity.ts index 948841c8e7..cd4b0a7d58 100644 --- a/packages/opencode/src/permission/arity.ts +++ b/packages/opencode/src/permission/arity.ts @@ -1,15 +1,14 @@ -export namespace BashArity { - export function prefix(tokens: string[]) { - for (let len = tokens.length; len > 0; len--) { - const prefix = tokens.slice(0, len).join(" ") - const arity = ARITY[prefix] - if (arity !== undefined) return tokens.slice(0, arity) - } - if (tokens.length === 0) return [] - return tokens.slice(0, 1) +export function prefix(tokens: string[]) { + for (let len = tokens.length; len > 0; len--) { + const prefix = tokens.slice(0, len).join(" ") + const arity = ARITY[prefix] + if (arity !== undefined) return tokens.slice(0, arity) } + if (tokens.length === 0) return [] + return tokens.slice(0, 1) +} - /* Generated with following prompt: +/* Generated with following prompt: You are generating a dictionary of command-prefix arities for bash-style commands. This dictionary is used to identify the "human-understandable command" from an input shell command.### **RULES (follow strictly)**1. Each entry maps a **command prefix string → number**, representing how many **tokens** define the command. 2. **Flags NEVER count as tokens**. Only subcommands count. @@ -22,142 +21,143 @@ This dictionary is used to identify the "human-understandable command" from an i * `npm run dev` → `npm run dev` (because `npm run` has arity 3) * `python script.py` → `python script.py` (default: whole input, not in dictionary)### **Now generate the dictionary.** */ - const ARITY: Record = { - cat: 1, // cat file.txt - cd: 1, // cd /path/to/dir - chmod: 1, // chmod 755 script.sh - chown: 1, // chown user:group file.txt - cp: 1, // cp source.txt dest.txt - echo: 1, // echo "hello world" - env: 1, // env - export: 1, // export PATH=/usr/bin - grep: 1, // grep pattern file.txt - kill: 1, // kill 1234 - killall: 1, // killall process - ln: 1, // ln -s source target - ls: 1, // ls -la - mkdir: 1, // mkdir new-dir - mv: 1, // mv old.txt new.txt - ps: 1, // ps aux - pwd: 1, // pwd - rm: 1, // rm file.txt - rmdir: 1, // rmdir empty-dir - sleep: 1, // sleep 5 - source: 1, // source ~/.bashrc - tail: 1, // tail -f log.txt - touch: 1, // touch file.txt - unset: 1, // unset VAR - which: 1, // which node - aws: 3, // aws s3 ls - az: 3, // az storage blob list - bazel: 2, // bazel build - brew: 2, // brew install node - bun: 2, // bun install - "bun run": 3, // bun run dev - "bun x": 3, // bun x vite - cargo: 2, // cargo build - "cargo add": 3, // cargo add tokio - "cargo run": 3, // cargo run main - cdk: 2, // cdk deploy - cf: 2, // cf push app - cmake: 2, // cmake build - composer: 2, // composer require laravel - consul: 2, // consul members - "consul kv": 3, // consul kv get config/app - crictl: 2, // crictl ps - deno: 2, // deno run server.ts - "deno task": 3, // deno task dev - doctl: 3, // doctl kubernetes cluster list - docker: 2, // docker run nginx - "docker builder": 3, // docker builder prune - "docker compose": 3, // docker compose up - "docker container": 3, // docker container ls - "docker image": 3, // docker image prune - "docker network": 3, // docker network inspect - "docker volume": 3, // docker volume ls - eksctl: 2, // eksctl get clusters - "eksctl create": 3, // eksctl create cluster - firebase: 2, // firebase deploy - flyctl: 2, // flyctl deploy - gcloud: 3, // gcloud compute instances list - gh: 3, // gh pr list - git: 2, // git checkout main - "git config": 3, // git config user.name - "git remote": 3, // git remote add origin - "git stash": 3, // git stash pop - go: 2, // go build - gradle: 2, // gradle build - helm: 2, // helm install mychart - heroku: 2, // heroku logs - hugo: 2, // hugo new site blog - ip: 2, // ip link show - "ip addr": 3, // ip addr show - "ip link": 3, // ip link set eth0 up - "ip netns": 3, // ip netns exec foo bash - "ip route": 3, // ip route add default via 1.1.1.1 - kind: 2, // kind delete cluster - "kind create": 3, // kind create cluster - kubectl: 2, // kubectl get pods - "kubectl kustomize": 3, // kubectl kustomize overlays/dev - "kubectl rollout": 3, // kubectl rollout restart deploy/api - kustomize: 2, // kustomize build . - make: 2, // make build - mc: 2, // mc ls myminio - "mc admin": 3, // mc admin info myminio - minikube: 2, // minikube start - mongosh: 2, // mongosh test - mysql: 2, // mysql -u root - mvn: 2, // mvn compile - ng: 2, // ng generate component home - npm: 2, // npm install - "npm exec": 3, // npm exec vite - "npm init": 3, // npm init vue - "npm run": 3, // npm run dev - "npm view": 3, // npm view react version - nvm: 2, // nvm use 18 - nx: 2, // nx build - openssl: 2, // openssl genrsa 2048 - "openssl req": 3, // openssl req -new -key key.pem - "openssl x509": 3, // openssl x509 -in cert.pem - pip: 2, // pip install numpy - pipenv: 2, // pipenv install flask - pnpm: 2, // pnpm install - "pnpm dlx": 3, // pnpm dlx create-next-app - "pnpm exec": 3, // pnpm exec vite - "pnpm run": 3, // pnpm run dev - poetry: 2, // poetry add requests - podman: 2, // podman run alpine - "podman container": 3, // podman container ls - "podman image": 3, // podman image prune - psql: 2, // psql -d mydb - pulumi: 2, // pulumi up - "pulumi stack": 3, // pulumi stack output - pyenv: 2, // pyenv install 3.11 - python: 2, // python -m venv env - rake: 2, // rake db:migrate - rbenv: 2, // rbenv install 3.2.0 - "redis-cli": 2, // redis-cli ping - rustup: 2, // rustup update - serverless: 2, // serverless invoke - sfdx: 3, // sfdx force:org:list - skaffold: 2, // skaffold dev - sls: 2, // sls deploy - sst: 2, // sst deploy - swift: 2, // swift build - systemctl: 2, // systemctl restart nginx - terraform: 2, // terraform apply - "terraform workspace": 3, // terraform workspace select prod - tmux: 2, // tmux new -s dev - turbo: 2, // turbo run build - ufw: 2, // ufw allow 22 - vault: 2, // vault login - "vault auth": 3, // vault auth list - "vault kv": 3, // vault kv get secret/api - vercel: 2, // vercel deploy - volta: 2, // volta install node - wp: 2, // wp plugin install - yarn: 2, // yarn add react - "yarn dlx": 3, // yarn dlx create-react-app - "yarn run": 3, // yarn run dev - } +const ARITY: Record = { + cat: 1, // cat file.txt + cd: 1, // cd /path/to/dir + chmod: 1, // chmod 755 script.sh + chown: 1, // chown user:group file.txt + cp: 1, // cp source.txt dest.txt + echo: 1, // echo "hello world" + env: 1, // env + export: 1, // export PATH=/usr/bin + grep: 1, // grep pattern file.txt + kill: 1, // kill 1234 + killall: 1, // killall process + ln: 1, // ln -s source target + ls: 1, // ls -la + mkdir: 1, // mkdir new-dir + mv: 1, // mv old.txt new.txt + ps: 1, // ps aux + pwd: 1, // pwd + rm: 1, // rm file.txt + rmdir: 1, // rmdir empty-dir + sleep: 1, // sleep 5 + source: 1, // source ~/.bashrc + tail: 1, // tail -f log.txt + touch: 1, // touch file.txt + unset: 1, // unset VAR + which: 1, // which node + aws: 3, // aws s3 ls + az: 3, // az storage blob list + bazel: 2, // bazel build + brew: 2, // brew install node + bun: 2, // bun install + "bun run": 3, // bun run dev + "bun x": 3, // bun x vite + cargo: 2, // cargo build + "cargo add": 3, // cargo add tokio + "cargo run": 3, // cargo run main + cdk: 2, // cdk deploy + cf: 2, // cf push app + cmake: 2, // cmake build + composer: 2, // composer require laravel + consul: 2, // consul members + "consul kv": 3, // consul kv get config/app + crictl: 2, // crictl ps + deno: 2, // deno run server.ts + "deno task": 3, // deno task dev + doctl: 3, // doctl kubernetes cluster list + docker: 2, // docker run nginx + "docker builder": 3, // docker builder prune + "docker compose": 3, // docker compose up + "docker container": 3, // docker container ls + "docker image": 3, // docker image prune + "docker network": 3, // docker network inspect + "docker volume": 3, // docker volume ls + eksctl: 2, // eksctl get clusters + "eksctl create": 3, // eksctl create cluster + firebase: 2, // firebase deploy + flyctl: 2, // flyctl deploy + gcloud: 3, // gcloud compute instances list + gh: 3, // gh pr list + git: 2, // git checkout main + "git config": 3, // git config user.name + "git remote": 3, // git remote add origin + "git stash": 3, // git stash pop + go: 2, // go build + gradle: 2, // gradle build + helm: 2, // helm install mychart + heroku: 2, // heroku logs + hugo: 2, // hugo new site blog + ip: 2, // ip link show + "ip addr": 3, // ip addr show + "ip link": 3, // ip link set eth0 up + "ip netns": 3, // ip netns exec foo bash + "ip route": 3, // ip route add default via 1.1.1.1 + kind: 2, // kind delete cluster + "kind create": 3, // kind create cluster + kubectl: 2, // kubectl get pods + "kubectl kustomize": 3, // kubectl kustomize overlays/dev + "kubectl rollout": 3, // kubectl rollout restart deploy/api + kustomize: 2, // kustomize build . + make: 2, // make build + mc: 2, // mc ls myminio + "mc admin": 3, // mc admin info myminio + minikube: 2, // minikube start + mongosh: 2, // mongosh test + mysql: 2, // mysql -u root + mvn: 2, // mvn compile + ng: 2, // ng generate component home + npm: 2, // npm install + "npm exec": 3, // npm exec vite + "npm init": 3, // npm init vue + "npm run": 3, // npm run dev + "npm view": 3, // npm view react version + nvm: 2, // nvm use 18 + nx: 2, // nx build + openssl: 2, // openssl genrsa 2048 + "openssl req": 3, // openssl req -new -key key.pem + "openssl x509": 3, // openssl x509 -in cert.pem + pip: 2, // pip install numpy + pipenv: 2, // pipenv install flask + pnpm: 2, // pnpm install + "pnpm dlx": 3, // pnpm dlx create-next-app + "pnpm exec": 3, // pnpm exec vite + "pnpm run": 3, // pnpm run dev + poetry: 2, // poetry add requests + podman: 2, // podman run alpine + "podman container": 3, // podman container ls + "podman image": 3, // podman image prune + psql: 2, // psql -d mydb + pulumi: 2, // pulumi up + "pulumi stack": 3, // pulumi stack output + pyenv: 2, // pyenv install 3.11 + python: 2, // python -m venv env + rake: 2, // rake db:migrate + rbenv: 2, // rbenv install 3.2.0 + "redis-cli": 2, // redis-cli ping + rustup: 2, // rustup update + serverless: 2, // serverless invoke + sfdx: 3, // sfdx force:org:list + skaffold: 2, // skaffold dev + sls: 2, // sls deploy + sst: 2, // sst deploy + swift: 2, // swift build + systemctl: 2, // systemctl restart nginx + terraform: 2, // terraform apply + "terraform workspace": 3, // terraform workspace select prod + tmux: 2, // tmux new -s dev + turbo: 2, // turbo run build + ufw: 2, // ufw allow 22 + vault: 2, // vault login + "vault auth": 3, // vault auth list + "vault kv": 3, // vault kv get secret/api + vercel: 2, // vercel deploy + volta: 2, // volta install node + wp: 2, // wp plugin install + yarn: 2, // yarn add react + "yarn dlx": 3, // yarn dlx create-react-app + "yarn run": 3, // yarn run dev } + +export * as BashArity from "./arity" From 2fe9d9447070f6967d80b0a8f74239e1969d9e1c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:27:53 -0400 Subject: [PATCH 028/201] fix: remove 8 more unnecessary `as any` casts in opencode core (#22877) --- packages/opencode/src/acp/agent.ts | 2 +- packages/opencode/src/cli/cmd/providers.ts | 4 +++- packages/opencode/src/cli/cmd/tui/win32.ts | 5 +++-- packages/opencode/src/lsp/lsp.ts | 6 +++--- packages/opencode/src/mcp/mcp.ts | 2 +- .../copilot/responses/openai-responses-language-model.ts | 4 ++-- packages/opencode/src/provider/transform.ts | 2 +- packages/opencode/src/util/effect-zod.ts | 4 ++-- 8 files changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 9388c87f12..5d8c723ea7 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -178,7 +178,7 @@ export namespace ACP { }) for await (const event of events.stream) { if (this.eventAbort.signal.aborted) return - const payload = (event as any)?.payload + const payload = event?.payload if (!payload) continue await this.handleEvent(payload as Event).catch((error) => { log.error("failed to handle event", { error, type: payload.type }) diff --git a/packages/opencode/src/cli/cmd/providers.ts b/packages/opencode/src/cli/cmd/providers.ts index 4bc3f0ea6c..e2eb0b65a3 100644 --- a/packages/opencode/src/cli/cmd/providers.ts +++ b/packages/opencode/src/cli/cmd/providers.ts @@ -297,7 +297,9 @@ export const ProvidersLoginCommand = cmd({ prompts.intro("Add credential") if (args.url) { const url = args.url.replace(/\/+$/, "") - const wellknown = await fetch(`${url}/.well-known/opencode`).then((x) => x.json() as any) + const wellknown = (await fetch(`${url}/.well-known/opencode`).then((x) => x.json())) as { + auth: { command: string[]; env: string } + } prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``) const proc = Process.spawn(wellknown.auth.command, { stdout: "pipe", diff --git a/packages/opencode/src/cli/cmd/tui/win32.ts b/packages/opencode/src/cli/cmd/tui/win32.ts index 23e9f44857..1aaa80aecd 100644 --- a/packages/opencode/src/cli/cmd/tui/win32.ts +++ b/packages/opencode/src/cli/cmd/tui/win32.ts @@ -1,4 +1,5 @@ import { dlopen, ptr } from "bun:ffi" +import type { ReadStream } from "node:tty" const STD_INPUT_HANDLE = -10 const ENABLE_PROCESSED_INPUT = 0x0001 @@ -71,7 +72,7 @@ export function win32InstallCtrlCGuard() { if (!load()) return if (unhook) return unhook - const stdin = process.stdin as any + const stdin = process.stdin as ReadStream const original = stdin.setRawMode const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE) @@ -93,7 +94,7 @@ export function win32InstallCtrlCGuard() { setImmediate(enforce) } - let wrapped: ((mode: boolean) => unknown) | undefined + let wrapped: ReadStream["setRawMode"] | undefined if (typeof original === "function") { wrapped = (mode: boolean) => { diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index 2c0982eca5..d4d1e75634 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -465,12 +465,12 @@ export const layer = Layer.effect( direction: "callHierarchy/incomingCalls" | "callHierarchy/outgoingCalls", ) { const results = yield* run(input.file, async (client) => { - const items = (await client.connection - .sendRequest("textDocument/prepareCallHierarchy", { + const items = await client.connection + .sendRequest("textDocument/prepareCallHierarchy", { textDocument: { uri: pathToFileURL(input.file).href }, position: { line: input.line, character: input.character }, }) - .catch(() => [])) as any[] + .catch(() => [] as unknown[]) if (!items?.length) return [] return client.connection.sendRequest(direction, { item: items[0] }).catch(() => []) }) diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts index 6666e0854f..61201ce76d 100644 --- a/packages/opencode/src/mcp/mcp.ts +++ b/packages/opencode/src/mcp/mcp.ts @@ -531,7 +531,7 @@ export const layer = Layer.effect( Object.values(s.clients), (client) => Effect.gen(function* () { - const pid = (client.transport as any)?.pid + const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null if (typeof pid === "number") { const pids = yield* descendants(pid) for (const dpid of pids) { diff --git a/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts b/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts index 92c8fd857b..250d1f6f34 100644 --- a/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts +++ b/packages/opencode/src/provider/sdk/copilot/responses/openai-responses-language-model.ts @@ -354,7 +354,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 { details: "flex processing is only available for o3, o4-mini, and gpt-5 models", }) // Remove from args if not supported - delete (baseArgs as any).service_tier + baseArgs.service_tier = undefined } // Validate priority processing support @@ -366,7 +366,7 @@ export class OpenAIResponsesLanguageModel implements LanguageModelV3 { "priority processing is only available for supported models (gpt-4, gpt-5, gpt-5-mini, o3, o4-mini) and requires Enterprise access. gpt-5-nano is not supported", }) // Remove from args if not supported - delete (baseArgs as any).service_tier + baseArgs.service_tier = undefined } const { diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 66e87fb3b8..e527251b0f 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -193,7 +193,7 @@ function normalizeMessages( providerOptions: { ...msg.providerOptions, openaiCompatible: { - ...(msg.providerOptions as any)?.openaiCompatible, + ...msg.providerOptions?.openaiCompatible, [field]: reasoningText, }, }, diff --git a/packages/opencode/src/util/effect-zod.ts b/packages/opencode/src/util/effect-zod.ts index 553d7a0650..6e99fd4688 100644 --- a/packages/opencode/src/util/effect-zod.ts +++ b/packages/opencode/src/util/effect-zod.ts @@ -77,8 +77,8 @@ function union(ast: SchemaAST.Union): z.ZodTypeAny { if (items.length === 1) return items[0] if (items.length < 2) return fail(ast) - const discriminator = (ast as any).annotations?.discriminator - if (discriminator) { + const discriminator = ast.annotations?.discriminator + if (typeof discriminator === "string") { return z.discriminatedUnion(discriminator, items as [z.ZodObject, z.ZodObject, ...z.ZodObject[]]) } From 6c3b28db64e47895553949880f296bae74691f4a Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 14:38:39 -0500 Subject: [PATCH 029/201] fix: ensure that double pasting doesnt happen after tui perf commit was merged (#22880) --- .../opencode/src/cli/cmd/tui/component/prompt/index.tsx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index e64a16eb8a..82c4a7222f 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1031,6 +1031,10 @@ export function Prompt(props: PromptProps) { return } + // Once we cross an async boundary below, the terminal may perform its + // default paste unless we suppress it first and handle insertion ourselves. + event.preventDefault() + const filepath = iife(() => { const raw = pastedContent.replace(/^['"]+|['"]+$/g, "") if (raw.startsWith("file://")) { @@ -1048,7 +1052,6 @@ export function Prompt(props: PromptProps) { const filename = path.basename(filepath) // Handle SVG as raw text content, not as base64 image if (mime === "image/svg+xml") { - event.preventDefault() const content = await Filesystem.readText(filepath).catch(() => {}) if (content) { pasteText(content, `[SVG: ${filename ?? "image"}]`) @@ -1056,7 +1059,6 @@ export function Prompt(props: PromptProps) { } } if (mime.startsWith("image/") || mime === "application/pdf") { - event.preventDefault() const content = await Filesystem.readArrayBuffer(filepath) .then((buffer) => Buffer.from(buffer).toString("base64")) .catch(() => {}) @@ -1078,11 +1080,12 @@ export function Prompt(props: PromptProps) { (lineCount >= 3 || pastedContent.length > 150) && !sync.data.config.experimental?.disable_paste_summary ) { - event.preventDefault() pasteText(pastedContent, `[Pasted ~${lineCount} lines]`) return } + input.insertText(normalizedText) + // Force layout update and render for the pasted content setTimeout(() => { // setTimeout is a workaround and needs to be addressed properly From 76275fc3ab8f39cd02ae7eed87c47679e1f4c28e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 15:49:21 -0400 Subject: [PATCH 030/201] refactor: move Pty into pty/index.ts with self-reexport (#22881) --- .../specs/effect/namespace-treeshake.md | 54 ++- packages/opencode/src/pty/index.ts | 365 +++++++++++++++++- packages/opencode/src/pty/service.ts | 362 ----------------- 3 files changed, 413 insertions(+), 368 deletions(-) delete mode 100644 packages/opencode/src/pty/service.ts diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md index ac4d3987de..ef78c762bb 100644 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ b/packages/opencode/specs/effect/namespace-treeshake.md @@ -196,15 +196,59 @@ import regressions at runtime — the typechecker does not catch these. ## Rules for new code - No new `export namespace`. -- Every module file that wants a namespace gets a self-reexport at the +- Every module directory has a single canonical file — typically + `dir/index.ts` — with flat top-level exports and a self-reexport at the bottom: - `export * as Foo from "./foo"` -- Consumers import from the file itself: - `import { Foo } from "../path/to/foo"` -- No new barrel `index.ts` files for internal code. + `export * as Foo from "."` +- Consumers import from the directory: + `import { Foo } from "@/dir"` or `import { Foo } from "../dir"`. +- No sibling barrel files. If a directory has multiple independent + namespaces, they each get their own file (e.g. `config/config.ts`, + `config/plugin.ts`) and their own self-reexport; the `index.ts` in that + directory stays minimal or does not exist. - If a file needs a sibling, import the sibling file directly: `import * as Sibling from "./sibling"`, not `from "."`. +### Why `dir/index.ts` + `"."` is fine for us + +A single-file module (e.g. `pty/`) can live entirely in `dir/index.ts` +with `export * as Foo from "."` at the bottom. Consumers write the +short form: + +```ts +import { Pty } from "@/pty" +``` + +This works in Bun runtime, Bun build, esbuild, and Rollup. It does NOT +work under Node's `--experimental-strip-types` runner: + +``` +node --experimental-strip-types entry.ts + ERR_UNSUPPORTED_DIR_IMPORT: Directory import '/.../pty' is not supported +``` + +Node requires an explicit file or a `package.json#exports` map for ESM. +We don't care about that target right now because the opencode CLI is +built with Bun and the web apps are built with Vite/Rollup. If we ever +want to run raw `.ts` through Node, we'll need to either use explicit +`.ts` extensions everywhere or add per-directory `package.json` exports +maps. + +### When NOT to collapse to `index.ts` + +Some directories contain multiple independent namespaces where +`dir/index.ts` would be misleading. Examples: + +- `config/` has `Config`, `ConfigPaths`, `ConfigMarkdown`, `ConfigPlugin`, + `ConfigKeybinds`. Each lives in its own file with its own self-reexport + (`config/config.ts`, `config/plugin.ts`, etc.). Consumers import the + specific one: `import { ConfigPlugin } from "@/config/plugin"`. +- Same shape for `session/`, `server/`, etc. + +Collapsing one of those into `index.ts` would mean picking a single +"canonical" namespace for the directory, which breaks the symmetry and +hides the other files. + ## Scope There are still dozens of `export namespace` files left across the codebase. diff --git a/packages/opencode/src/pty/index.ts b/packages/opencode/src/pty/index.ts index 37cb4e49a8..3d00de596a 100644 --- a/packages/opencode/src/pty/index.ts +++ b/packages/opencode/src/pty/index.ts @@ -1 +1,364 @@ -export * as Pty from "./service" +import { BusEvent } from "@/bus/bus-event" +import { Bus } from "@/bus" +import { InstanceState } from "@/effect" +import { Instance } from "@/project/instance" +import type { Proc } from "#pty" +import z from "zod" +import { Log } from "../util" +import { lazy } from "@opencode-ai/shared/util/lazy" +import { Shell } from "@/shell/shell" +import { Plugin } from "@/plugin" +import { PtyID } from "./schema" +import { Effect, Layer, Context } from "effect" +import { EffectBridge } from "@/effect" + +const log = Log.create({ service: "pty" }) + +const BUFFER_LIMIT = 1024 * 1024 * 2 +const BUFFER_CHUNK = 64 * 1024 +const encoder = new TextEncoder() + +type Socket = { + readyState: number + data?: unknown + send: (data: string | Uint8Array | ArrayBuffer) => void + close: (code?: number, reason?: string) => void +} + +const sock = (ws: Socket) => (ws.data && typeof ws.data === "object" ? ws.data : ws) + +type Active = { + info: Info + process: Proc + buffer: string + bufferCursor: number + cursor: number + subscribers: Map +} + +type State = { + dir: string + sessions: Map +} + +// WebSocket control frame: 0x00 + UTF-8 JSON. +const meta = (cursor: number) => { + const json = JSON.stringify({ cursor }) + const bytes = encoder.encode(json) + const out = new Uint8Array(bytes.length + 1) + out[0] = 0 + out.set(bytes, 1) + return out +} + +const pty = lazy(() => import("#pty")) + +export const Info = z + .object({ + id: PtyID.zod, + title: z.string(), + command: z.string(), + args: z.array(z.string()), + cwd: z.string(), + status: z.enum(["running", "exited"]), + pid: z.number(), + }) + .meta({ ref: "Pty" }) + +export type Info = z.infer + +export const CreateInput = z.object({ + command: z.string().optional(), + args: z.array(z.string()).optional(), + cwd: z.string().optional(), + title: z.string().optional(), + env: z.record(z.string(), z.string()).optional(), +}) + +export type CreateInput = z.infer + +export const UpdateInput = z.object({ + title: z.string().optional(), + size: z + .object({ + rows: z.number(), + cols: z.number(), + }) + .optional(), +}) + +export type UpdateInput = z.infer + +export const Event = { + Created: BusEvent.define("pty.created", z.object({ info: Info })), + Updated: BusEvent.define("pty.updated", z.object({ info: Info })), + Exited: BusEvent.define("pty.exited", z.object({ id: PtyID.zod, exitCode: z.number() })), + Deleted: BusEvent.define("pty.deleted", z.object({ id: PtyID.zod })), +} + +export interface Interface { + readonly list: () => Effect.Effect + readonly get: (id: PtyID) => Effect.Effect + readonly create: (input: CreateInput) => Effect.Effect + readonly update: (id: PtyID, input: UpdateInput) => Effect.Effect + readonly remove: (id: PtyID) => Effect.Effect + readonly resize: (id: PtyID, cols: number, rows: number) => Effect.Effect + readonly write: (id: PtyID, data: string) => Effect.Effect + readonly connect: ( + id: PtyID, + ws: Socket, + cursor?: number, + ) => Effect.Effect<{ onMessage: (message: string | ArrayBuffer) => void; onClose: () => void } | undefined> +} + +export class Service extends Context.Service()("@opencode/Pty") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const plugin = yield* Plugin.Service + function teardown(session: Active) { + try { + session.process.kill() + } catch {} + for (const [sub, ws] of session.subscribers.entries()) { + try { + if (sock(ws) === sub) ws.close() + } catch {} + } + session.subscribers.clear() + } + + const state = yield* InstanceState.make( + Effect.fn("Pty.state")(function* (ctx) { + const state = { + dir: ctx.directory, + sessions: new Map(), + } + + yield* Effect.addFinalizer(() => + Effect.sync(() => { + for (const session of state.sessions.values()) { + teardown(session) + } + state.sessions.clear() + }), + ) + + return state + }), + ) + + const remove = Effect.fn("Pty.remove")(function* (id: PtyID) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) return + s.sessions.delete(id) + log.info("removing session", { id }) + teardown(session) + yield* bus.publish(Event.Deleted, { id: session.info.id }) + }) + + const list = Effect.fn("Pty.list")(function* () { + const s = yield* InstanceState.get(state) + return Array.from(s.sessions.values()).map((session) => session.info) + }) + + const get = Effect.fn("Pty.get")(function* (id: PtyID) { + const s = yield* InstanceState.get(state) + return s.sessions.get(id)?.info + }) + + const create = Effect.fn("Pty.create")(function* (input: CreateInput) { + const s = yield* InstanceState.get(state) + const bridge = yield* EffectBridge.make() + const id = PtyID.ascending() + const command = input.command || Shell.preferred() + const args = input.args || [] + if (Shell.login(command)) { + args.push("-l") + } + + const cwd = input.cwd || s.dir + const shell = yield* plugin.trigger("shell.env", { cwd }, { env: {} }) + const env = { + ...process.env, + ...input.env, + ...shell.env, + TERM: "xterm-256color", + OPENCODE_TERMINAL: "1", + } as Record + + if (process.platform === "win32") { + env.LC_ALL = "C.UTF-8" + env.LC_CTYPE = "C.UTF-8" + env.LANG = "C.UTF-8" + } + log.info("creating session", { id, cmd: command, args, cwd }) + + const { spawn } = yield* Effect.promise(() => pty()) + const proc = yield* Effect.sync(() => + spawn(command, args, { + name: "xterm-256color", + cwd, + env, + }), + ) + + const info = { + id, + title: input.title || `Terminal ${id.slice(-4)}`, + command, + args, + cwd, + status: "running", + pid: proc.pid, + } as const + const session: Active = { + info, + process: proc, + buffer: "", + bufferCursor: 0, + cursor: 0, + subscribers: new Map(), + } + s.sessions.set(id, session) + proc.onData( + Instance.bind((chunk) => { + session.cursor += chunk.length + + for (const [key, ws] of session.subscribers.entries()) { + if (ws.readyState !== 1) { + session.subscribers.delete(key) + continue + } + if (sock(ws) !== key) { + session.subscribers.delete(key) + continue + } + try { + ws.send(chunk) + } catch { + session.subscribers.delete(key) + } + } + + session.buffer += chunk + if (session.buffer.length <= BUFFER_LIMIT) return + const excess = session.buffer.length - BUFFER_LIMIT + session.buffer = session.buffer.slice(excess) + session.bufferCursor += excess + }), + ) + proc.onExit( + Instance.bind(({ exitCode }) => { + if (session.info.status === "exited") return + log.info("session exited", { id, exitCode }) + session.info.status = "exited" + bridge.fork(bus.publish(Event.Exited, { id, exitCode })) + bridge.fork(remove(id)) + }), + ) + yield* bus.publish(Event.Created, { info }) + return info + }) + + const update = Effect.fn("Pty.update")(function* (id: PtyID, input: UpdateInput) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) return + if (input.title) { + session.info.title = input.title + } + if (input.size) { + session.process.resize(input.size.cols, input.size.rows) + } + yield* bus.publish(Event.Updated, { info: session.info }) + return session.info + }) + + const resize = Effect.fn("Pty.resize")(function* (id: PtyID, cols: number, rows: number) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (session && session.info.status === "running") { + session.process.resize(cols, rows) + } + }) + + const write = Effect.fn("Pty.write")(function* (id: PtyID, data: string) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (session && session.info.status === "running") { + session.process.write(data) + } + }) + + const connect = Effect.fn("Pty.connect")(function* (id: PtyID, ws: Socket, cursor?: number) { + const s = yield* InstanceState.get(state) + const session = s.sessions.get(id) + if (!session) { + ws.close() + return + } + log.info("client connected to session", { id }) + + const sub = sock(ws) + session.subscribers.delete(sub) + session.subscribers.set(sub, ws) + + const cleanup = () => { + session.subscribers.delete(sub) + } + + const start = session.bufferCursor + const end = session.cursor + const from = + cursor === -1 ? end : typeof cursor === "number" && Number.isSafeInteger(cursor) ? Math.max(0, cursor) : 0 + + const data = (() => { + if (!session.buffer) return "" + if (from >= end) return "" + const offset = Math.max(0, from - start) + if (offset >= session.buffer.length) return "" + return session.buffer.slice(offset) + })() + + if (data) { + try { + for (let i = 0; i < data.length; i += BUFFER_CHUNK) { + ws.send(data.slice(i, i + BUFFER_CHUNK)) + } + } catch { + cleanup() + ws.close() + return + } + } + + try { + ws.send(meta(end)) + } catch { + cleanup() + ws.close() + return + } + + return { + onMessage: (message: string | ArrayBuffer) => { + session.process.write(typeof message === "string" ? message : new TextDecoder().decode(message)) + }, + onClose: () => { + log.info("client disconnected from session", { id }) + cleanup() + }, + } + }) + + return Service.of({ list, get, create, update, remove, resize, write, connect }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Plugin.defaultLayer)) + +export * as Pty from "." diff --git a/packages/opencode/src/pty/service.ts b/packages/opencode/src/pty/service.ts deleted file mode 100644 index 0c810be88f..0000000000 --- a/packages/opencode/src/pty/service.ts +++ /dev/null @@ -1,362 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { Bus } from "@/bus" -import { InstanceState } from "@/effect" -import { Instance } from "@/project/instance" -import type { Proc } from "#pty" -import z from "zod" -import { Log } from "../util" -import { lazy } from "@opencode-ai/shared/util/lazy" -import { Shell } from "@/shell/shell" -import { Plugin } from "@/plugin" -import { PtyID } from "./schema" -import { Effect, Layer, Context } from "effect" -import { EffectBridge } from "@/effect" - -const log = Log.create({ service: "pty" }) - -const BUFFER_LIMIT = 1024 * 1024 * 2 -const BUFFER_CHUNK = 64 * 1024 -const encoder = new TextEncoder() - -type Socket = { - readyState: number - data?: unknown - send: (data: string | Uint8Array | ArrayBuffer) => void - close: (code?: number, reason?: string) => void -} - -const sock = (ws: Socket) => (ws.data && typeof ws.data === "object" ? ws.data : ws) - -type Active = { - info: Info - process: Proc - buffer: string - bufferCursor: number - cursor: number - subscribers: Map -} - -type State = { - dir: string - sessions: Map -} - -// WebSocket control frame: 0x00 + UTF-8 JSON. -const meta = (cursor: number) => { - const json = JSON.stringify({ cursor }) - const bytes = encoder.encode(json) - const out = new Uint8Array(bytes.length + 1) - out[0] = 0 - out.set(bytes, 1) - return out -} - -const pty = lazy(() => import("#pty")) - -export const Info = z - .object({ - id: PtyID.zod, - title: z.string(), - command: z.string(), - args: z.array(z.string()), - cwd: z.string(), - status: z.enum(["running", "exited"]), - pid: z.number(), - }) - .meta({ ref: "Pty" }) - -export type Info = z.infer - -export const CreateInput = z.object({ - command: z.string().optional(), - args: z.array(z.string()).optional(), - cwd: z.string().optional(), - title: z.string().optional(), - env: z.record(z.string(), z.string()).optional(), -}) - -export type CreateInput = z.infer - -export const UpdateInput = z.object({ - title: z.string().optional(), - size: z - .object({ - rows: z.number(), - cols: z.number(), - }) - .optional(), -}) - -export type UpdateInput = z.infer - -export const Event = { - Created: BusEvent.define("pty.created", z.object({ info: Info })), - Updated: BusEvent.define("pty.updated", z.object({ info: Info })), - Exited: BusEvent.define("pty.exited", z.object({ id: PtyID.zod, exitCode: z.number() })), - Deleted: BusEvent.define("pty.deleted", z.object({ id: PtyID.zod })), -} - -export interface Interface { - readonly list: () => Effect.Effect - readonly get: (id: PtyID) => Effect.Effect - readonly create: (input: CreateInput) => Effect.Effect - readonly update: (id: PtyID, input: UpdateInput) => Effect.Effect - readonly remove: (id: PtyID) => Effect.Effect - readonly resize: (id: PtyID, cols: number, rows: number) => Effect.Effect - readonly write: (id: PtyID, data: string) => Effect.Effect - readonly connect: ( - id: PtyID, - ws: Socket, - cursor?: number, - ) => Effect.Effect<{ onMessage: (message: string | ArrayBuffer) => void; onClose: () => void } | undefined> -} - -export class Service extends Context.Service()("@opencode/Pty") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const plugin = yield* Plugin.Service - function teardown(session: Active) { - try { - session.process.kill() - } catch {} - for (const [sub, ws] of session.subscribers.entries()) { - try { - if (sock(ws) === sub) ws.close() - } catch {} - } - session.subscribers.clear() - } - - const state = yield* InstanceState.make( - Effect.fn("Pty.state")(function* (ctx) { - const state = { - dir: ctx.directory, - sessions: new Map(), - } - - yield* Effect.addFinalizer(() => - Effect.sync(() => { - for (const session of state.sessions.values()) { - teardown(session) - } - state.sessions.clear() - }), - ) - - return state - }), - ) - - const remove = Effect.fn("Pty.remove")(function* (id: PtyID) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) return - s.sessions.delete(id) - log.info("removing session", { id }) - teardown(session) - yield* bus.publish(Event.Deleted, { id: session.info.id }) - }) - - const list = Effect.fn("Pty.list")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.sessions.values()).map((session) => session.info) - }) - - const get = Effect.fn("Pty.get")(function* (id: PtyID) { - const s = yield* InstanceState.get(state) - return s.sessions.get(id)?.info - }) - - const create = Effect.fn("Pty.create")(function* (input: CreateInput) { - const s = yield* InstanceState.get(state) - const bridge = yield* EffectBridge.make() - const id = PtyID.ascending() - const command = input.command || Shell.preferred() - const args = input.args || [] - if (Shell.login(command)) { - args.push("-l") - } - - const cwd = input.cwd || s.dir - const shell = yield* plugin.trigger("shell.env", { cwd }, { env: {} }) - const env = { - ...process.env, - ...input.env, - ...shell.env, - TERM: "xterm-256color", - OPENCODE_TERMINAL: "1", - } as Record - - if (process.platform === "win32") { - env.LC_ALL = "C.UTF-8" - env.LC_CTYPE = "C.UTF-8" - env.LANG = "C.UTF-8" - } - log.info("creating session", { id, cmd: command, args, cwd }) - - const { spawn } = yield* Effect.promise(() => pty()) - const proc = yield* Effect.sync(() => - spawn(command, args, { - name: "xterm-256color", - cwd, - env, - }), - ) - - const info = { - id, - title: input.title || `Terminal ${id.slice(-4)}`, - command, - args, - cwd, - status: "running", - pid: proc.pid, - } as const - const session: Active = { - info, - process: proc, - buffer: "", - bufferCursor: 0, - cursor: 0, - subscribers: new Map(), - } - s.sessions.set(id, session) - proc.onData( - Instance.bind((chunk) => { - session.cursor += chunk.length - - for (const [key, ws] of session.subscribers.entries()) { - if (ws.readyState !== 1) { - session.subscribers.delete(key) - continue - } - if (sock(ws) !== key) { - session.subscribers.delete(key) - continue - } - try { - ws.send(chunk) - } catch { - session.subscribers.delete(key) - } - } - - session.buffer += chunk - if (session.buffer.length <= BUFFER_LIMIT) return - const excess = session.buffer.length - BUFFER_LIMIT - session.buffer = session.buffer.slice(excess) - session.bufferCursor += excess - }), - ) - proc.onExit( - Instance.bind(({ exitCode }) => { - if (session.info.status === "exited") return - log.info("session exited", { id, exitCode }) - session.info.status = "exited" - bridge.fork(bus.publish(Event.Exited, { id, exitCode })) - bridge.fork(remove(id)) - }), - ) - yield* bus.publish(Event.Created, { info }) - return info - }) - - const update = Effect.fn("Pty.update")(function* (id: PtyID, input: UpdateInput) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) return - if (input.title) { - session.info.title = input.title - } - if (input.size) { - session.process.resize(input.size.cols, input.size.rows) - } - yield* bus.publish(Event.Updated, { info: session.info }) - return session.info - }) - - const resize = Effect.fn("Pty.resize")(function* (id: PtyID, cols: number, rows: number) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (session && session.info.status === "running") { - session.process.resize(cols, rows) - } - }) - - const write = Effect.fn("Pty.write")(function* (id: PtyID, data: string) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (session && session.info.status === "running") { - session.process.write(data) - } - }) - - const connect = Effect.fn("Pty.connect")(function* (id: PtyID, ws: Socket, cursor?: number) { - const s = yield* InstanceState.get(state) - const session = s.sessions.get(id) - if (!session) { - ws.close() - return - } - log.info("client connected to session", { id }) - - const sub = sock(ws) - session.subscribers.delete(sub) - session.subscribers.set(sub, ws) - - const cleanup = () => { - session.subscribers.delete(sub) - } - - const start = session.bufferCursor - const end = session.cursor - const from = - cursor === -1 ? end : typeof cursor === "number" && Number.isSafeInteger(cursor) ? Math.max(0, cursor) : 0 - - const data = (() => { - if (!session.buffer) return "" - if (from >= end) return "" - const offset = Math.max(0, from - start) - if (offset >= session.buffer.length) return "" - return session.buffer.slice(offset) - })() - - if (data) { - try { - for (let i = 0; i < data.length; i += BUFFER_CHUNK) { - ws.send(data.slice(i, i + BUFFER_CHUNK)) - } - } catch { - cleanup() - ws.close() - return - } - } - - try { - ws.send(meta(end)) - } catch { - cleanup() - ws.close() - return - } - - return { - onMessage: (message: string | ArrayBuffer) => { - session.process.write(typeof message === "string" ? message : new TextDecoder().decode(message)) - }, - onClose: () => { - log.info("client disconnected from session", { id }) - cleanup() - }, - } - }) - - return Service.of({ list, get, create, update, remove, resize, write, connect }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Plugin.defaultLayer)) From 5e650fd9e2c8ebaedb49af2ff771af9721782d98 Mon Sep 17 00:00:00 2001 From: Kobi Hudson Date: Thu, 16 Apr 2026 13:01:21 -0700 Subject: [PATCH 031/201] fix(opencode): drop max_tokens for OpenAI reasoning models on Cloudflare AI Gateway (#22864) Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> --- packages/opencode/src/plugin/cloudflare.ts | 11 +++ .../opencode/test/plugin/cloudflare.test.ts | 68 +++++++++++++++++++ 2 files changed, 79 insertions(+) create mode 100644 packages/opencode/test/plugin/cloudflare.test.ts diff --git a/packages/opencode/src/plugin/cloudflare.ts b/packages/opencode/src/plugin/cloudflare.ts index 2ccf5168d8..c4bf6bb8e7 100644 --- a/packages/opencode/src/plugin/cloudflare.ts +++ b/packages/opencode/src/plugin/cloudflare.ts @@ -61,5 +61,16 @@ export async function CloudflareAIGatewayAuthPlugin(_input: PluginInput): Promis }, ], }, + "chat.params": async (input, output) => { + if (input.model.providerID !== "cloudflare-ai-gateway") return + // The unified gateway routes through @ai-sdk/openai-compatible, which + // always emits max_tokens. OpenAI reasoning models (gpt-5.x, o-series) + // reject that field and require max_completion_tokens instead, and the + // compatible SDK has no way to rename it. Drop the cap so OpenAI falls + // back to the model's default output budget. + if (!input.model.api.id.toLowerCase().startsWith("openai/")) return + if (!input.model.capabilities.reasoning) return + output.maxOutputTokens = undefined + }, } } diff --git a/packages/opencode/test/plugin/cloudflare.test.ts b/packages/opencode/test/plugin/cloudflare.test.ts new file mode 100644 index 0000000000..5fa4106835 --- /dev/null +++ b/packages/opencode/test/plugin/cloudflare.test.ts @@ -0,0 +1,68 @@ +import { expect, test } from "bun:test" +import { CloudflareAIGatewayAuthPlugin } from "@/plugin/cloudflare" + +const pluginInput = { + client: {} as never, + project: {} as never, + directory: "", + worktree: "", + experimental_workspace: { + register() {}, + }, + serverUrl: new URL("https://example.com"), + $: {} as never, +} + +function makeHookInput(overrides: { providerID?: string; apiId?: string; reasoning?: boolean }) { + return { + sessionID: "s", + agent: "a", + provider: {} as never, + message: {} as never, + model: { + providerID: overrides.providerID ?? "cloudflare-ai-gateway", + api: { id: overrides.apiId ?? "openai/gpt-5.2-codex", url: "", npm: "ai-gateway-provider" }, + capabilities: { + reasoning: overrides.reasoning ?? true, + temperature: false, + attachment: true, + toolcall: true, + input: { text: true, audio: false, image: false, video: false, pdf: false }, + output: { text: true, audio: false, image: false, video: false, pdf: false }, + interleaved: false, + }, + } as never, + } +} + +function makeHookOutput() { + return { temperature: 0, topP: 1, topK: 0, maxOutputTokens: 32_000 as number | undefined, options: {} } +} + +test("omits maxOutputTokens for openai reasoning models on cloudflare-ai-gateway", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "openai/gpt-5.2-codex", reasoning: true }), out) + expect(out.maxOutputTokens).toBeUndefined() +}) + +test("keeps maxOutputTokens for openai non-reasoning models", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "openai/gpt-4-turbo", reasoning: false }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) + +test("keeps maxOutputTokens for non-openai reasoning models on cloudflare-ai-gateway", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ apiId: "anthropic/claude-sonnet-4-5", reasoning: true }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) + +test("ignores non-cloudflare-ai-gateway providers", async () => { + const hooks = await CloudflareAIGatewayAuthPlugin(pluginInput) + const out = makeHookOutput() + await hooks["chat.params"]!(makeHookInput({ providerID: "openai", apiId: "gpt-5.2-codex", reasoning: true }), out) + expect(out.maxOutputTokens).toBe(32_000) +}) From 1c33b866ba962ed7a4c147c316ad807886a0045e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:11:05 -0400 Subject: [PATCH 032/201] fix: remove 10 more unnecessary `as any` casts in opencode core (#22882) --- .../src/cli/cmd/tui/routes/session/index.tsx | 35 +++++++++++-------- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/lsp/server.ts | 8 +++-- .../src/server/instance/experimental.ts | 4 +-- packages/opencode/src/session/prompt.ts | 7 ++-- .../test/session/structured-output.test.ts | 10 ------ 6 files changed, 30 insertions(+), 36 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 5b4308d593..b0514bf1b1 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -44,6 +44,8 @@ import type { GrepTool } from "@/tool/grep" import type { EditTool } from "@/tool/edit" import type { ApplyPatchTool } from "@/tool/apply_patch" import type { WebFetchTool } from "@/tool/webfetch" +import type { CodeSearchTool } from "@/tool/codesearch" +import type { WebSearchTool } from "@/tool/websearch" import type { TaskTool } from "@/tool/task" import type { QuestionTool } from "@/tool/question" import type { SkillTool } from "@/tool/skill" @@ -1934,28 +1936,26 @@ function Grep(props: ToolProps) { function WebFetch(props: ToolProps) { return ( - - WebFetch {(props.input as any).url} + + WebFetch {props.input.url} ) } -function CodeSearch(props: ToolProps) { - const input = props.input as any - const metadata = props.metadata as any +function CodeSearch(props: ToolProps) { + const metadata = props.metadata as { results?: number } return ( - - Exa Code Search "{input.query}" ({metadata.results} results) + + Exa Code Search "{props.input.query}" ({metadata.results} results) ) } -function WebSearch(props: ToolProps) { - const input = props.input as any - const metadata = props.metadata as any +function WebSearch(props: ToolProps) { + const metadata = props.metadata as { numResults?: number } return ( - - Exa Web Search "{input.query}" ({metadata.numResults} results) + + Exa Web Search "{props.input.query}" ({metadata.numResults} results) ) } @@ -1979,7 +1979,9 @@ function Task(props: ToolProps) { ) }) - const current = createMemo(() => tools().findLast((x) => (x.state as any).title)) + const current = createMemo(() => + tools().findLast((x) => (x.state.status === "running" || x.state.status === "completed") && x.state.title), + ) const isRunning = createMemo(() => props.part.state.status === "running") @@ -1996,8 +1998,11 @@ function Task(props: ToolProps) { if (isRunning() && tools().length > 0) { // content[0] += ` · ${tools().length} toolcalls` - if (current()) content.push(`↳ ${Locale.titlecase(current()!.tool)} ${(current()!.state as any).title}`) - else content.push(`↳ ${tools().length} toolcalls`) + if (current()) { + const state = current()!.state + const title = state.status === "running" || state.status === "completed" ? state.title : undefined + content.push(`↳ ${Locale.titlecase(current()!.tool)} ${title}`) + } else content.push(`↳ ${tools().length} toolcalls`) } if (props.part.state.status === "completed") { diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index a738ebf130..3cbc539600 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -517,7 +517,7 @@ export const layer = Layer.effect( if (!response.ok) { throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) } - const wellknown = (yield* Effect.promise(() => response.json())) as any + const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } const remoteConfig = wellknown.config ?? {} if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" const source = `${url}/.well-known/opencode` diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts index 390c5f2428..760e8eaba0 100644 --- a/packages/opencode/src/lsp/server.ts +++ b/packages/opencode/src/lsp/server.ts @@ -611,7 +611,9 @@ export const Zls: Info = { return } - const release = (await releaseResponse.json()) as any + const release = (await releaseResponse.json()) as { + assets?: { name?: string; browser_download_url?: string }[] + } const platform = process.platform const arch = process.arch @@ -646,8 +648,8 @@ export const Zls: Info = { return } - const asset = release.assets.find((a: any) => a.name === assetName) - if (!asset) { + const asset = release.assets?.find((a) => a.name === assetName) + if (!asset?.browser_download_url) { log.error(`Could not find asset ${assetName} in latest zls release`) return } diff --git a/packages/opencode/src/server/instance/experimental.ts b/packages/opencode/src/server/instance/experimental.ts index fe80173a8b..4f8887a43c 100644 --- a/packages/opencode/src/server/instance/experimental.ts +++ b/packages/opencode/src/server/instance/experimental.ts @@ -12,7 +12,6 @@ import { Config } from "../../config" import { ConsoleState } from "../../config/console-state" import { Account, AccountID, OrgID } from "../../account" import { AppRuntime } from "../../effect/app-runtime" -import { zodToJsonSchema } from "zod-to-json-schema" import { errors } from "../error" import { lazy } from "../../util/lazy" import { Effect, Option } from "effect" @@ -226,8 +225,7 @@ export const ExperimentalRoutes = lazy(() => tools.map((t) => ({ id: t.id, description: t.description, - // Handle both Zod schemas and plain JSON schemas - parameters: (t.parameters as any)?._def ? zodToJsonSchema(t.parameters as any) : t.parameters, + parameters: z.toJSONSchema(t.parameters), })), ) }, diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 4b8b95baa8..004ee19abe 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -10,6 +10,7 @@ import { Agent } from "../agent/agent" import { Provider } from "../provider" import { ModelID, ProviderID } from "../provider/schema" import { type Tool as AITool, tool, jsonSchema, type ToolExecutionOptions, asSchema } from "ai" +import type { JSONSchema7 } from "@ai-sdk/provider" import { SessionCompaction } from "./compaction" import { Bus } from "../bus" import { ProviderTransform } from "../provider" @@ -407,9 +408,8 @@ NOTE: At any point in time through this workflow you should feel free to ask the })) { const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) tools[item.id] = tool({ - id: item.id as any, description: item.description, - inputSchema: jsonSchema(schema as any), + inputSchema: jsonSchema(schema), execute(args, options) { return run.promise( Effect.gen(function* () { @@ -1827,9 +1827,8 @@ NOTE: At any point in time through this workflow you should feel free to ask the const { $schema: _, ...toolSchema } = input.schema return tool({ - id: "StructuredOutput" as any, description: STRUCTURED_OUTPUT_DESCRIPTION, - inputSchema: jsonSchema(toolSchema as any), + inputSchema: jsonSchema(toolSchema as JSONSchema7), async execute(args) { // AI SDK validates args against inputSchema before calling execute() input.onSuccess(args) diff --git a/packages/opencode/test/session/structured-output.test.ts b/packages/opencode/test/session/structured-output.test.ts index db3f8cfded..2debfb76d5 100644 --- a/packages/opencode/test/session/structured-output.test.ts +++ b/packages/opencode/test/session/structured-output.test.ts @@ -157,16 +157,6 @@ describe("structured-output.AssistantMessage", () => { }) describe("structured-output.createStructuredOutputTool", () => { - test("creates tool with correct id", () => { - const tool = SessionPrompt.createStructuredOutputTool({ - schema: { type: "object", properties: { name: { type: "string" } } }, - onSuccess: () => {}, - }) - - // AI SDK tool type doesn't expose id, but we set it internally - expect((tool as any).id).toBe("StructuredOutput") - }) - test("creates tool with description", () => { const tool = SessionPrompt.createStructuredOutputTool({ schema: { type: "object" }, From e0d71f124ef52f557387753ee19abe0f04f0faeb Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:12:46 -0400 Subject: [PATCH 033/201] tooling: add collapse-barrel.ts for single-namespace barrel migration (#22887) --- packages/opencode/script/collapse-barrel.ts | 161 ++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 packages/opencode/script/collapse-barrel.ts diff --git a/packages/opencode/script/collapse-barrel.ts b/packages/opencode/script/collapse-barrel.ts new file mode 100644 index 0000000000..05bb11589c --- /dev/null +++ b/packages/opencode/script/collapse-barrel.ts @@ -0,0 +1,161 @@ +#!/usr/bin/env bun +/** + * Collapse a single-namespace barrel directory into a dir/index.ts module. + * + * Given a directory `src/foo/` that contains: + * + * - `index.ts` (exactly `export * as Foo from "./foo"`) + * - `foo.ts` (the real implementation) + * - zero or more sibling files + * + * this script: + * + * 1. Deletes the old `index.ts` barrel. + * 2. `git mv`s `foo.ts` → `index.ts` so the implementation IS the directory entry. + * 3. Appends `export * as Foo from "."` to the new `index.ts`. + * 4. Rewrites any same-directory sibling `*.ts` files that imported + * `./foo` (with or without the namespace name) to import `"."` instead. + * + * Consumer files outside the directory keep importing from the directory + * (`"@/foo"` / `"../foo"` / etc.) and continue to work, because + * `dir/index.ts` now provides the `Foo` named export directly. + * + * Usage: + * + * bun script/collapse-barrel.ts src/bus + * bun script/collapse-barrel.ts src/bus --dry-run + * + * Notes: + * + * - Only works on directories whose barrel is a single + * `export * as Name from "./file"` line. Refuses otherwise. + * - Refuses if the implementation file name already conflicts with + * `index.ts`. + * - Safe to run repeatedly: a second run on an already-collapsed dir + * will exit with a clear message. + */ + +import fs from "node:fs" +import path from "node:path" +import { spawnSync } from "node:child_process" + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const targetArg = args.find((a) => !a.startsWith("--")) + +if (!targetArg) { + console.error("Usage: bun script/collapse-barrel.ts [--dry-run]") + process.exit(1) +} + +const dir = path.resolve(targetArg) +const indexPath = path.join(dir, "index.ts") + +if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) { + console.error(`Not a directory: ${dir}`) + process.exit(1) +} +if (!fs.existsSync(indexPath)) { + console.error(`No index.ts in ${dir}`) + process.exit(1) +} + +// Validate barrel shape. +const indexContent = fs.readFileSync(indexPath, "utf-8").trim() +const match = indexContent.match(/^export\s+\*\s+as\s+(\w+)\s+from\s+["']\.\/([^"']+)["']\s*;?\s*$/) +if (!match) { + console.error(`Not a simple single-namespace barrel:\n${indexContent}`) + process.exit(1) +} +const namespaceName = match[1] +const implRel = match[2].replace(/\.ts$/, "") +const implPath = path.join(dir, `${implRel}.ts`) + +if (!fs.existsSync(implPath)) { + console.error(`Implementation file not found: ${implPath}`) + process.exit(1) +} + +if (implRel === "index") { + console.error(`Nothing to do — impl file is already index.ts`) + process.exit(0) +} + +console.log(`Collapsing ${path.relative(process.cwd(), dir)}`) +console.log(` namespace: ${namespaceName}`) +console.log(` impl file: ${implRel}.ts → index.ts`) + +// Figure out which sibling files need rewriting. +const siblings = fs + .readdirSync(dir) + .filter((f) => f.endsWith(".ts") || f.endsWith(".tsx")) + .filter((f) => f !== "index.ts" && f !== `${implRel}.ts`) + .map((f) => path.join(dir, f)) + +type SiblingEdit = { file: string; content: string } +const siblingEdits: SiblingEdit[] = [] + +for (const sibling of siblings) { + const content = fs.readFileSync(sibling, "utf-8") + // Match any import or re-export referring to "./" inside this directory. + const siblingRegex = new RegExp(`(from\\s*["'])\\.\\/${implRel.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&")}(["'])`, "g") + if (!siblingRegex.test(content)) continue + const updated = content.replace(siblingRegex, `$1.$2`) + siblingEdits.push({ file: sibling, content: updated }) +} + +if (siblingEdits.length > 0) { + console.log(` sibling rewrites: ${siblingEdits.length}`) + for (const edit of siblingEdits) { + console.log(` ${path.relative(process.cwd(), edit.file)}`) + } +} else { + console.log(` sibling rewrites: none`) +} + +if (dryRun) { + console.log(`\n(dry run) would:`) + console.log(` - delete ${path.relative(process.cwd(), indexPath)}`) + console.log(` - git mv ${path.relative(process.cwd(), implPath)} ${path.relative(process.cwd(), indexPath)}`) + console.log(` - append \`export * as ${namespaceName} from "."\` to the new index.ts`) + for (const edit of siblingEdits) { + console.log(` - rewrite sibling: ${path.relative(process.cwd(), edit.file)}`) + } + process.exit(0) +} + +// Apply: remove the old barrel, git-mv the impl onto it, then rewrite content. +// We can't git-mv on top of an existing tracked file, so we remove the barrel first. +function runGit(...cmd: string[]) { + const res = spawnSync("git", cmd, { stdio: "inherit" }) + if (res.status !== 0) { + console.error(`git ${cmd.join(" ")} failed`) + process.exit(res.status ?? 1) + } +} + +// Step 1: remove the barrel +runGit("rm", "-f", indexPath) + +// Step 2: rename the impl file into index.ts +runGit("mv", implPath, indexPath) + +// Step 3: append the self-reexport to the new index.ts +const newContent = fs.readFileSync(indexPath, "utf-8") +const trimmed = newContent.endsWith("\n") ? newContent : newContent + "\n" +fs.writeFileSync(indexPath, `${trimmed}\nexport * as ${namespaceName} from "."\n`) +console.log(` appended: export * as ${namespaceName} from "."`) + +// Step 4: rewrite siblings +for (const edit of siblingEdits) { + fs.writeFileSync(edit.file, edit.content) +} +if (siblingEdits.length > 0) { + console.log(` rewrote ${siblingEdits.length} sibling file(s)`) +} + +console.log(`\nDone. Verify with:`) +console.log(` cd packages/opencode`) +console.log(` bunx --bun tsgo --noEmit`) +console.log(` bun run --conditions=browser ./src/index.ts generate`) +console.log(` bun run test`) From 25a9de301ad83ac7f6c8ec5ed67d81ee4d2a0221 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 16:21:47 -0400 Subject: [PATCH 034/201] core: eager load config on startup for better traces and refactor npm install for improved error reporting Config is now loaded eagerly during project bootstrap so users can see config loading in traces during startup. This helps diagnose configuration issues earlier in the initialization flow. NPM installation logic has been refactored with a unified reify function and improved InstallFailedError that includes both the packages being installed and the target directory. This provides users with complete context when package installations fail, making it easier to identify which dependency or project directory caused the issue. --- .../opencode/src/effect/bootstrap-runtime.ts | 2 + packages/opencode/src/project/bootstrap.ts | 4 + packages/shared/src/npm.ts | 126 ++++++++---------- 3 files changed, 60 insertions(+), 72 deletions(-) diff --git a/packages/opencode/src/effect/bootstrap-runtime.ts b/packages/opencode/src/effect/bootstrap-runtime.ts index 89cc071561..62b71e58b1 100644 --- a/packages/opencode/src/effect/bootstrap-runtime.ts +++ b/packages/opencode/src/effect/bootstrap-runtime.ts @@ -10,9 +10,11 @@ import { File } from "@/file" import { Vcs } from "@/project" import { Snapshot } from "@/snapshot" import { Bus } from "@/bus" +import { Config } from "@/config" import * as Observability from "./observability" export const BootstrapLayer = Layer.mergeAll( + Config.defaultLayer, Plugin.defaultLayer, ShareNext.defaultLayer, Format.defaultLayer, diff --git a/packages/opencode/src/project/bootstrap.ts b/packages/opencode/src/project/bootstrap.ts index e506d2feda..a7c071a9f8 100644 --- a/packages/opencode/src/project/bootstrap.ts +++ b/packages/opencode/src/project/bootstrap.ts @@ -12,9 +12,13 @@ import { Log } from "@/util" import { FileWatcher } from "@/file/watcher" import { ShareNext } from "@/share" import * as Effect from "effect/Effect" +import { Config } from "@/config" export const InstanceBootstrap = Effect.gen(function* () { Log.Default.info("bootstrapping", { directory: Instance.directory }) + // everything depends on config so eager load it for nice traces + yield* Config.Service.use((svc) => svc.get()) + // Plugin can mutate config so it has to be initialized before anything else. yield* Plugin.Service.use((svc) => svc.init()) yield* Effect.all( [ diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts index e4f42227de..865e827b31 100644 --- a/packages/shared/src/npm.ts +++ b/packages/shared/src/npm.ts @@ -8,7 +8,8 @@ import { EffectFlock } from "./util/effect-flock" export namespace Npm { export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { - pkg: Schema.String, + add: Schema.Array(Schema.String).pipe(Schema.optional), + dir: Schema.String, cause: Schema.optional(Schema.Defect), }) {} @@ -19,7 +20,10 @@ export namespace Npm { export interface Interface { readonly add: (pkg: string) => Effect.Effect - readonly install: (dir: string, input?: { add: string[] }) => Effect.Effect + readonly install: ( + dir: string, + input?: { add: string[] }, + ) => Effect.Effect readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect readonly which: (pkg: string) => Effect.Effect> } @@ -55,6 +59,37 @@ export namespace Npm { interface ArboristTree { edgesOut: Map } + + const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) + export const layer = Layer.effect( Service, Effect.gen(function* () { @@ -91,45 +126,12 @@ export namespace Npm { }) const add = Effect.fn("Npm.add")(function* (pkg: string) { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) const dir = directory(pkg) yield* flock.acquire(`npm-install:${dir}`) - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - - const tree = yield* Effect.tryPromise({ - try: () => arborist.loadVirtual().catch(() => undefined), - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) as Effect.Effect - - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } - - const result = yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: [pkg], - save: true, - saveType: "prod", - }), - catch: (cause) => new InstallFailedError({ pkg, cause }), - }) as Effect.Effect - - const first = result.edgesOut.values().next().value?.to - if (!first) { - return yield* new InstallFailedError({ pkg }) - } - + const tree = yield* reify({ dir, add: [pkg] }) + const first = tree.edgesOut.values().next().value?.to + if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) return resolveEntryPoint(first.name, first.path) }, Effect.scoped) @@ -142,41 +144,20 @@ export namespace Npm { yield* flock.acquire(`npm-install:${dir}`) - const reify = Effect.fn("Npm.reify")(function* () { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - yield* Effect.tryPromise({ - try: () => - arb - .reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }) - .catch(() => {}), - catch: () => {}, - }).pipe(Effect.orElseSucceed(() => {})) - }) - - const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) - if (!nodeModulesExists) { - yield* reify() - return - } - - const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) - const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) - - const pkgAny = pkg as any - const lockAny = lock as any + yield* Effect.gen(function* () { + const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) + if (!nodeModulesExists) { + yield* reify({ add: input?.add, dir }) + return + } + }).pipe(Effect.withSpan("Npm.checkNodeModules")) yield* Effect.gen(function* () { + const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) + const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) + + const pkgAny = pkg as any + const lockAny = lock as any const declared = new Set([ ...Object.keys(pkgAny?.dependencies || {}), ...Object.keys(pkgAny?.devDependencies || {}), @@ -195,11 +176,12 @@ export namespace Npm { for (const name of declared) { if (!locked.has(name)) { - yield* reify() + yield* reify({ dir, add: input?.add }) return } } }).pipe(Effect.withSpan("Npm.checkDirty")) + return }, Effect.scoped) From 26af77cd1e0b34de2bc171a665c2cc7819c15110 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:26:33 -0400 Subject: [PATCH 035/201] fix(core): fix detection of local installation channel (#22899) --- packages/opencode/src/installation/version.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/installation/version.ts b/packages/opencode/src/installation/version.ts index f1668d2646..25d9cd99aa 100644 --- a/packages/opencode/src/installation/version.ts +++ b/packages/opencode/src/installation/version.ts @@ -5,4 +5,4 @@ declare global { export const InstallationVersion = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "local" export const InstallationChannel = typeof OPENCODE_CHANNEL === "string" ? OPENCODE_CHANNEL : "local" -export const InstallationLocal = InstallationVersion === "local" +export const InstallationLocal = InstallationChannel === "local" From 1045a43603436d2328dcdaccc9160bd945e8b765 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:29:51 -0400 Subject: [PATCH 036/201] refactor: collapse format barrel into format/index.ts (#22898) --- packages/opencode/src/format/format.ts | 192 ------------------------ packages/opencode/src/format/index.ts | 195 ++++++++++++++++++++++++- 2 files changed, 194 insertions(+), 193 deletions(-) delete mode 100644 packages/opencode/src/format/format.ts diff --git a/packages/opencode/src/format/format.ts b/packages/opencode/src/format/format.ts deleted file mode 100644 index 40855636f9..0000000000 --- a/packages/opencode/src/format/format.ts +++ /dev/null @@ -1,192 +0,0 @@ -import { Effect, Layer, Context } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" -import path from "path" -import { mergeDeep } from "remeda" -import z from "zod" -import { Config } from "../config" -import { Log } from "../util" -import * as Formatter from "./formatter" - -const log = Log.create({ service: "format" }) - -export const Status = z - .object({ - name: z.string(), - extensions: z.string().array(), - enabled: z.boolean(), - }) - .meta({ - ref: "FormatterStatus", - }) -export type Status = z.infer - -export interface Interface { - readonly init: () => Effect.Effect - readonly status: () => Effect.Effect - readonly file: (filepath: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Format") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const state = yield* InstanceState.make( - Effect.fn("Format.state")(function* (_ctx) { - const commands: Record = {} - const formatters: Record = {} - - const cfg = yield* config.get() - - if (cfg.formatter !== false) { - for (const item of Object.values(Formatter)) { - formatters[item.name] = item - } - for (const [name, item] of Object.entries(cfg.formatter ?? {})) { - // Ruff and uv are both the same formatter, so disabling either should disable both. - if (["ruff", "uv"].includes(name) && (cfg.formatter?.ruff?.disabled || cfg.formatter?.uv?.disabled)) { - // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. - delete formatters.ruff - delete formatters.uv - continue - } - if (item.disabled) { - delete formatters[name] - continue - } - const info = mergeDeep(formatters[name] ?? {}, { - extensions: [], - ...item, - }) - - formatters[name] = { - ...info, - name, - enabled: async () => info.command ?? false, - } - } - } else { - log.info("all formatters are disabled") - } - - async function getCommand(item: Formatter.Info) { - let cmd = commands[item.name] - if (cmd === false || cmd === undefined) { - cmd = await item.enabled() - commands[item.name] = cmd - } - return cmd - } - - async function isEnabled(item: Formatter.Info) { - const cmd = await getCommand(item) - return cmd !== false - } - - async function getFormatter(ext: string) { - const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext)) - const checks = await Promise.all( - matching.map(async (item) => { - log.info("checking", { name: item.name, ext }) - const cmd = await getCommand(item) - if (cmd) { - log.info("enabled", { name: item.name, ext }) - } - return { - item, - cmd, - } - }), - ) - return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! })) - } - - function formatFile(filepath: string) { - return Effect.gen(function* () { - log.info("formatting", { file: filepath }) - const ext = path.extname(filepath) - - for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) { - if (cmd === false) continue - log.info("running", { command: cmd }) - const replaced = cmd.map((x) => x.replace("$FILE", filepath)) - const dir = yield* InstanceState.directory - const code = yield* spawner - .spawn( - ChildProcess.make(replaced[0]!, replaced.slice(1), { - cwd: dir, - env: item.environment, - extendEnv: true, - }), - ) - .pipe( - Effect.flatMap((handle) => handle.exitCode), - Effect.scoped, - Effect.catch(() => - Effect.sync(() => { - log.error("failed to format file", { - error: "spawn failed", - command: cmd, - ...item.environment, - file: filepath, - }) - return ChildProcessSpawner.ExitCode(1) - }), - ), - ) - if (code !== 0) { - log.error("failed", { - command: cmd, - ...item.environment, - }) - } - } - }) - } - - log.info("init") - - return { - formatters, - isEnabled, - formatFile, - } - }), - ) - - const init = Effect.fn("Format.init")(function* () { - yield* InstanceState.get(state) - }) - - const status = Effect.fn("Format.status")(function* () { - const { formatters, isEnabled } = yield* InstanceState.get(state) - const result: Status[] = [] - for (const formatter of Object.values(formatters)) { - const isOn = yield* Effect.promise(() => isEnabled(formatter)) - result.push({ - name: formatter.name, - extensions: formatter.extensions, - enabled: isOn, - }) - } - return result - }) - - const file = Effect.fn("Format.file")(function* (filepath: string) { - const { formatFile } = yield* InstanceState.get(state) - yield* formatFile(filepath) - }) - - return Service.of({ init, status, file }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), -) diff --git a/packages/opencode/src/format/index.ts b/packages/opencode/src/format/index.ts index 435c517ac7..d0ae59d05e 100644 --- a/packages/opencode/src/format/index.ts +++ b/packages/opencode/src/format/index.ts @@ -1 +1,194 @@ -export * as Format from "./format" +import { Effect, Layer, Context } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" +import path from "path" +import { mergeDeep } from "remeda" +import z from "zod" +import { Config } from "../config" +import { Log } from "../util" +import * as Formatter from "./formatter" + +const log = Log.create({ service: "format" }) + +export const Status = z + .object({ + name: z.string(), + extensions: z.string().array(), + enabled: z.boolean(), + }) + .meta({ + ref: "FormatterStatus", + }) +export type Status = z.infer + +export interface Interface { + readonly init: () => Effect.Effect + readonly status: () => Effect.Effect + readonly file: (filepath: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Format") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const state = yield* InstanceState.make( + Effect.fn("Format.state")(function* (_ctx) { + const commands: Record = {} + const formatters: Record = {} + + const cfg = yield* config.get() + + if (cfg.formatter !== false) { + for (const item of Object.values(Formatter)) { + formatters[item.name] = item + } + for (const [name, item] of Object.entries(cfg.formatter ?? {})) { + // Ruff and uv are both the same formatter, so disabling either should disable both. + if (["ruff", "uv"].includes(name) && (cfg.formatter?.ruff?.disabled || cfg.formatter?.uv?.disabled)) { + // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. + delete formatters.ruff + delete formatters.uv + continue + } + if (item.disabled) { + delete formatters[name] + continue + } + const info = mergeDeep(formatters[name] ?? {}, { + extensions: [], + ...item, + }) + + formatters[name] = { + ...info, + name, + enabled: async () => info.command ?? false, + } + } + } else { + log.info("all formatters are disabled") + } + + async function getCommand(item: Formatter.Info) { + let cmd = commands[item.name] + if (cmd === false || cmd === undefined) { + cmd = await item.enabled() + commands[item.name] = cmd + } + return cmd + } + + async function isEnabled(item: Formatter.Info) { + const cmd = await getCommand(item) + return cmd !== false + } + + async function getFormatter(ext: string) { + const matching = Object.values(formatters).filter((item) => item.extensions.includes(ext)) + const checks = await Promise.all( + matching.map(async (item) => { + log.info("checking", { name: item.name, ext }) + const cmd = await getCommand(item) + if (cmd) { + log.info("enabled", { name: item.name, ext }) + } + return { + item, + cmd, + } + }), + ) + return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! })) + } + + function formatFile(filepath: string) { + return Effect.gen(function* () { + log.info("formatting", { file: filepath }) + const ext = path.extname(filepath) + + for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) { + if (cmd === false) continue + log.info("running", { command: cmd }) + const replaced = cmd.map((x) => x.replace("$FILE", filepath)) + const dir = yield* InstanceState.directory + const code = yield* spawner + .spawn( + ChildProcess.make(replaced[0]!, replaced.slice(1), { + cwd: dir, + env: item.environment, + extendEnv: true, + }), + ) + .pipe( + Effect.flatMap((handle) => handle.exitCode), + Effect.scoped, + Effect.catch(() => + Effect.sync(() => { + log.error("failed to format file", { + error: "spawn failed", + command: cmd, + ...item.environment, + file: filepath, + }) + return ChildProcessSpawner.ExitCode(1) + }), + ), + ) + if (code !== 0) { + log.error("failed", { + command: cmd, + ...item.environment, + }) + } + } + }) + } + + log.info("init") + + return { + formatters, + isEnabled, + formatFile, + } + }), + ) + + const init = Effect.fn("Format.init")(function* () { + yield* InstanceState.get(state) + }) + + const status = Effect.fn("Format.status")(function* () { + const { formatters, isEnabled } = yield* InstanceState.get(state) + const result: Status[] = [] + for (const formatter of Object.values(formatters)) { + const isOn = yield* Effect.promise(() => isEnabled(formatter)) + result.push({ + name: formatter.name, + extensions: formatter.extensions, + enabled: isOn, + }) + } + return result + }) + + const file = Effect.fn("Format.file")(function* (filepath: string) { + const { formatFile } = yield* InstanceState.get(state) + yield* formatFile(filepath) + }) + + return Service.of({ init, status, file }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), +) + +export * as Format from "." From 9b77430d0d3bf84fa7784a093b783fad3d7ca824 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:29:54 -0400 Subject: [PATCH 037/201] refactor: collapse env barrel into env/index.ts (#22900) --- packages/opencode/src/env/env.ts | 35 --------------------------- packages/opencode/src/env/index.ts | 38 +++++++++++++++++++++++++++++- 2 files changed, 37 insertions(+), 36 deletions(-) delete mode 100644 packages/opencode/src/env/env.ts diff --git a/packages/opencode/src/env/env.ts b/packages/opencode/src/env/env.ts deleted file mode 100644 index 618ae32684..0000000000 --- a/packages/opencode/src/env/env.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Context, Effect, Layer } from "effect" -import { InstanceState } from "@/effect" - -type State = Record - -export interface Interface { - readonly get: (key: string) => Effect.Effect - readonly all: () => Effect.Effect - readonly set: (key: string, value: string) => Effect.Effect - readonly remove: (key: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Env") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const state = yield* InstanceState.make(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env }))) - - const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key])) - const all = Effect.fn("Env.all")(() => InstanceState.get(state)) - const set = Effect.fn("Env.set")(function* (key: string, value: string) { - const env = yield* InstanceState.get(state) - env[key] = value - }) - const remove = Effect.fn("Env.remove")(function* (key: string) { - const env = yield* InstanceState.get(state) - delete env[key] - }) - - return Service.of({ get, all, set, remove }) - }), -) - -export const defaultLayer = layer diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index c589edbfdd..a53d96def2 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -1 +1,37 @@ -export * as Env from "./env" +import { Context, Effect, Layer } from "effect" +import { InstanceState } from "@/effect" + +type State = Record + +export interface Interface { + readonly get: (key: string) => Effect.Effect + readonly all: () => Effect.Effect + readonly set: (key: string, value: string) => Effect.Effect + readonly remove: (key: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Env") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const state = yield* InstanceState.make(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env }))) + + const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key])) + const all = Effect.fn("Env.all")(() => InstanceState.get(state)) + const set = Effect.fn("Env.set")(function* (key: string, value: string) { + const env = yield* InstanceState.get(state) + env[key] = value + }) + const remove = Effect.fn("Env.remove")(function* (key: string) { + const env = yield* InstanceState.get(state) + delete env[key] + }) + + return Service.of({ get, all, set, remove }) + }), +) + +export const defaultLayer = layer + +export * as Env from "." From a8d8a35cd3033602befc6648d00ed6be37aed826 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:30:11 -0400 Subject: [PATCH 038/201] feat(core): pass auth data to workspace (#22897) --- packages/opencode/src/auth/auth.ts | 6 ++++++ packages/opencode/src/control-plane/types.ts | 2 +- packages/opencode/src/control-plane/workspace.ts | 8 +++++++- packages/opencode/src/server/proxy.ts | 6 ------ 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/packages/opencode/src/auth/auth.ts b/packages/opencode/src/auth/auth.ts index fb9d2b1495..598178fad1 100644 --- a/packages/opencode/src/auth/auth.ts +++ b/packages/opencode/src/auth/auth.ts @@ -56,6 +56,12 @@ export const layer = Layer.effect( const decode = Schema.decodeUnknownOption(Info) const all = Effect.fn("Auth.all")(function* () { + if (process.env.OPENCODE_AUTH_CONTENT) { + try { + return JSON.parse(process.env.OPENCODE_AUTH_CONTENT) + } catch (err) {} + } + const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined)) }) diff --git a/packages/opencode/src/control-plane/types.ts b/packages/opencode/src/control-plane/types.ts index 4e499e45ea..3961cd0e2a 100644 --- a/packages/opencode/src/control-plane/types.ts +++ b/packages/opencode/src/control-plane/types.ts @@ -28,7 +28,7 @@ export type WorkspaceAdaptor = { name: string description: string configure(info: WorkspaceInfo): WorkspaceInfo | Promise - create(info: WorkspaceInfo, from?: WorkspaceInfo): Promise + create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise remove(info: WorkspaceInfo): Promise target(info: WorkspaceInfo): Target | Promise } diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index d870eb6360..08d675b253 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -5,6 +5,7 @@ import { Database, asc, eq, inArray } from "@/storage" import { Project } from "@/project" import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" +import { Auth } from "@/auth" import { SyncEvent } from "@/sync" import { EventTable } from "@/sync/event.sql" import { Flag } from "@/flag/flag" @@ -112,7 +113,12 @@ export namespace Workspace { .run() }) - await adaptor.create(config) + const env = { + OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), + OPENCODE_WORKSPACE_ID: config.id, + OPENCODE_EXPERIMENTAL_WORKSPACES: "true" + } + await adaptor.create(config, env) startSync(info) diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 5e36f2cff9..07703fdc80 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -110,12 +110,6 @@ export namespace ServerProxy { req: Request, workspaceID: WorkspaceID, ) { - console.log("proxy http request", { - method: req.method, - request: req.url, - url: String(url), - }) - if (!Workspace.isSyncing(workspaceID)) { return new Response(`broken sync connection for workspace: ${workspaceID}`, { status: 503, From 3fe906f517eb60aa20fd47c95ec3c131452e8d91 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:30:52 -0400 Subject: [PATCH 039/201] refactor: collapse command barrel into command/index.ts (#22903) --- packages/opencode/src/command/command.ts | 186 ---------------------- packages/opencode/src/command/index.ts | 189 ++++++++++++++++++++++- 2 files changed, 188 insertions(+), 187 deletions(-) delete mode 100644 packages/opencode/src/command/command.ts diff --git a/packages/opencode/src/command/command.ts b/packages/opencode/src/command/command.ts deleted file mode 100644 index 4ea1325240..0000000000 --- a/packages/opencode/src/command/command.ts +++ /dev/null @@ -1,186 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { InstanceState } from "@/effect" -import { EffectBridge } from "@/effect" -import type { InstanceContext } from "@/project/instance" -import { SessionID, MessageID } from "@/session/schema" -import { Effect, Layer, Context } from "effect" -import z from "zod" -import { Config } from "../config" -import { MCP } from "../mcp" -import { Skill } from "../skill" -import PROMPT_INITIALIZE from "./template/initialize.txt" -import PROMPT_REVIEW from "./template/review.txt" - -type State = { - commands: Record -} - -export const Event = { - Executed: BusEvent.define( - "command.executed", - z.object({ - name: z.string(), - sessionID: SessionID.zod, - arguments: z.string(), - messageID: MessageID.zod, - }), - ), -} - -export const Info = z - .object({ - name: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: z.string().optional(), - source: z.enum(["command", "mcp", "skill"]).optional(), - // workaround for zod not supporting async functions natively so we use getters - // https://zod.dev/v4/changelog?id=zfunction - template: z.promise(z.string()).or(z.string()), - subtask: z.boolean().optional(), - hints: z.array(z.string()), - }) - .meta({ - ref: "Command", - }) - -// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it -export type Info = Omit, "template"> & { template: Promise | string } - -export function hints(template: string) { - const result: string[] = [] - const numbered = template.match(/\$\d+/g) - if (numbered) { - for (const match of [...new Set(numbered)].sort()) result.push(match) - } - if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS") - return result -} - -export const Default = { - INIT: "init", - REVIEW: "review", -} as const - -export interface Interface { - readonly get: (name: string) => Effect.Effect - readonly list: () => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Command") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const mcp = yield* MCP.Service - const skill = yield* Skill.Service - - const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) { - const cfg = yield* config.get() - const bridge = yield* EffectBridge.make() - const commands: Record = {} - - commands[Default.INIT] = { - name: Default.INIT, - description: "guided AGENTS.md setup", - source: "command", - get template() { - return PROMPT_INITIALIZE.replace("${path}", ctx.worktree) - }, - hints: hints(PROMPT_INITIALIZE), - } - commands[Default.REVIEW] = { - name: Default.REVIEW, - description: "review changes [commit|branch|pr], defaults to uncommitted", - source: "command", - get template() { - return PROMPT_REVIEW.replace("${path}", ctx.worktree) - }, - subtask: true, - hints: hints(PROMPT_REVIEW), - } - - for (const [name, command] of Object.entries(cfg.command ?? {})) { - commands[name] = { - name, - agent: command.agent, - model: command.model, - description: command.description, - source: "command", - get template() { - return command.template - }, - subtask: command.subtask, - hints: hints(command.template), - } - } - - for (const [name, prompt] of Object.entries(yield* mcp.prompts())) { - commands[name] = { - name, - source: "mcp", - description: prompt.description, - get template() { - return bridge.promise( - mcp - .getPrompt( - prompt.client, - prompt.name, - prompt.arguments - ? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`])) - : {}, - ) - .pipe( - Effect.map( - (template) => - template?.messages - .map((message) => (message.content.type === "text" ? message.content.text : "")) - .join("\n") || "", - ), - ), - ) - }, - hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [], - } - } - - for (const item of yield* skill.all()) { - if (commands[item.name]) continue - commands[item.name] = { - name: item.name, - description: item.description, - source: "skill", - get template() { - return item.content - }, - hints: [], - } - } - - return { - commands, - } - }) - - const state = yield* InstanceState.make((ctx) => init(ctx)) - - const get = Effect.fn("Command.get")(function* (name: string) { - const s = yield* InstanceState.get(state) - return s.commands[name] - }) - - const list = Effect.fn("Command.list")(function* () { - const s = yield* InstanceState.get(state) - return Object.values(s.commands) - }) - - return Service.of({ get, list }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(MCP.defaultLayer), - Layer.provide(Skill.defaultLayer), -) diff --git a/packages/opencode/src/command/index.ts b/packages/opencode/src/command/index.ts index 2e530360c5..27ba357ecc 100644 --- a/packages/opencode/src/command/index.ts +++ b/packages/opencode/src/command/index.ts @@ -1 +1,188 @@ -export * as Command from "./command" +import { BusEvent } from "@/bus/bus-event" +import { InstanceState } from "@/effect" +import { EffectBridge } from "@/effect" +import type { InstanceContext } from "@/project/instance" +import { SessionID, MessageID } from "@/session/schema" +import { Effect, Layer, Context } from "effect" +import z from "zod" +import { Config } from "../config" +import { MCP } from "../mcp" +import { Skill } from "../skill" +import PROMPT_INITIALIZE from "./template/initialize.txt" +import PROMPT_REVIEW from "./template/review.txt" + +type State = { + commands: Record +} + +export const Event = { + Executed: BusEvent.define( + "command.executed", + z.object({ + name: z.string(), + sessionID: SessionID.zod, + arguments: z.string(), + messageID: MessageID.zod, + }), + ), +} + +export const Info = z + .object({ + name: z.string(), + description: z.string().optional(), + agent: z.string().optional(), + model: z.string().optional(), + source: z.enum(["command", "mcp", "skill"]).optional(), + // workaround for zod not supporting async functions natively so we use getters + // https://zod.dev/v4/changelog?id=zfunction + template: z.promise(z.string()).or(z.string()), + subtask: z.boolean().optional(), + hints: z.array(z.string()), + }) + .meta({ + ref: "Command", + }) + +// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it +export type Info = Omit, "template"> & { template: Promise | string } + +export function hints(template: string) { + const result: string[] = [] + const numbered = template.match(/\$\d+/g) + if (numbered) { + for (const match of [...new Set(numbered)].sort()) result.push(match) + } + if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS") + return result +} + +export const Default = { + INIT: "init", + REVIEW: "review", +} as const + +export interface Interface { + readonly get: (name: string) => Effect.Effect + readonly list: () => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Command") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const mcp = yield* MCP.Service + const skill = yield* Skill.Service + + const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) { + const cfg = yield* config.get() + const bridge = yield* EffectBridge.make() + const commands: Record = {} + + commands[Default.INIT] = { + name: Default.INIT, + description: "guided AGENTS.md setup", + source: "command", + get template() { + return PROMPT_INITIALIZE.replace("${path}", ctx.worktree) + }, + hints: hints(PROMPT_INITIALIZE), + } + commands[Default.REVIEW] = { + name: Default.REVIEW, + description: "review changes [commit|branch|pr], defaults to uncommitted", + source: "command", + get template() { + return PROMPT_REVIEW.replace("${path}", ctx.worktree) + }, + subtask: true, + hints: hints(PROMPT_REVIEW), + } + + for (const [name, command] of Object.entries(cfg.command ?? {})) { + commands[name] = { + name, + agent: command.agent, + model: command.model, + description: command.description, + source: "command", + get template() { + return command.template + }, + subtask: command.subtask, + hints: hints(command.template), + } + } + + for (const [name, prompt] of Object.entries(yield* mcp.prompts())) { + commands[name] = { + name, + source: "mcp", + description: prompt.description, + get template() { + return bridge.promise( + mcp + .getPrompt( + prompt.client, + prompt.name, + prompt.arguments + ? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`])) + : {}, + ) + .pipe( + Effect.map( + (template) => + template?.messages + .map((message) => (message.content.type === "text" ? message.content.text : "")) + .join("\n") || "", + ), + ), + ) + }, + hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [], + } + } + + for (const item of yield* skill.all()) { + if (commands[item.name]) continue + commands[item.name] = { + name: item.name, + description: item.description, + source: "skill", + get template() { + return item.content + }, + hints: [], + } + } + + return { + commands, + } + }) + + const state = yield* InstanceState.make((ctx) => init(ctx)) + + const get = Effect.fn("Command.get")(function* (name: string) { + const s = yield* InstanceState.get(state) + return s.commands[name] + }) + + const list = Effect.fn("Command.list")(function* () { + const s = yield* InstanceState.get(state) + return Object.values(s.commands) + }) + + return Service.of({ get, list }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(MCP.defaultLayer), + Layer.provide(Skill.defaultLayer), +) + +export * as Command from "." From 021ab50fb105153de174c664ce90f5c90e4ba840 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 20:31:50 +0000 Subject: [PATCH 040/201] chore: generate --- packages/opencode/src/control-plane/workspace.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 08d675b253..9c1c4c8960 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -116,7 +116,7 @@ export namespace Workspace { const env = { OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), OPENCODE_WORKSPACE_ID: config.id, - OPENCODE_EXPERIMENTAL_WORKSPACES: "true" + OPENCODE_EXPERIMENTAL_WORKSPACES: "true", } await adaptor.create(config, env) From 23f97ac49d5e39f8b9cd1f269ad3f1c33404a557 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:33:52 -0400 Subject: [PATCH 041/201] refactor: collapse global barrel into global/index.ts (#22905) --- packages/opencode/src/global/global.ts | 56 ------------------------ packages/opencode/src/global/index.ts | 59 +++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 57 deletions(-) delete mode 100644 packages/opencode/src/global/global.ts diff --git a/packages/opencode/src/global/global.ts b/packages/opencode/src/global/global.ts deleted file mode 100644 index 3633e0855a..0000000000 --- a/packages/opencode/src/global/global.ts +++ /dev/null @@ -1,56 +0,0 @@ -import fs from "fs/promises" -import { xdgData, xdgCache, xdgConfig, xdgState } from "xdg-basedir" -import path from "path" -import os from "os" -import { Filesystem } from "../util" -import { Flock } from "@opencode-ai/shared/util/flock" - -const app = "opencode" - -const data = path.join(xdgData!, app) -const cache = path.join(xdgCache!, app) -const config = path.join(xdgConfig!, app) -const state = path.join(xdgState!, app) - -export const Path = { - // Allow override via OPENCODE_TEST_HOME for test isolation - get home() { - return process.env.OPENCODE_TEST_HOME || os.homedir() - }, - data, - bin: path.join(cache, "bin"), - log: path.join(data, "log"), - cache, - config, - state, -} - -// Initialize Flock with global state path -Flock.setGlobal({ state }) - -await Promise.all([ - fs.mkdir(Path.data, { recursive: true }), - fs.mkdir(Path.config, { recursive: true }), - fs.mkdir(Path.state, { recursive: true }), - fs.mkdir(Path.log, { recursive: true }), - fs.mkdir(Path.bin, { recursive: true }), -]) - -const CACHE_VERSION = "21" - -const version = await Filesystem.readText(path.join(Path.cache, "version")).catch(() => "0") - -if (version !== CACHE_VERSION) { - try { - const contents = await fs.readdir(Path.cache) - await Promise.all( - contents.map((item) => - fs.rm(path.join(Path.cache, item), { - recursive: true, - force: true, - }), - ), - ) - } catch {} - await Filesystem.write(path.join(Path.cache, "version"), CACHE_VERSION) -} diff --git a/packages/opencode/src/global/index.ts b/packages/opencode/src/global/index.ts index 9262bf2a93..27bac598fb 100644 --- a/packages/opencode/src/global/index.ts +++ b/packages/opencode/src/global/index.ts @@ -1 +1,58 @@ -export * as Global from "./global" +import fs from "fs/promises" +import { xdgData, xdgCache, xdgConfig, xdgState } from "xdg-basedir" +import path from "path" +import os from "os" +import { Filesystem } from "../util" +import { Flock } from "@opencode-ai/shared/util/flock" + +const app = "opencode" + +const data = path.join(xdgData!, app) +const cache = path.join(xdgCache!, app) +const config = path.join(xdgConfig!, app) +const state = path.join(xdgState!, app) + +export const Path = { + // Allow override via OPENCODE_TEST_HOME for test isolation + get home() { + return process.env.OPENCODE_TEST_HOME || os.homedir() + }, + data, + bin: path.join(cache, "bin"), + log: path.join(data, "log"), + cache, + config, + state, +} + +// Initialize Flock with global state path +Flock.setGlobal({ state }) + +await Promise.all([ + fs.mkdir(Path.data, { recursive: true }), + fs.mkdir(Path.config, { recursive: true }), + fs.mkdir(Path.state, { recursive: true }), + fs.mkdir(Path.log, { recursive: true }), + fs.mkdir(Path.bin, { recursive: true }), +]) + +const CACHE_VERSION = "21" + +const version = await Filesystem.readText(path.join(Path.cache, "version")).catch(() => "0") + +if (version !== CACHE_VERSION) { + try { + const contents = await fs.readdir(Path.cache) + await Promise.all( + contents.map((item) => + fs.rm(path.join(Path.cache, item), { + recursive: true, + force: true, + }), + ), + ) + } catch {} + await Filesystem.write(path.join(Path.cache, "version"), CACHE_VERSION) +} + +export * as Global from "." From cb6a9253fe8c4439bcfeff6c152b22b470de2eda Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:34:33 -0400 Subject: [PATCH 042/201] refactor: collapse sync barrel into sync/index.ts (#22907) --- packages/opencode/src/sync/index.ts | 279 ++++++++++++++++++++++- packages/opencode/src/sync/sync-event.ts | 276 ---------------------- 2 files changed, 278 insertions(+), 277 deletions(-) delete mode 100644 packages/opencode/src/sync/sync-event.ts diff --git a/packages/opencode/src/sync/index.ts b/packages/opencode/src/sync/index.ts index a6dec180bd..125d8c9550 100644 --- a/packages/opencode/src/sync/index.ts +++ b/packages/opencode/src/sync/index.ts @@ -1 +1,278 @@ -export * as SyncEvent from "./sync-event" +import z from "zod" +import type { ZodObject } from "zod" +import { Database, eq } from "@/storage" +import { GlobalBus } from "@/bus/global" +import { Bus as ProjectBus } from "@/bus" +import { BusEvent } from "@/bus/bus-event" +import { Instance } from "@/project/instance" +import { EventSequenceTable, EventTable } from "./event.sql" +import { WorkspaceContext } from "@/control-plane/workspace-context" +import { EventID } from "./schema" +import { Flag } from "@/flag/flag" + +export type Definition = { + type: string + version: number + aggregate: string + schema: z.ZodObject + + // This is temporary and only exists for compatibility with bus + // event definitions + properties: z.ZodObject +} + +export type Event = { + id: string + seq: number + aggregateID: string + data: z.infer +} + +export type SerializedEvent = Event & { type: string } + +type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void + +export const registry = new Map() +let projectors: Map | undefined +const versions = new Map() +let frozen = false +let convertEvent: (type: string, event: Event["data"]) => Promise> | Record + +export function reset() { + frozen = false + projectors = undefined + convertEvent = (_, data) => data +} + +export function init(input: { projectors: Array<[Definition, ProjectorFunc]>; convertEvent?: typeof convertEvent }) { + projectors = new Map(input.projectors) + + // Install all the latest event defs to the bus. We only ever emit + // latest versions from code, and keep around old versions for + // replaying. Replaying does not go through the bus, and it + // simplifies the bus to only use unversioned latest events + for (let [type, version] of versions.entries()) { + let def = registry.get(versionedType(type, version))! + + BusEvent.define(def.type, def.properties || def.schema) + } + + // Freeze the system so it clearly errors if events are defined + // after `init` which would cause bugs + frozen = true + convertEvent = input.convertEvent || ((_, data) => data) +} + +export function versionedType(type: A): A +export function versionedType(type: A, version: B): `${A}/${B}` +export function versionedType(type: string, version?: number) { + return version ? `${type}.${version}` : type +} + +export function define< + Type extends string, + Agg extends string, + Schema extends ZodObject>>, + BusSchema extends ZodObject = Schema, +>(input: { type: Type; version: number; aggregate: Agg; schema: Schema; busSchema?: BusSchema }) { + if (frozen) { + throw new Error("Error defining sync event: sync system has been frozen") + } + + const def = { + type: input.type, + version: input.version, + aggregate: input.aggregate, + schema: input.schema, + properties: input.busSchema ? input.busSchema : input.schema, + } + + versions.set(def.type, Math.max(def.version, versions.get(def.type) || 0)) + + registry.set(versionedType(def.type, def.version), def) + + return def +} + +export function project( + def: Def, + func: (db: Database.TxOrDb, data: Event["data"]) => void, +): [Definition, ProjectorFunc] { + return [def, func as ProjectorFunc] +} + +function process(def: Def, event: Event, options: { publish: boolean }) { + if (projectors == null) { + throw new Error("No projectors available. Call `SyncEvent.init` to install projectors") + } + + const projector = projectors.get(def) + if (!projector) { + throw new Error(`Projector not found for event: ${def.type}`) + } + + // idempotent: need to ignore any events already logged + + Database.transaction((tx) => { + projector(tx, event.data) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + tx.insert(EventSequenceTable) + .values({ + aggregate_id: event.aggregateID, + seq: event.seq, + }) + .onConflictDoUpdate({ + target: EventSequenceTable.aggregate_id, + set: { seq: event.seq }, + }) + .run() + tx.insert(EventTable) + .values({ + id: event.id, + seq: event.seq, + aggregate_id: event.aggregateID, + type: versionedType(def.type, def.version), + data: event.data as Record, + }) + .run() + } + + Database.effect(() => { + if (options?.publish) { + const result = convertEvent(def.type, event.data) + if (result instanceof Promise) { + void result.then((data) => { + void ProjectBus.publish({ type: def.type, properties: def.schema }, data) + }) + } else { + void ProjectBus.publish({ type: def.type, properties: def.schema }, result) + } + + GlobalBus.emit("event", { + directory: Instance.directory, + project: Instance.project.id, + workspace: WorkspaceContext.workspaceID, + payload: { + type: "sync", + syncEvent: { + type: versionedType(def.type, def.version), + ...event, + }, + }, + }) + } + }) + }) +} + +export function replay(event: SerializedEvent, options?: { publish: boolean }) { + const def = registry.get(event.type) + if (!def) { + throw new Error(`Unknown event type: ${event.type}`) + } + + const row = Database.use((db) => + db + .select({ seq: EventSequenceTable.seq }) + .from(EventSequenceTable) + .where(eq(EventSequenceTable.aggregate_id, event.aggregateID)) + .get(), + ) + + const latest = row?.seq ?? -1 + if (event.seq <= latest) { + return + } + + const expected = latest + 1 + if (event.seq !== expected) { + throw new Error(`Sequence mismatch for aggregate "${event.aggregateID}": expected ${expected}, got ${event.seq}`) + } + + process(def, event, { publish: !!options?.publish }) +} + +export function replayAll(events: SerializedEvent[], options?: { publish: boolean }) { + const source = events[0]?.aggregateID + if (!source) return + if (events.some((item) => item.aggregateID !== source)) { + throw new Error("Replay events must belong to the same session") + } + const start = events[0].seq + for (const [i, item] of events.entries()) { + const seq = start + i + if (item.seq !== seq) { + throw new Error(`Replay sequence mismatch at index ${i}: expected ${seq}, got ${item.seq}`) + } + } + for (const item of events) { + replay(item, options) + } + return source +} + +export function run(def: Def, data: Event["data"], options?: { publish?: boolean }) { + const agg = (data as Record)[def.aggregate] + // This should never happen: we've enforced it via typescript in + // the definition + if (agg == null) { + throw new Error(`SyncEvent.run: "${def.aggregate}" required but not found: ${JSON.stringify(data)}`) + } + + if (def.version !== versions.get(def.type)) { + throw new Error(`SyncEvent.run: running old versions of events is not allowed: ${def.type}`) + } + + const { publish = true } = options || {} + + // Note that this is an "immediate" transaction which is critical. + // We need to make sure we can safely read and write with nothing + // else changing the data from under us + Database.transaction( + (tx) => { + const id = EventID.ascending() + const row = tx + .select({ seq: EventSequenceTable.seq }) + .from(EventSequenceTable) + .where(eq(EventSequenceTable.aggregate_id, agg)) + .get() + const seq = row?.seq != null ? row.seq + 1 : 0 + + const event = { id, seq, aggregateID: agg, data } + process(def, event, { publish }) + }, + { + behavior: "immediate", + }, + ) +} + +export function remove(aggregateID: string) { + Database.transaction((tx) => { + tx.delete(EventSequenceTable).where(eq(EventSequenceTable.aggregate_id, aggregateID)).run() + tx.delete(EventTable).where(eq(EventTable.aggregate_id, aggregateID)).run() + }) +} + +export function payloads() { + return registry + .entries() + .map(([type, def]) => { + return z + .object({ + type: z.literal("sync"), + name: z.literal(type), + id: z.string(), + seq: z.number(), + aggregateID: z.literal(def.aggregate), + data: def.schema, + }) + .meta({ + ref: `SyncEvent.${def.type}`, + }) + }) + .toArray() +} + +export * as SyncEvent from "." diff --git a/packages/opencode/src/sync/sync-event.ts b/packages/opencode/src/sync/sync-event.ts deleted file mode 100644 index 94c889d917..0000000000 --- a/packages/opencode/src/sync/sync-event.ts +++ /dev/null @@ -1,276 +0,0 @@ -import z from "zod" -import type { ZodObject } from "zod" -import { Database, eq } from "@/storage" -import { GlobalBus } from "@/bus/global" -import { Bus as ProjectBus } from "@/bus" -import { BusEvent } from "@/bus/bus-event" -import { Instance } from "@/project/instance" -import { EventSequenceTable, EventTable } from "./event.sql" -import { WorkspaceContext } from "@/control-plane/workspace-context" -import { EventID } from "./schema" -import { Flag } from "@/flag/flag" - -export type Definition = { - type: string - version: number - aggregate: string - schema: z.ZodObject - - // This is temporary and only exists for compatibility with bus - // event definitions - properties: z.ZodObject -} - -export type Event = { - id: string - seq: number - aggregateID: string - data: z.infer -} - -export type SerializedEvent = Event & { type: string } - -type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void - -export const registry = new Map() -let projectors: Map | undefined -const versions = new Map() -let frozen = false -let convertEvent: (type: string, event: Event["data"]) => Promise> | Record - -export function reset() { - frozen = false - projectors = undefined - convertEvent = (_, data) => data -} - -export function init(input: { projectors: Array<[Definition, ProjectorFunc]>; convertEvent?: typeof convertEvent }) { - projectors = new Map(input.projectors) - - // Install all the latest event defs to the bus. We only ever emit - // latest versions from code, and keep around old versions for - // replaying. Replaying does not go through the bus, and it - // simplifies the bus to only use unversioned latest events - for (let [type, version] of versions.entries()) { - let def = registry.get(versionedType(type, version))! - - BusEvent.define(def.type, def.properties || def.schema) - } - - // Freeze the system so it clearly errors if events are defined - // after `init` which would cause bugs - frozen = true - convertEvent = input.convertEvent || ((_, data) => data) -} - -export function versionedType(type: A): A -export function versionedType(type: A, version: B): `${A}/${B}` -export function versionedType(type: string, version?: number) { - return version ? `${type}.${version}` : type -} - -export function define< - Type extends string, - Agg extends string, - Schema extends ZodObject>>, - BusSchema extends ZodObject = Schema, ->(input: { type: Type; version: number; aggregate: Agg; schema: Schema; busSchema?: BusSchema }) { - if (frozen) { - throw new Error("Error defining sync event: sync system has been frozen") - } - - const def = { - type: input.type, - version: input.version, - aggregate: input.aggregate, - schema: input.schema, - properties: input.busSchema ? input.busSchema : input.schema, - } - - versions.set(def.type, Math.max(def.version, versions.get(def.type) || 0)) - - registry.set(versionedType(def.type, def.version), def) - - return def -} - -export function project( - def: Def, - func: (db: Database.TxOrDb, data: Event["data"]) => void, -): [Definition, ProjectorFunc] { - return [def, func as ProjectorFunc] -} - -function process(def: Def, event: Event, options: { publish: boolean }) { - if (projectors == null) { - throw new Error("No projectors available. Call `SyncEvent.init` to install projectors") - } - - const projector = projectors.get(def) - if (!projector) { - throw new Error(`Projector not found for event: ${def.type}`) - } - - // idempotent: need to ignore any events already logged - - Database.transaction((tx) => { - projector(tx, event.data) - - if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { - tx.insert(EventSequenceTable) - .values({ - aggregate_id: event.aggregateID, - seq: event.seq, - }) - .onConflictDoUpdate({ - target: EventSequenceTable.aggregate_id, - set: { seq: event.seq }, - }) - .run() - tx.insert(EventTable) - .values({ - id: event.id, - seq: event.seq, - aggregate_id: event.aggregateID, - type: versionedType(def.type, def.version), - data: event.data as Record, - }) - .run() - } - - Database.effect(() => { - if (options?.publish) { - const result = convertEvent(def.type, event.data) - if (result instanceof Promise) { - void result.then((data) => { - void ProjectBus.publish({ type: def.type, properties: def.schema }, data) - }) - } else { - void ProjectBus.publish({ type: def.type, properties: def.schema }, result) - } - - GlobalBus.emit("event", { - directory: Instance.directory, - project: Instance.project.id, - workspace: WorkspaceContext.workspaceID, - payload: { - type: "sync", - syncEvent: { - type: versionedType(def.type, def.version), - ...event, - }, - }, - }) - } - }) - }) -} - -export function replay(event: SerializedEvent, options?: { publish: boolean }) { - const def = registry.get(event.type) - if (!def) { - throw new Error(`Unknown event type: ${event.type}`) - } - - const row = Database.use((db) => - db - .select({ seq: EventSequenceTable.seq }) - .from(EventSequenceTable) - .where(eq(EventSequenceTable.aggregate_id, event.aggregateID)) - .get(), - ) - - const latest = row?.seq ?? -1 - if (event.seq <= latest) { - return - } - - const expected = latest + 1 - if (event.seq !== expected) { - throw new Error(`Sequence mismatch for aggregate "${event.aggregateID}": expected ${expected}, got ${event.seq}`) - } - - process(def, event, { publish: !!options?.publish }) -} - -export function replayAll(events: SerializedEvent[], options?: { publish: boolean }) { - const source = events[0]?.aggregateID - if (!source) return - if (events.some((item) => item.aggregateID !== source)) { - throw new Error("Replay events must belong to the same session") - } - const start = events[0].seq - for (const [i, item] of events.entries()) { - const seq = start + i - if (item.seq !== seq) { - throw new Error(`Replay sequence mismatch at index ${i}: expected ${seq}, got ${item.seq}`) - } - } - for (const item of events) { - replay(item, options) - } - return source -} - -export function run(def: Def, data: Event["data"], options?: { publish?: boolean }) { - const agg = (data as Record)[def.aggregate] - // This should never happen: we've enforced it via typescript in - // the definition - if (agg == null) { - throw new Error(`SyncEvent.run: "${def.aggregate}" required but not found: ${JSON.stringify(data)}`) - } - - if (def.version !== versions.get(def.type)) { - throw new Error(`SyncEvent.run: running old versions of events is not allowed: ${def.type}`) - } - - const { publish = true } = options || {} - - // Note that this is an "immediate" transaction which is critical. - // We need to make sure we can safely read and write with nothing - // else changing the data from under us - Database.transaction( - (tx) => { - const id = EventID.ascending() - const row = tx - .select({ seq: EventSequenceTable.seq }) - .from(EventSequenceTable) - .where(eq(EventSequenceTable.aggregate_id, agg)) - .get() - const seq = row?.seq != null ? row.seq + 1 : 0 - - const event = { id, seq, aggregateID: agg, data } - process(def, event, { publish }) - }, - { - behavior: "immediate", - }, - ) -} - -export function remove(aggregateID: string) { - Database.transaction((tx) => { - tx.delete(EventSequenceTable).where(eq(EventSequenceTable.aggregate_id, aggregateID)).run() - tx.delete(EventTable).where(eq(EventTable.aggregate_id, aggregateID)).run() - }) -} - -export function payloads() { - return registry - .entries() - .map(([type, def]) => { - return z - .object({ - type: z.literal("sync"), - name: z.literal(type), - id: z.string(), - seq: z.number(), - aggregateID: z.literal(def.aggregate), - data: def.schema, - }) - .meta({ - ref: `SyncEvent.${def.type}`, - }) - }) - .toArray() -} From 8c191b10c2bdceee607b0b549f9632f5adb5b511 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:04 -0400 Subject: [PATCH 043/201] refactor: collapse ide barrel into ide/index.ts (#22904) --- packages/opencode/src/ide/ide.ts | 71 ---------------------------- packages/opencode/src/ide/index.ts | 74 +++++++++++++++++++++++++++++- 2 files changed, 73 insertions(+), 72 deletions(-) delete mode 100644 packages/opencode/src/ide/ide.ts diff --git a/packages/opencode/src/ide/ide.ts b/packages/opencode/src/ide/ide.ts deleted file mode 100644 index 65e80d7f28..0000000000 --- a/packages/opencode/src/ide/ide.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Log } from "../util" -import { Process } from "@/util" - -const SUPPORTED_IDES = [ - { name: "Windsurf" as const, cmd: "windsurf" }, - { name: "Visual Studio Code - Insiders" as const, cmd: "code-insiders" }, - { name: "Visual Studio Code" as const, cmd: "code" }, - { name: "Cursor" as const, cmd: "cursor" }, - { name: "VSCodium" as const, cmd: "codium" }, -] - -const log = Log.create({ service: "ide" }) - -export const Event = { - Installed: BusEvent.define( - "ide.installed", - z.object({ - ide: z.string(), - }), - ), -} - -export const AlreadyInstalledError = NamedError.create("AlreadyInstalledError", z.object({})) - -export const InstallFailedError = NamedError.create( - "InstallFailedError", - z.object({ - stderr: z.string(), - }), -) - -export function ide() { - if (process.env["TERM_PROGRAM"] === "vscode") { - const v = process.env["GIT_ASKPASS"] - for (const ide of SUPPORTED_IDES) { - if (v?.includes(ide.name)) return ide.name - } - } - return "unknown" -} - -export function alreadyInstalled() { - return process.env["OPENCODE_CALLER"] === "vscode" || process.env["OPENCODE_CALLER"] === "vscode-insiders" -} - -export async function install(ide: (typeof SUPPORTED_IDES)[number]["name"]) { - const cmd = SUPPORTED_IDES.find((i) => i.name === ide)?.cmd - if (!cmd) throw new Error(`Unknown IDE: ${ide}`) - - const p = await Process.run([cmd, "--install-extension", "sst-dev.opencode"], { - nothrow: true, - }) - const stdout = p.stdout.toString() - const stderr = p.stderr.toString() - - log.info("installed", { - ide, - stdout, - stderr, - }) - - if (p.code !== 0) { - throw new InstallFailedError({ stderr }) - } - if (stdout.includes("already installed")) { - throw new AlreadyInstalledError({}) - } -} diff --git a/packages/opencode/src/ide/index.ts b/packages/opencode/src/ide/index.ts index 9716ecbc74..ee80c34741 100644 --- a/packages/opencode/src/ide/index.ts +++ b/packages/opencode/src/ide/index.ts @@ -1 +1,73 @@ -export * as Ide from "./ide" +import { BusEvent } from "@/bus/bus-event" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Log } from "../util" +import { Process } from "@/util" + +const SUPPORTED_IDES = [ + { name: "Windsurf" as const, cmd: "windsurf" }, + { name: "Visual Studio Code - Insiders" as const, cmd: "code-insiders" }, + { name: "Visual Studio Code" as const, cmd: "code" }, + { name: "Cursor" as const, cmd: "cursor" }, + { name: "VSCodium" as const, cmd: "codium" }, +] + +const log = Log.create({ service: "ide" }) + +export const Event = { + Installed: BusEvent.define( + "ide.installed", + z.object({ + ide: z.string(), + }), + ), +} + +export const AlreadyInstalledError = NamedError.create("AlreadyInstalledError", z.object({})) + +export const InstallFailedError = NamedError.create( + "InstallFailedError", + z.object({ + stderr: z.string(), + }), +) + +export function ide() { + if (process.env["TERM_PROGRAM"] === "vscode") { + const v = process.env["GIT_ASKPASS"] + for (const ide of SUPPORTED_IDES) { + if (v?.includes(ide.name)) return ide.name + } + } + return "unknown" +} + +export function alreadyInstalled() { + return process.env["OPENCODE_CALLER"] === "vscode" || process.env["OPENCODE_CALLER"] === "vscode-insiders" +} + +export async function install(ide: (typeof SUPPORTED_IDES)[number]["name"]) { + const cmd = SUPPORTED_IDES.find((i) => i.name === ide)?.cmd + if (!cmd) throw new Error(`Unknown IDE: ${ide}`) + + const p = await Process.run([cmd, "--install-extension", "sst-dev.opencode"], { + nothrow: true, + }) + const stdout = p.stdout.toString() + const stderr = p.stderr.toString() + + log.info("installed", { + ide, + stdout, + stderr, + }) + + if (p.code !== 0) { + throw new InstallFailedError({ stderr }) + } + if (stdout.includes("already installed")) { + throw new AlreadyInstalledError({}) + } +} + +export * as Ide from "." From bd51a0d35bcdec47a980bb3c34c1d5d7ba144866 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:26 -0400 Subject: [PATCH 044/201] refactor: collapse worktree barrel into worktree/index.ts (#22906) --- packages/opencode/src/worktree/index.ts | 598 ++++++++++++++++++++- packages/opencode/src/worktree/worktree.ts | 595 -------------------- 2 files changed, 597 insertions(+), 596 deletions(-) delete mode 100644 packages/opencode/src/worktree/worktree.ts diff --git a/packages/opencode/src/worktree/index.ts b/packages/opencode/src/worktree/index.ts index 39bf94d69b..aa1dc2f8f1 100644 --- a/packages/opencode/src/worktree/index.ts +++ b/packages/opencode/src/worktree/index.ts @@ -1 +1,597 @@ -export * as Worktree from "./worktree" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Global } from "../global" +import { Instance } from "../project/instance" +import { InstanceBootstrap } from "../project/bootstrap" +import { Project } from "../project" +import { Database, eq } from "../storage" +import { ProjectTable } from "../project/project.sql" +import type { ProjectID } from "../project/schema" +import { Log } from "../util" +import { Slug } from "@opencode-ai/shared/util/slug" +import { errorMessage } from "../util/error" +import { BusEvent } from "@/bus/bus-event" +import { GlobalBus } from "@/bus/global" +import { Git } from "@/git" +import { Effect, Layer, Path, Scope, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import { NodePath } from "@effect/platform-node" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { BootstrapRuntime } from "@/effect/bootstrap-runtime" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" + +const log = Log.create({ service: "worktree" }) + +export const Event = { + Ready: BusEvent.define( + "worktree.ready", + z.object({ + name: z.string(), + branch: z.string(), + }), + ), + Failed: BusEvent.define( + "worktree.failed", + z.object({ + message: z.string(), + }), + ), +} + +export const Info = z + .object({ + name: z.string(), + branch: z.string(), + directory: z.string(), + }) + .meta({ + ref: "Worktree", + }) + +export type Info = z.infer + +export const CreateInput = z + .object({ + name: z.string().optional(), + startCommand: z.string().optional().describe("Additional startup script to run after the project's start command"), + }) + .meta({ + ref: "WorktreeCreateInput", + }) + +export type CreateInput = z.infer + +export const RemoveInput = z + .object({ + directory: z.string(), + }) + .meta({ + ref: "WorktreeRemoveInput", + }) + +export type RemoveInput = z.infer + +export const ResetInput = z + .object({ + directory: z.string(), + }) + .meta({ + ref: "WorktreeResetInput", + }) + +export type ResetInput = z.infer + +export const NotGitError = NamedError.create( + "WorktreeNotGitError", + z.object({ + message: z.string(), + }), +) + +export const NameGenerationFailedError = NamedError.create( + "WorktreeNameGenerationFailedError", + z.object({ + message: z.string(), + }), +) + +export const CreateFailedError = NamedError.create( + "WorktreeCreateFailedError", + z.object({ + message: z.string(), + }), +) + +export const StartCommandFailedError = NamedError.create( + "WorktreeStartCommandFailedError", + z.object({ + message: z.string(), + }), +) + +export const RemoveFailedError = NamedError.create( + "WorktreeRemoveFailedError", + z.object({ + message: z.string(), + }), +) + +export const ResetFailedError = NamedError.create( + "WorktreeResetFailedError", + z.object({ + message: z.string(), + }), +) + +function slugify(input: string) { + return input + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+/, "") + .replace(/-+$/, "") +} + +function failedRemoves(...chunks: string[]) { + return chunks.filter(Boolean).flatMap((chunk) => + chunk + .split("\n") + .map((line) => line.trim()) + .flatMap((line) => { + const match = line.match(/^warning:\s+failed to remove\s+(.+):\s+/i) + if (!match) return [] + const value = match[1]?.trim().replace(/^['"]|['"]$/g, "") + if (!value) return [] + return [value] + }), + ) +} + +// --------------------------------------------------------------------------- +// Effect service +// --------------------------------------------------------------------------- + +export interface Interface { + readonly makeWorktreeInfo: (name?: string) => Effect.Effect + readonly createFromInfo: (info: Info, startCommand?: string) => Effect.Effect + readonly create: (input?: CreateInput) => Effect.Effect + readonly remove: (input: RemoveInput) => Effect.Effect + readonly reset: (input: ResetInput) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Worktree") {} + +type GitResult = { code: number; text: string; stderr: string } + +export const layer: Layer.Layer< + Service, + never, + AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner | Git.Service | Project.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const scope = yield* Scope.Scope + const fs = yield* AppFileSystem.Service + const pathSvc = yield* Path.Path + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const gitSvc = yield* Git.Service + const project = yield* Project.Service + + const git = Effect.fnUntraced( + function* (args: string[], opts?: { cwd?: string }) { + const handle = yield* spawner.spawn( + ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }), + ) + const [text, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, text, stderr } satisfies GitResult + }, + Effect.scoped, + Effect.catch((e) => + Effect.succeed({ code: 1, text: "", stderr: e instanceof Error ? e.message : String(e) } satisfies GitResult), + ), + ) + + const MAX_NAME_ATTEMPTS = 26 + const candidate = Effect.fn("Worktree.candidate")(function* (root: string, base?: string) { + const ctx = yield* InstanceState.context + for (const attempt of Array.from({ length: MAX_NAME_ATTEMPTS }, (_, i) => i)) { + const name = base ? (attempt === 0 ? base : `${base}-${Slug.create()}`) : Slug.create() + const branch = `opencode/${name}` + const directory = pathSvc.join(root, name) + + if (yield* fs.exists(directory).pipe(Effect.orDie)) continue + + const ref = `refs/heads/${branch}` + const branchCheck = yield* git(["show-ref", "--verify", "--quiet", ref], { cwd: ctx.worktree }) + if (branchCheck.code === 0) continue + + return Info.parse({ name, branch, directory }) + } + throw new NameGenerationFailedError({ message: "Failed to generate a unique worktree name" }) + }) + + const makeWorktreeInfo = Effect.fn("Worktree.makeWorktreeInfo")(function* (name?: string) { + const ctx = yield* InstanceState.context + if (ctx.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const root = pathSvc.join(Global.Path.data, "worktree", ctx.project.id) + yield* fs.makeDirectory(root, { recursive: true }).pipe(Effect.orDie) + + const base = name ? slugify(name) : "" + return yield* candidate(root, base || undefined) + }) + + const setup = Effect.fnUntraced(function* (info: Info) { + const ctx = yield* InstanceState.context + const created = yield* git(["worktree", "add", "--no-checkout", "-b", info.branch, info.directory], { + cwd: ctx.worktree, + }) + if (created.code !== 0) { + throw new CreateFailedError({ message: created.stderr || created.text || "Failed to create git worktree" }) + } + + yield* project.addSandbox(ctx.project.id, info.directory).pipe(Effect.catch(() => Effect.void)) + }) + + const boot = Effect.fnUntraced(function* (info: Info, startCommand?: string) { + const ctx = yield* InstanceState.context + const workspaceID = yield* InstanceState.workspaceID + const projectID = ctx.project.id + const extra = startCommand?.trim() + + const populated = yield* git(["reset", "--hard"], { cwd: info.directory }) + if (populated.code !== 0) { + const message = populated.stderr || populated.text || "Failed to populate worktree" + log.error("worktree checkout failed", { directory: info.directory, message }) + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { type: Event.Failed.type, properties: { message } }, + }) + return + } + + const booted = yield* Effect.promise(() => + Instance.provide({ + directory: info.directory, + init: () => BootstrapRuntime.runPromise(InstanceBootstrap), + fn: () => undefined, + }) + .then(() => true) + .catch((error) => { + const message = errorMessage(error) + log.error("worktree bootstrap failed", { directory: info.directory, message }) + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { type: Event.Failed.type, properties: { message } }, + }) + return false + }), + ) + if (!booted) return + + GlobalBus.emit("event", { + directory: info.directory, + project: ctx.project.id, + workspace: workspaceID, + payload: { + type: Event.Ready.type, + properties: { name: info.name, branch: info.branch }, + }, + }) + + yield* runStartScripts(info.directory, { projectID, extra }) + }) + + const createFromInfo = Effect.fn("Worktree.createFromInfo")(function* (info: Info, startCommand?: string) { + yield* setup(info) + yield* boot(info, startCommand) + }) + + const create = Effect.fn("Worktree.create")(function* (input?: CreateInput) { + const info = yield* makeWorktreeInfo(input?.name) + yield* setup(info) + yield* boot(info, input?.startCommand).pipe( + Effect.catchCause((cause) => Effect.sync(() => log.error("worktree bootstrap failed", { cause }))), + Effect.forkIn(scope), + ) + return info + }) + + const canonical = Effect.fnUntraced(function* (input: string) { + const abs = pathSvc.resolve(input) + const real = yield* fs.realPath(abs).pipe(Effect.catch(() => Effect.succeed(abs))) + const normalized = pathSvc.normalize(real) + return process.platform === "win32" ? normalized.toLowerCase() : normalized + }) + + function parseWorktreeList(text: string) { + return text + .split("\n") + .map((line) => line.trim()) + .reduce<{ path?: string; branch?: string }[]>((acc, line) => { + if (!line) return acc + if (line.startsWith("worktree ")) { + acc.push({ path: line.slice("worktree ".length).trim() }) + return acc + } + const current = acc[acc.length - 1] + if (!current) return acc + if (line.startsWith("branch ")) { + current.branch = line.slice("branch ".length).trim() + } + return acc + }, []) + } + + const locateWorktree = Effect.fnUntraced(function* ( + entries: { path?: string; branch?: string }[], + directory: string, + ) { + for (const item of entries) { + if (!item.path) continue + const key = yield* canonical(item.path) + if (key === directory) return item + } + return undefined + }) + + function stopFsmonitor(target: string) { + return fs.exists(target).pipe( + Effect.orDie, + Effect.flatMap((exists) => (exists ? git(["fsmonitor--daemon", "stop"], { cwd: target }) : Effect.void)), + ) + } + + function cleanDirectory(target: string) { + return Effect.promise(() => + import("fs/promises") + .then((fsp) => fsp.rm(target, { recursive: true, force: true, maxRetries: 5, retryDelay: 100 })) + .catch((error) => { + const message = errorMessage(error) + throw new RemoveFailedError({ message: message || "Failed to remove git worktree directory" }) + }), + ) + } + + const remove = Effect.fn("Worktree.remove")(function* (input: RemoveInput) { + if (Instance.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const directory = yield* canonical(input.directory) + + const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (list.code !== 0) { + throw new RemoveFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) + } + + const entries = parseWorktreeList(list.text) + const entry = yield* locateWorktree(entries, directory) + + if (!entry?.path) { + const directoryExists = yield* fs.exists(directory).pipe(Effect.orDie) + if (directoryExists) { + yield* stopFsmonitor(directory) + yield* cleanDirectory(directory) + } + return true + } + + yield* stopFsmonitor(entry.path) + const removed = yield* git(["worktree", "remove", "--force", entry.path], { cwd: Instance.worktree }) + if (removed.code !== 0) { + const next = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (next.code !== 0) { + throw new RemoveFailedError({ + message: removed.stderr || removed.text || next.stderr || next.text || "Failed to remove git worktree", + }) + } + + const stale = yield* locateWorktree(parseWorktreeList(next.text), directory) + if (stale?.path) { + throw new RemoveFailedError({ message: removed.stderr || removed.text || "Failed to remove git worktree" }) + } + } + + yield* cleanDirectory(entry.path) + + const branch = entry.branch?.replace(/^refs\/heads\//, "") + if (branch) { + const deleted = yield* git(["branch", "-D", branch], { cwd: Instance.worktree }) + if (deleted.code !== 0) { + throw new RemoveFailedError({ + message: deleted.stderr || deleted.text || "Failed to delete worktree branch", + }) + } + } + + return true + }) + + const gitExpect = Effect.fnUntraced(function* ( + args: string[], + opts: { cwd: string }, + error: (r: GitResult) => Error, + ) { + const result = yield* git(args, opts) + if (result.code !== 0) throw error(result) + return result + }) + + const runStartCommand = Effect.fnUntraced( + function* (directory: string, cmd: string) { + const [shell, args] = process.platform === "win32" ? ["cmd", ["/c", cmd]] : ["bash", ["-lc", cmd]] + const handle = yield* spawner.spawn( + ChildProcess.make(shell, args, { cwd: directory, extendEnv: true, stdin: "ignore" }), + ) + // Drain stdout, capture stderr for error reporting + const [, stderr] = yield* Effect.all( + [Stream.runDrain(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ).pipe(Effect.orDie) + const code = yield* handle.exitCode + return { code, stderr } + }, + Effect.scoped, + Effect.catch(() => Effect.succeed({ code: 1, stderr: "" })), + ) + + const runStartScript = Effect.fnUntraced(function* (directory: string, cmd: string, kind: string) { + const text = cmd.trim() + if (!text) return true + const result = yield* runStartCommand(directory, text) + if (result.code === 0) return true + log.error("worktree start command failed", { kind, directory, message: result.stderr }) + return false + }) + + const runStartScripts = Effect.fnUntraced(function* ( + directory: string, + input: { projectID: ProjectID; extra?: string }, + ) { + const row = yield* Effect.sync(() => + Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()), + ) + const project = row ? Project.fromRow(row) : undefined + const startup = project?.commands?.start?.trim() ?? "" + const ok = yield* runStartScript(directory, startup, "project") + if (!ok) return false + yield* runStartScript(directory, input.extra ?? "", "worktree") + return true + }) + + const prune = Effect.fnUntraced(function* (root: string, entries: string[]) { + const base = yield* canonical(root) + yield* Effect.forEach( + entries, + (entry) => + Effect.gen(function* () { + const target = yield* canonical(pathSvc.resolve(root, entry)) + if (target === base) return + if (!target.startsWith(`${base}${pathSvc.sep}`)) return + yield* fs.remove(target, { recursive: true }).pipe(Effect.ignore) + }), + { concurrency: "unbounded" }, + ) + }) + + const sweep = Effect.fnUntraced(function* (root: string) { + const first = yield* git(["clean", "-ffdx"], { cwd: root }) + if (first.code === 0) return first + + const entries = failedRemoves(first.stderr, first.text) + if (!entries.length) return first + + yield* prune(root, entries) + return yield* git(["clean", "-ffdx"], { cwd: root }) + }) + + const reset = Effect.fn("Worktree.reset")(function* (input: ResetInput) { + if (Instance.project.vcs !== "git") { + throw new NotGitError({ message: "Worktrees are only supported for git projects" }) + } + + const directory = yield* canonical(input.directory) + const primary = yield* canonical(Instance.worktree) + if (directory === primary) { + throw new ResetFailedError({ message: "Cannot reset the primary workspace" }) + } + + const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) + if (list.code !== 0) { + throw new ResetFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) + } + + const entry = yield* locateWorktree(parseWorktreeList(list.text), directory) + if (!entry?.path) { + throw new ResetFailedError({ message: "Worktree not found" }) + } + + const worktreePath = entry.path + + const base = yield* gitSvc.defaultBranch(Instance.worktree) + if (!base) { + throw new ResetFailedError({ message: "Default branch not found" }) + } + + const sep = base.ref.indexOf("/") + if (base.ref !== base.name && sep > 0) { + const remote = base.ref.slice(0, sep) + const branch = base.ref.slice(sep + 1) + yield* gitExpect( + ["fetch", remote, branch], + { cwd: Instance.worktree }, + (r) => new ResetFailedError({ message: r.stderr || r.text || `Failed to fetch ${base.ref}` }), + ) + } + + yield* gitExpect( + ["reset", "--hard", base.ref], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset worktree to target" }), + ) + + const cleanResult = yield* sweep(worktreePath) + if (cleanResult.code !== 0) { + throw new ResetFailedError({ message: cleanResult.stderr || cleanResult.text || "Failed to clean worktree" }) + } + + yield* gitExpect( + ["submodule", "update", "--init", "--recursive", "--force"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to update submodules" }), + ) + + yield* gitExpect( + ["submodule", "foreach", "--recursive", "git", "reset", "--hard"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset submodules" }), + ) + + yield* gitExpect( + ["submodule", "foreach", "--recursive", "git", "clean", "-fdx"], + { cwd: worktreePath }, + (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to clean submodules" }), + ) + + const status = yield* git(["-c", "core.fsmonitor=false", "status", "--porcelain=v1"], { cwd: worktreePath }) + if (status.code !== 0) { + throw new ResetFailedError({ message: status.stderr || status.text || "Failed to read git status" }) + } + + if (status.text.trim()) { + throw new ResetFailedError({ message: `Worktree reset left local changes:\n${status.text.trim()}` }) + } + + yield* runStartScripts(worktreePath, { projectID: Instance.project.id }).pipe( + Effect.catchCause((cause) => Effect.sync(() => log.error("worktree start task failed", { cause }))), + Effect.forkIn(scope), + ) + + return true + }) + + return Service.of({ makeWorktreeInfo, createFromInfo, create, remove, reset }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Git.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(Project.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(NodePath.layer), +) + +export * as Worktree from "." diff --git a/packages/opencode/src/worktree/worktree.ts b/packages/opencode/src/worktree/worktree.ts deleted file mode 100644 index d4fab2030b..0000000000 --- a/packages/opencode/src/worktree/worktree.ts +++ /dev/null @@ -1,595 +0,0 @@ -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Instance } from "../project/instance" -import { InstanceBootstrap } from "../project/bootstrap" -import { Project } from "../project" -import { Database, eq } from "../storage" -import { ProjectTable } from "../project/project.sql" -import type { ProjectID } from "../project/schema" -import { Log } from "../util" -import { Slug } from "@opencode-ai/shared/util/slug" -import { errorMessage } from "../util/error" -import { BusEvent } from "@/bus/bus-event" -import { GlobalBus } from "@/bus/global" -import { Git } from "@/git" -import { Effect, Layer, Path, Scope, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import { NodePath } from "@effect/platform-node" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { BootstrapRuntime } from "@/effect/bootstrap-runtime" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" - -const log = Log.create({ service: "worktree" }) - -export const Event = { - Ready: BusEvent.define( - "worktree.ready", - z.object({ - name: z.string(), - branch: z.string(), - }), - ), - Failed: BusEvent.define( - "worktree.failed", - z.object({ - message: z.string(), - }), - ), -} - -export const Info = z - .object({ - name: z.string(), - branch: z.string(), - directory: z.string(), - }) - .meta({ - ref: "Worktree", - }) - -export type Info = z.infer - -export const CreateInput = z - .object({ - name: z.string().optional(), - startCommand: z.string().optional().describe("Additional startup script to run after the project's start command"), - }) - .meta({ - ref: "WorktreeCreateInput", - }) - -export type CreateInput = z.infer - -export const RemoveInput = z - .object({ - directory: z.string(), - }) - .meta({ - ref: "WorktreeRemoveInput", - }) - -export type RemoveInput = z.infer - -export const ResetInput = z - .object({ - directory: z.string(), - }) - .meta({ - ref: "WorktreeResetInput", - }) - -export type ResetInput = z.infer - -export const NotGitError = NamedError.create( - "WorktreeNotGitError", - z.object({ - message: z.string(), - }), -) - -export const NameGenerationFailedError = NamedError.create( - "WorktreeNameGenerationFailedError", - z.object({ - message: z.string(), - }), -) - -export const CreateFailedError = NamedError.create( - "WorktreeCreateFailedError", - z.object({ - message: z.string(), - }), -) - -export const StartCommandFailedError = NamedError.create( - "WorktreeStartCommandFailedError", - z.object({ - message: z.string(), - }), -) - -export const RemoveFailedError = NamedError.create( - "WorktreeRemoveFailedError", - z.object({ - message: z.string(), - }), -) - -export const ResetFailedError = NamedError.create( - "WorktreeResetFailedError", - z.object({ - message: z.string(), - }), -) - -function slugify(input: string) { - return input - .trim() - .toLowerCase() - .replace(/[^a-z0-9]+/g, "-") - .replace(/^-+/, "") - .replace(/-+$/, "") -} - -function failedRemoves(...chunks: string[]) { - return chunks.filter(Boolean).flatMap((chunk) => - chunk - .split("\n") - .map((line) => line.trim()) - .flatMap((line) => { - const match = line.match(/^warning:\s+failed to remove\s+(.+):\s+/i) - if (!match) return [] - const value = match[1]?.trim().replace(/^['"]|['"]$/g, "") - if (!value) return [] - return [value] - }), - ) -} - -// --------------------------------------------------------------------------- -// Effect service -// --------------------------------------------------------------------------- - -export interface Interface { - readonly makeWorktreeInfo: (name?: string) => Effect.Effect - readonly createFromInfo: (info: Info, startCommand?: string) => Effect.Effect - readonly create: (input?: CreateInput) => Effect.Effect - readonly remove: (input: RemoveInput) => Effect.Effect - readonly reset: (input: ResetInput) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Worktree") {} - -type GitResult = { code: number; text: string; stderr: string } - -export const layer: Layer.Layer< - Service, - never, - AppFileSystem.Service | Path.Path | ChildProcessSpawner.ChildProcessSpawner | Git.Service | Project.Service -> = Layer.effect( - Service, - Effect.gen(function* () { - const scope = yield* Scope.Scope - const fs = yield* AppFileSystem.Service - const pathSvc = yield* Path.Path - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const gitSvc = yield* Git.Service - const project = yield* Project.Service - - const git = Effect.fnUntraced( - function* (args: string[], opts?: { cwd?: string }) { - const handle = yield* spawner.spawn( - ChildProcess.make("git", args, { cwd: opts?.cwd, extendEnv: true, stdin: "ignore" }), - ) - const [text, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, text, stderr } satisfies GitResult - }, - Effect.scoped, - Effect.catch((e) => - Effect.succeed({ code: 1, text: "", stderr: e instanceof Error ? e.message : String(e) } satisfies GitResult), - ), - ) - - const MAX_NAME_ATTEMPTS = 26 - const candidate = Effect.fn("Worktree.candidate")(function* (root: string, base?: string) { - const ctx = yield* InstanceState.context - for (const attempt of Array.from({ length: MAX_NAME_ATTEMPTS }, (_, i) => i)) { - const name = base ? (attempt === 0 ? base : `${base}-${Slug.create()}`) : Slug.create() - const branch = `opencode/${name}` - const directory = pathSvc.join(root, name) - - if (yield* fs.exists(directory).pipe(Effect.orDie)) continue - - const ref = `refs/heads/${branch}` - const branchCheck = yield* git(["show-ref", "--verify", "--quiet", ref], { cwd: ctx.worktree }) - if (branchCheck.code === 0) continue - - return Info.parse({ name, branch, directory }) - } - throw new NameGenerationFailedError({ message: "Failed to generate a unique worktree name" }) - }) - - const makeWorktreeInfo = Effect.fn("Worktree.makeWorktreeInfo")(function* (name?: string) { - const ctx = yield* InstanceState.context - if (ctx.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const root = pathSvc.join(Global.Path.data, "worktree", ctx.project.id) - yield* fs.makeDirectory(root, { recursive: true }).pipe(Effect.orDie) - - const base = name ? slugify(name) : "" - return yield* candidate(root, base || undefined) - }) - - const setup = Effect.fnUntraced(function* (info: Info) { - const ctx = yield* InstanceState.context - const created = yield* git(["worktree", "add", "--no-checkout", "-b", info.branch, info.directory], { - cwd: ctx.worktree, - }) - if (created.code !== 0) { - throw new CreateFailedError({ message: created.stderr || created.text || "Failed to create git worktree" }) - } - - yield* project.addSandbox(ctx.project.id, info.directory).pipe(Effect.catch(() => Effect.void)) - }) - - const boot = Effect.fnUntraced(function* (info: Info, startCommand?: string) { - const ctx = yield* InstanceState.context - const workspaceID = yield* InstanceState.workspaceID - const projectID = ctx.project.id - const extra = startCommand?.trim() - - const populated = yield* git(["reset", "--hard"], { cwd: info.directory }) - if (populated.code !== 0) { - const message = populated.stderr || populated.text || "Failed to populate worktree" - log.error("worktree checkout failed", { directory: info.directory, message }) - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { type: Event.Failed.type, properties: { message } }, - }) - return - } - - const booted = yield* Effect.promise(() => - Instance.provide({ - directory: info.directory, - init: () => BootstrapRuntime.runPromise(InstanceBootstrap), - fn: () => undefined, - }) - .then(() => true) - .catch((error) => { - const message = errorMessage(error) - log.error("worktree bootstrap failed", { directory: info.directory, message }) - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { type: Event.Failed.type, properties: { message } }, - }) - return false - }), - ) - if (!booted) return - - GlobalBus.emit("event", { - directory: info.directory, - project: ctx.project.id, - workspace: workspaceID, - payload: { - type: Event.Ready.type, - properties: { name: info.name, branch: info.branch }, - }, - }) - - yield* runStartScripts(info.directory, { projectID, extra }) - }) - - const createFromInfo = Effect.fn("Worktree.createFromInfo")(function* (info: Info, startCommand?: string) { - yield* setup(info) - yield* boot(info, startCommand) - }) - - const create = Effect.fn("Worktree.create")(function* (input?: CreateInput) { - const info = yield* makeWorktreeInfo(input?.name) - yield* setup(info) - yield* boot(info, input?.startCommand).pipe( - Effect.catchCause((cause) => Effect.sync(() => log.error("worktree bootstrap failed", { cause }))), - Effect.forkIn(scope), - ) - return info - }) - - const canonical = Effect.fnUntraced(function* (input: string) { - const abs = pathSvc.resolve(input) - const real = yield* fs.realPath(abs).pipe(Effect.catch(() => Effect.succeed(abs))) - const normalized = pathSvc.normalize(real) - return process.platform === "win32" ? normalized.toLowerCase() : normalized - }) - - function parseWorktreeList(text: string) { - return text - .split("\n") - .map((line) => line.trim()) - .reduce<{ path?: string; branch?: string }[]>((acc, line) => { - if (!line) return acc - if (line.startsWith("worktree ")) { - acc.push({ path: line.slice("worktree ".length).trim() }) - return acc - } - const current = acc[acc.length - 1] - if (!current) return acc - if (line.startsWith("branch ")) { - current.branch = line.slice("branch ".length).trim() - } - return acc - }, []) - } - - const locateWorktree = Effect.fnUntraced(function* ( - entries: { path?: string; branch?: string }[], - directory: string, - ) { - for (const item of entries) { - if (!item.path) continue - const key = yield* canonical(item.path) - if (key === directory) return item - } - return undefined - }) - - function stopFsmonitor(target: string) { - return fs.exists(target).pipe( - Effect.orDie, - Effect.flatMap((exists) => (exists ? git(["fsmonitor--daemon", "stop"], { cwd: target }) : Effect.void)), - ) - } - - function cleanDirectory(target: string) { - return Effect.promise(() => - import("fs/promises") - .then((fsp) => fsp.rm(target, { recursive: true, force: true, maxRetries: 5, retryDelay: 100 })) - .catch((error) => { - const message = errorMessage(error) - throw new RemoveFailedError({ message: message || "Failed to remove git worktree directory" }) - }), - ) - } - - const remove = Effect.fn("Worktree.remove")(function* (input: RemoveInput) { - if (Instance.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const directory = yield* canonical(input.directory) - - const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (list.code !== 0) { - throw new RemoveFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) - } - - const entries = parseWorktreeList(list.text) - const entry = yield* locateWorktree(entries, directory) - - if (!entry?.path) { - const directoryExists = yield* fs.exists(directory).pipe(Effect.orDie) - if (directoryExists) { - yield* stopFsmonitor(directory) - yield* cleanDirectory(directory) - } - return true - } - - yield* stopFsmonitor(entry.path) - const removed = yield* git(["worktree", "remove", "--force", entry.path], { cwd: Instance.worktree }) - if (removed.code !== 0) { - const next = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (next.code !== 0) { - throw new RemoveFailedError({ - message: removed.stderr || removed.text || next.stderr || next.text || "Failed to remove git worktree", - }) - } - - const stale = yield* locateWorktree(parseWorktreeList(next.text), directory) - if (stale?.path) { - throw new RemoveFailedError({ message: removed.stderr || removed.text || "Failed to remove git worktree" }) - } - } - - yield* cleanDirectory(entry.path) - - const branch = entry.branch?.replace(/^refs\/heads\//, "") - if (branch) { - const deleted = yield* git(["branch", "-D", branch], { cwd: Instance.worktree }) - if (deleted.code !== 0) { - throw new RemoveFailedError({ - message: deleted.stderr || deleted.text || "Failed to delete worktree branch", - }) - } - } - - return true - }) - - const gitExpect = Effect.fnUntraced(function* ( - args: string[], - opts: { cwd: string }, - error: (r: GitResult) => Error, - ) { - const result = yield* git(args, opts) - if (result.code !== 0) throw error(result) - return result - }) - - const runStartCommand = Effect.fnUntraced( - function* (directory: string, cmd: string) { - const [shell, args] = process.platform === "win32" ? ["cmd", ["/c", cmd]] : ["bash", ["-lc", cmd]] - const handle = yield* spawner.spawn( - ChildProcess.make(shell, args, { cwd: directory, extendEnv: true, stdin: "ignore" }), - ) - // Drain stdout, capture stderr for error reporting - const [, stderr] = yield* Effect.all( - [Stream.runDrain(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ).pipe(Effect.orDie) - const code = yield* handle.exitCode - return { code, stderr } - }, - Effect.scoped, - Effect.catch(() => Effect.succeed({ code: 1, stderr: "" })), - ) - - const runStartScript = Effect.fnUntraced(function* (directory: string, cmd: string, kind: string) { - const text = cmd.trim() - if (!text) return true - const result = yield* runStartCommand(directory, text) - if (result.code === 0) return true - log.error("worktree start command failed", { kind, directory, message: result.stderr }) - return false - }) - - const runStartScripts = Effect.fnUntraced(function* ( - directory: string, - input: { projectID: ProjectID; extra?: string }, - ) { - const row = yield* Effect.sync(() => - Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()), - ) - const project = row ? Project.fromRow(row) : undefined - const startup = project?.commands?.start?.trim() ?? "" - const ok = yield* runStartScript(directory, startup, "project") - if (!ok) return false - yield* runStartScript(directory, input.extra ?? "", "worktree") - return true - }) - - const prune = Effect.fnUntraced(function* (root: string, entries: string[]) { - const base = yield* canonical(root) - yield* Effect.forEach( - entries, - (entry) => - Effect.gen(function* () { - const target = yield* canonical(pathSvc.resolve(root, entry)) - if (target === base) return - if (!target.startsWith(`${base}${pathSvc.sep}`)) return - yield* fs.remove(target, { recursive: true }).pipe(Effect.ignore) - }), - { concurrency: "unbounded" }, - ) - }) - - const sweep = Effect.fnUntraced(function* (root: string) { - const first = yield* git(["clean", "-ffdx"], { cwd: root }) - if (first.code === 0) return first - - const entries = failedRemoves(first.stderr, first.text) - if (!entries.length) return first - - yield* prune(root, entries) - return yield* git(["clean", "-ffdx"], { cwd: root }) - }) - - const reset = Effect.fn("Worktree.reset")(function* (input: ResetInput) { - if (Instance.project.vcs !== "git") { - throw new NotGitError({ message: "Worktrees are only supported for git projects" }) - } - - const directory = yield* canonical(input.directory) - const primary = yield* canonical(Instance.worktree) - if (directory === primary) { - throw new ResetFailedError({ message: "Cannot reset the primary workspace" }) - } - - const list = yield* git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) - if (list.code !== 0) { - throw new ResetFailedError({ message: list.stderr || list.text || "Failed to read git worktrees" }) - } - - const entry = yield* locateWorktree(parseWorktreeList(list.text), directory) - if (!entry?.path) { - throw new ResetFailedError({ message: "Worktree not found" }) - } - - const worktreePath = entry.path - - const base = yield* gitSvc.defaultBranch(Instance.worktree) - if (!base) { - throw new ResetFailedError({ message: "Default branch not found" }) - } - - const sep = base.ref.indexOf("/") - if (base.ref !== base.name && sep > 0) { - const remote = base.ref.slice(0, sep) - const branch = base.ref.slice(sep + 1) - yield* gitExpect( - ["fetch", remote, branch], - { cwd: Instance.worktree }, - (r) => new ResetFailedError({ message: r.stderr || r.text || `Failed to fetch ${base.ref}` }), - ) - } - - yield* gitExpect( - ["reset", "--hard", base.ref], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset worktree to target" }), - ) - - const cleanResult = yield* sweep(worktreePath) - if (cleanResult.code !== 0) { - throw new ResetFailedError({ message: cleanResult.stderr || cleanResult.text || "Failed to clean worktree" }) - } - - yield* gitExpect( - ["submodule", "update", "--init", "--recursive", "--force"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to update submodules" }), - ) - - yield* gitExpect( - ["submodule", "foreach", "--recursive", "git", "reset", "--hard"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to reset submodules" }), - ) - - yield* gitExpect( - ["submodule", "foreach", "--recursive", "git", "clean", "-fdx"], - { cwd: worktreePath }, - (r) => new ResetFailedError({ message: r.stderr || r.text || "Failed to clean submodules" }), - ) - - const status = yield* git(["-c", "core.fsmonitor=false", "status", "--porcelain=v1"], { cwd: worktreePath }) - if (status.code !== 0) { - throw new ResetFailedError({ message: status.stderr || status.text || "Failed to read git status" }) - } - - if (status.text.trim()) { - throw new ResetFailedError({ message: `Worktree reset left local changes:\n${status.text.trim()}` }) - } - - yield* runStartScripts(worktreePath, { projectID: Instance.project.id }).pipe( - Effect.catchCause((cause) => Effect.sync(() => log.error("worktree start task failed", { cause }))), - Effect.forkIn(scope), - ) - - return true - }) - - return Service.of({ makeWorktreeInfo, createFromInfo, create, remove, reset }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Git.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(Project.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(NodePath.layer), -) From ae9a6966075042863cd061f798c0ce09fea3ff64 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:28 -0400 Subject: [PATCH 045/201] refactor: collapse installation barrel into installation/index.ts (#22910) --- packages/opencode/src/installation/index.ts | 339 +++++++++++++++++- .../opencode/src/installation/installation.ts | 336 ----------------- 2 files changed, 338 insertions(+), 337 deletions(-) delete mode 100644 packages/opencode/src/installation/installation.ts diff --git a/packages/opencode/src/installation/index.ts b/packages/opencode/src/installation/index.ts index 4e48fcd6a0..babde9dc47 100644 --- a/packages/opencode/src/installation/index.ts +++ b/packages/opencode/src/installation/index.ts @@ -1 +1,338 @@ -export * as Installation from "./installation" +import { Effect, Layer, Schema, Context, Stream } from "effect" +import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { withTransientReadRetry } from "@/util/effect-http-client" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import path from "path" +import z from "zod" +import { BusEvent } from "@/bus/bus-event" +import { Flag } from "../flag/flag" +import { Log } from "../util" + +import semver from "semver" +import { InstallationChannel, InstallationVersion } from "./version" + +const log = Log.create({ service: "installation" }) + +export type Method = "curl" | "npm" | "yarn" | "pnpm" | "bun" | "brew" | "scoop" | "choco" | "unknown" + +export type ReleaseType = "patch" | "minor" | "major" + +export const Event = { + Updated: BusEvent.define( + "installation.updated", + z.object({ + version: z.string(), + }), + ), + UpdateAvailable: BusEvent.define( + "installation.update-available", + z.object({ + version: z.string(), + }), + ), +} + +export function getReleaseType(current: string, latest: string): ReleaseType { + const currMajor = semver.major(current) + const currMinor = semver.minor(current) + const newMajor = semver.major(latest) + const newMinor = semver.minor(latest) + + if (newMajor > currMajor) return "major" + if (newMinor > currMinor) return "minor" + return "patch" +} + +export const Info = z + .object({ + version: z.string(), + latest: z.string(), + }) + .meta({ + ref: "InstallationInfo", + }) +export type Info = z.infer + +export const USER_AGENT = `opencode/${InstallationChannel}/${InstallationVersion}/${Flag.OPENCODE_CLIENT}` + +export function isPreview() { + return InstallationChannel !== "latest" +} + +export function isLocal() { + return InstallationChannel === "local" +} + +export class UpgradeFailedError extends Schema.TaggedErrorClass()("UpgradeFailedError", { + stderr: Schema.String, +}) {} + +// Response schemas for external version APIs +const GitHubRelease = Schema.Struct({ tag_name: Schema.String }) +const NpmPackage = Schema.Struct({ version: Schema.String }) +const BrewFormula = Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) }) +const BrewInfoV2 = Schema.Struct({ + formulae: Schema.Array(Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) })), +}) +const ChocoPackage = Schema.Struct({ + d: Schema.Struct({ results: Schema.Array(Schema.Struct({ Version: Schema.String })) }), +}) +const ScoopManifest = NpmPackage + +export interface Interface { + readonly info: () => Effect.Effect + readonly method: () => Effect.Effect + readonly latest: (method?: Method) => Effect.Effect + readonly upgrade: (method: Method, target: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Installation") {} + +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const http = yield* HttpClient.HttpClient + const httpOk = HttpClient.filterStatusOk(withTransientReadRetry(http)) + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const text = Effect.fnUntraced( + function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { + const proc = ChildProcess.make(cmd[0], cmd.slice(1), { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const out = yield* Stream.mkString(Stream.decodeText(handle.stdout)) + yield* handle.exitCode + return out + }, + Effect.scoped, + Effect.catch(() => Effect.succeed("")), + ) + + const run = Effect.fnUntraced( + function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { + const proc = ChildProcess.make(cmd[0], cmd.slice(1), { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, stdout, stderr } + }, + Effect.scoped, + Effect.catch(() => Effect.succeed({ code: ChildProcessSpawner.ExitCode(1), stdout: "", stderr: "" })), + ) + + const getBrewFormula = Effect.fnUntraced(function* () { + const tapFormula = yield* text(["brew", "list", "--formula", "anomalyco/tap/opencode"]) + if (tapFormula.includes("opencode")) return "anomalyco/tap/opencode" + const coreFormula = yield* text(["brew", "list", "--formula", "opencode"]) + if (coreFormula.includes("opencode")) return "opencode" + return "opencode" + }) + + const upgradeCurl = Effect.fnUntraced( + function* (target: string) { + const response = yield* httpOk.execute(HttpClientRequest.get("https://opencode.ai/install")) + const body = yield* response.text + const bodyBytes = new TextEncoder().encode(body) + const proc = ChildProcess.make("bash", [], { + stdin: Stream.make(bodyBytes), + env: { VERSION: target }, + extendEnv: true, + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, stdout, stderr } + }, + Effect.scoped, + Effect.orDie, + ) + + const methodImpl = Effect.fn("Installation.method")(function* () { + if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl" as Method + if (process.execPath.includes(path.join(".local", "bin"))) return "curl" as Method + const exec = process.execPath.toLowerCase() + + const checks: Array<{ name: Method; command: () => Effect.Effect }> = [ + { name: "npm", command: () => text(["npm", "list", "-g", "--depth=0"]) }, + { name: "yarn", command: () => text(["yarn", "global", "list"]) }, + { name: "pnpm", command: () => text(["pnpm", "list", "-g", "--depth=0"]) }, + { name: "bun", command: () => text(["bun", "pm", "ls", "-g"]) }, + { name: "brew", command: () => text(["brew", "list", "--formula", "opencode"]) }, + { name: "scoop", command: () => text(["scoop", "list", "opencode"]) }, + { name: "choco", command: () => text(["choco", "list", "--limit-output", "opencode"]) }, + ] + + checks.sort((a, b) => { + const aMatches = exec.includes(a.name) + const bMatches = exec.includes(b.name) + if (aMatches && !bMatches) return -1 + if (!aMatches && bMatches) return 1 + return 0 + }) + + for (const check of checks) { + const output = yield* check.command() + const installedName = + check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "opencode" : "opencode-ai" + if (output.includes(installedName)) { + return check.name + } + } + + return "unknown" as Method + }) + + const latestImpl = Effect.fn("Installation.latest")(function* (installMethod?: Method) { + const detectedMethod = installMethod || (yield* methodImpl()) + + if (detectedMethod === "brew") { + const formula = yield* getBrewFormula() + if (formula.includes("/")) { + const infoJson = yield* text(["brew", "info", "--json=v2", formula]) + const info = yield* Schema.decodeUnknownEffect(Schema.fromJsonString(BrewInfoV2))(infoJson) + return info.formulae[0].versions.stable + } + const response = yield* httpOk.execute( + HttpClientRequest.get("https://formulae.brew.sh/api/formula/opencode.json").pipe( + HttpClientRequest.acceptJson, + ), + ) + const data = yield* HttpClientResponse.schemaBodyJson(BrewFormula)(response) + return data.versions.stable + } + + if (detectedMethod === "npm" || detectedMethod === "bun" || detectedMethod === "pnpm") { + const r = (yield* text(["npm", "config", "get", "registry"])).trim() + const reg = r || "https://registry.npmjs.org" + const registry = reg.endsWith("/") ? reg.slice(0, -1) : reg + const channel = InstallationChannel + const response = yield* httpOk.execute( + HttpClientRequest.get(`${registry}/opencode-ai/${channel}`).pipe(HttpClientRequest.acceptJson), + ) + const data = yield* HttpClientResponse.schemaBodyJson(NpmPackage)(response) + return data.version + } + + if (detectedMethod === "choco") { + const response = yield* httpOk.execute( + HttpClientRequest.get( + "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27opencode%27%20and%20IsLatestVersion&$select=Version", + ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json;odata=verbose" })), + ) + const data = yield* HttpClientResponse.schemaBodyJson(ChocoPackage)(response) + return data.d.results[0].Version + } + + if (detectedMethod === "scoop") { + const response = yield* httpOk.execute( + HttpClientRequest.get( + "https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/opencode.json", + ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json" })), + ) + const data = yield* HttpClientResponse.schemaBodyJson(ScoopManifest)(response) + return data.version + } + + const response = yield* httpOk.execute( + HttpClientRequest.get("https://api.github.com/repos/anomalyco/opencode/releases/latest").pipe( + HttpClientRequest.acceptJson, + ), + ) + const data = yield* HttpClientResponse.schemaBodyJson(GitHubRelease)(response) + return data.tag_name.replace(/^v/, "") + }, Effect.orDie) + + const upgradeImpl = Effect.fn("Installation.upgrade")(function* (m: Method, target: string) { + let result: { code: ChildProcessSpawner.ExitCode; stdout: string; stderr: string } | undefined + switch (m) { + case "curl": + result = yield* upgradeCurl(target) + break + case "npm": + result = yield* run(["npm", "install", "-g", `opencode-ai@${target}`]) + break + case "pnpm": + result = yield* run(["pnpm", "install", "-g", `opencode-ai@${target}`]) + break + case "bun": + result = yield* run(["bun", "install", "-g", `opencode-ai@${target}`]) + break + case "brew": { + const formula = yield* getBrewFormula() + const env = { HOMEBREW_NO_AUTO_UPDATE: "1" } + if (formula.includes("/")) { + const tap = yield* run(["brew", "tap", "anomalyco/tap"], { env }) + if (tap.code !== 0) { + result = tap + break + } + const repo = yield* text(["brew", "--repo", "anomalyco/tap"]) + const dir = repo.trim() + if (dir) { + const pull = yield* run(["git", "pull", "--ff-only"], { cwd: dir, env }) + if (pull.code !== 0) { + result = pull + break + } + } + } + result = yield* run(["brew", "upgrade", formula], { env }) + break + } + case "choco": + result = yield* run(["choco", "upgrade", "opencode", `--version=${target}`, "-y"]) + break + case "scoop": + result = yield* run(["scoop", "install", `opencode@${target}`]) + break + default: + return yield* new UpgradeFailedError({ stderr: `Unknown method: ${m}` }) + } + if (!result || result.code !== 0) { + const stderr = m === "choco" ? "not running from an elevated command shell" : result?.stderr || "" + return yield* new UpgradeFailedError({ stderr }) + } + log.info("upgraded", { + method: m, + target, + stdout: result.stdout, + stderr: result.stderr, + }) + yield* text([process.execPath, "--version"]) + }) + + return Service.of({ + info: Effect.fn("Installation.info")(function* () { + return { + version: InstallationVersion, + latest: yield* latestImpl(), + } + }), + method: methodImpl, + latest: latestImpl, + upgrade: upgradeImpl, + }) + }), + ) + +export const defaultLayer = layer.pipe( + Layer.provide(FetchHttpClient.layer), + Layer.provide(CrossSpawnSpawner.defaultLayer), +) + +export * as Installation from "." diff --git a/packages/opencode/src/installation/installation.ts b/packages/opencode/src/installation/installation.ts deleted file mode 100644 index 96a99b77a3..0000000000 --- a/packages/opencode/src/installation/installation.ts +++ /dev/null @@ -1,336 +0,0 @@ -import { Effect, Layer, Schema, Context, Stream } from "effect" -import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { withTransientReadRetry } from "@/util/effect-http-client" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import path from "path" -import z from "zod" -import { BusEvent } from "@/bus/bus-event" -import { Flag } from "../flag/flag" -import { Log } from "../util" - -import semver from "semver" -import { InstallationChannel, InstallationVersion } from "./version" - -const log = Log.create({ service: "installation" }) - -export type Method = "curl" | "npm" | "yarn" | "pnpm" | "bun" | "brew" | "scoop" | "choco" | "unknown" - -export type ReleaseType = "patch" | "minor" | "major" - -export const Event = { - Updated: BusEvent.define( - "installation.updated", - z.object({ - version: z.string(), - }), - ), - UpdateAvailable: BusEvent.define( - "installation.update-available", - z.object({ - version: z.string(), - }), - ), -} - -export function getReleaseType(current: string, latest: string): ReleaseType { - const currMajor = semver.major(current) - const currMinor = semver.minor(current) - const newMajor = semver.major(latest) - const newMinor = semver.minor(latest) - - if (newMajor > currMajor) return "major" - if (newMinor > currMinor) return "minor" - return "patch" -} - -export const Info = z - .object({ - version: z.string(), - latest: z.string(), - }) - .meta({ - ref: "InstallationInfo", - }) -export type Info = z.infer - -export const USER_AGENT = `opencode/${InstallationChannel}/${InstallationVersion}/${Flag.OPENCODE_CLIENT}` - -export function isPreview() { - return InstallationChannel !== "latest" -} - -export function isLocal() { - return InstallationChannel === "local" -} - -export class UpgradeFailedError extends Schema.TaggedErrorClass()("UpgradeFailedError", { - stderr: Schema.String, -}) {} - -// Response schemas for external version APIs -const GitHubRelease = Schema.Struct({ tag_name: Schema.String }) -const NpmPackage = Schema.Struct({ version: Schema.String }) -const BrewFormula = Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) }) -const BrewInfoV2 = Schema.Struct({ - formulae: Schema.Array(Schema.Struct({ versions: Schema.Struct({ stable: Schema.String }) })), -}) -const ChocoPackage = Schema.Struct({ - d: Schema.Struct({ results: Schema.Array(Schema.Struct({ Version: Schema.String })) }), -}) -const ScoopManifest = NpmPackage - -export interface Interface { - readonly info: () => Effect.Effect - readonly method: () => Effect.Effect - readonly latest: (method?: Method) => Effect.Effect - readonly upgrade: (method: Method, target: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Installation") {} - -export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const http = yield* HttpClient.HttpClient - const httpOk = HttpClient.filterStatusOk(withTransientReadRetry(http)) - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const text = Effect.fnUntraced( - function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { - const proc = ChildProcess.make(cmd[0], cmd.slice(1), { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const out = yield* Stream.mkString(Stream.decodeText(handle.stdout)) - yield* handle.exitCode - return out - }, - Effect.scoped, - Effect.catch(() => Effect.succeed("")), - ) - - const run = Effect.fnUntraced( - function* (cmd: string[], opts?: { cwd?: string; env?: Record }) { - const proc = ChildProcess.make(cmd[0], cmd.slice(1), { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, stdout, stderr } - }, - Effect.scoped, - Effect.catch(() => Effect.succeed({ code: ChildProcessSpawner.ExitCode(1), stdout: "", stderr: "" })), - ) - - const getBrewFormula = Effect.fnUntraced(function* () { - const tapFormula = yield* text(["brew", "list", "--formula", "anomalyco/tap/opencode"]) - if (tapFormula.includes("opencode")) return "anomalyco/tap/opencode" - const coreFormula = yield* text(["brew", "list", "--formula", "opencode"]) - if (coreFormula.includes("opencode")) return "opencode" - return "opencode" - }) - - const upgradeCurl = Effect.fnUntraced( - function* (target: string) { - const response = yield* httpOk.execute(HttpClientRequest.get("https://opencode.ai/install")) - const body = yield* response.text - const bodyBytes = new TextEncoder().encode(body) - const proc = ChildProcess.make("bash", [], { - stdin: Stream.make(bodyBytes), - env: { VERSION: target }, - extendEnv: true, - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, stdout, stderr } - }, - Effect.scoped, - Effect.orDie, - ) - - const methodImpl = Effect.fn("Installation.method")(function* () { - if (process.execPath.includes(path.join(".opencode", "bin"))) return "curl" as Method - if (process.execPath.includes(path.join(".local", "bin"))) return "curl" as Method - const exec = process.execPath.toLowerCase() - - const checks: Array<{ name: Method; command: () => Effect.Effect }> = [ - { name: "npm", command: () => text(["npm", "list", "-g", "--depth=0"]) }, - { name: "yarn", command: () => text(["yarn", "global", "list"]) }, - { name: "pnpm", command: () => text(["pnpm", "list", "-g", "--depth=0"]) }, - { name: "bun", command: () => text(["bun", "pm", "ls", "-g"]) }, - { name: "brew", command: () => text(["brew", "list", "--formula", "opencode"]) }, - { name: "scoop", command: () => text(["scoop", "list", "opencode"]) }, - { name: "choco", command: () => text(["choco", "list", "--limit-output", "opencode"]) }, - ] - - checks.sort((a, b) => { - const aMatches = exec.includes(a.name) - const bMatches = exec.includes(b.name) - if (aMatches && !bMatches) return -1 - if (!aMatches && bMatches) return 1 - return 0 - }) - - for (const check of checks) { - const output = yield* check.command() - const installedName = - check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "opencode" : "opencode-ai" - if (output.includes(installedName)) { - return check.name - } - } - - return "unknown" as Method - }) - - const latestImpl = Effect.fn("Installation.latest")(function* (installMethod?: Method) { - const detectedMethod = installMethod || (yield* methodImpl()) - - if (detectedMethod === "brew") { - const formula = yield* getBrewFormula() - if (formula.includes("/")) { - const infoJson = yield* text(["brew", "info", "--json=v2", formula]) - const info = yield* Schema.decodeUnknownEffect(Schema.fromJsonString(BrewInfoV2))(infoJson) - return info.formulae[0].versions.stable - } - const response = yield* httpOk.execute( - HttpClientRequest.get("https://formulae.brew.sh/api/formula/opencode.json").pipe( - HttpClientRequest.acceptJson, - ), - ) - const data = yield* HttpClientResponse.schemaBodyJson(BrewFormula)(response) - return data.versions.stable - } - - if (detectedMethod === "npm" || detectedMethod === "bun" || detectedMethod === "pnpm") { - const r = (yield* text(["npm", "config", "get", "registry"])).trim() - const reg = r || "https://registry.npmjs.org" - const registry = reg.endsWith("/") ? reg.slice(0, -1) : reg - const channel = InstallationChannel - const response = yield* httpOk.execute( - HttpClientRequest.get(`${registry}/opencode-ai/${channel}`).pipe(HttpClientRequest.acceptJson), - ) - const data = yield* HttpClientResponse.schemaBodyJson(NpmPackage)(response) - return data.version - } - - if (detectedMethod === "choco") { - const response = yield* httpOk.execute( - HttpClientRequest.get( - "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27opencode%27%20and%20IsLatestVersion&$select=Version", - ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json;odata=verbose" })), - ) - const data = yield* HttpClientResponse.schemaBodyJson(ChocoPackage)(response) - return data.d.results[0].Version - } - - if (detectedMethod === "scoop") { - const response = yield* httpOk.execute( - HttpClientRequest.get( - "https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/opencode.json", - ).pipe(HttpClientRequest.setHeaders({ Accept: "application/json" })), - ) - const data = yield* HttpClientResponse.schemaBodyJson(ScoopManifest)(response) - return data.version - } - - const response = yield* httpOk.execute( - HttpClientRequest.get("https://api.github.com/repos/anomalyco/opencode/releases/latest").pipe( - HttpClientRequest.acceptJson, - ), - ) - const data = yield* HttpClientResponse.schemaBodyJson(GitHubRelease)(response) - return data.tag_name.replace(/^v/, "") - }, Effect.orDie) - - const upgradeImpl = Effect.fn("Installation.upgrade")(function* (m: Method, target: string) { - let result: { code: ChildProcessSpawner.ExitCode; stdout: string; stderr: string } | undefined - switch (m) { - case "curl": - result = yield* upgradeCurl(target) - break - case "npm": - result = yield* run(["npm", "install", "-g", `opencode-ai@${target}`]) - break - case "pnpm": - result = yield* run(["pnpm", "install", "-g", `opencode-ai@${target}`]) - break - case "bun": - result = yield* run(["bun", "install", "-g", `opencode-ai@${target}`]) - break - case "brew": { - const formula = yield* getBrewFormula() - const env = { HOMEBREW_NO_AUTO_UPDATE: "1" } - if (formula.includes("/")) { - const tap = yield* run(["brew", "tap", "anomalyco/tap"], { env }) - if (tap.code !== 0) { - result = tap - break - } - const repo = yield* text(["brew", "--repo", "anomalyco/tap"]) - const dir = repo.trim() - if (dir) { - const pull = yield* run(["git", "pull", "--ff-only"], { cwd: dir, env }) - if (pull.code !== 0) { - result = pull - break - } - } - } - result = yield* run(["brew", "upgrade", formula], { env }) - break - } - case "choco": - result = yield* run(["choco", "upgrade", "opencode", `--version=${target}`, "-y"]) - break - case "scoop": - result = yield* run(["scoop", "install", `opencode@${target}`]) - break - default: - return yield* new UpgradeFailedError({ stderr: `Unknown method: ${m}` }) - } - if (!result || result.code !== 0) { - const stderr = m === "choco" ? "not running from an elevated command shell" : result?.stderr || "" - return yield* new UpgradeFailedError({ stderr }) - } - log.info("upgraded", { - method: m, - target, - stdout: result.stdout, - stderr: result.stderr, - }) - yield* text([process.execPath, "--version"]) - }) - - return Service.of({ - info: Effect.fn("Installation.info")(function* () { - return { - version: InstallationVersion, - latest: yield* latestImpl(), - } - }), - method: methodImpl, - latest: latestImpl, - upgrade: upgradeImpl, - }) - }), - ) - -export const defaultLayer = layer.pipe( - Layer.provide(FetchHttpClient.layer), - Layer.provide(CrossSpawnSpawner.defaultLayer), -) From 99d392a4fbff2f4203240709221f986c3ffe60a7 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:35:43 -0400 Subject: [PATCH 046/201] refactor: collapse skill barrel into skill/index.ts (#22912) --- packages/opencode/src/skill/index.ts | 265 ++++++++++++++++++++++++++- packages/opencode/src/skill/skill.ts | 262 -------------------------- 2 files changed, 264 insertions(+), 263 deletions(-) delete mode 100644 packages/opencode/src/skill/skill.ts diff --git a/packages/opencode/src/skill/index.ts b/packages/opencode/src/skill/index.ts index 6d7b428dfb..b139b39e6e 100644 --- a/packages/opencode/src/skill/index.ts +++ b/packages/opencode/src/skill/index.ts @@ -1 +1,264 @@ -export * as Skill from "./skill" +import os from "os" +import path from "path" +import { pathToFileURL } from "url" +import z from "zod" +import { Effect, Layer, Context } from "effect" +import { NamedError } from "@opencode-ai/shared/util/error" +import type { Agent } from "@/agent/agent" +import { Bus } from "@/bus" +import { InstanceState } from "@/effect" +import { Flag } from "@/flag/flag" +import { Global } from "@/global" +import { Permission } from "@/permission" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Config } from "../config" +import { ConfigMarkdown } from "../config" +import { Glob } from "@opencode-ai/shared/util/glob" +import { Log } from "../util" +import { Discovery } from "./discovery" + +const log = Log.create({ service: "skill" }) +const EXTERNAL_DIRS = [".claude", ".agents"] +const EXTERNAL_SKILL_PATTERN = "skills/**/SKILL.md" +const OPENCODE_SKILL_PATTERN = "{skill,skills}/**/SKILL.md" +const SKILL_PATTERN = "**/SKILL.md" + +export const Info = z.object({ + name: z.string(), + description: z.string(), + location: z.string(), + content: z.string(), +}) +export type Info = z.infer + +export const InvalidError = NamedError.create( + "SkillInvalidError", + z.object({ + path: z.string(), + message: z.string().optional(), + issues: z.custom().optional(), + }), +) + +export const NameMismatchError = NamedError.create( + "SkillNameMismatchError", + z.object({ + path: z.string(), + expected: z.string(), + actual: z.string(), + }), +) + +type State = { + skills: Record + dirs: Set +} + +export interface Interface { + readonly get: (name: string) => Effect.Effect + readonly all: () => Effect.Effect + readonly dirs: () => Effect.Effect + readonly available: (agent?: Agent.Info) => Effect.Effect +} + +const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.Interface) { + const md = yield* Effect.tryPromise({ + try: () => ConfigMarkdown.parse(match), + catch: (err) => err, + }).pipe( + Effect.catch( + Effect.fnUntraced(function* (err) { + const message = ConfigMarkdown.FrontmatterError.isInstance(err) + ? err.data.message + : `Failed to parse skill ${match}` + const { Session } = yield* Effect.promise(() => import("@/session")) + yield* bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) + log.error("failed to load skill", { skill: match, err }) + return undefined + }), + ), + ) + + if (!md) return + + const parsed = Info.pick({ name: true, description: true }).safeParse(md.data) + if (!parsed.success) return + + if (state.skills[parsed.data.name]) { + log.warn("duplicate skill name", { + name: parsed.data.name, + existing: state.skills[parsed.data.name].location, + duplicate: match, + }) + } + + state.dirs.add(path.dirname(match)) + state.skills[parsed.data.name] = { + name: parsed.data.name, + description: parsed.data.description, + location: match, + content: md.content, + } +}) + +const scan = Effect.fnUntraced(function* ( + state: State, + bus: Bus.Interface, + root: string, + pattern: string, + opts?: { dot?: boolean; scope?: string }, +) { + const matches = yield* Effect.tryPromise({ + try: () => + Glob.scan(pattern, { + cwd: root, + absolute: true, + include: "file", + symlink: true, + dot: opts?.dot, + }), + catch: (error) => error, + }).pipe( + Effect.catch((error) => { + if (!opts?.scope) return Effect.die(error) + log.error(`failed to scan ${opts.scope} skills`, { dir: root, error }) + return Effect.succeed([] as string[]) + }), + ) + + yield* Effect.forEach(matches, (match) => add(state, match, bus), { + concurrency: "unbounded", + discard: true, + }) +}) + +const loadSkills = Effect.fnUntraced(function* ( + state: State, + config: Config.Interface, + discovery: Discovery.Interface, + bus: Bus.Interface, + fsys: AppFileSystem.Interface, + directory: string, + worktree: string, +) { + if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { + for (const dir of EXTERNAL_DIRS) { + const root = path.join(Global.Path.home, dir) + if (!(yield* fsys.isDir(root))) continue + yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) + } + + const upDirs = yield* fsys + .up({ targets: EXTERNAL_DIRS, start: directory, stop: worktree }) + .pipe(Effect.catch(() => Effect.succeed([] as string[]))) + + for (const root of upDirs) { + yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) + } + } + + const configDirs = yield* config.directories() + for (const dir of configDirs) { + yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) + } + + const cfg = yield* config.get() + for (const item of cfg.skills?.paths ?? []) { + const expanded = item.startsWith("~/") ? path.join(os.homedir(), item.slice(2)) : item + const dir = path.isAbsolute(expanded) ? expanded : path.join(directory, expanded) + if (!(yield* fsys.isDir(dir))) { + log.warn("skill path not found", { path: dir }) + continue + } + + yield* scan(state, bus, dir, SKILL_PATTERN) + } + + for (const url of cfg.skills?.urls ?? []) { + const pulledDirs = yield* discovery.pull(url) + for (const dir of pulledDirs) { + state.dirs.add(dir) + yield* scan(state, bus, dir, SKILL_PATTERN) + } + } + + log.info("init", { count: Object.keys(state.skills).length }) +}) + +export class Service extends Context.Service()("@opencode/Skill") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const discovery = yield* Discovery.Service + const config = yield* Config.Service + const bus = yield* Bus.Service + const fsys = yield* AppFileSystem.Service + const state = yield* InstanceState.make( + Effect.fn("Skill.state")(function* (ctx) { + const s: State = { skills: {}, dirs: new Set() } + yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) + return s + }), + ) + + const get = Effect.fn("Skill.get")(function* (name: string) { + const s = yield* InstanceState.get(state) + return s.skills[name] + }) + + const all = Effect.fn("Skill.all")(function* () { + const s = yield* InstanceState.get(state) + return Object.values(s.skills) + }) + + const dirs = Effect.fn("Skill.dirs")(function* () { + const s = yield* InstanceState.get(state) + return Array.from(s.dirs) + }) + + const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { + const s = yield* InstanceState.get(state) + const list = Object.values(s.skills).toSorted((a, b) => a.name.localeCompare(b.name)) + if (!agent) return list + return list.filter((skill) => Permission.evaluate("skill", skill.name, agent.permission).action !== "deny") + }) + + return Service.of({ get, all, dirs, available }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Discovery.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(AppFileSystem.defaultLayer), +) + +export function fmt(list: Info[], opts: { verbose: boolean }) { + if (list.length === 0) return "No skills are currently available." + if (opts.verbose) { + return [ + "", + ...list + .sort((a, b) => a.name.localeCompare(b.name)) + .flatMap((skill) => [ + " ", + ` ${skill.name}`, + ` ${skill.description}`, + ` ${pathToFileURL(skill.location).href}`, + " ", + ]), + "", + ].join("\n") + } + + return [ + "## Available Skills", + ...list + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map((skill) => `- **${skill.name}**: ${skill.description}`), + ].join("\n") +} + +export * as Skill from "." diff --git a/packages/opencode/src/skill/skill.ts b/packages/opencode/src/skill/skill.ts deleted file mode 100644 index f8ff7b8f5f..0000000000 --- a/packages/opencode/src/skill/skill.ts +++ /dev/null @@ -1,262 +0,0 @@ -import os from "os" -import path from "path" -import { pathToFileURL } from "url" -import z from "zod" -import { Effect, Layer, Context } from "effect" -import { NamedError } from "@opencode-ai/shared/util/error" -import type { Agent } from "@/agent/agent" -import { Bus } from "@/bus" -import { InstanceState } from "@/effect" -import { Flag } from "@/flag/flag" -import { Global } from "@/global" -import { Permission } from "@/permission" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Config } from "../config" -import { ConfigMarkdown } from "../config" -import { Glob } from "@opencode-ai/shared/util/glob" -import { Log } from "../util" -import { Discovery } from "./discovery" - -const log = Log.create({ service: "skill" }) -const EXTERNAL_DIRS = [".claude", ".agents"] -const EXTERNAL_SKILL_PATTERN = "skills/**/SKILL.md" -const OPENCODE_SKILL_PATTERN = "{skill,skills}/**/SKILL.md" -const SKILL_PATTERN = "**/SKILL.md" - -export const Info = z.object({ - name: z.string(), - description: z.string(), - location: z.string(), - content: z.string(), -}) -export type Info = z.infer - -export const InvalidError = NamedError.create( - "SkillInvalidError", - z.object({ - path: z.string(), - message: z.string().optional(), - issues: z.custom().optional(), - }), -) - -export const NameMismatchError = NamedError.create( - "SkillNameMismatchError", - z.object({ - path: z.string(), - expected: z.string(), - actual: z.string(), - }), -) - -type State = { - skills: Record - dirs: Set -} - -export interface Interface { - readonly get: (name: string) => Effect.Effect - readonly all: () => Effect.Effect - readonly dirs: () => Effect.Effect - readonly available: (agent?: Agent.Info) => Effect.Effect -} - -const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.Interface) { - const md = yield* Effect.tryPromise({ - try: () => ConfigMarkdown.parse(match), - catch: (err) => err, - }).pipe( - Effect.catch( - Effect.fnUntraced(function* (err) { - const message = ConfigMarkdown.FrontmatterError.isInstance(err) - ? err.data.message - : `Failed to parse skill ${match}` - const { Session } = yield* Effect.promise(() => import("@/session")) - yield* bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() }) - log.error("failed to load skill", { skill: match, err }) - return undefined - }), - ), - ) - - if (!md) return - - const parsed = Info.pick({ name: true, description: true }).safeParse(md.data) - if (!parsed.success) return - - if (state.skills[parsed.data.name]) { - log.warn("duplicate skill name", { - name: parsed.data.name, - existing: state.skills[parsed.data.name].location, - duplicate: match, - }) - } - - state.dirs.add(path.dirname(match)) - state.skills[parsed.data.name] = { - name: parsed.data.name, - description: parsed.data.description, - location: match, - content: md.content, - } -}) - -const scan = Effect.fnUntraced(function* ( - state: State, - bus: Bus.Interface, - root: string, - pattern: string, - opts?: { dot?: boolean; scope?: string }, -) { - const matches = yield* Effect.tryPromise({ - try: () => - Glob.scan(pattern, { - cwd: root, - absolute: true, - include: "file", - symlink: true, - dot: opts?.dot, - }), - catch: (error) => error, - }).pipe( - Effect.catch((error) => { - if (!opts?.scope) return Effect.die(error) - log.error(`failed to scan ${opts.scope} skills`, { dir: root, error }) - return Effect.succeed([] as string[]) - }), - ) - - yield* Effect.forEach(matches, (match) => add(state, match, bus), { - concurrency: "unbounded", - discard: true, - }) -}) - -const loadSkills = Effect.fnUntraced(function* ( - state: State, - config: Config.Interface, - discovery: Discovery.Interface, - bus: Bus.Interface, - fsys: AppFileSystem.Interface, - directory: string, - worktree: string, -) { - if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { - for (const dir of EXTERNAL_DIRS) { - const root = path.join(Global.Path.home, dir) - if (!(yield* fsys.isDir(root))) continue - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) - } - - const upDirs = yield* fsys - .up({ targets: EXTERNAL_DIRS, start: directory, stop: worktree }) - .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - - for (const root of upDirs) { - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) - } - } - - const configDirs = yield* config.directories() - for (const dir of configDirs) { - yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) - } - - const cfg = yield* config.get() - for (const item of cfg.skills?.paths ?? []) { - const expanded = item.startsWith("~/") ? path.join(os.homedir(), item.slice(2)) : item - const dir = path.isAbsolute(expanded) ? expanded : path.join(directory, expanded) - if (!(yield* fsys.isDir(dir))) { - log.warn("skill path not found", { path: dir }) - continue - } - - yield* scan(state, bus, dir, SKILL_PATTERN) - } - - for (const url of cfg.skills?.urls ?? []) { - const pulledDirs = yield* discovery.pull(url) - for (const dir of pulledDirs) { - state.dirs.add(dir) - yield* scan(state, bus, dir, SKILL_PATTERN) - } - } - - log.info("init", { count: Object.keys(state.skills).length }) -}) - -export class Service extends Context.Service()("@opencode/Skill") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const discovery = yield* Discovery.Service - const config = yield* Config.Service - const bus = yield* Bus.Service - const fsys = yield* AppFileSystem.Service - const state = yield* InstanceState.make( - Effect.fn("Skill.state")(function* (ctx) { - const s: State = { skills: {}, dirs: new Set() } - yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) - return s - }), - ) - - const get = Effect.fn("Skill.get")(function* (name: string) { - const s = yield* InstanceState.get(state) - return s.skills[name] - }) - - const all = Effect.fn("Skill.all")(function* () { - const s = yield* InstanceState.get(state) - return Object.values(s.skills) - }) - - const dirs = Effect.fn("Skill.dirs")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.dirs) - }) - - const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { - const s = yield* InstanceState.get(state) - const list = Object.values(s.skills).toSorted((a, b) => a.name.localeCompare(b.name)) - if (!agent) return list - return list.filter((skill) => Permission.evaluate("skill", skill.name, agent.permission).action !== "deny") - }) - - return Service.of({ get, all, dirs, available }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Discovery.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(AppFileSystem.defaultLayer), -) - -export function fmt(list: Info[], opts: { verbose: boolean }) { - if (list.length === 0) return "No skills are currently available." - if (opts.verbose) { - return [ - "", - ...list - .sort((a, b) => a.name.localeCompare(b.name)) - .flatMap((skill) => [ - " ", - ` ${skill.name}`, - ` ${skill.description}`, - ` ${pathToFileURL(skill.location).href}`, - " ", - ]), - "", - ].join("\n") - } - - return [ - "## Available Skills", - ...list - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map((skill) => `- **${skill.name}**: ${skill.description}`), - ].join("\n") -} From ab15fc1575fbdc9d9233bf7a130b4ae7b9cf8754 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:02 -0400 Subject: [PATCH 047/201] refactor: collapse npm barrel into npm/index.ts (#22911) --- packages/opencode/src/npm/index.ts | 190 ++++++++++++++++++++++++++++- packages/opencode/src/npm/npm.ts | 187 ---------------------------- 2 files changed, 189 insertions(+), 188 deletions(-) delete mode 100644 packages/opencode/src/npm/npm.ts diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 856ed2a2c6..174df12974 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -1 +1,189 @@ -export * as Npm from "./npm" +import semver from "semver" +import z from "zod" +import { NamedError } from "@opencode-ai/shared/util/error" +import { Global } from "../global" +import { Log } from "../util" +import path from "path" +import { readdir, rm } from "fs/promises" +import { Filesystem } from "@/util" +import { Flock } from "@opencode-ai/shared/util/flock" + +const log = Log.create({ service: "npm" }) +const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined + +export const InstallFailedError = NamedError.create( + "NpmInstallFailedError", + z.object({ + pkg: z.string(), + }), +) + +export function sanitize(pkg: string) { + if (!illegal) return pkg + return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") +} + +function directory(pkg: string) { + return path.join(Global.Path.cache, "packages", sanitize(pkg)) +} + +function resolveEntryPoint(name: string, dir: string) { + let entrypoint: string | undefined + try { + entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) + } catch {} + const result = { + directory: dir, + entrypoint, + } + return result +} + +export async function outdated(pkg: string, cachedVersion: string): Promise { + const response = await fetch(`https://registry.npmjs.org/${pkg}`) + if (!response.ok) { + log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) + return false + } + + const data = (await response.json()) as { "dist-tags"?: { latest?: string } } + const latestVersion = data?.["dist-tags"]?.latest + if (!latestVersion) { + log.warn("No latest version found, using cached", { pkg, cachedVersion }) + return false + } + + const range = /[\s^~*xX<>|=]/.test(cachedVersion) + if (range) return !semver.satisfies(latestVersion, cachedVersion) + + return semver.lt(cachedVersion, latestVersion) +} + +export async function add(pkg: string) { + const { Arborist } = await import("@npmcli/arborist") + const dir = directory(pkg) + await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) + log.info("installing package", { + pkg, + }) + + const arborist = new Arborist({ + path: dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + const tree = await arborist.loadVirtual().catch(() => {}) + if (tree) { + const first = tree.edgesOut.values().next().value?.to + if (first) { + return resolveEntryPoint(first.name, first.path) + } + } + + const result = await arborist + .reify({ + add: [pkg], + save: true, + saveType: "prod", + }) + .catch((cause) => { + throw new InstallFailedError( + { pkg }, + { + cause, + }, + ) + }) + + const first = result.edgesOut.values().next().value?.to + if (!first) throw new InstallFailedError({ pkg }) + return resolveEntryPoint(first.name, first.path) +} + +export async function install(dir: string) { + await using _ = await Flock.acquire(`npm-install:${dir}`) + log.info("checking dependencies", { dir }) + + const reify = async () => { + const { Arborist } = await import("@npmcli/arborist") + const arb = new Arborist({ + path: dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + await arb.reify().catch(() => {}) + } + + if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { + log.info("node_modules missing, reifying") + await reify() + return + } + + const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) + const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) + + const declared = new Set([ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.devDependencies || {}), + ...Object.keys(pkg.peerDependencies || {}), + ...Object.keys(pkg.optionalDependencies || {}), + ]) + + const root = lock.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root.dependencies || {}), + ...Object.keys(root.devDependencies || {}), + ...Object.keys(root.peerDependencies || {}), + ...Object.keys(root.optionalDependencies || {}), + ]) + + for (const name of declared) { + if (!locked.has(name)) { + log.info("dependency not in lock file, reifying", { name }) + await reify() + return + } + } + + log.info("dependencies in sync") +} + +export async function which(pkg: string) { + const dir = directory(pkg) + const binDir = path.join(dir, "node_modules", ".bin") + + const pick = async () => { + const files = await readdir(binDir).catch(() => []) + if (files.length === 0) return undefined + if (files.length === 1) return files[0] + // Multiple binaries — resolve from package.json bin field like npx does + const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( + path.join(dir, "node_modules", pkg, "package.json"), + ).catch(() => undefined) + if (pkgJson?.bin) { + const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg + const bin = pkgJson.bin + if (typeof bin === "string") return unscoped + const keys = Object.keys(bin) + if (keys.length === 1) return keys[0] + return bin[unscoped] ? unscoped : keys[0] + } + return files[0] + } + + const bin = await pick() + if (bin) return path.join(binDir, bin) + + await rm(path.join(dir, "package-lock.json"), { force: true }) + await add(pkg) + const resolved = await pick() + if (!resolved) return + return path.join(binDir, resolved) +} + +export * as Npm from "." diff --git a/packages/opencode/src/npm/npm.ts b/packages/opencode/src/npm/npm.ts deleted file mode 100644 index d74c10d555..0000000000 --- a/packages/opencode/src/npm/npm.ts +++ /dev/null @@ -1,187 +0,0 @@ -import semver from "semver" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Log } from "../util" -import path from "path" -import { readdir, rm } from "fs/promises" -import { Filesystem } from "@/util" -import { Flock } from "@opencode-ai/shared/util/flock" - -const log = Log.create({ service: "npm" }) -const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined - -export const InstallFailedError = NamedError.create( - "NpmInstallFailedError", - z.object({ - pkg: z.string(), - }), -) - -export function sanitize(pkg: string) { - if (!illegal) return pkg - return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") -} - -function directory(pkg: string) { - return path.join(Global.Path.cache, "packages", sanitize(pkg)) -} - -function resolveEntryPoint(name: string, dir: string) { - let entrypoint: string | undefined - try { - entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - } catch {} - const result = { - directory: dir, - entrypoint, - } - return result -} - -export async function outdated(pkg: string, cachedVersion: string): Promise { - const response = await fetch(`https://registry.npmjs.org/${pkg}`) - if (!response.ok) { - log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) - return false - } - - const data = (await response.json()) as { "dist-tags"?: { latest?: string } } - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - log.warn("No latest version found, using cached", { pkg, cachedVersion }) - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) -} - -export async function add(pkg: string) { - const { Arborist } = await import("@npmcli/arborist") - const dir = directory(pkg) - await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) - log.info("installing package", { - pkg, - }) - - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - const tree = await arborist.loadVirtual().catch(() => {}) - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } - - const result = await arborist - .reify({ - add: [pkg], - save: true, - saveType: "prod", - }) - .catch((cause) => { - throw new InstallFailedError( - { pkg }, - { - cause, - }, - ) - }) - - const first = result.edgesOut.values().next().value?.to - if (!first) throw new InstallFailedError({ pkg }) - return resolveEntryPoint(first.name, first.path) -} - -export async function install(dir: string) { - await using _ = await Flock.acquire(`npm-install:${dir}`) - log.info("checking dependencies", { dir }) - - const reify = async () => { - const { Arborist } = await import("@npmcli/arborist") - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - await arb.reify().catch(() => {}) - } - - if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { - log.info("node_modules missing, reifying") - await reify() - return - } - - const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) - - const declared = new Set([ - ...Object.keys(pkg.dependencies || {}), - ...Object.keys(pkg.devDependencies || {}), - ...Object.keys(pkg.peerDependencies || {}), - ...Object.keys(pkg.optionalDependencies || {}), - ]) - - const root = lock.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root.dependencies || {}), - ...Object.keys(root.devDependencies || {}), - ...Object.keys(root.peerDependencies || {}), - ...Object.keys(root.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - log.info("dependency not in lock file, reifying", { name }) - await reify() - return - } - } - - log.info("dependencies in sync") -} - -export async function which(pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = async () => { - const files = await readdir(binDir).catch(() => []) - if (files.length === 0) return undefined - if (files.length === 1) return files[0] - // Multiple binaries — resolve from package.json bin field like npx does - const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( - path.join(dir, "node_modules", pkg, "package.json"), - ).catch(() => undefined) - if (pkgJson?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = pkgJson.bin - if (typeof bin === "string") return unscoped - const keys = Object.keys(bin) - if (keys.length === 1) return keys[0] - return bin[unscoped] ? unscoped : keys[0] - } - return files[0] - } - - const bin = await pick() - if (bin) return path.join(binDir, bin) - - await rm(path.join(dir, "package-lock.json"), { force: true }) - await add(pkg) - const resolved = await pick() - if (!resolved) return - return path.join(binDir, resolved) -} From 964474a1b1cb2d864ef982e6ab7280b5b509c26f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:04 -0400 Subject: [PATCH 048/201] refactor: collapse permission barrel into permission/index.ts (#22915) --- packages/opencode/src/permission/index.ts | 326 +++++++++++++++++- .../opencode/src/permission/permission.ts | 323 ----------------- 2 files changed, 325 insertions(+), 324 deletions(-) delete mode 100644 packages/opencode/src/permission/permission.ts diff --git a/packages/opencode/src/permission/index.ts b/packages/opencode/src/permission/index.ts index 7d8a2fff82..b9a221155c 100644 --- a/packages/opencode/src/permission/index.ts +++ b/packages/opencode/src/permission/index.ts @@ -1 +1,325 @@ -export * as Permission from "./permission" +import { Bus } from "@/bus" +import { BusEvent } from "@/bus/bus-event" +import { ConfigPermission } from "@/config/permission" +import { InstanceState } from "@/effect" +import { ProjectID } from "@/project/schema" +import { MessageID, SessionID } from "@/session/schema" +import { PermissionTable } from "@/session/session.sql" +import { Database, eq } from "@/storage" +import { zod } from "@/util/effect-zod" +import { Log } from "@/util" +import { withStatics } from "@/util/schema" +import { Wildcard } from "@/util" +import { Deferred, Effect, Layer, Schema, Context } from "effect" +import os from "os" +import { evaluate as evalRule } from "./evaluate" +import { PermissionID } from "./schema" + +const log = Log.create({ service: "permission" }) + +export const Action = Schema.Literals(["allow", "deny", "ask"]) + .annotate({ identifier: "PermissionAction" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Action = Schema.Schema.Type + +export class Rule extends Schema.Class("PermissionRule")({ + permission: Schema.String, + pattern: Schema.String, + action: Action, +}) { + static readonly zod = zod(this) +} + +export const Ruleset = Schema.mutable(Schema.Array(Rule)) + .annotate({ identifier: "PermissionRuleset" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Ruleset = Schema.Schema.Type + +export class Request extends Schema.Class("PermissionRequest")({ + id: PermissionID, + sessionID: SessionID, + permission: Schema.String, + patterns: Schema.Array(Schema.String), + metadata: Schema.Record(Schema.String, Schema.Unknown), + always: Schema.Array(Schema.String), + tool: Schema.optional( + Schema.Struct({ + messageID: MessageID, + callID: Schema.String, + }), + ), +}) { + static readonly zod = zod(this) +} + +export const Reply = Schema.Literals(["once", "always", "reject"]).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Reply = Schema.Schema.Type + +const reply = { + reply: Reply, + message: Schema.optional(Schema.String), +} + +export const ReplyBody = Schema.Struct(reply) + .annotate({ identifier: "PermissionReplyBody" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ReplyBody = Schema.Schema.Type + +export class Approval extends Schema.Class("PermissionApproval")({ + projectID: ProjectID, + patterns: Schema.Array(Schema.String), +}) { + static readonly zod = zod(this) +} + +export const Event = { + Asked: BusEvent.define("permission.asked", Request.zod), + Replied: BusEvent.define( + "permission.replied", + zod( + Schema.Struct({ + sessionID: SessionID, + requestID: PermissionID, + reply: Reply, + }), + ), + ), +} + +export class RejectedError extends Schema.TaggedErrorClass()("PermissionRejectedError", {}) { + override get message() { + return "The user rejected permission to use this specific tool call." + } +} + +export class CorrectedError extends Schema.TaggedErrorClass()("PermissionCorrectedError", { + feedback: Schema.String, +}) { + override get message() { + return `The user rejected permission to use this specific tool call with the following feedback: ${this.feedback}` + } +} + +export class DeniedError extends Schema.TaggedErrorClass()("PermissionDeniedError", { + ruleset: Schema.Any, +}) { + override get message() { + return `The user has specified a rule which prevents you from using this specific tool call. Here are some of the relevant rules ${JSON.stringify(this.ruleset)}` + } +} + +export type Error = DeniedError | RejectedError | CorrectedError + +export const AskInput = Schema.Struct({ + ...Request.fields, + id: Schema.optional(PermissionID), + ruleset: Ruleset, +}) + .annotate({ identifier: "PermissionAskInput" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type AskInput = Schema.Schema.Type + +export const ReplyInput = Schema.Struct({ + requestID: PermissionID, + ...reply, +}) + .annotate({ identifier: "PermissionReplyInput" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ReplyInput = Schema.Schema.Type + +export interface Interface { + readonly ask: (input: AskInput) => Effect.Effect + readonly reply: (input: ReplyInput) => Effect.Effect + readonly list: () => Effect.Effect> +} + +interface PendingEntry { + info: Request + deferred: Deferred.Deferred +} + +interface State { + pending: Map + approved: Ruleset +} + +export function evaluate(permission: string, pattern: string, ...rulesets: Ruleset[]): Rule { + log.info("evaluate", { permission, pattern, ruleset: rulesets.flat() }) + return evalRule(permission, pattern, ...rulesets) +} + +export class Service extends Context.Service()("@opencode/Permission") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const state = yield* InstanceState.make( + Effect.fn("Permission.state")(function* (ctx) { + const row = Database.use((db) => + db.select().from(PermissionTable).where(eq(PermissionTable.project_id, ctx.project.id)).get(), + ) + const state = { + pending: new Map(), + approved: row?.data ?? [], + } + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + for (const item of state.pending.values()) { + yield* Deferred.fail(item.deferred, new RejectedError()) + } + state.pending.clear() + }), + ) + + return state + }), + ) + + const ask = Effect.fn("Permission.ask")(function* (input: AskInput) { + const { approved, pending } = yield* InstanceState.get(state) + const { ruleset, ...request } = input + let needsAsk = false + + for (const pattern of request.patterns) { + const rule = evaluate(request.permission, pattern, ruleset, approved) + log.info("evaluated", { permission: request.permission, pattern, action: rule }) + if (rule.action === "deny") { + return yield* new DeniedError({ + ruleset: ruleset.filter((rule) => Wildcard.match(request.permission, rule.permission)), + }) + } + if (rule.action === "allow") continue + needsAsk = true + } + + if (!needsAsk) return + + const id = request.id ?? PermissionID.ascending() + const info = Schema.decodeUnknownSync(Request)({ + id, + ...request, + }) + log.info("asking", { id, permission: info.permission, patterns: info.patterns }) + + const deferred = yield* Deferred.make() + pending.set(id, { info, deferred }) + yield* bus.publish(Event.Asked, info) + return yield* Effect.ensuring( + Deferred.await(deferred), + Effect.sync(() => { + pending.delete(id) + }), + ) + }) + + const reply = Effect.fn("Permission.reply")(function* (input: ReplyInput) { + const { approved, pending } = yield* InstanceState.get(state) + const existing = pending.get(input.requestID) + if (!existing) return + + pending.delete(input.requestID) + yield* bus.publish(Event.Replied, { + sessionID: existing.info.sessionID, + requestID: existing.info.id, + reply: input.reply, + }) + + if (input.reply === "reject") { + yield* Deferred.fail( + existing.deferred, + input.message ? new CorrectedError({ feedback: input.message }) : new RejectedError(), + ) + + for (const [id, item] of pending.entries()) { + if (item.info.sessionID !== existing.info.sessionID) continue + pending.delete(id) + yield* bus.publish(Event.Replied, { + sessionID: item.info.sessionID, + requestID: item.info.id, + reply: "reject", + }) + yield* Deferred.fail(item.deferred, new RejectedError()) + } + return + } + + yield* Deferred.succeed(existing.deferred, undefined) + if (input.reply === "once") return + + for (const pattern of existing.info.always) { + approved.push({ + permission: existing.info.permission, + pattern, + action: "allow", + }) + } + + for (const [id, item] of pending.entries()) { + if (item.info.sessionID !== existing.info.sessionID) continue + const ok = item.info.patterns.every( + (pattern) => evaluate(item.info.permission, pattern, approved).action === "allow", + ) + if (!ok) continue + pending.delete(id) + yield* bus.publish(Event.Replied, { + sessionID: item.info.sessionID, + requestID: item.info.id, + reply: "always", + }) + yield* Deferred.succeed(item.deferred, undefined) + } + }) + + const list = Effect.fn("Permission.list")(function* () { + const pending = (yield* InstanceState.get(state)).pending + return Array.from(pending.values(), (item) => item.info) + }) + + return Service.of({ ask, reply, list }) + }), +) + +function expand(pattern: string): string { + if (pattern.startsWith("~/")) return os.homedir() + pattern.slice(1) + if (pattern === "~") return os.homedir() + if (pattern.startsWith("$HOME/")) return os.homedir() + pattern.slice(5) + if (pattern.startsWith("$HOME")) return os.homedir() + pattern.slice(5) + return pattern +} + +export function fromConfig(permission: ConfigPermission.Info) { + const ruleset: Ruleset = [] + for (const [key, value] of Object.entries(permission)) { + if (typeof value === "string") { + ruleset.push({ permission: key, action: value, pattern: "*" }) + continue + } + ruleset.push( + ...Object.entries(value).map(([pattern, action]) => ({ permission: key, pattern: expand(pattern), action })), + ) + } + return ruleset +} + +export function merge(...rulesets: Ruleset[]): Ruleset { + return rulesets.flat() +} + +const EDIT_TOOLS = ["edit", "write", "apply_patch", "multiedit"] + +export function disabled(tools: string[], ruleset: Ruleset): Set { + const result = new Set() + for (const tool of tools) { + const permission = EDIT_TOOLS.includes(tool) ? "edit" : tool + const rule = ruleset.findLast((rule) => Wildcard.match(permission, rule.permission)) + if (!rule) continue + if (rule.pattern === "*" && rule.action === "deny") result.add(tool) + } + return result +} + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as Permission from "." diff --git a/packages/opencode/src/permission/permission.ts b/packages/opencode/src/permission/permission.ts deleted file mode 100644 index 44dac3b1db..0000000000 --- a/packages/opencode/src/permission/permission.ts +++ /dev/null @@ -1,323 +0,0 @@ -import { Bus } from "@/bus" -import { BusEvent } from "@/bus/bus-event" -import { ConfigPermission } from "@/config/permission" -import { InstanceState } from "@/effect" -import { ProjectID } from "@/project/schema" -import { MessageID, SessionID } from "@/session/schema" -import { PermissionTable } from "@/session/session.sql" -import { Database, eq } from "@/storage" -import { zod } from "@/util/effect-zod" -import { Log } from "@/util" -import { withStatics } from "@/util/schema" -import { Wildcard } from "@/util" -import { Deferred, Effect, Layer, Schema, Context } from "effect" -import os from "os" -import { evaluate as evalRule } from "./evaluate" -import { PermissionID } from "./schema" - -const log = Log.create({ service: "permission" }) - -export const Action = Schema.Literals(["allow", "deny", "ask"]) - .annotate({ identifier: "PermissionAction" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Action = Schema.Schema.Type - -export class Rule extends Schema.Class("PermissionRule")({ - permission: Schema.String, - pattern: Schema.String, - action: Action, -}) { - static readonly zod = zod(this) -} - -export const Ruleset = Schema.mutable(Schema.Array(Rule)) - .annotate({ identifier: "PermissionRuleset" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Ruleset = Schema.Schema.Type - -export class Request extends Schema.Class("PermissionRequest")({ - id: PermissionID, - sessionID: SessionID, - permission: Schema.String, - patterns: Schema.Array(Schema.String), - metadata: Schema.Record(Schema.String, Schema.Unknown), - always: Schema.Array(Schema.String), - tool: Schema.optional( - Schema.Struct({ - messageID: MessageID, - callID: Schema.String, - }), - ), -}) { - static readonly zod = zod(this) -} - -export const Reply = Schema.Literals(["once", "always", "reject"]).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Reply = Schema.Schema.Type - -const reply = { - reply: Reply, - message: Schema.optional(Schema.String), -} - -export const ReplyBody = Schema.Struct(reply) - .annotate({ identifier: "PermissionReplyBody" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type ReplyBody = Schema.Schema.Type - -export class Approval extends Schema.Class("PermissionApproval")({ - projectID: ProjectID, - patterns: Schema.Array(Schema.String), -}) { - static readonly zod = zod(this) -} - -export const Event = { - Asked: BusEvent.define("permission.asked", Request.zod), - Replied: BusEvent.define( - "permission.replied", - zod( - Schema.Struct({ - sessionID: SessionID, - requestID: PermissionID, - reply: Reply, - }), - ), - ), -} - -export class RejectedError extends Schema.TaggedErrorClass()("PermissionRejectedError", {}) { - override get message() { - return "The user rejected permission to use this specific tool call." - } -} - -export class CorrectedError extends Schema.TaggedErrorClass()("PermissionCorrectedError", { - feedback: Schema.String, -}) { - override get message() { - return `The user rejected permission to use this specific tool call with the following feedback: ${this.feedback}` - } -} - -export class DeniedError extends Schema.TaggedErrorClass()("PermissionDeniedError", { - ruleset: Schema.Any, -}) { - override get message() { - return `The user has specified a rule which prevents you from using this specific tool call. Here are some of the relevant rules ${JSON.stringify(this.ruleset)}` - } -} - -export type Error = DeniedError | RejectedError | CorrectedError - -export const AskInput = Schema.Struct({ - ...Request.fields, - id: Schema.optional(PermissionID), - ruleset: Ruleset, -}) - .annotate({ identifier: "PermissionAskInput" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type AskInput = Schema.Schema.Type - -export const ReplyInput = Schema.Struct({ - requestID: PermissionID, - ...reply, -}) - .annotate({ identifier: "PermissionReplyInput" }) - .pipe(withStatics((s) => ({ zod: zod(s) }))) -export type ReplyInput = Schema.Schema.Type - -export interface Interface { - readonly ask: (input: AskInput) => Effect.Effect - readonly reply: (input: ReplyInput) => Effect.Effect - readonly list: () => Effect.Effect> -} - -interface PendingEntry { - info: Request - deferred: Deferred.Deferred -} - -interface State { - pending: Map - approved: Ruleset -} - -export function evaluate(permission: string, pattern: string, ...rulesets: Ruleset[]): Rule { - log.info("evaluate", { permission, pattern, ruleset: rulesets.flat() }) - return evalRule(permission, pattern, ...rulesets) -} - -export class Service extends Context.Service()("@opencode/Permission") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const state = yield* InstanceState.make( - Effect.fn("Permission.state")(function* (ctx) { - const row = Database.use((db) => - db.select().from(PermissionTable).where(eq(PermissionTable.project_id, ctx.project.id)).get(), - ) - const state = { - pending: new Map(), - approved: row?.data ?? [], - } - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - for (const item of state.pending.values()) { - yield* Deferred.fail(item.deferred, new RejectedError()) - } - state.pending.clear() - }), - ) - - return state - }), - ) - - const ask = Effect.fn("Permission.ask")(function* (input: AskInput) { - const { approved, pending } = yield* InstanceState.get(state) - const { ruleset, ...request } = input - let needsAsk = false - - for (const pattern of request.patterns) { - const rule = evaluate(request.permission, pattern, ruleset, approved) - log.info("evaluated", { permission: request.permission, pattern, action: rule }) - if (rule.action === "deny") { - return yield* new DeniedError({ - ruleset: ruleset.filter((rule) => Wildcard.match(request.permission, rule.permission)), - }) - } - if (rule.action === "allow") continue - needsAsk = true - } - - if (!needsAsk) return - - const id = request.id ?? PermissionID.ascending() - const info = Schema.decodeUnknownSync(Request)({ - id, - ...request, - }) - log.info("asking", { id, permission: info.permission, patterns: info.patterns }) - - const deferred = yield* Deferred.make() - pending.set(id, { info, deferred }) - yield* bus.publish(Event.Asked, info) - return yield* Effect.ensuring( - Deferred.await(deferred), - Effect.sync(() => { - pending.delete(id) - }), - ) - }) - - const reply = Effect.fn("Permission.reply")(function* (input: ReplyInput) { - const { approved, pending } = yield* InstanceState.get(state) - const existing = pending.get(input.requestID) - if (!existing) return - - pending.delete(input.requestID) - yield* bus.publish(Event.Replied, { - sessionID: existing.info.sessionID, - requestID: existing.info.id, - reply: input.reply, - }) - - if (input.reply === "reject") { - yield* Deferred.fail( - existing.deferred, - input.message ? new CorrectedError({ feedback: input.message }) : new RejectedError(), - ) - - for (const [id, item] of pending.entries()) { - if (item.info.sessionID !== existing.info.sessionID) continue - pending.delete(id) - yield* bus.publish(Event.Replied, { - sessionID: item.info.sessionID, - requestID: item.info.id, - reply: "reject", - }) - yield* Deferred.fail(item.deferred, new RejectedError()) - } - return - } - - yield* Deferred.succeed(existing.deferred, undefined) - if (input.reply === "once") return - - for (const pattern of existing.info.always) { - approved.push({ - permission: existing.info.permission, - pattern, - action: "allow", - }) - } - - for (const [id, item] of pending.entries()) { - if (item.info.sessionID !== existing.info.sessionID) continue - const ok = item.info.patterns.every( - (pattern) => evaluate(item.info.permission, pattern, approved).action === "allow", - ) - if (!ok) continue - pending.delete(id) - yield* bus.publish(Event.Replied, { - sessionID: item.info.sessionID, - requestID: item.info.id, - reply: "always", - }) - yield* Deferred.succeed(item.deferred, undefined) - } - }) - - const list = Effect.fn("Permission.list")(function* () { - const pending = (yield* InstanceState.get(state)).pending - return Array.from(pending.values(), (item) => item.info) - }) - - return Service.of({ ask, reply, list }) - }), -) - -function expand(pattern: string): string { - if (pattern.startsWith("~/")) return os.homedir() + pattern.slice(1) - if (pattern === "~") return os.homedir() - if (pattern.startsWith("$HOME/")) return os.homedir() + pattern.slice(5) - if (pattern.startsWith("$HOME")) return os.homedir() + pattern.slice(5) - return pattern -} - -export function fromConfig(permission: ConfigPermission.Info) { - const ruleset: Ruleset = [] - for (const [key, value] of Object.entries(permission)) { - if (typeof value === "string") { - ruleset.push({ permission: key, action: value, pattern: "*" }) - continue - } - ruleset.push( - ...Object.entries(value).map(([pattern, action]) => ({ permission: key, pattern: expand(pattern), action })), - ) - } - return ruleset -} - -export function merge(...rulesets: Ruleset[]): Ruleset { - return rulesets.flat() -} - -const EDIT_TOOLS = ["edit", "write", "apply_patch", "multiedit"] - -export function disabled(tools: string[], ruleset: Ruleset): Set { - const result = new Set() - for (const tool of tools) { - const permission = EDIT_TOOLS.includes(tool) ? "edit" : tool - const rule = ruleset.findLast((rule) => Wildcard.match(permission, rule.permission)) - if (!rule) continue - if (rule.pattern === "*" && rule.action === "deny") result.add(tool) - } - return result -} - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) From 664b2c36e8200c70d4b6a70702aceaf0604cd00a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:07 -0400 Subject: [PATCH 049/201] refactor: collapse git barrel into git/index.ts (#22909) --- packages/opencode/src/git/git.ts | 258 ---------------------------- packages/opencode/src/git/index.ts | 261 ++++++++++++++++++++++++++++- 2 files changed, 260 insertions(+), 259 deletions(-) delete mode 100644 packages/opencode/src/git/git.ts diff --git a/packages/opencode/src/git/git.ts b/packages/opencode/src/git/git.ts deleted file mode 100644 index 908c718521..0000000000 --- a/packages/opencode/src/git/git.ts +++ /dev/null @@ -1,258 +0,0 @@ -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { Effect, Layer, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" - -const cfg = [ - "--no-optional-locks", - "-c", - "core.autocrlf=false", - "-c", - "core.fsmonitor=false", - "-c", - "core.longpaths=true", - "-c", - "core.symlinks=true", - "-c", - "core.quotepath=false", -] as const - -const out = (result: { text(): string }) => result.text().trim() -const nuls = (text: string) => text.split("\0").filter(Boolean) -const fail = (err: unknown) => - ({ - exitCode: 1, - text: () => "", - stdout: Buffer.alloc(0), - stderr: Buffer.from(err instanceof Error ? err.message : String(err)), - }) satisfies Result - -export type Kind = "added" | "deleted" | "modified" - -export type Base = { - readonly name: string - readonly ref: string -} - -export type Item = { - readonly file: string - readonly code: string - readonly status: Kind -} - -export type Stat = { - readonly file: string - readonly additions: number - readonly deletions: number -} - -export interface Result { - readonly exitCode: number - readonly text: () => string - readonly stdout: Buffer - readonly stderr: Buffer -} - -export interface Options { - readonly cwd: string - readonly env?: Record -} - -export interface Interface { - readonly run: (args: string[], opts: Options) => Effect.Effect - readonly branch: (cwd: string) => Effect.Effect - readonly prefix: (cwd: string) => Effect.Effect - readonly defaultBranch: (cwd: string) => Effect.Effect - readonly hasHead: (cwd: string) => Effect.Effect - readonly mergeBase: (cwd: string, base: string, head?: string) => Effect.Effect - readonly show: (cwd: string, ref: string, file: string, prefix?: string) => Effect.Effect - readonly status: (cwd: string) => Effect.Effect - readonly diff: (cwd: string, ref: string) => Effect.Effect - readonly stats: (cwd: string, ref: string) => Effect.Effect -} - -const kind = (code: string): Kind => { - if (code === "??") return "added" - if (code.includes("U")) return "modified" - if (code.includes("A") && !code.includes("D")) return "added" - if (code.includes("D") && !code.includes("A")) return "deleted" - return "modified" -} - -export class Service extends Context.Service()("@opencode/Git") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - - const run = Effect.fn("Git.run")( - function* (args: string[], opts: Options) { - const proc = ChildProcess.make("git", [...cfg, ...args], { - cwd: opts.cwd, - env: opts.env, - extendEnv: true, - stdin: "ignore", - stdout: "pipe", - stderr: "pipe", - }) - const handle = yield* spawner.spawn(proc) - const [stdout, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - return { - exitCode: yield* handle.exitCode, - text: () => stdout, - stdout: Buffer.from(stdout), - stderr: Buffer.from(stderr), - } satisfies Result - }, - Effect.scoped, - Effect.catch((err) => Effect.succeed(fail(err))), - ) - - const text = Effect.fn("Git.text")(function* (args: string[], opts: Options) { - return (yield* run(args, opts)).text() - }) - - const lines = Effect.fn("Git.lines")(function* (args: string[], opts: Options) { - return (yield* text(args, opts)) - .split(/\r?\n/) - .map((item) => item.trim()) - .filter(Boolean) - }) - - const refs = Effect.fnUntraced(function* (cwd: string) { - return yield* lines(["for-each-ref", "--format=%(refname:short)", "refs/heads"], { cwd }) - }) - - const configured = Effect.fnUntraced(function* (cwd: string, list: string[]) { - const result = yield* run(["config", "init.defaultBranch"], { cwd }) - const name = out(result) - if (!name || !list.includes(name)) return - return { name, ref: name } satisfies Base - }) - - const primary = Effect.fnUntraced(function* (cwd: string) { - const list = yield* lines(["remote"], { cwd }) - if (list.includes("origin")) return "origin" - if (list.length === 1) return list[0] - if (list.includes("upstream")) return "upstream" - return list[0] - }) - - const branch = Effect.fn("Git.branch")(function* (cwd: string) { - const result = yield* run(["symbolic-ref", "--quiet", "--short", "HEAD"], { cwd }) - if (result.exitCode !== 0) return - const text = out(result) - return text || undefined - }) - - const prefix = Effect.fn("Git.prefix")(function* (cwd: string) { - const result = yield* run(["rev-parse", "--show-prefix"], { cwd }) - if (result.exitCode !== 0) return "" - return out(result) - }) - - const defaultBranch = Effect.fn("Git.defaultBranch")(function* (cwd: string) { - const remote = yield* primary(cwd) - if (remote) { - const head = yield* run(["symbolic-ref", `refs/remotes/${remote}/HEAD`], { cwd }) - if (head.exitCode === 0) { - const ref = out(head).replace(/^refs\/remotes\//, "") - const name = ref.startsWith(`${remote}/`) ? ref.slice(`${remote}/`.length) : "" - if (name) return { name, ref } satisfies Base - } - } - - const list = yield* refs(cwd) - const next = yield* configured(cwd, list) - if (next) return next - if (list.includes("main")) return { name: "main", ref: "main" } satisfies Base - if (list.includes("master")) return { name: "master", ref: "master" } satisfies Base - }) - - const hasHead = Effect.fn("Git.hasHead")(function* (cwd: string) { - const result = yield* run(["rev-parse", "--verify", "HEAD"], { cwd }) - return result.exitCode === 0 - }) - - const mergeBase = Effect.fn("Git.mergeBase")(function* (cwd: string, base: string, head = "HEAD") { - const result = yield* run(["merge-base", base, head], { cwd }) - if (result.exitCode !== 0) return - const text = out(result) - return text || undefined - }) - - const show = Effect.fn("Git.show")(function* (cwd: string, ref: string, file: string, prefix = "") { - const target = prefix ? `${prefix}${file}` : file - const result = yield* run(["show", `${ref}:${target}`], { cwd }) - if (result.exitCode !== 0) return "" - if (result.stdout.includes(0)) return "" - return result.text() - }) - - const status = Effect.fn("Git.status")(function* (cwd: string) { - return nuls( - yield* text(["status", "--porcelain=v1", "--untracked-files=all", "--no-renames", "-z", "--", "."], { - cwd, - }), - ).flatMap((item) => { - const file = item.slice(3) - if (!file) return [] - const code = item.slice(0, 2) - return [{ file, code, status: kind(code) } satisfies Item] - }) - }) - - const diff = Effect.fn("Git.diff")(function* (cwd: string, ref: string) { - const list = nuls( - yield* text(["diff", "--no-ext-diff", "--no-renames", "--name-status", "-z", ref, "--", "."], { cwd }), - ) - return list.flatMap((code, idx) => { - if (idx % 2 !== 0) return [] - const file = list[idx + 1] - if (!code || !file) return [] - return [{ file, code, status: kind(code) } satisfies Item] - }) - }) - - const stats = Effect.fn("Git.stats")(function* (cwd: string, ref: string) { - return nuls( - yield* text(["diff", "--no-ext-diff", "--no-renames", "--numstat", "-z", ref, "--", "."], { cwd }), - ).flatMap((item) => { - const a = item.indexOf("\t") - const b = item.indexOf("\t", a + 1) - if (a === -1 || b === -1) return [] - const file = item.slice(b + 1) - if (!file) return [] - const adds = item.slice(0, a) - const dels = item.slice(a + 1, b) - const additions = adds === "-" ? 0 : Number.parseInt(adds || "0", 10) - const deletions = dels === "-" ? 0 : Number.parseInt(dels || "0", 10) - return [ - { - file, - additions: Number.isFinite(additions) ? additions : 0, - deletions: Number.isFinite(deletions) ? deletions : 0, - } satisfies Stat, - ] - }) - }) - - return Service.of({ - run, - branch, - prefix, - defaultBranch, - hasHead, - mergeBase, - show, - status, - diff, - stats, - }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(CrossSpawnSpawner.defaultLayer)) diff --git a/packages/opencode/src/git/index.ts b/packages/opencode/src/git/index.ts index 019819d6e3..719b5607fb 100644 --- a/packages/opencode/src/git/index.ts +++ b/packages/opencode/src/git/index.ts @@ -1 +1,260 @@ -export * as Git from "./git" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { Effect, Layer, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" + +const cfg = [ + "--no-optional-locks", + "-c", + "core.autocrlf=false", + "-c", + "core.fsmonitor=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", +] as const + +const out = (result: { text(): string }) => result.text().trim() +const nuls = (text: string) => text.split("\0").filter(Boolean) +const fail = (err: unknown) => + ({ + exitCode: 1, + text: () => "", + stdout: Buffer.alloc(0), + stderr: Buffer.from(err instanceof Error ? err.message : String(err)), + }) satisfies Result + +export type Kind = "added" | "deleted" | "modified" + +export type Base = { + readonly name: string + readonly ref: string +} + +export type Item = { + readonly file: string + readonly code: string + readonly status: Kind +} + +export type Stat = { + readonly file: string + readonly additions: number + readonly deletions: number +} + +export interface Result { + readonly exitCode: number + readonly text: () => string + readonly stdout: Buffer + readonly stderr: Buffer +} + +export interface Options { + readonly cwd: string + readonly env?: Record +} + +export interface Interface { + readonly run: (args: string[], opts: Options) => Effect.Effect + readonly branch: (cwd: string) => Effect.Effect + readonly prefix: (cwd: string) => Effect.Effect + readonly defaultBranch: (cwd: string) => Effect.Effect + readonly hasHead: (cwd: string) => Effect.Effect + readonly mergeBase: (cwd: string, base: string, head?: string) => Effect.Effect + readonly show: (cwd: string, ref: string, file: string, prefix?: string) => Effect.Effect + readonly status: (cwd: string) => Effect.Effect + readonly diff: (cwd: string, ref: string) => Effect.Effect + readonly stats: (cwd: string, ref: string) => Effect.Effect +} + +const kind = (code: string): Kind => { + if (code === "??") return "added" + if (code.includes("U")) return "modified" + if (code.includes("A") && !code.includes("D")) return "added" + if (code.includes("D") && !code.includes("A")) return "deleted" + return "modified" +} + +export class Service extends Context.Service()("@opencode/Git") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + + const run = Effect.fn("Git.run")( + function* (args: string[], opts: Options) { + const proc = ChildProcess.make("git", [...cfg, ...args], { + cwd: opts.cwd, + env: opts.env, + extendEnv: true, + stdin: "ignore", + stdout: "pipe", + stderr: "pipe", + }) + const handle = yield* spawner.spawn(proc) + const [stdout, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + return { + exitCode: yield* handle.exitCode, + text: () => stdout, + stdout: Buffer.from(stdout), + stderr: Buffer.from(stderr), + } satisfies Result + }, + Effect.scoped, + Effect.catch((err) => Effect.succeed(fail(err))), + ) + + const text = Effect.fn("Git.text")(function* (args: string[], opts: Options) { + return (yield* run(args, opts)).text() + }) + + const lines = Effect.fn("Git.lines")(function* (args: string[], opts: Options) { + return (yield* text(args, opts)) + .split(/\r?\n/) + .map((item) => item.trim()) + .filter(Boolean) + }) + + const refs = Effect.fnUntraced(function* (cwd: string) { + return yield* lines(["for-each-ref", "--format=%(refname:short)", "refs/heads"], { cwd }) + }) + + const configured = Effect.fnUntraced(function* (cwd: string, list: string[]) { + const result = yield* run(["config", "init.defaultBranch"], { cwd }) + const name = out(result) + if (!name || !list.includes(name)) return + return { name, ref: name } satisfies Base + }) + + const primary = Effect.fnUntraced(function* (cwd: string) { + const list = yield* lines(["remote"], { cwd }) + if (list.includes("origin")) return "origin" + if (list.length === 1) return list[0] + if (list.includes("upstream")) return "upstream" + return list[0] + }) + + const branch = Effect.fn("Git.branch")(function* (cwd: string) { + const result = yield* run(["symbolic-ref", "--quiet", "--short", "HEAD"], { cwd }) + if (result.exitCode !== 0) return + const text = out(result) + return text || undefined + }) + + const prefix = Effect.fn("Git.prefix")(function* (cwd: string) { + const result = yield* run(["rev-parse", "--show-prefix"], { cwd }) + if (result.exitCode !== 0) return "" + return out(result) + }) + + const defaultBranch = Effect.fn("Git.defaultBranch")(function* (cwd: string) { + const remote = yield* primary(cwd) + if (remote) { + const head = yield* run(["symbolic-ref", `refs/remotes/${remote}/HEAD`], { cwd }) + if (head.exitCode === 0) { + const ref = out(head).replace(/^refs\/remotes\//, "") + const name = ref.startsWith(`${remote}/`) ? ref.slice(`${remote}/`.length) : "" + if (name) return { name, ref } satisfies Base + } + } + + const list = yield* refs(cwd) + const next = yield* configured(cwd, list) + if (next) return next + if (list.includes("main")) return { name: "main", ref: "main" } satisfies Base + if (list.includes("master")) return { name: "master", ref: "master" } satisfies Base + }) + + const hasHead = Effect.fn("Git.hasHead")(function* (cwd: string) { + const result = yield* run(["rev-parse", "--verify", "HEAD"], { cwd }) + return result.exitCode === 0 + }) + + const mergeBase = Effect.fn("Git.mergeBase")(function* (cwd: string, base: string, head = "HEAD") { + const result = yield* run(["merge-base", base, head], { cwd }) + if (result.exitCode !== 0) return + const text = out(result) + return text || undefined + }) + + const show = Effect.fn("Git.show")(function* (cwd: string, ref: string, file: string, prefix = "") { + const target = prefix ? `${prefix}${file}` : file + const result = yield* run(["show", `${ref}:${target}`], { cwd }) + if (result.exitCode !== 0) return "" + if (result.stdout.includes(0)) return "" + return result.text() + }) + + const status = Effect.fn("Git.status")(function* (cwd: string) { + return nuls( + yield* text(["status", "--porcelain=v1", "--untracked-files=all", "--no-renames", "-z", "--", "."], { + cwd, + }), + ).flatMap((item) => { + const file = item.slice(3) + if (!file) return [] + const code = item.slice(0, 2) + return [{ file, code, status: kind(code) } satisfies Item] + }) + }) + + const diff = Effect.fn("Git.diff")(function* (cwd: string, ref: string) { + const list = nuls( + yield* text(["diff", "--no-ext-diff", "--no-renames", "--name-status", "-z", ref, "--", "."], { cwd }), + ) + return list.flatMap((code, idx) => { + if (idx % 2 !== 0) return [] + const file = list[idx + 1] + if (!code || !file) return [] + return [{ file, code, status: kind(code) } satisfies Item] + }) + }) + + const stats = Effect.fn("Git.stats")(function* (cwd: string, ref: string) { + return nuls( + yield* text(["diff", "--no-ext-diff", "--no-renames", "--numstat", "-z", ref, "--", "."], { cwd }), + ).flatMap((item) => { + const a = item.indexOf("\t") + const b = item.indexOf("\t", a + 1) + if (a === -1 || b === -1) return [] + const file = item.slice(b + 1) + if (!file) return [] + const adds = item.slice(0, a) + const dels = item.slice(a + 1, b) + const additions = adds === "-" ? 0 : Number.parseInt(adds || "0", 10) + const deletions = dels === "-" ? 0 : Number.parseInt(dels || "0", 10) + return [ + { + file, + additions: Number.isFinite(additions) ? additions : 0, + deletions: Number.isFinite(deletions) ? deletions : 0, + } satisfies Stat, + ] + }) + }) + + return Service.of({ + run, + branch, + prefix, + defaultBranch, + hasHead, + mergeBase, + show, + status, + diff, + stats, + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(CrossSpawnSpawner.defaultLayer)) + +export * as Git from "." From 5fccdc9fc7979be5f5b04ae9701d550fbec21535 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:23 -0400 Subject: [PATCH 050/201] refactor: collapse mcp barrel into mcp/index.ts (#22913) --- packages/opencode/src/mcp/index.ts | 934 ++++++++++++++++++++++++++++- packages/opencode/src/mcp/mcp.ts | 931 ---------------------------- 2 files changed, 933 insertions(+), 932 deletions(-) delete mode 100644 packages/opencode/src/mcp/mcp.ts diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index c42b9eb5c1..ba53e7c0b5 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -1 +1,933 @@ -export * as MCP from "./mcp" +import { dynamicTool, type Tool, jsonSchema, type JSONSchema7 } from "ai" +import { Client } from "@modelcontextprotocol/sdk/client/index.js" +import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js" +import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js" +import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js" +import { UnauthorizedError } from "@modelcontextprotocol/sdk/client/auth.js" +import { + CallToolResultSchema, + type Tool as MCPToolDef, + ToolListChangedNotificationSchema, +} from "@modelcontextprotocol/sdk/types.js" +import { Config } from "../config" +import { ConfigMCP } from "../config/mcp" +import { Log } from "../util" +import { NamedError } from "@opencode-ai/shared/util/error" +import z from "zod/v4" +import { Instance } from "../project/instance" +import { Installation } from "../installation" +import { InstallationVersion } from "../installation/version" +import { withTimeout } from "@/util/timeout" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { McpOAuthProvider } from "./oauth-provider" +import { McpOAuthCallback } from "./oauth-callback" +import { McpAuth } from "./auth" +import { BusEvent } from "../bus/bus-event" +import { Bus } from "@/bus" +import { TuiEvent } from "@/cli/cmd/tui/event" +import open from "open" +import { Effect, Exit, Layer, Option, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { InstanceState } from "@/effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" + +const log = Log.create({ service: "mcp" }) +const DEFAULT_TIMEOUT = 30_000 + +export const Resource = z + .object({ + name: z.string(), + uri: z.string(), + description: z.string().optional(), + mimeType: z.string().optional(), + client: z.string(), + }) + .meta({ ref: "McpResource" }) +export type Resource = z.infer + +export const ToolsChanged = BusEvent.define( + "mcp.tools.changed", + z.object({ + server: z.string(), + }), +) + +export const BrowserOpenFailed = BusEvent.define( + "mcp.browser.open.failed", + z.object({ + mcpName: z.string(), + url: z.string(), + }), +) + +export const Failed = NamedError.create( + "MCPFailed", + z.object({ + name: z.string(), + }), +) + +type MCPClient = Client + +export const Status = z + .discriminatedUnion("status", [ + z + .object({ + status: z.literal("connected"), + }) + .meta({ + ref: "MCPStatusConnected", + }), + z + .object({ + status: z.literal("disabled"), + }) + .meta({ + ref: "MCPStatusDisabled", + }), + z + .object({ + status: z.literal("failed"), + error: z.string(), + }) + .meta({ + ref: "MCPStatusFailed", + }), + z + .object({ + status: z.literal("needs_auth"), + }) + .meta({ + ref: "MCPStatusNeedsAuth", + }), + z + .object({ + status: z.literal("needs_client_registration"), + error: z.string(), + }) + .meta({ + ref: "MCPStatusNeedsClientRegistration", + }), + ]) + .meta({ + ref: "MCPStatus", + }) +export type Status = z.infer + +// Store transports for OAuth servers to allow finishing auth +type TransportWithAuth = StreamableHTTPClientTransport | SSEClientTransport +const pendingOAuthTransports = new Map() + +// Prompt cache types +type PromptInfo = Awaited>["prompts"][number] +type ResourceInfo = Awaited>["resources"][number] +type McpEntry = NonNullable[string] + +function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { + return typeof entry === "object" && entry !== null && "type" in entry +} + +const sanitize = (s: string) => s.replace(/[^a-zA-Z0-9_-]/g, "_") + +// Convert MCP tool definition to AI SDK Tool type +function convertMcpTool(mcpTool: MCPToolDef, client: MCPClient, timeout?: number): Tool { + const inputSchema = mcpTool.inputSchema + + // Spread first, then override type to ensure it's always "object" + const schema: JSONSchema7 = { + ...(inputSchema as JSONSchema7), + type: "object", + properties: (inputSchema.properties ?? {}) as JSONSchema7["properties"], + additionalProperties: false, + } + + return dynamicTool({ + description: mcpTool.description ?? "", + inputSchema: jsonSchema(schema), + execute: async (args: unknown) => { + return client.callTool( + { + name: mcpTool.name, + arguments: (args || {}) as Record, + }, + CallToolResultSchema, + { + resetTimeoutOnProgress: true, + timeout, + }, + ) + }, + }) +} + +function defs(key: string, client: MCPClient, timeout?: number) { + return Effect.tryPromise({ + try: () => withTimeout(client.listTools(), timeout ?? DEFAULT_TIMEOUT), + catch: (err) => (err instanceof Error ? err : new Error(String(err))), + }).pipe( + Effect.map((result) => result.tools), + Effect.catch((err) => { + log.error("failed to get tools from client", { key, error: err }) + return Effect.succeed(undefined) + }), + ) +} + +function fetchFromClient( + clientName: string, + client: Client, + listFn: (c: Client) => Promise, + label: string, +) { + return Effect.tryPromise({ + try: () => listFn(client), + catch: (e: any) => { + log.error(`failed to get ${label}`, { clientName, error: e.message }) + return e + }, + }).pipe( + Effect.map((items) => { + const out: Record = {} + const sanitizedClient = sanitize(clientName) + for (const item of items) { + out[sanitizedClient + ":" + sanitize(item.name)] = { ...item, client: clientName } + } + return out + }), + Effect.orElseSucceed(() => undefined), + ) +} + +interface CreateResult { + mcpClient?: MCPClient + status: Status + defs?: MCPToolDef[] +} + +interface AuthResult { + authorizationUrl: string + oauthState: string + client?: MCPClient +} + +// --- Effect Service --- + +interface State { + status: Record + clients: Record + defs: Record +} + +export interface Interface { + readonly status: () => Effect.Effect> + readonly clients: () => Effect.Effect> + readonly tools: () => Effect.Effect> + readonly prompts: () => Effect.Effect> + readonly resources: () => Effect.Effect> + readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> + readonly connect: (name: string) => Effect.Effect + readonly disconnect: (name: string) => Effect.Effect + readonly getPrompt: ( + clientName: string, + name: string, + args?: Record, + ) => Effect.Effect> | undefined> + readonly readResource: ( + clientName: string, + resourceUri: string, + ) => Effect.Effect> | undefined> + readonly startAuth: (mcpName: string) => Effect.Effect<{ authorizationUrl: string; oauthState: string }> + readonly authenticate: (mcpName: string) => Effect.Effect + readonly finishAuth: (mcpName: string, authorizationCode: string) => Effect.Effect + readonly removeAuth: (mcpName: string) => Effect.Effect + readonly supportsOAuth: (mcpName: string) => Effect.Effect + readonly hasStoredTokens: (mcpName: string) => Effect.Effect + readonly getAuthStatus: (mcpName: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/MCP") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const auth = yield* McpAuth.Service + const bus = yield* Bus.Service + + type Transport = StdioClientTransport | StreamableHTTPClientTransport | SSEClientTransport + + /** + * Connect a client via the given transport with resource safety: + * on failure the transport is closed; on success the caller owns it. + */ + const connectTransport = (transport: Transport, timeout: number) => + Effect.acquireUseRelease( + Effect.succeed(transport), + (t) => + Effect.tryPromise({ + try: () => { + const client = new Client({ name: "opencode", version: InstallationVersion }) + return withTimeout(client.connect(t), timeout).then(() => client) + }, + catch: (e) => (e instanceof Error ? e : new Error(String(e))), + }), + (t, exit) => (Exit.isFailure(exit) ? Effect.tryPromise(() => t.close()).pipe(Effect.ignore) : Effect.void), + ) + + const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } + + const connectRemote = Effect.fn("MCP.connectRemote")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "remote" }, + ) { + const oauthDisabled = mcp.oauth === false + const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined + let authProvider: McpOAuthProvider | undefined + + if (!oauthDisabled) { + authProvider = new McpOAuthProvider( + key, + mcp.url, + { + clientId: oauthConfig?.clientId, + clientSecret: oauthConfig?.clientSecret, + scope: oauthConfig?.scope, + redirectUri: oauthConfig?.redirectUri, + }, + { + onRedirect: async (url) => { + log.info("oauth redirect requested", { key, url: url.toString() }) + }, + }, + auth, + ) + } + + const transports: Array<{ name: string; transport: TransportWithAuth }> = [ + { + name: "StreamableHTTP", + transport: new StreamableHTTPClientTransport(new URL(mcp.url), { + authProvider, + requestInit: mcp.headers ? { headers: mcp.headers } : undefined, + }), + }, + { + name: "SSE", + transport: new SSEClientTransport(new URL(mcp.url), { + authProvider, + requestInit: mcp.headers ? { headers: mcp.headers } : undefined, + }), + }, + ] + + const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT + let lastStatus: Status | undefined + + for (const { name, transport } of transports) { + const result = yield* connectTransport(transport, connectTimeout).pipe( + Effect.map((client) => ({ client, transportName: name })), + Effect.catch((error) => { + const lastError = error instanceof Error ? error : new Error(String(error)) + const isAuthError = + error instanceof UnauthorizedError || (authProvider && lastError.message.includes("OAuth")) + + if (isAuthError) { + log.info("mcp server requires authentication", { key, transport: name }) + + if (lastError.message.includes("registration") || lastError.message.includes("client_id")) { + lastStatus = { + status: "needs_client_registration" as const, + error: "Server does not support dynamic client registration. Please provide clientId in config.", + } + return bus + .publish(TuiEvent.ToastShow, { + title: "MCP Authentication Required", + message: `Server "${key}" requires a pre-registered client ID. Add clientId to your config.`, + variant: "warning", + duration: 8000, + }) + .pipe(Effect.ignore, Effect.as(undefined)) + } else { + pendingOAuthTransports.set(key, transport) + lastStatus = { status: "needs_auth" as const } + return bus + .publish(TuiEvent.ToastShow, { + title: "MCP Authentication Required", + message: `Server "${key}" requires authentication. Run: opencode mcp auth ${key}`, + variant: "warning", + duration: 8000, + }) + .pipe(Effect.ignore, Effect.as(undefined)) + } + } + + log.debug("transport connection failed", { + key, + transport: name, + url: mcp.url, + error: lastError.message, + }) + lastStatus = { status: "failed" as const, error: lastError.message } + return Effect.succeed(undefined) + }), + ) + if (result) { + log.info("connected", { key, transport: result.transportName }) + return { client: result.client as MCPClient | undefined, status: { status: "connected" } as Status } + } + // If this was an auth error, stop trying other transports + if (lastStatus?.status === "needs_auth" || lastStatus?.status === "needs_client_registration") break + } + + return { + client: undefined as MCPClient | undefined, + status: (lastStatus ?? { status: "failed", error: "Unknown error" }) as Status, + } + }) + + const connectLocal = Effect.fn("MCP.connectLocal")(function* ( + key: string, + mcp: ConfigMCP.Info & { type: "local" }, + ) { + const [cmd, ...args] = mcp.command + const cwd = Instance.directory + const transport = new StdioClientTransport({ + stderr: "pipe", + command: cmd, + args, + cwd, + env: { + ...process.env, + ...(cmd === "opencode" ? { BUN_BE_BUN: "1" } : {}), + ...mcp.environment, + }, + }) + transport.stderr?.on("data", (chunk: Buffer) => { + log.info(`mcp stderr: ${chunk.toString()}`, { key }) + }) + + const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT + return yield* connectTransport(transport, connectTimeout).pipe( + Effect.map((client): { client: MCPClient | undefined; status: Status } => ({ + client, + status: { status: "connected" }, + })), + Effect.catch((error): Effect.Effect<{ client: MCPClient | undefined; status: Status }> => { + const msg = error instanceof Error ? error.message : String(error) + log.error("local mcp startup failed", { key, command: mcp.command, cwd, error: msg }) + return Effect.succeed({ client: undefined, status: { status: "failed", error: msg } }) + }), + ) + }) + + const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { + if (mcp.enabled === false) { + log.info("mcp server disabled", { key }) + return DISABLED_RESULT + } + + log.info("found", { key, type: mcp.type }) + + const { client: mcpClient, status } = + mcp.type === "remote" + ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) + : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) + + if (!mcpClient) { + return { status } satisfies CreateResult + } + + const listed = yield* defs(key, mcpClient, mcp.timeout) + if (!listed) { + yield* Effect.tryPromise(() => mcpClient.close()).pipe(Effect.ignore) + return { status: { status: "failed", error: "Failed to get tools" } } satisfies CreateResult + } + + log.info("create() successfully created client", { key, toolCount: listed.length }) + return { mcpClient, status, defs: listed } satisfies CreateResult + }) + const cfgSvc = yield* Config.Service + + const descendants = Effect.fnUntraced( + function* (pid: number) { + if (process.platform === "win32") return [] as number[] + const pids: number[] = [] + const queue = [pid] + while (queue.length > 0) { + const current = queue.shift()! + const handle = yield* spawner.spawn(ChildProcess.make("pgrep", ["-P", String(current)], { stdin: "ignore" })) + const text = yield* Stream.mkString(Stream.decodeText(handle.stdout)) + yield* handle.exitCode + for (const tok of text.split("\n")) { + const cpid = parseInt(tok, 10) + if (!isNaN(cpid) && !pids.includes(cpid)) { + pids.push(cpid) + queue.push(cpid) + } + } + } + return pids + }, + Effect.scoped, + Effect.catch(() => Effect.succeed([] as number[])), + ) + + function watch(s: State, name: string, client: MCPClient, bridge: EffectBridge.Shape, timeout?: number) { + client.setNotificationHandler(ToolListChangedNotificationSchema, async () => { + log.info("tools list changed notification received", { server: name }) + if (s.clients[name] !== client || s.status[name]?.status !== "connected") return + + const listed = await bridge.promise(defs(name, client, timeout)) + if (!listed) return + if (s.clients[name] !== client || s.status[name]?.status !== "connected") return + + s.defs[name] = listed + await bridge.promise(bus.publish(ToolsChanged, { server: name }).pipe(Effect.ignore)) + }) + } + + const state = yield* InstanceState.make( + Effect.fn("MCP.state")(function* () { + const cfg = yield* cfgSvc.get() + const bridge = yield* EffectBridge.make() + const config = cfg.mcp ?? {} + const s: State = { + status: {}, + clients: {}, + defs: {}, + } + + yield* Effect.forEach( + Object.entries(config), + ([key, mcp]) => + Effect.gen(function* () { + if (!isMcpConfigured(mcp)) { + log.error("Ignoring MCP config entry without type", { key }) + return + } + + if (mcp.enabled === false) { + s.status[key] = { status: "disabled" } + return + } + + const result = yield* create(key, mcp).pipe(Effect.catch(() => Effect.void)) + if (!result) return + + s.status[key] = result.status + if (result.mcpClient) { + s.clients[key] = result.mcpClient + s.defs[key] = result.defs! + watch(s, key, result.mcpClient, bridge, mcp.timeout) + } + }), + { concurrency: "unbounded" }, + ) + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + yield* Effect.forEach( + Object.values(s.clients), + (client) => + Effect.gen(function* () { + const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null + if (typeof pid === "number") { + const pids = yield* descendants(pid) + for (const dpid of pids) { + try { + process.kill(dpid, "SIGTERM") + } catch {} + } + } + yield* Effect.tryPromise(() => client.close()).pipe(Effect.ignore) + }), + { concurrency: "unbounded" }, + ) + pendingOAuthTransports.clear() + }), + ) + + return s + }), + ) + + function closeClient(s: State, name: string) { + const client = s.clients[name] + delete s.defs[name] + if (!client) return Effect.void + return Effect.tryPromise(() => client.close()).pipe(Effect.ignore) + } + + const storeClient = Effect.fnUntraced(function* ( + s: State, + name: string, + client: MCPClient, + listed: MCPToolDef[], + timeout?: number, + ) { + const bridge = yield* EffectBridge.make() + yield* closeClient(s, name) + s.status[name] = { status: "connected" } + s.clients[name] = client + s.defs[name] = listed + watch(s, name, client, bridge, timeout) + return s.status[name] + }) + + const status = Effect.fn("MCP.status")(function* () { + const s = yield* InstanceState.get(state) + + const cfg = yield* cfgSvc.get() + const config = cfg.mcp ?? {} + const result: Record = {} + + for (const [key, mcp] of Object.entries(config)) { + if (!isMcpConfigured(mcp)) continue + result[key] = s.status[key] ?? { status: "disabled" } + } + + return result + }) + + const clients = Effect.fn("MCP.clients")(function* () { + const s = yield* InstanceState.get(state) + return s.clients + }) + + const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { + const s = yield* InstanceState.get(state) + const result = yield* create(name, mcp) + + s.status[name] = result.status + if (!result.mcpClient) { + yield* closeClient(s, name) + delete s.clients[name] + return result.status + } + + return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) + }) + + const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { + yield* createAndStore(name, mcp) + const s = yield* InstanceState.get(state) + return { status: s.status } + }) + + const connect = Effect.fn("MCP.connect")(function* (name: string) { + const mcp = yield* getMcpConfig(name) + if (!mcp) { + log.error("MCP config not found or invalid", { name }) + return + } + yield* createAndStore(name, { ...mcp, enabled: true }) + }) + + const disconnect = Effect.fn("MCP.disconnect")(function* (name: string) { + const s = yield* InstanceState.get(state) + yield* closeClient(s, name) + delete s.clients[name] + s.status[name] = { status: "disabled" } + }) + + const tools = Effect.fn("MCP.tools")(function* () { + const result: Record = {} + const s = yield* InstanceState.get(state) + + const cfg = yield* cfgSvc.get() + const config = cfg.mcp ?? {} + const defaultTimeout = cfg.experimental?.mcp_timeout + + const connectedClients = Object.entries(s.clients).filter( + ([clientName]) => s.status[clientName]?.status === "connected", + ) + + yield* Effect.forEach( + connectedClients, + ([clientName, client]) => + Effect.gen(function* () { + const mcpConfig = config[clientName] + const entry = mcpConfig && isMcpConfigured(mcpConfig) ? mcpConfig : undefined + + const listed = s.defs[clientName] + if (!listed) { + log.warn("missing cached tools for connected server", { clientName }) + return + } + + const timeout = entry?.timeout ?? defaultTimeout + for (const mcpTool of listed) { + result[sanitize(clientName) + "_" + sanitize(mcpTool.name)] = convertMcpTool(mcpTool, client, timeout) + } + }), + { concurrency: "unbounded" }, + ) + return result + }) + + function collectFromConnected( + s: State, + listFn: (c: Client) => Promise, + label: string, + ) { + return Effect.forEach( + Object.entries(s.clients).filter(([name]) => s.status[name]?.status === "connected"), + ([clientName, client]) => + fetchFromClient(clientName, client, listFn, label).pipe(Effect.map((items) => Object.entries(items ?? {}))), + { concurrency: "unbounded" }, + ).pipe(Effect.map((results) => Object.fromEntries(results.flat()))) + } + + const prompts = Effect.fn("MCP.prompts")(function* () { + const s = yield* InstanceState.get(state) + return yield* collectFromConnected(s, (c) => c.listPrompts().then((r) => r.prompts), "prompts") + }) + + const resources = Effect.fn("MCP.resources")(function* () { + const s = yield* InstanceState.get(state) + return yield* collectFromConnected(s, (c) => c.listResources().then((r) => r.resources), "resources") + }) + + const withClient = Effect.fnUntraced(function* ( + clientName: string, + fn: (client: MCPClient) => Promise, + label: string, + meta?: Record, + ) { + const s = yield* InstanceState.get(state) + const client = s.clients[clientName] + if (!client) { + log.warn(`client not found for ${label}`, { clientName }) + return undefined + } + return yield* Effect.tryPromise({ + try: () => fn(client), + catch: (e: any) => { + log.error(`failed to ${label}`, { clientName, ...meta, error: e?.message }) + return e + }, + }).pipe(Effect.orElseSucceed(() => undefined)) + }) + + const getPrompt = Effect.fn("MCP.getPrompt")(function* ( + clientName: string, + name: string, + args?: Record, + ) { + return yield* withClient(clientName, (client) => client.getPrompt({ name, arguments: args }), "getPrompt", { + promptName: name, + }) + }) + + const readResource = Effect.fn("MCP.readResource")(function* (clientName: string, resourceUri: string) { + return yield* withClient(clientName, (client) => client.readResource({ uri: resourceUri }), "readResource", { + resourceUri, + }) + }) + + const getMcpConfig = Effect.fnUntraced(function* (mcpName: string) { + const cfg = yield* cfgSvc.get() + const mcpConfig = cfg.mcp?.[mcpName] + if (!mcpConfig || !isMcpConfigured(mcpConfig)) return undefined + return mcpConfig + }) + + const startAuth = Effect.fn("MCP.startAuth")(function* (mcpName: string) { + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) throw new Error(`MCP server ${mcpName} not found or disabled`) + if (mcpConfig.type !== "remote") throw new Error(`MCP server ${mcpName} is not a remote server`) + if (mcpConfig.oauth === false) throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`) + + // OAuth config is optional - if not provided, we'll use auto-discovery + const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined + + // Start the callback server with custom redirectUri if configured + yield* Effect.promise(() => McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)) + + const oauthState = Array.from(crypto.getRandomValues(new Uint8Array(32))) + .map((b) => b.toString(16).padStart(2, "0")) + .join("") + yield* auth.updateOAuthState(mcpName, oauthState) + let capturedUrl: URL | undefined + const authProvider = new McpOAuthProvider( + mcpName, + mcpConfig.url, + { + clientId: oauthConfig?.clientId, + clientSecret: oauthConfig?.clientSecret, + scope: oauthConfig?.scope, + redirectUri: oauthConfig?.redirectUri, + }, + { + onRedirect: async (url) => { + capturedUrl = url + }, + }, + auth, + ) + + const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider }) + + return yield* Effect.tryPromise({ + try: () => { + const client = new Client({ name: "opencode", version: InstallationVersion }) + return client + .connect(transport) + .then(() => ({ authorizationUrl: "", oauthState, client }) satisfies AuthResult) + }, + catch: (error) => error, + }).pipe( + Effect.catch((error) => { + if (error instanceof UnauthorizedError && capturedUrl) { + pendingOAuthTransports.set(mcpName, transport) + return Effect.succeed({ authorizationUrl: capturedUrl.toString(), oauthState } satisfies AuthResult) + } + return Effect.die(error) + }), + ) + }) + + const authenticate = Effect.fn("MCP.authenticate")(function* (mcpName: string) { + const result = yield* startAuth(mcpName) + if (!result.authorizationUrl) { + const client = "client" in result ? result.client : undefined + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) { + yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) + return { status: "failed", error: "MCP config not found after auth" } as Status + } + + const listed = client ? yield* defs(mcpName, client, mcpConfig.timeout) : undefined + if (!client || !listed) { + yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) + return { status: "failed", error: "Failed to get tools" } as Status + } + + const s = yield* InstanceState.get(state) + yield* auth.clearOAuthState(mcpName) + return yield* storeClient(s, mcpName, client, listed, mcpConfig.timeout) + } + + log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: result.oauthState }) + + const callbackPromise = McpOAuthCallback.waitForCallback(result.oauthState, mcpName) + + yield* Effect.tryPromise(() => open(result.authorizationUrl)).pipe( + Effect.flatMap((subprocess) => + Effect.callback((resume) => { + const timer = setTimeout(() => resume(Effect.void), 500) + subprocess.on("error", (err) => { + clearTimeout(timer) + resume(Effect.fail(err)) + }) + subprocess.on("exit", (code) => { + if (code !== null && code !== 0) { + clearTimeout(timer) + resume(Effect.fail(new Error(`Browser open failed with exit code ${code}`))) + } + }) + }), + ), + Effect.catch(() => { + log.warn("failed to open browser, user must open URL manually", { mcpName }) + return bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl }).pipe(Effect.ignore) + }), + ) + + const code = yield* Effect.promise(() => callbackPromise) + + const storedState = yield* auth.getOAuthState(mcpName) + if (storedState !== result.oauthState) { + yield* auth.clearOAuthState(mcpName) + throw new Error("OAuth state mismatch - potential CSRF attack") + } + yield* auth.clearOAuthState(mcpName) + return yield* finishAuth(mcpName, code) + }) + + const finishAuth = Effect.fn("MCP.finishAuth")(function* (mcpName: string, authorizationCode: string) { + const transport = pendingOAuthTransports.get(mcpName) + if (!transport) throw new Error(`No pending OAuth flow for MCP server: ${mcpName}`) + + const result = yield* Effect.tryPromise({ + try: () => transport.finishAuth(authorizationCode).then(() => true as const), + catch: (error) => { + log.error("failed to finish oauth", { mcpName, error }) + return error + }, + }).pipe(Effect.option) + + if (Option.isNone(result)) { + return { status: "failed", error: "OAuth completion failed" } as Status + } + + yield* auth.clearCodeVerifier(mcpName) + pendingOAuthTransports.delete(mcpName) + + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) return { status: "failed", error: "MCP config not found after auth" } as Status + + return yield* createAndStore(mcpName, mcpConfig) + }) + + const removeAuth = Effect.fn("MCP.removeAuth")(function* (mcpName: string) { + yield* auth.remove(mcpName) + McpOAuthCallback.cancelPending(mcpName) + pendingOAuthTransports.delete(mcpName) + log.info("removed oauth credentials", { mcpName }) + }) + + const supportsOAuth = Effect.fn("MCP.supportsOAuth")(function* (mcpName: string) { + const mcpConfig = yield* getMcpConfig(mcpName) + if (!mcpConfig) return false + return mcpConfig.type === "remote" && mcpConfig.oauth !== false + }) + + const hasStoredTokens = Effect.fn("MCP.hasStoredTokens")(function* (mcpName: string) { + const entry = yield* auth.get(mcpName) + return !!entry?.tokens + }) + + const getAuthStatus = Effect.fn("MCP.getAuthStatus")(function* (mcpName: string) { + const entry = yield* auth.get(mcpName) + if (!entry?.tokens) return "not_authenticated" as AuthStatus + const expired = yield* auth.isTokenExpired(mcpName) + return (expired ? "expired" : "authenticated") as AuthStatus + }) + + return Service.of({ + status, + clients, + tools, + prompts, + resources, + add, + connect, + disconnect, + getPrompt, + readResource, + startAuth, + authenticate, + finishAuth, + removeAuth, + supportsOAuth, + hasStoredTokens, + getAuthStatus, + }) + }), +) + +export type AuthStatus = "authenticated" | "expired" | "not_authenticated" + +// --- Per-service runtime --- + +export const defaultLayer = layer.pipe( + Layer.provide(McpAuth.layer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), +) + +export * as MCP from "." diff --git a/packages/opencode/src/mcp/mcp.ts b/packages/opencode/src/mcp/mcp.ts deleted file mode 100644 index 61201ce76d..0000000000 --- a/packages/opencode/src/mcp/mcp.ts +++ /dev/null @@ -1,931 +0,0 @@ -import { dynamicTool, type Tool, jsonSchema, type JSONSchema7 } from "ai" -import { Client } from "@modelcontextprotocol/sdk/client/index.js" -import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js" -import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js" -import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js" -import { UnauthorizedError } from "@modelcontextprotocol/sdk/client/auth.js" -import { - CallToolResultSchema, - type Tool as MCPToolDef, - ToolListChangedNotificationSchema, -} from "@modelcontextprotocol/sdk/types.js" -import { Config } from "../config" -import { ConfigMCP } from "../config/mcp" -import { Log } from "../util" -import { NamedError } from "@opencode-ai/shared/util/error" -import z from "zod/v4" -import { Instance } from "../project/instance" -import { Installation } from "../installation" -import { InstallationVersion } from "../installation/version" -import { withTimeout } from "@/util/timeout" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { McpOAuthProvider } from "./oauth-provider" -import { McpOAuthCallback } from "./oauth-callback" -import { McpAuth } from "./auth" -import { BusEvent } from "../bus/bus-event" -import { Bus } from "@/bus" -import { TuiEvent } from "@/cli/cmd/tui/event" -import open from "open" -import { Effect, Exit, Layer, Option, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { InstanceState } from "@/effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" - -const log = Log.create({ service: "mcp" }) -const DEFAULT_TIMEOUT = 30_000 - -export const Resource = z - .object({ - name: z.string(), - uri: z.string(), - description: z.string().optional(), - mimeType: z.string().optional(), - client: z.string(), - }) - .meta({ ref: "McpResource" }) -export type Resource = z.infer - -export const ToolsChanged = BusEvent.define( - "mcp.tools.changed", - z.object({ - server: z.string(), - }), -) - -export const BrowserOpenFailed = BusEvent.define( - "mcp.browser.open.failed", - z.object({ - mcpName: z.string(), - url: z.string(), - }), -) - -export const Failed = NamedError.create( - "MCPFailed", - z.object({ - name: z.string(), - }), -) - -type MCPClient = Client - -export const Status = z - .discriminatedUnion("status", [ - z - .object({ - status: z.literal("connected"), - }) - .meta({ - ref: "MCPStatusConnected", - }), - z - .object({ - status: z.literal("disabled"), - }) - .meta({ - ref: "MCPStatusDisabled", - }), - z - .object({ - status: z.literal("failed"), - error: z.string(), - }) - .meta({ - ref: "MCPStatusFailed", - }), - z - .object({ - status: z.literal("needs_auth"), - }) - .meta({ - ref: "MCPStatusNeedsAuth", - }), - z - .object({ - status: z.literal("needs_client_registration"), - error: z.string(), - }) - .meta({ - ref: "MCPStatusNeedsClientRegistration", - }), - ]) - .meta({ - ref: "MCPStatus", - }) -export type Status = z.infer - -// Store transports for OAuth servers to allow finishing auth -type TransportWithAuth = StreamableHTTPClientTransport | SSEClientTransport -const pendingOAuthTransports = new Map() - -// Prompt cache types -type PromptInfo = Awaited>["prompts"][number] -type ResourceInfo = Awaited>["resources"][number] -type McpEntry = NonNullable[string] - -function isMcpConfigured(entry: McpEntry): entry is ConfigMCP.Info { - return typeof entry === "object" && entry !== null && "type" in entry -} - -const sanitize = (s: string) => s.replace(/[^a-zA-Z0-9_-]/g, "_") - -// Convert MCP tool definition to AI SDK Tool type -function convertMcpTool(mcpTool: MCPToolDef, client: MCPClient, timeout?: number): Tool { - const inputSchema = mcpTool.inputSchema - - // Spread first, then override type to ensure it's always "object" - const schema: JSONSchema7 = { - ...(inputSchema as JSONSchema7), - type: "object", - properties: (inputSchema.properties ?? {}) as JSONSchema7["properties"], - additionalProperties: false, - } - - return dynamicTool({ - description: mcpTool.description ?? "", - inputSchema: jsonSchema(schema), - execute: async (args: unknown) => { - return client.callTool( - { - name: mcpTool.name, - arguments: (args || {}) as Record, - }, - CallToolResultSchema, - { - resetTimeoutOnProgress: true, - timeout, - }, - ) - }, - }) -} - -function defs(key: string, client: MCPClient, timeout?: number) { - return Effect.tryPromise({ - try: () => withTimeout(client.listTools(), timeout ?? DEFAULT_TIMEOUT), - catch: (err) => (err instanceof Error ? err : new Error(String(err))), - }).pipe( - Effect.map((result) => result.tools), - Effect.catch((err) => { - log.error("failed to get tools from client", { key, error: err }) - return Effect.succeed(undefined) - }), - ) -} - -function fetchFromClient( - clientName: string, - client: Client, - listFn: (c: Client) => Promise, - label: string, -) { - return Effect.tryPromise({ - try: () => listFn(client), - catch: (e: any) => { - log.error(`failed to get ${label}`, { clientName, error: e.message }) - return e - }, - }).pipe( - Effect.map((items) => { - const out: Record = {} - const sanitizedClient = sanitize(clientName) - for (const item of items) { - out[sanitizedClient + ":" + sanitize(item.name)] = { ...item, client: clientName } - } - return out - }), - Effect.orElseSucceed(() => undefined), - ) -} - -interface CreateResult { - mcpClient?: MCPClient - status: Status - defs?: MCPToolDef[] -} - -interface AuthResult { - authorizationUrl: string - oauthState: string - client?: MCPClient -} - -// --- Effect Service --- - -interface State { - status: Record - clients: Record - defs: Record -} - -export interface Interface { - readonly status: () => Effect.Effect> - readonly clients: () => Effect.Effect> - readonly tools: () => Effect.Effect> - readonly prompts: () => Effect.Effect> - readonly resources: () => Effect.Effect> - readonly add: (name: string, mcp: ConfigMCP.Info) => Effect.Effect<{ status: Record | Status }> - readonly connect: (name: string) => Effect.Effect - readonly disconnect: (name: string) => Effect.Effect - readonly getPrompt: ( - clientName: string, - name: string, - args?: Record, - ) => Effect.Effect> | undefined> - readonly readResource: ( - clientName: string, - resourceUri: string, - ) => Effect.Effect> | undefined> - readonly startAuth: (mcpName: string) => Effect.Effect<{ authorizationUrl: string; oauthState: string }> - readonly authenticate: (mcpName: string) => Effect.Effect - readonly finishAuth: (mcpName: string, authorizationCode: string) => Effect.Effect - readonly removeAuth: (mcpName: string) => Effect.Effect - readonly supportsOAuth: (mcpName: string) => Effect.Effect - readonly hasStoredTokens: (mcpName: string) => Effect.Effect - readonly getAuthStatus: (mcpName: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/MCP") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const auth = yield* McpAuth.Service - const bus = yield* Bus.Service - - type Transport = StdioClientTransport | StreamableHTTPClientTransport | SSEClientTransport - - /** - * Connect a client via the given transport with resource safety: - * on failure the transport is closed; on success the caller owns it. - */ - const connectTransport = (transport: Transport, timeout: number) => - Effect.acquireUseRelease( - Effect.succeed(transport), - (t) => - Effect.tryPromise({ - try: () => { - const client = new Client({ name: "opencode", version: InstallationVersion }) - return withTimeout(client.connect(t), timeout).then(() => client) - }, - catch: (e) => (e instanceof Error ? e : new Error(String(e))), - }), - (t, exit) => (Exit.isFailure(exit) ? Effect.tryPromise(() => t.close()).pipe(Effect.ignore) : Effect.void), - ) - - const DISABLED_RESULT: CreateResult = { status: { status: "disabled" } } - - const connectRemote = Effect.fn("MCP.connectRemote")(function* ( - key: string, - mcp: ConfigMCP.Info & { type: "remote" }, - ) { - const oauthDisabled = mcp.oauth === false - const oauthConfig = typeof mcp.oauth === "object" ? mcp.oauth : undefined - let authProvider: McpOAuthProvider | undefined - - if (!oauthDisabled) { - authProvider = new McpOAuthProvider( - key, - mcp.url, - { - clientId: oauthConfig?.clientId, - clientSecret: oauthConfig?.clientSecret, - scope: oauthConfig?.scope, - redirectUri: oauthConfig?.redirectUri, - }, - { - onRedirect: async (url) => { - log.info("oauth redirect requested", { key, url: url.toString() }) - }, - }, - auth, - ) - } - - const transports: Array<{ name: string; transport: TransportWithAuth }> = [ - { - name: "StreamableHTTP", - transport: new StreamableHTTPClientTransport(new URL(mcp.url), { - authProvider, - requestInit: mcp.headers ? { headers: mcp.headers } : undefined, - }), - }, - { - name: "SSE", - transport: new SSEClientTransport(new URL(mcp.url), { - authProvider, - requestInit: mcp.headers ? { headers: mcp.headers } : undefined, - }), - }, - ] - - const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT - let lastStatus: Status | undefined - - for (const { name, transport } of transports) { - const result = yield* connectTransport(transport, connectTimeout).pipe( - Effect.map((client) => ({ client, transportName: name })), - Effect.catch((error) => { - const lastError = error instanceof Error ? error : new Error(String(error)) - const isAuthError = - error instanceof UnauthorizedError || (authProvider && lastError.message.includes("OAuth")) - - if (isAuthError) { - log.info("mcp server requires authentication", { key, transport: name }) - - if (lastError.message.includes("registration") || lastError.message.includes("client_id")) { - lastStatus = { - status: "needs_client_registration" as const, - error: "Server does not support dynamic client registration. Please provide clientId in config.", - } - return bus - .publish(TuiEvent.ToastShow, { - title: "MCP Authentication Required", - message: `Server "${key}" requires a pre-registered client ID. Add clientId to your config.`, - variant: "warning", - duration: 8000, - }) - .pipe(Effect.ignore, Effect.as(undefined)) - } else { - pendingOAuthTransports.set(key, transport) - lastStatus = { status: "needs_auth" as const } - return bus - .publish(TuiEvent.ToastShow, { - title: "MCP Authentication Required", - message: `Server "${key}" requires authentication. Run: opencode mcp auth ${key}`, - variant: "warning", - duration: 8000, - }) - .pipe(Effect.ignore, Effect.as(undefined)) - } - } - - log.debug("transport connection failed", { - key, - transport: name, - url: mcp.url, - error: lastError.message, - }) - lastStatus = { status: "failed" as const, error: lastError.message } - return Effect.succeed(undefined) - }), - ) - if (result) { - log.info("connected", { key, transport: result.transportName }) - return { client: result.client as MCPClient | undefined, status: { status: "connected" } as Status } - } - // If this was an auth error, stop trying other transports - if (lastStatus?.status === "needs_auth" || lastStatus?.status === "needs_client_registration") break - } - - return { - client: undefined as MCPClient | undefined, - status: (lastStatus ?? { status: "failed", error: "Unknown error" }) as Status, - } - }) - - const connectLocal = Effect.fn("MCP.connectLocal")(function* ( - key: string, - mcp: ConfigMCP.Info & { type: "local" }, - ) { - const [cmd, ...args] = mcp.command - const cwd = Instance.directory - const transport = new StdioClientTransport({ - stderr: "pipe", - command: cmd, - args, - cwd, - env: { - ...process.env, - ...(cmd === "opencode" ? { BUN_BE_BUN: "1" } : {}), - ...mcp.environment, - }, - }) - transport.stderr?.on("data", (chunk: Buffer) => { - log.info(`mcp stderr: ${chunk.toString()}`, { key }) - }) - - const connectTimeout = mcp.timeout ?? DEFAULT_TIMEOUT - return yield* connectTransport(transport, connectTimeout).pipe( - Effect.map((client): { client: MCPClient | undefined; status: Status } => ({ - client, - status: { status: "connected" }, - })), - Effect.catch((error): Effect.Effect<{ client: MCPClient | undefined; status: Status }> => { - const msg = error instanceof Error ? error.message : String(error) - log.error("local mcp startup failed", { key, command: mcp.command, cwd, error: msg }) - return Effect.succeed({ client: undefined, status: { status: "failed", error: msg } }) - }), - ) - }) - - const create = Effect.fn("MCP.create")(function* (key: string, mcp: ConfigMCP.Info) { - if (mcp.enabled === false) { - log.info("mcp server disabled", { key }) - return DISABLED_RESULT - } - - log.info("found", { key, type: mcp.type }) - - const { client: mcpClient, status } = - mcp.type === "remote" - ? yield* connectRemote(key, mcp as ConfigMCP.Info & { type: "remote" }) - : yield* connectLocal(key, mcp as ConfigMCP.Info & { type: "local" }) - - if (!mcpClient) { - return { status } satisfies CreateResult - } - - const listed = yield* defs(key, mcpClient, mcp.timeout) - if (!listed) { - yield* Effect.tryPromise(() => mcpClient.close()).pipe(Effect.ignore) - return { status: { status: "failed", error: "Failed to get tools" } } satisfies CreateResult - } - - log.info("create() successfully created client", { key, toolCount: listed.length }) - return { mcpClient, status, defs: listed } satisfies CreateResult - }) - const cfgSvc = yield* Config.Service - - const descendants = Effect.fnUntraced( - function* (pid: number) { - if (process.platform === "win32") return [] as number[] - const pids: number[] = [] - const queue = [pid] - while (queue.length > 0) { - const current = queue.shift()! - const handle = yield* spawner.spawn(ChildProcess.make("pgrep", ["-P", String(current)], { stdin: "ignore" })) - const text = yield* Stream.mkString(Stream.decodeText(handle.stdout)) - yield* handle.exitCode - for (const tok of text.split("\n")) { - const cpid = parseInt(tok, 10) - if (!isNaN(cpid) && !pids.includes(cpid)) { - pids.push(cpid) - queue.push(cpid) - } - } - } - return pids - }, - Effect.scoped, - Effect.catch(() => Effect.succeed([] as number[])), - ) - - function watch(s: State, name: string, client: MCPClient, bridge: EffectBridge.Shape, timeout?: number) { - client.setNotificationHandler(ToolListChangedNotificationSchema, async () => { - log.info("tools list changed notification received", { server: name }) - if (s.clients[name] !== client || s.status[name]?.status !== "connected") return - - const listed = await bridge.promise(defs(name, client, timeout)) - if (!listed) return - if (s.clients[name] !== client || s.status[name]?.status !== "connected") return - - s.defs[name] = listed - await bridge.promise(bus.publish(ToolsChanged, { server: name }).pipe(Effect.ignore)) - }) - } - - const state = yield* InstanceState.make( - Effect.fn("MCP.state")(function* () { - const cfg = yield* cfgSvc.get() - const bridge = yield* EffectBridge.make() - const config = cfg.mcp ?? {} - const s: State = { - status: {}, - clients: {}, - defs: {}, - } - - yield* Effect.forEach( - Object.entries(config), - ([key, mcp]) => - Effect.gen(function* () { - if (!isMcpConfigured(mcp)) { - log.error("Ignoring MCP config entry without type", { key }) - return - } - - if (mcp.enabled === false) { - s.status[key] = { status: "disabled" } - return - } - - const result = yield* create(key, mcp).pipe(Effect.catch(() => Effect.void)) - if (!result) return - - s.status[key] = result.status - if (result.mcpClient) { - s.clients[key] = result.mcpClient - s.defs[key] = result.defs! - watch(s, key, result.mcpClient, bridge, mcp.timeout) - } - }), - { concurrency: "unbounded" }, - ) - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - yield* Effect.forEach( - Object.values(s.clients), - (client) => - Effect.gen(function* () { - const pid = client.transport instanceof StdioClientTransport ? client.transport.pid : null - if (typeof pid === "number") { - const pids = yield* descendants(pid) - for (const dpid of pids) { - try { - process.kill(dpid, "SIGTERM") - } catch {} - } - } - yield* Effect.tryPromise(() => client.close()).pipe(Effect.ignore) - }), - { concurrency: "unbounded" }, - ) - pendingOAuthTransports.clear() - }), - ) - - return s - }), - ) - - function closeClient(s: State, name: string) { - const client = s.clients[name] - delete s.defs[name] - if (!client) return Effect.void - return Effect.tryPromise(() => client.close()).pipe(Effect.ignore) - } - - const storeClient = Effect.fnUntraced(function* ( - s: State, - name: string, - client: MCPClient, - listed: MCPToolDef[], - timeout?: number, - ) { - const bridge = yield* EffectBridge.make() - yield* closeClient(s, name) - s.status[name] = { status: "connected" } - s.clients[name] = client - s.defs[name] = listed - watch(s, name, client, bridge, timeout) - return s.status[name] - }) - - const status = Effect.fn("MCP.status")(function* () { - const s = yield* InstanceState.get(state) - - const cfg = yield* cfgSvc.get() - const config = cfg.mcp ?? {} - const result: Record = {} - - for (const [key, mcp] of Object.entries(config)) { - if (!isMcpConfigured(mcp)) continue - result[key] = s.status[key] ?? { status: "disabled" } - } - - return result - }) - - const clients = Effect.fn("MCP.clients")(function* () { - const s = yield* InstanceState.get(state) - return s.clients - }) - - const createAndStore = Effect.fn("MCP.createAndStore")(function* (name: string, mcp: ConfigMCP.Info) { - const s = yield* InstanceState.get(state) - const result = yield* create(name, mcp) - - s.status[name] = result.status - if (!result.mcpClient) { - yield* closeClient(s, name) - delete s.clients[name] - return result.status - } - - return yield* storeClient(s, name, result.mcpClient, result.defs!, mcp.timeout) - }) - - const add = Effect.fn("MCP.add")(function* (name: string, mcp: ConfigMCP.Info) { - yield* createAndStore(name, mcp) - const s = yield* InstanceState.get(state) - return { status: s.status } - }) - - const connect = Effect.fn("MCP.connect")(function* (name: string) { - const mcp = yield* getMcpConfig(name) - if (!mcp) { - log.error("MCP config not found or invalid", { name }) - return - } - yield* createAndStore(name, { ...mcp, enabled: true }) - }) - - const disconnect = Effect.fn("MCP.disconnect")(function* (name: string) { - const s = yield* InstanceState.get(state) - yield* closeClient(s, name) - delete s.clients[name] - s.status[name] = { status: "disabled" } - }) - - const tools = Effect.fn("MCP.tools")(function* () { - const result: Record = {} - const s = yield* InstanceState.get(state) - - const cfg = yield* cfgSvc.get() - const config = cfg.mcp ?? {} - const defaultTimeout = cfg.experimental?.mcp_timeout - - const connectedClients = Object.entries(s.clients).filter( - ([clientName]) => s.status[clientName]?.status === "connected", - ) - - yield* Effect.forEach( - connectedClients, - ([clientName, client]) => - Effect.gen(function* () { - const mcpConfig = config[clientName] - const entry = mcpConfig && isMcpConfigured(mcpConfig) ? mcpConfig : undefined - - const listed = s.defs[clientName] - if (!listed) { - log.warn("missing cached tools for connected server", { clientName }) - return - } - - const timeout = entry?.timeout ?? defaultTimeout - for (const mcpTool of listed) { - result[sanitize(clientName) + "_" + sanitize(mcpTool.name)] = convertMcpTool(mcpTool, client, timeout) - } - }), - { concurrency: "unbounded" }, - ) - return result - }) - - function collectFromConnected( - s: State, - listFn: (c: Client) => Promise, - label: string, - ) { - return Effect.forEach( - Object.entries(s.clients).filter(([name]) => s.status[name]?.status === "connected"), - ([clientName, client]) => - fetchFromClient(clientName, client, listFn, label).pipe(Effect.map((items) => Object.entries(items ?? {}))), - { concurrency: "unbounded" }, - ).pipe(Effect.map((results) => Object.fromEntries(results.flat()))) - } - - const prompts = Effect.fn("MCP.prompts")(function* () { - const s = yield* InstanceState.get(state) - return yield* collectFromConnected(s, (c) => c.listPrompts().then((r) => r.prompts), "prompts") - }) - - const resources = Effect.fn("MCP.resources")(function* () { - const s = yield* InstanceState.get(state) - return yield* collectFromConnected(s, (c) => c.listResources().then((r) => r.resources), "resources") - }) - - const withClient = Effect.fnUntraced(function* ( - clientName: string, - fn: (client: MCPClient) => Promise, - label: string, - meta?: Record, - ) { - const s = yield* InstanceState.get(state) - const client = s.clients[clientName] - if (!client) { - log.warn(`client not found for ${label}`, { clientName }) - return undefined - } - return yield* Effect.tryPromise({ - try: () => fn(client), - catch: (e: any) => { - log.error(`failed to ${label}`, { clientName, ...meta, error: e?.message }) - return e - }, - }).pipe(Effect.orElseSucceed(() => undefined)) - }) - - const getPrompt = Effect.fn("MCP.getPrompt")(function* ( - clientName: string, - name: string, - args?: Record, - ) { - return yield* withClient(clientName, (client) => client.getPrompt({ name, arguments: args }), "getPrompt", { - promptName: name, - }) - }) - - const readResource = Effect.fn("MCP.readResource")(function* (clientName: string, resourceUri: string) { - return yield* withClient(clientName, (client) => client.readResource({ uri: resourceUri }), "readResource", { - resourceUri, - }) - }) - - const getMcpConfig = Effect.fnUntraced(function* (mcpName: string) { - const cfg = yield* cfgSvc.get() - const mcpConfig = cfg.mcp?.[mcpName] - if (!mcpConfig || !isMcpConfigured(mcpConfig)) return undefined - return mcpConfig - }) - - const startAuth = Effect.fn("MCP.startAuth")(function* (mcpName: string) { - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) throw new Error(`MCP server ${mcpName} not found or disabled`) - if (mcpConfig.type !== "remote") throw new Error(`MCP server ${mcpName} is not a remote server`) - if (mcpConfig.oauth === false) throw new Error(`MCP server ${mcpName} has OAuth explicitly disabled`) - - // OAuth config is optional - if not provided, we'll use auto-discovery - const oauthConfig = typeof mcpConfig.oauth === "object" ? mcpConfig.oauth : undefined - - // Start the callback server with custom redirectUri if configured - yield* Effect.promise(() => McpOAuthCallback.ensureRunning(oauthConfig?.redirectUri)) - - const oauthState = Array.from(crypto.getRandomValues(new Uint8Array(32))) - .map((b) => b.toString(16).padStart(2, "0")) - .join("") - yield* auth.updateOAuthState(mcpName, oauthState) - let capturedUrl: URL | undefined - const authProvider = new McpOAuthProvider( - mcpName, - mcpConfig.url, - { - clientId: oauthConfig?.clientId, - clientSecret: oauthConfig?.clientSecret, - scope: oauthConfig?.scope, - redirectUri: oauthConfig?.redirectUri, - }, - { - onRedirect: async (url) => { - capturedUrl = url - }, - }, - auth, - ) - - const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider }) - - return yield* Effect.tryPromise({ - try: () => { - const client = new Client({ name: "opencode", version: InstallationVersion }) - return client - .connect(transport) - .then(() => ({ authorizationUrl: "", oauthState, client }) satisfies AuthResult) - }, - catch: (error) => error, - }).pipe( - Effect.catch((error) => { - if (error instanceof UnauthorizedError && capturedUrl) { - pendingOAuthTransports.set(mcpName, transport) - return Effect.succeed({ authorizationUrl: capturedUrl.toString(), oauthState } satisfies AuthResult) - } - return Effect.die(error) - }), - ) - }) - - const authenticate = Effect.fn("MCP.authenticate")(function* (mcpName: string) { - const result = yield* startAuth(mcpName) - if (!result.authorizationUrl) { - const client = "client" in result ? result.client : undefined - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) { - yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) - return { status: "failed", error: "MCP config not found after auth" } as Status - } - - const listed = client ? yield* defs(mcpName, client, mcpConfig.timeout) : undefined - if (!client || !listed) { - yield* Effect.tryPromise(() => client?.close() ?? Promise.resolve()).pipe(Effect.ignore) - return { status: "failed", error: "Failed to get tools" } as Status - } - - const s = yield* InstanceState.get(state) - yield* auth.clearOAuthState(mcpName) - return yield* storeClient(s, mcpName, client, listed, mcpConfig.timeout) - } - - log.info("opening browser for oauth", { mcpName, url: result.authorizationUrl, state: result.oauthState }) - - const callbackPromise = McpOAuthCallback.waitForCallback(result.oauthState, mcpName) - - yield* Effect.tryPromise(() => open(result.authorizationUrl)).pipe( - Effect.flatMap((subprocess) => - Effect.callback((resume) => { - const timer = setTimeout(() => resume(Effect.void), 500) - subprocess.on("error", (err) => { - clearTimeout(timer) - resume(Effect.fail(err)) - }) - subprocess.on("exit", (code) => { - if (code !== null && code !== 0) { - clearTimeout(timer) - resume(Effect.fail(new Error(`Browser open failed with exit code ${code}`))) - } - }) - }), - ), - Effect.catch(() => { - log.warn("failed to open browser, user must open URL manually", { mcpName }) - return bus.publish(BrowserOpenFailed, { mcpName, url: result.authorizationUrl }).pipe(Effect.ignore) - }), - ) - - const code = yield* Effect.promise(() => callbackPromise) - - const storedState = yield* auth.getOAuthState(mcpName) - if (storedState !== result.oauthState) { - yield* auth.clearOAuthState(mcpName) - throw new Error("OAuth state mismatch - potential CSRF attack") - } - yield* auth.clearOAuthState(mcpName) - return yield* finishAuth(mcpName, code) - }) - - const finishAuth = Effect.fn("MCP.finishAuth")(function* (mcpName: string, authorizationCode: string) { - const transport = pendingOAuthTransports.get(mcpName) - if (!transport) throw new Error(`No pending OAuth flow for MCP server: ${mcpName}`) - - const result = yield* Effect.tryPromise({ - try: () => transport.finishAuth(authorizationCode).then(() => true as const), - catch: (error) => { - log.error("failed to finish oauth", { mcpName, error }) - return error - }, - }).pipe(Effect.option) - - if (Option.isNone(result)) { - return { status: "failed", error: "OAuth completion failed" } as Status - } - - yield* auth.clearCodeVerifier(mcpName) - pendingOAuthTransports.delete(mcpName) - - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) return { status: "failed", error: "MCP config not found after auth" } as Status - - return yield* createAndStore(mcpName, mcpConfig) - }) - - const removeAuth = Effect.fn("MCP.removeAuth")(function* (mcpName: string) { - yield* auth.remove(mcpName) - McpOAuthCallback.cancelPending(mcpName) - pendingOAuthTransports.delete(mcpName) - log.info("removed oauth credentials", { mcpName }) - }) - - const supportsOAuth = Effect.fn("MCP.supportsOAuth")(function* (mcpName: string) { - const mcpConfig = yield* getMcpConfig(mcpName) - if (!mcpConfig) return false - return mcpConfig.type === "remote" && mcpConfig.oauth !== false - }) - - const hasStoredTokens = Effect.fn("MCP.hasStoredTokens")(function* (mcpName: string) { - const entry = yield* auth.get(mcpName) - return !!entry?.tokens - }) - - const getAuthStatus = Effect.fn("MCP.getAuthStatus")(function* (mcpName: string) { - const entry = yield* auth.get(mcpName) - if (!entry?.tokens) return "not_authenticated" as AuthStatus - const expired = yield* auth.isTokenExpired(mcpName) - return (expired ? "expired" : "authenticated") as AuthStatus - }) - - return Service.of({ - status, - clients, - tools, - prompts, - resources, - add, - connect, - disconnect, - getPrompt, - readResource, - startAuth, - authenticate, - finishAuth, - removeAuth, - supportsOAuth, - hasStoredTokens, - getAuthStatus, - }) - }), -) - -export type AuthStatus = "authenticated" | "expired" | "not_authenticated" - -// --- Per-service runtime --- - -export const defaultLayer = layer.pipe( - Layer.provide(McpAuth.layer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), -) From 49bbea5aed2c4662c9740745b760817c1a88cd56 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:36:45 -0400 Subject: [PATCH 051/201] refactor: collapse snapshot barrel into snapshot/index.ts (#22916) --- packages/opencode/src/snapshot/index.ts | 778 ++++++++++++++++++++- packages/opencode/src/snapshot/snapshot.ts | 775 -------------------- 2 files changed, 777 insertions(+), 776 deletions(-) delete mode 100644 packages/opencode/src/snapshot/snapshot.ts diff --git a/packages/opencode/src/snapshot/index.ts b/packages/opencode/src/snapshot/index.ts index 49eafe4450..d38034e998 100644 --- a/packages/opencode/src/snapshot/index.ts +++ b/packages/opencode/src/snapshot/index.ts @@ -1 +1,777 @@ -export * as Snapshot from "./snapshot" +import { Cause, Duration, Effect, Layer, Schedule, Semaphore, Context, Stream } from "effect" +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" +import { formatPatch, structuredPatch } from "diff" +import path from "path" +import z from "zod" +import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { InstanceState } from "@/effect" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Hash } from "@opencode-ai/shared/util/hash" +import { Config } from "../config" +import { Global } from "../global" +import { Log } from "../util" + +export const Patch = z.object({ + hash: z.string(), + files: z.string().array(), +}) +export type Patch = z.infer + +export const FileDiff = z + .object({ + file: z.string(), + patch: z.string(), + additions: z.number(), + deletions: z.number(), + status: z.enum(["added", "deleted", "modified"]).optional(), + }) + .meta({ + ref: "SnapshotFileDiff", + }) +export type FileDiff = z.infer + +const log = Log.create({ service: "snapshot" }) +const prune = "7.days" +const limit = 2 * 1024 * 1024 +const core = ["-c", "core.longpaths=true", "-c", "core.symlinks=true"] +const cfg = ["-c", "core.autocrlf=false", ...core] +const quote = [...cfg, "-c", "core.quotepath=false"] +interface GitResult { + readonly code: ChildProcessSpawner.ExitCode + readonly text: string + readonly stderr: string +} + +type State = Omit + +export interface Interface { + readonly init: () => Effect.Effect + readonly cleanup: () => Effect.Effect + readonly track: () => Effect.Effect + readonly patch: (hash: string) => Effect.Effect + readonly restore: (snapshot: string) => Effect.Effect + readonly revert: (patches: Patch[]) => Effect.Effect + readonly diff: (hash: string) => Effect.Effect + readonly diffFull: (from: string, to: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Snapshot") {} + +export const layer: Layer.Layer< + Service, + never, + AppFileSystem.Service | ChildProcessSpawner.ChildProcessSpawner | Config.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const fs = yield* AppFileSystem.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const config = yield* Config.Service + const locks = new Map() + + const lock = (key: string) => { + const hit = locks.get(key) + if (hit) return hit + + const next = Semaphore.makeUnsafe(1) + locks.set(key, next) + return next + } + + const state = yield* InstanceState.make( + Effect.fn("Snapshot.state")(function* (ctx) { + const state = { + directory: ctx.directory, + worktree: ctx.worktree, + gitdir: path.join(Global.Path.data, "snapshot", ctx.project.id, Hash.fast(ctx.worktree)), + vcs: ctx.project.vcs, + } + + const args = (cmd: string[]) => ["--git-dir", state.gitdir, "--work-tree", state.worktree, ...cmd] + + const enc = new TextEncoder() + const feed = (list: string[]) => Stream.make(enc.encode(list.join("\0") + "\0")) + + const git = Effect.fnUntraced( + function* ( + cmd: string[], + opts?: { cwd?: string; env?: Record; stdin?: ChildProcess.CommandInput }, + ) { + const proc = ChildProcess.make("git", cmd, { + cwd: opts?.cwd, + env: opts?.env, + extendEnv: true, + stdin: opts?.stdin, + }) + const handle = yield* spawner.spawn(proc) + const [text, stderr] = yield* Effect.all( + [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + return { code, text, stderr } satisfies GitResult + }, + Effect.scoped, + Effect.catch((err) => + Effect.succeed({ + code: ChildProcessSpawner.ExitCode(1), + text: "", + stderr: err instanceof Error ? err.message : String(err), + }), + ), + ) + + const ignore = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return new Set() + const check = yield* git( + [ + ...quote, + "--git-dir", + path.join(state.worktree, ".git"), + "--work-tree", + state.worktree, + "check-ignore", + "--no-index", + "--stdin", + "-z", + ], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + if (check.code !== 0 && check.code !== 1) return new Set() + return new Set(check.text.split("\0").filter(Boolean)) + }) + + const drop = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return + yield* git( + [ + ...cfg, + ...args(["rm", "--cached", "-f", "--ignore-unmatch", "--pathspec-from-file=-", "--pathspec-file-nul"]), + ], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + }) + + const stage = Effect.fnUntraced(function* (files: string[]) { + if (!files.length) return + const result = yield* git( + [...cfg, ...args(["add", "--all", "--sparse", "--pathspec-from-file=-", "--pathspec-file-nul"])], + { + cwd: state.directory, + stdin: feed(files), + }, + ) + if (result.code === 0) return + log.warn("failed to add snapshot files", { + exitCode: result.code, + stderr: result.stderr, + }) + }) + + const exists = (file: string) => fs.exists(file).pipe(Effect.orDie) + const read = (file: string) => fs.readFileString(file).pipe(Effect.catch(() => Effect.succeed(""))) + const remove = (file: string) => fs.remove(file).pipe(Effect.catch(() => Effect.void)) + const locked = (fx: Effect.Effect) => lock(state.gitdir).withPermits(1)(fx) + + const enabled = Effect.fnUntraced(function* () { + if (state.vcs !== "git") return false + return (yield* config.get()).snapshot !== false + }) + + const excludes = Effect.fnUntraced(function* () { + const result = yield* git(["rev-parse", "--path-format=absolute", "--git-path", "info/exclude"], { + cwd: state.worktree, + }) + const file = result.text.trim() + if (!file) return + if (!(yield* exists(file))) return + return file + }) + + const sync = Effect.fnUntraced(function* (list: string[] = []) { + const file = yield* excludes() + const target = path.join(state.gitdir, "info", "exclude") + const text = [ + file ? (yield* read(file)).trimEnd() : "", + ...list.map((item) => `/${item.replaceAll("\\", "/")}`), + ] + .filter(Boolean) + .join("\n") + yield* fs.ensureDir(path.join(state.gitdir, "info")).pipe(Effect.orDie) + yield* fs.writeFileString(target, text ? `${text}\n` : "").pipe(Effect.orDie) + }) + + const add = Effect.fnUntraced(function* () { + yield* sync() + const [diff, other] = yield* Effect.all( + [ + git([...quote, ...args(["diff-files", "--name-only", "-z", "--", "."])], { + cwd: state.directory, + }), + git([...quote, ...args(["ls-files", "--others", "--exclude-standard", "-z", "--", "."])], { + cwd: state.directory, + }), + ], + { concurrency: 2 }, + ) + if (diff.code !== 0 || other.code !== 0) { + log.warn("failed to list snapshot files", { + diffCode: diff.code, + diffStderr: diff.stderr, + otherCode: other.code, + otherStderr: other.stderr, + }) + return + } + + const tracked = diff.text.split("\0").filter(Boolean) + const untracked = other.text.split("\0").filter(Boolean) + const all = Array.from(new Set([...tracked, ...untracked])) + if (!all.length) return + + // Resolve source-repo ignore rules against the exact candidate set. + // --no-index keeps this pattern-based even when a path is already tracked. + const ignored = yield* ignore(all) + + // Remove newly-ignored files from snapshot index to prevent re-adding + if (ignored.size > 0) { + const ignoredFiles = Array.from(ignored) + log.info("removing gitignored files from snapshot", { count: ignoredFiles.length }) + yield* drop(ignoredFiles) + } + + const allow = all.filter((item) => !ignored.has(item)) + if (!allow.length) return + + const large = new Set( + (yield* Effect.all( + allow.map((item) => + fs + .stat(path.join(state.directory, item)) + .pipe(Effect.catch(() => Effect.void)) + .pipe( + Effect.map((stat) => { + if (!stat || stat.type !== "File") return + const size = typeof stat.size === "bigint" ? Number(stat.size) : stat.size + return size > limit ? item : undefined + }), + ), + ), + { concurrency: 8 }, + )).filter((item): item is string => Boolean(item)), + ) + const block = new Set(untracked.filter((item) => large.has(item))) + yield* sync(Array.from(block)) + // Stage only the allowed candidate paths so snapshot updates stay scoped. + yield* stage(allow.filter((item) => !block.has(item))) + }) + + const cleanup = Effect.fnUntraced(function* () { + return yield* locked( + Effect.gen(function* () { + if (!(yield* enabled())) return + if (!(yield* exists(state.gitdir))) return + const result = yield* git(args(["gc", `--prune=${prune}`]), { cwd: state.directory }) + if (result.code !== 0) { + log.warn("cleanup failed", { + exitCode: result.code, + stderr: result.stderr, + }) + return + } + log.info("cleanup", { prune }) + }), + ) + }) + + const track = Effect.fnUntraced(function* () { + return yield* locked( + Effect.gen(function* () { + if (!(yield* enabled())) return + const existed = yield* exists(state.gitdir) + yield* fs.ensureDir(state.gitdir).pipe(Effect.orDie) + if (!existed) { + yield* git(["init"], { + env: { GIT_DIR: state.gitdir, GIT_WORK_TREE: state.worktree }, + }) + yield* git(["--git-dir", state.gitdir, "config", "core.autocrlf", "false"]) + yield* git(["--git-dir", state.gitdir, "config", "core.longpaths", "true"]) + yield* git(["--git-dir", state.gitdir, "config", "core.symlinks", "true"]) + yield* git(["--git-dir", state.gitdir, "config", "core.fsmonitor", "false"]) + log.info("initialized") + } + yield* add() + const result = yield* git(args(["write-tree"]), { cwd: state.directory }) + const hash = result.text.trim() + log.info("tracking", { hash, cwd: state.directory, git: state.gitdir }) + return hash + }), + ) + }) + + const patch = Effect.fnUntraced(function* (hash: string) { + return yield* locked( + Effect.gen(function* () { + yield* add() + const result = yield* git( + [...quote, ...args(["diff", "--cached", "--no-ext-diff", "--name-only", hash, "--", "."])], + { + cwd: state.directory, + }, + ) + if (result.code !== 0) { + log.warn("failed to get diff", { hash, exitCode: result.code }) + return { hash, files: [] } + } + const files = result.text + .trim() + .split("\n") + .map((x) => x.trim()) + .filter(Boolean) + + // Hide ignored-file removals from the user-facing patch output. + const ignored = yield* ignore(files) + + return { + hash, + files: files + .filter((item) => !ignored.has(item)) + .map((x) => path.join(state.worktree, x).replaceAll("\\", "/")), + } + }), + ) + }) + + const restore = Effect.fnUntraced(function* (snapshot: string) { + return yield* locked( + Effect.gen(function* () { + log.info("restore", { commit: snapshot }) + const result = yield* git([...core, ...args(["read-tree", snapshot])], { cwd: state.worktree }) + if (result.code === 0) { + const checkout = yield* git([...core, ...args(["checkout-index", "-a", "-f"])], { + cwd: state.worktree, + }) + if (checkout.code === 0) return + log.error("failed to restore snapshot", { + snapshot, + exitCode: checkout.code, + stderr: checkout.stderr, + }) + return + } + log.error("failed to restore snapshot", { + snapshot, + exitCode: result.code, + stderr: result.stderr, + }) + }), + ) + }) + + const revert = Effect.fnUntraced(function* (patches: Patch[]) { + return yield* locked( + Effect.gen(function* () { + const ops: { hash: string; file: string; rel: string }[] = [] + const seen = new Set() + for (const item of patches) { + for (const file of item.files) { + if (seen.has(file)) continue + seen.add(file) + ops.push({ + hash: item.hash, + file, + rel: path.relative(state.worktree, file).replaceAll("\\", "/"), + }) + } + } + + const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) { + log.info("reverting", { file: op.file, hash: op.hash }) + const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], { + cwd: state.worktree, + }) + if (result.code === 0) return + const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], { + cwd: state.worktree, + }) + if (tree.code === 0 && tree.text.trim()) { + log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash }) + return + } + log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) + yield* remove(op.file) + }) + + const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`) + + for (let i = 0; i < ops.length; ) { + const first = ops[i]! + const run = [first] + let j = i + 1 + // Only batch adjacent files when their paths cannot affect each other. + while (j < ops.length && run.length < 100) { + const next = ops[j]! + if (next.hash !== first.hash) break + if (run.some((item) => clash(item.rel, next.rel))) break + run.push(next) + j += 1 + } + + if (run.length === 1) { + yield* single(first) + i = j + continue + } + + const tree = yield* git( + [...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])], + { + cwd: state.worktree, + }, + ) + + if (tree.code !== 0) { + log.info("batched ls-tree failed, falling back to single-file revert", { + hash: first.hash, + files: run.length, + }) + for (const op of run) { + yield* single(op) + } + i = j + continue + } + + const have = new Set( + tree.text + .trim() + .split("\n") + .map((item) => item.trim()) + .filter(Boolean), + ) + const list = run.filter((item) => have.has(item.rel)) + if (list.length) { + log.info("reverting", { hash: first.hash, files: list.length }) + const result = yield* git( + [...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])], + { + cwd: state.worktree, + }, + ) + if (result.code !== 0) { + log.info("batched checkout failed, falling back to single-file revert", { + hash: first.hash, + files: list.length, + }) + for (const op of run) { + yield* single(op) + } + i = j + continue + } + } + + for (const op of run) { + if (have.has(op.rel)) continue + log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) + yield* remove(op.file) + } + + i = j + } + }), + ) + }) + + const diff = Effect.fnUntraced(function* (hash: string) { + return yield* locked( + Effect.gen(function* () { + yield* add() + const result = yield* git([...quote, ...args(["diff", "--cached", "--no-ext-diff", hash, "--", "."])], { + cwd: state.worktree, + }) + if (result.code !== 0) { + log.warn("failed to get diff", { + hash, + exitCode: result.code, + stderr: result.stderr, + }) + return "" + } + return result.text.trim() + }), + ) + }) + + const diffFull = Effect.fnUntraced(function* (from: string, to: string) { + return yield* locked( + Effect.gen(function* () { + type Row = { + file: string + status: "added" | "deleted" | "modified" + binary: boolean + additions: number + deletions: number + } + + type Ref = { + file: string + side: "before" | "after" + ref: string + } + + const show = Effect.fnUntraced(function* (row: Row) { + if (row.binary) return ["", ""] + if (row.status === "added") { + return [ + "", + yield* git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + ] + } + if (row.status === "deleted") { + return [ + yield* git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe( + Effect.map((item) => item.text), + ), + "", + ] + } + return yield* Effect.all( + [ + git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), + ], + { concurrency: 2 }, + ) + }) + + const load = Effect.fnUntraced( + function* (rows: Row[]) { + const refs = rows.flatMap((row) => { + if (row.binary) return [] + if (row.status === "added") + return [{ file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref] + if (row.status === "deleted") { + return [{ file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref] + } + return [ + { file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref, + { file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref, + ] + }) + if (!refs.length) return new Map() + + const proc = ChildProcess.make("git", [...cfg, ...args(["cat-file", "--batch"])], { + cwd: state.directory, + extendEnv: true, + stdin: Stream.make(new TextEncoder().encode(refs.map((item) => item.ref).join("\n") + "\n")), + }) + const handle = yield* spawner.spawn(proc) + const [out, err] = yield* Effect.all( + [Stream.mkUint8Array(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], + { concurrency: 2 }, + ) + const code = yield* handle.exitCode + if (code !== 0) { + log.info("git cat-file --batch failed during snapshot diff, falling back to per-file git show", { + stderr: err, + refs: refs.length, + }) + return + } + + const fail = (msg: string, extra?: Record) => { + log.info(msg, { ...extra, refs: refs.length }) + return undefined + } + + const map = new Map() + const dec = new TextDecoder() + let i = 0 + for (const ref of refs) { + let end = i + while (end < out.length && out[end] !== 10) end += 1 + if (end >= out.length) { + return fail( + "git cat-file --batch returned a truncated header during snapshot diff, falling back to per-file git show", + ) + } + + const head = dec.decode(out.slice(i, end)) + i = end + 1 + const hit = map.get(ref.file) ?? { before: "", after: "" } + if (head.endsWith(" missing")) { + map.set(ref.file, hit) + continue + } + + const match = head.match(/^[0-9a-f]+ blob (\d+)$/) + if (!match) { + return fail( + "git cat-file --batch returned an unexpected header during snapshot diff, falling back to per-file git show", + { head }, + ) + } + + const size = Number(match[1]) + if (!Number.isInteger(size) || size < 0 || i + size >= out.length || out[i + size] !== 10) { + return fail( + "git cat-file --batch returned truncated content during snapshot diff, falling back to per-file git show", + { head }, + ) + } + + const text = dec.decode(out.slice(i, i + size)) + if (ref.side === "before") hit.before = text + if (ref.side === "after") hit.after = text + map.set(ref.file, hit) + i += size + 1 + } + + if (i !== out.length) { + return fail( + "git cat-file --batch returned trailing data during snapshot diff, falling back to per-file git show", + ) + } + + return map + }, + Effect.scoped, + Effect.catch(() => + Effect.succeed | undefined>(undefined), + ), + ) + + const result: FileDiff[] = [] + const status = new Map() + + const statuses = yield* git( + [...quote, ...args(["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."])], + { cwd: state.directory }, + ) + + for (const line of statuses.text.trim().split("\n")) { + if (!line) continue + const [code, file] = line.split("\t") + if (!code || !file) continue + status.set(file, code.startsWith("A") ? "added" : code.startsWith("D") ? "deleted" : "modified") + } + + const numstat = yield* git( + [...quote, ...args(["diff", "--no-ext-diff", "--no-renames", "--numstat", from, to, "--", "."])], + { + cwd: state.directory, + }, + ) + + const rows = numstat.text + .trim() + .split("\n") + .filter(Boolean) + .flatMap((line) => { + const [adds, dels, file] = line.split("\t") + if (!file) return [] + const binary = adds === "-" && dels === "-" + const additions = binary ? 0 : parseInt(adds) + const deletions = binary ? 0 : parseInt(dels) + return [ + { + file, + status: status.get(file) ?? "modified", + binary, + additions: Number.isFinite(additions) ? additions : 0, + deletions: Number.isFinite(deletions) ? deletions : 0, + } satisfies Row, + ] + }) + + // Hide ignored-file removals from the user-facing diff output. + const ignored = yield* ignore(rows.map((r) => r.file)) + if (ignored.size > 0) { + const filtered = rows.filter((r) => !ignored.has(r.file)) + rows.length = 0 + rows.push(...filtered) + } + + const step = 100 + const patch = (file: string, before: string, after: string) => + formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER })) + + for (let i = 0; i < rows.length; i += step) { + const run = rows.slice(i, i + step) + const text = yield* load(run) + + for (const row of run) { + const hit = text?.get(row.file) ?? { before: "", after: "" } + const [before, after] = row.binary ? ["", ""] : text ? [hit.before, hit.after] : yield* show(row) + result.push({ + file: row.file, + patch: row.binary ? "" : patch(row.file, before, after), + additions: row.additions, + deletions: row.deletions, + status: row.status, + }) + } + } + + return result + }), + ) + }) + + yield* cleanup().pipe( + Effect.catchCause((cause) => { + log.error("cleanup loop failed", { cause: Cause.pretty(cause) }) + return Effect.void + }), + Effect.repeat(Schedule.spaced(Duration.hours(1))), + Effect.delay(Duration.minutes(1)), + Effect.forkScoped, + ) + + return { cleanup, track, patch, restore, revert, diff, diffFull } + }), + ) + + return Service.of({ + init: Effect.fn("Snapshot.init")(function* () { + yield* InstanceState.get(state) + }), + cleanup: Effect.fn("Snapshot.cleanup")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.cleanup()) + }), + track: Effect.fn("Snapshot.track")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.track()) + }), + patch: Effect.fn("Snapshot.patch")(function* (hash: string) { + return yield* InstanceState.useEffect(state, (s) => s.patch(hash)) + }), + restore: Effect.fn("Snapshot.restore")(function* (snapshot: string) { + return yield* InstanceState.useEffect(state, (s) => s.restore(snapshot)) + }), + revert: Effect.fn("Snapshot.revert")(function* (patches: Patch[]) { + return yield* InstanceState.useEffect(state, (s) => s.revert(patches)) + }), + diff: Effect.fn("Snapshot.diff")(function* (hash: string) { + return yield* InstanceState.useEffect(state, (s) => s.diff(hash)) + }), + diffFull: Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) { + return yield* InstanceState.useEffect(state, (s) => s.diffFull(from, to)) + }), + }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(CrossSpawnSpawner.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Config.defaultLayer), +) + +export * as Snapshot from "." diff --git a/packages/opencode/src/snapshot/snapshot.ts b/packages/opencode/src/snapshot/snapshot.ts deleted file mode 100644 index 7a5c0a4dca..0000000000 --- a/packages/opencode/src/snapshot/snapshot.ts +++ /dev/null @@ -1,775 +0,0 @@ -import { Cause, Duration, Effect, Layer, Schedule, Semaphore, Context, Stream } from "effect" -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" -import { formatPatch, structuredPatch } from "diff" -import path from "path" -import z from "zod" -import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" -import { InstanceState } from "@/effect" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Hash } from "@opencode-ai/shared/util/hash" -import { Config } from "../config" -import { Global } from "../global" -import { Log } from "../util" - -export const Patch = z.object({ - hash: z.string(), - files: z.string().array(), -}) -export type Patch = z.infer - -export const FileDiff = z - .object({ - file: z.string(), - patch: z.string(), - additions: z.number(), - deletions: z.number(), - status: z.enum(["added", "deleted", "modified"]).optional(), - }) - .meta({ - ref: "SnapshotFileDiff", - }) -export type FileDiff = z.infer - -const log = Log.create({ service: "snapshot" }) -const prune = "7.days" -const limit = 2 * 1024 * 1024 -const core = ["-c", "core.longpaths=true", "-c", "core.symlinks=true"] -const cfg = ["-c", "core.autocrlf=false", ...core] -const quote = [...cfg, "-c", "core.quotepath=false"] -interface GitResult { - readonly code: ChildProcessSpawner.ExitCode - readonly text: string - readonly stderr: string -} - -type State = Omit - -export interface Interface { - readonly init: () => Effect.Effect - readonly cleanup: () => Effect.Effect - readonly track: () => Effect.Effect - readonly patch: (hash: string) => Effect.Effect - readonly restore: (snapshot: string) => Effect.Effect - readonly revert: (patches: Patch[]) => Effect.Effect - readonly diff: (hash: string) => Effect.Effect - readonly diffFull: (from: string, to: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Snapshot") {} - -export const layer: Layer.Layer< - Service, - never, - AppFileSystem.Service | ChildProcessSpawner.ChildProcessSpawner | Config.Service -> = Layer.effect( - Service, - Effect.gen(function* () { - const fs = yield* AppFileSystem.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const config = yield* Config.Service - const locks = new Map() - - const lock = (key: string) => { - const hit = locks.get(key) - if (hit) return hit - - const next = Semaphore.makeUnsafe(1) - locks.set(key, next) - return next - } - - const state = yield* InstanceState.make( - Effect.fn("Snapshot.state")(function* (ctx) { - const state = { - directory: ctx.directory, - worktree: ctx.worktree, - gitdir: path.join(Global.Path.data, "snapshot", ctx.project.id, Hash.fast(ctx.worktree)), - vcs: ctx.project.vcs, - } - - const args = (cmd: string[]) => ["--git-dir", state.gitdir, "--work-tree", state.worktree, ...cmd] - - const enc = new TextEncoder() - const feed = (list: string[]) => Stream.make(enc.encode(list.join("\0") + "\0")) - - const git = Effect.fnUntraced( - function* ( - cmd: string[], - opts?: { cwd?: string; env?: Record; stdin?: ChildProcess.CommandInput }, - ) { - const proc = ChildProcess.make("git", cmd, { - cwd: opts?.cwd, - env: opts?.env, - extendEnv: true, - stdin: opts?.stdin, - }) - const handle = yield* spawner.spawn(proc) - const [text, stderr] = yield* Effect.all( - [Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - return { code, text, stderr } satisfies GitResult - }, - Effect.scoped, - Effect.catch((err) => - Effect.succeed({ - code: ChildProcessSpawner.ExitCode(1), - text: "", - stderr: err instanceof Error ? err.message : String(err), - }), - ), - ) - - const ignore = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return new Set() - const check = yield* git( - [ - ...quote, - "--git-dir", - path.join(state.worktree, ".git"), - "--work-tree", - state.worktree, - "check-ignore", - "--no-index", - "--stdin", - "-z", - ], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - if (check.code !== 0 && check.code !== 1) return new Set() - return new Set(check.text.split("\0").filter(Boolean)) - }) - - const drop = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return - yield* git( - [ - ...cfg, - ...args(["rm", "--cached", "-f", "--ignore-unmatch", "--pathspec-from-file=-", "--pathspec-file-nul"]), - ], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - }) - - const stage = Effect.fnUntraced(function* (files: string[]) { - if (!files.length) return - const result = yield* git( - [...cfg, ...args(["add", "--all", "--sparse", "--pathspec-from-file=-", "--pathspec-file-nul"])], - { - cwd: state.directory, - stdin: feed(files), - }, - ) - if (result.code === 0) return - log.warn("failed to add snapshot files", { - exitCode: result.code, - stderr: result.stderr, - }) - }) - - const exists = (file: string) => fs.exists(file).pipe(Effect.orDie) - const read = (file: string) => fs.readFileString(file).pipe(Effect.catch(() => Effect.succeed(""))) - const remove = (file: string) => fs.remove(file).pipe(Effect.catch(() => Effect.void)) - const locked = (fx: Effect.Effect) => lock(state.gitdir).withPermits(1)(fx) - - const enabled = Effect.fnUntraced(function* () { - if (state.vcs !== "git") return false - return (yield* config.get()).snapshot !== false - }) - - const excludes = Effect.fnUntraced(function* () { - const result = yield* git(["rev-parse", "--path-format=absolute", "--git-path", "info/exclude"], { - cwd: state.worktree, - }) - const file = result.text.trim() - if (!file) return - if (!(yield* exists(file))) return - return file - }) - - const sync = Effect.fnUntraced(function* (list: string[] = []) { - const file = yield* excludes() - const target = path.join(state.gitdir, "info", "exclude") - const text = [ - file ? (yield* read(file)).trimEnd() : "", - ...list.map((item) => `/${item.replaceAll("\\", "/")}`), - ] - .filter(Boolean) - .join("\n") - yield* fs.ensureDir(path.join(state.gitdir, "info")).pipe(Effect.orDie) - yield* fs.writeFileString(target, text ? `${text}\n` : "").pipe(Effect.orDie) - }) - - const add = Effect.fnUntraced(function* () { - yield* sync() - const [diff, other] = yield* Effect.all( - [ - git([...quote, ...args(["diff-files", "--name-only", "-z", "--", "."])], { - cwd: state.directory, - }), - git([...quote, ...args(["ls-files", "--others", "--exclude-standard", "-z", "--", "."])], { - cwd: state.directory, - }), - ], - { concurrency: 2 }, - ) - if (diff.code !== 0 || other.code !== 0) { - log.warn("failed to list snapshot files", { - diffCode: diff.code, - diffStderr: diff.stderr, - otherCode: other.code, - otherStderr: other.stderr, - }) - return - } - - const tracked = diff.text.split("\0").filter(Boolean) - const untracked = other.text.split("\0").filter(Boolean) - const all = Array.from(new Set([...tracked, ...untracked])) - if (!all.length) return - - // Resolve source-repo ignore rules against the exact candidate set. - // --no-index keeps this pattern-based even when a path is already tracked. - const ignored = yield* ignore(all) - - // Remove newly-ignored files from snapshot index to prevent re-adding - if (ignored.size > 0) { - const ignoredFiles = Array.from(ignored) - log.info("removing gitignored files from snapshot", { count: ignoredFiles.length }) - yield* drop(ignoredFiles) - } - - const allow = all.filter((item) => !ignored.has(item)) - if (!allow.length) return - - const large = new Set( - (yield* Effect.all( - allow.map((item) => - fs - .stat(path.join(state.directory, item)) - .pipe(Effect.catch(() => Effect.void)) - .pipe( - Effect.map((stat) => { - if (!stat || stat.type !== "File") return - const size = typeof stat.size === "bigint" ? Number(stat.size) : stat.size - return size > limit ? item : undefined - }), - ), - ), - { concurrency: 8 }, - )).filter((item): item is string => Boolean(item)), - ) - const block = new Set(untracked.filter((item) => large.has(item))) - yield* sync(Array.from(block)) - // Stage only the allowed candidate paths so snapshot updates stay scoped. - yield* stage(allow.filter((item) => !block.has(item))) - }) - - const cleanup = Effect.fnUntraced(function* () { - return yield* locked( - Effect.gen(function* () { - if (!(yield* enabled())) return - if (!(yield* exists(state.gitdir))) return - const result = yield* git(args(["gc", `--prune=${prune}`]), { cwd: state.directory }) - if (result.code !== 0) { - log.warn("cleanup failed", { - exitCode: result.code, - stderr: result.stderr, - }) - return - } - log.info("cleanup", { prune }) - }), - ) - }) - - const track = Effect.fnUntraced(function* () { - return yield* locked( - Effect.gen(function* () { - if (!(yield* enabled())) return - const existed = yield* exists(state.gitdir) - yield* fs.ensureDir(state.gitdir).pipe(Effect.orDie) - if (!existed) { - yield* git(["init"], { - env: { GIT_DIR: state.gitdir, GIT_WORK_TREE: state.worktree }, - }) - yield* git(["--git-dir", state.gitdir, "config", "core.autocrlf", "false"]) - yield* git(["--git-dir", state.gitdir, "config", "core.longpaths", "true"]) - yield* git(["--git-dir", state.gitdir, "config", "core.symlinks", "true"]) - yield* git(["--git-dir", state.gitdir, "config", "core.fsmonitor", "false"]) - log.info("initialized") - } - yield* add() - const result = yield* git(args(["write-tree"]), { cwd: state.directory }) - const hash = result.text.trim() - log.info("tracking", { hash, cwd: state.directory, git: state.gitdir }) - return hash - }), - ) - }) - - const patch = Effect.fnUntraced(function* (hash: string) { - return yield* locked( - Effect.gen(function* () { - yield* add() - const result = yield* git( - [...quote, ...args(["diff", "--cached", "--no-ext-diff", "--name-only", hash, "--", "."])], - { - cwd: state.directory, - }, - ) - if (result.code !== 0) { - log.warn("failed to get diff", { hash, exitCode: result.code }) - return { hash, files: [] } - } - const files = result.text - .trim() - .split("\n") - .map((x) => x.trim()) - .filter(Boolean) - - // Hide ignored-file removals from the user-facing patch output. - const ignored = yield* ignore(files) - - return { - hash, - files: files - .filter((item) => !ignored.has(item)) - .map((x) => path.join(state.worktree, x).replaceAll("\\", "/")), - } - }), - ) - }) - - const restore = Effect.fnUntraced(function* (snapshot: string) { - return yield* locked( - Effect.gen(function* () { - log.info("restore", { commit: snapshot }) - const result = yield* git([...core, ...args(["read-tree", snapshot])], { cwd: state.worktree }) - if (result.code === 0) { - const checkout = yield* git([...core, ...args(["checkout-index", "-a", "-f"])], { - cwd: state.worktree, - }) - if (checkout.code === 0) return - log.error("failed to restore snapshot", { - snapshot, - exitCode: checkout.code, - stderr: checkout.stderr, - }) - return - } - log.error("failed to restore snapshot", { - snapshot, - exitCode: result.code, - stderr: result.stderr, - }) - }), - ) - }) - - const revert = Effect.fnUntraced(function* (patches: Patch[]) { - return yield* locked( - Effect.gen(function* () { - const ops: { hash: string; file: string; rel: string }[] = [] - const seen = new Set() - for (const item of patches) { - for (const file of item.files) { - if (seen.has(file)) continue - seen.add(file) - ops.push({ - hash: item.hash, - file, - rel: path.relative(state.worktree, file).replaceAll("\\", "/"), - }) - } - } - - const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) { - log.info("reverting", { file: op.file, hash: op.hash }) - const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], { - cwd: state.worktree, - }) - if (result.code === 0) return - const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], { - cwd: state.worktree, - }) - if (tree.code === 0 && tree.text.trim()) { - log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash }) - return - } - log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) - yield* remove(op.file) - }) - - const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`) - - for (let i = 0; i < ops.length; ) { - const first = ops[i]! - const run = [first] - let j = i + 1 - // Only batch adjacent files when their paths cannot affect each other. - while (j < ops.length && run.length < 100) { - const next = ops[j]! - if (next.hash !== first.hash) break - if (run.some((item) => clash(item.rel, next.rel))) break - run.push(next) - j += 1 - } - - if (run.length === 1) { - yield* single(first) - i = j - continue - } - - const tree = yield* git( - [...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])], - { - cwd: state.worktree, - }, - ) - - if (tree.code !== 0) { - log.info("batched ls-tree failed, falling back to single-file revert", { - hash: first.hash, - files: run.length, - }) - for (const op of run) { - yield* single(op) - } - i = j - continue - } - - const have = new Set( - tree.text - .trim() - .split("\n") - .map((item) => item.trim()) - .filter(Boolean), - ) - const list = run.filter((item) => have.has(item.rel)) - if (list.length) { - log.info("reverting", { hash: first.hash, files: list.length }) - const result = yield* git( - [...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])], - { - cwd: state.worktree, - }, - ) - if (result.code !== 0) { - log.info("batched checkout failed, falling back to single-file revert", { - hash: first.hash, - files: list.length, - }) - for (const op of run) { - yield* single(op) - } - i = j - continue - } - } - - for (const op of run) { - if (have.has(op.rel)) continue - log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash }) - yield* remove(op.file) - } - - i = j - } - }), - ) - }) - - const diff = Effect.fnUntraced(function* (hash: string) { - return yield* locked( - Effect.gen(function* () { - yield* add() - const result = yield* git([...quote, ...args(["diff", "--cached", "--no-ext-diff", hash, "--", "."])], { - cwd: state.worktree, - }) - if (result.code !== 0) { - log.warn("failed to get diff", { - hash, - exitCode: result.code, - stderr: result.stderr, - }) - return "" - } - return result.text.trim() - }), - ) - }) - - const diffFull = Effect.fnUntraced(function* (from: string, to: string) { - return yield* locked( - Effect.gen(function* () { - type Row = { - file: string - status: "added" | "deleted" | "modified" - binary: boolean - additions: number - deletions: number - } - - type Ref = { - file: string - side: "before" | "after" - ref: string - } - - const show = Effect.fnUntraced(function* (row: Row) { - if (row.binary) return ["", ""] - if (row.status === "added") { - return [ - "", - yield* git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - ] - } - if (row.status === "deleted") { - return [ - yield* git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe( - Effect.map((item) => item.text), - ), - "", - ] - } - return yield* Effect.all( - [ - git([...cfg, ...args(["show", `${from}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - git([...cfg, ...args(["show", `${to}:${row.file}`])]).pipe(Effect.map((item) => item.text)), - ], - { concurrency: 2 }, - ) - }) - - const load = Effect.fnUntraced( - function* (rows: Row[]) { - const refs = rows.flatMap((row) => { - if (row.binary) return [] - if (row.status === "added") - return [{ file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref] - if (row.status === "deleted") { - return [{ file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref] - } - return [ - { file: row.file, side: "before", ref: `${from}:${row.file}` } satisfies Ref, - { file: row.file, side: "after", ref: `${to}:${row.file}` } satisfies Ref, - ] - }) - if (!refs.length) return new Map() - - const proc = ChildProcess.make("git", [...cfg, ...args(["cat-file", "--batch"])], { - cwd: state.directory, - extendEnv: true, - stdin: Stream.make(new TextEncoder().encode(refs.map((item) => item.ref).join("\n") + "\n")), - }) - const handle = yield* spawner.spawn(proc) - const [out, err] = yield* Effect.all( - [Stream.mkUint8Array(handle.stdout), Stream.mkString(Stream.decodeText(handle.stderr))], - { concurrency: 2 }, - ) - const code = yield* handle.exitCode - if (code !== 0) { - log.info("git cat-file --batch failed during snapshot diff, falling back to per-file git show", { - stderr: err, - refs: refs.length, - }) - return - } - - const fail = (msg: string, extra?: Record) => { - log.info(msg, { ...extra, refs: refs.length }) - return undefined - } - - const map = new Map() - const dec = new TextDecoder() - let i = 0 - for (const ref of refs) { - let end = i - while (end < out.length && out[end] !== 10) end += 1 - if (end >= out.length) { - return fail( - "git cat-file --batch returned a truncated header during snapshot diff, falling back to per-file git show", - ) - } - - const head = dec.decode(out.slice(i, end)) - i = end + 1 - const hit = map.get(ref.file) ?? { before: "", after: "" } - if (head.endsWith(" missing")) { - map.set(ref.file, hit) - continue - } - - const match = head.match(/^[0-9a-f]+ blob (\d+)$/) - if (!match) { - return fail( - "git cat-file --batch returned an unexpected header during snapshot diff, falling back to per-file git show", - { head }, - ) - } - - const size = Number(match[1]) - if (!Number.isInteger(size) || size < 0 || i + size >= out.length || out[i + size] !== 10) { - return fail( - "git cat-file --batch returned truncated content during snapshot diff, falling back to per-file git show", - { head }, - ) - } - - const text = dec.decode(out.slice(i, i + size)) - if (ref.side === "before") hit.before = text - if (ref.side === "after") hit.after = text - map.set(ref.file, hit) - i += size + 1 - } - - if (i !== out.length) { - return fail( - "git cat-file --batch returned trailing data during snapshot diff, falling back to per-file git show", - ) - } - - return map - }, - Effect.scoped, - Effect.catch(() => - Effect.succeed | undefined>(undefined), - ), - ) - - const result: FileDiff[] = [] - const status = new Map() - - const statuses = yield* git( - [...quote, ...args(["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."])], - { cwd: state.directory }, - ) - - for (const line of statuses.text.trim().split("\n")) { - if (!line) continue - const [code, file] = line.split("\t") - if (!code || !file) continue - status.set(file, code.startsWith("A") ? "added" : code.startsWith("D") ? "deleted" : "modified") - } - - const numstat = yield* git( - [...quote, ...args(["diff", "--no-ext-diff", "--no-renames", "--numstat", from, to, "--", "."])], - { - cwd: state.directory, - }, - ) - - const rows = numstat.text - .trim() - .split("\n") - .filter(Boolean) - .flatMap((line) => { - const [adds, dels, file] = line.split("\t") - if (!file) return [] - const binary = adds === "-" && dels === "-" - const additions = binary ? 0 : parseInt(adds) - const deletions = binary ? 0 : parseInt(dels) - return [ - { - file, - status: status.get(file) ?? "modified", - binary, - additions: Number.isFinite(additions) ? additions : 0, - deletions: Number.isFinite(deletions) ? deletions : 0, - } satisfies Row, - ] - }) - - // Hide ignored-file removals from the user-facing diff output. - const ignored = yield* ignore(rows.map((r) => r.file)) - if (ignored.size > 0) { - const filtered = rows.filter((r) => !ignored.has(r.file)) - rows.length = 0 - rows.push(...filtered) - } - - const step = 100 - const patch = (file: string, before: string, after: string) => - formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER })) - - for (let i = 0; i < rows.length; i += step) { - const run = rows.slice(i, i + step) - const text = yield* load(run) - - for (const row of run) { - const hit = text?.get(row.file) ?? { before: "", after: "" } - const [before, after] = row.binary ? ["", ""] : text ? [hit.before, hit.after] : yield* show(row) - result.push({ - file: row.file, - patch: row.binary ? "" : patch(row.file, before, after), - additions: row.additions, - deletions: row.deletions, - status: row.status, - }) - } - } - - return result - }), - ) - }) - - yield* cleanup().pipe( - Effect.catchCause((cause) => { - log.error("cleanup loop failed", { cause: Cause.pretty(cause) }) - return Effect.void - }), - Effect.repeat(Schedule.spaced(Duration.hours(1))), - Effect.delay(Duration.minutes(1)), - Effect.forkScoped, - ) - - return { cleanup, track, patch, restore, revert, diff, diffFull } - }), - ) - - return Service.of({ - init: Effect.fn("Snapshot.init")(function* () { - yield* InstanceState.get(state) - }), - cleanup: Effect.fn("Snapshot.cleanup")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.cleanup()) - }), - track: Effect.fn("Snapshot.track")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.track()) - }), - patch: Effect.fn("Snapshot.patch")(function* (hash: string) { - return yield* InstanceState.useEffect(state, (s) => s.patch(hash)) - }), - restore: Effect.fn("Snapshot.restore")(function* (snapshot: string) { - return yield* InstanceState.useEffect(state, (s) => s.restore(snapshot)) - }), - revert: Effect.fn("Snapshot.revert")(function* (patches: Patch[]) { - return yield* InstanceState.useEffect(state, (s) => s.revert(patches)) - }), - diff: Effect.fn("Snapshot.diff")(function* (hash: string) { - return yield* InstanceState.useEffect(state, (s) => s.diff(hash)) - }), - diffFull: Effect.fn("Snapshot.diffFull")(function* (from: string, to: string) { - return yield* InstanceState.useEffect(state, (s) => s.diffFull(from, to)) - }), - }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(CrossSpawnSpawner.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Config.defaultLayer), -) From 2638e2acfa4dc5198dc3454986515022fac6559b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:37:13 -0400 Subject: [PATCH 052/201] refactor: collapse plugin barrel into plugin/index.ts (#22914) --- packages/opencode/src/plugin/index.ts | 290 +++++++++++++++++- packages/opencode/src/plugin/plugin.ts | 287 ----------------- .../test/plugin/auth-override.test.ts | 2 +- 3 files changed, 290 insertions(+), 289 deletions(-) delete mode 100644 packages/opencode/src/plugin/plugin.ts diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index 20f38c41c2..dd2a784694 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -1 +1,289 @@ -export * as Plugin from "./plugin" +import type { + Hooks, + PluginInput, + Plugin as PluginInstance, + PluginModule, + WorkspaceAdaptor as PluginWorkspaceAdaptor, +} from "@opencode-ai/plugin" +import { Config } from "../config" +import { Bus } from "../bus" +import { Log } from "../util" +import { createOpencodeClient } from "@opencode-ai/sdk" +import { Flag } from "../flag/flag" +import { CodexAuthPlugin } from "./codex" +import { Session } from "../session" +import { NamedError } from "@opencode-ai/shared/util/error" +import { CopilotAuthPlugin } from "./github-copilot/copilot" +import { gitlabAuthPlugin as GitlabAuthPlugin } from "opencode-gitlab-auth" +import { PoeAuthPlugin } from "opencode-poe-auth" +import { CloudflareAIGatewayAuthPlugin, CloudflareWorkersAuthPlugin } from "./cloudflare" +import { Effect, Layer, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { InstanceState } from "@/effect" +import { errorMessage } from "@/util/error" +import { PluginLoader } from "./loader" +import { parsePluginSpecifier, readPluginId, readV1Plugin, resolvePluginId } from "./shared" +import { registerAdaptor } from "@/control-plane/adaptors" +import type { WorkspaceAdaptor } from "@/control-plane/types" + +const log = Log.create({ service: "plugin" }) + +type State = { + hooks: Hooks[] +} + +// Hook names that follow the (input, output) => Promise trigger pattern +type TriggerName = { + [K in keyof Hooks]-?: NonNullable extends (input: any, output: any) => Promise ? K : never +}[keyof Hooks] + +export interface Interface { + readonly trigger: < + Name extends TriggerName, + Input = Parameters[Name]>[0], + Output = Parameters[Name]>[1], + >( + name: Name, + input: Input, + output: Output, + ) => Effect.Effect + readonly list: () => Effect.Effect + readonly init: () => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Plugin") {} + +// Built-in plugins that are directly imported (not installed from npm) +const INTERNAL_PLUGINS: PluginInstance[] = [ + CodexAuthPlugin, + CopilotAuthPlugin, + GitlabAuthPlugin, + PoeAuthPlugin, + CloudflareWorkersAuthPlugin, + CloudflareAIGatewayAuthPlugin, +] + +function isServerPlugin(value: unknown): value is PluginInstance { + return typeof value === "function" +} + +function getServerPlugin(value: unknown) { + if (isServerPlugin(value)) return value + if (!value || typeof value !== "object" || !("server" in value)) return + if (!isServerPlugin(value.server)) return + return value.server +} + +function getLegacyPlugins(mod: Record) { + const seen = new Set() + const result: PluginInstance[] = [] + + for (const entry of Object.values(mod)) { + if (seen.has(entry)) continue + seen.add(entry) + const plugin = getServerPlugin(entry) + if (!plugin) throw new TypeError("Plugin export is not a function") + result.push(plugin) + } + + return result +} + +async function applyPlugin(load: PluginLoader.Loaded, input: PluginInput, hooks: Hooks[]) { + const plugin = readV1Plugin(load.mod, load.spec, "server", "detect") + if (plugin) { + await resolvePluginId(load.source, load.spec, load.target, readPluginId(plugin.id, load.spec), load.pkg) + hooks.push(await (plugin as PluginModule).server(input, load.options)) + return + } + + for (const server of getLegacyPlugins(load.mod)) { + hooks.push(await server(input, load.options)) + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const config = yield* Config.Service + + const state = yield* InstanceState.make( + Effect.fn("Plugin.state")(function* (ctx) { + const hooks: Hooks[] = [] + const bridge = yield* EffectBridge.make() + + function publishPluginError(message: string) { + bridge.fork(bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })) + } + + const { Server } = yield* Effect.promise(() => import("../server/server")) + + const client = createOpencodeClient({ + baseUrl: "http://localhost:4096", + directory: ctx.directory, + headers: Flag.OPENCODE_SERVER_PASSWORD + ? { + Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`, + } + : undefined, + fetch: async (...args) => (await Server.Default()).app.fetch(...args), + }) + const cfg = yield* config.get() + const input: PluginInput = { + client, + project: ctx.project, + worktree: ctx.worktree, + directory: ctx.directory, + experimental_workspace: { + register(type: string, adaptor: PluginWorkspaceAdaptor) { + registerAdaptor(ctx.project.id, type, adaptor as WorkspaceAdaptor) + }, + }, + get serverUrl(): URL { + return Server.url ?? new URL("http://localhost:4096") + }, + // @ts-expect-error + $: typeof Bun === "undefined" ? undefined : Bun.$, + } + + for (const plugin of INTERNAL_PLUGINS) { + log.info("loading internal plugin", { name: plugin.name }) + const init = yield* Effect.tryPromise({ + try: () => plugin(input), + catch: (err) => { + log.error("failed to load internal plugin", { name: plugin.name, error: err }) + }, + }).pipe(Effect.option) + if (init._tag === "Some") hooks.push(init.value) + } + + const plugins = Flag.OPENCODE_PURE ? [] : (cfg.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && cfg.plugin_origins?.length) { + log.info("skipping external plugins in pure mode", { count: cfg.plugin_origins.length }) + } + if (plugins.length) yield* config.waitForDependencies() + + const loaded = yield* Effect.promise(() => + PluginLoader.loadExternal({ + items: plugins, + kind: "server", + report: { + start(candidate) { + log.info("loading plugin", { path: candidate.plan.spec }) + }, + missing(candidate, _retry, message) { + log.warn("plugin has no server entrypoint", { path: candidate.plan.spec, message }) + }, + error(candidate, _retry, stage, error, resolved) { + const spec = candidate.plan.spec + const cause = error instanceof Error ? (error.cause ?? error) : error + const message = stage === "load" ? errorMessage(error) : errorMessage(cause) + + if (stage === "install") { + const parsed = parsePluginSpecifier(spec) + log.error("failed to install plugin", { pkg: parsed.pkg, version: parsed.version, error: message }) + publishPluginError(`Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`) + return + } + + if (stage === "compatibility") { + log.warn("plugin incompatible", { path: spec, error: message }) + publishPluginError(`Plugin ${spec} skipped: ${message}`) + return + } + + if (stage === "entry") { + log.error("failed to resolve plugin server entry", { path: spec, error: message }) + publishPluginError(`Failed to load plugin ${spec}: ${message}`) + return + } + + log.error("failed to load plugin", { path: spec, target: resolved?.entry, error: message }) + publishPluginError(`Failed to load plugin ${spec}: ${message}`) + }, + }, + }), + ) + for (const load of loaded) { + if (!load) continue + + // Keep plugin execution sequential so hook registration and execution + // order remains deterministic across plugin runs. + yield* Effect.tryPromise({ + try: () => applyPlugin(load, input, hooks), + catch: (err) => { + const message = errorMessage(err) + log.error("failed to load plugin", { path: load.spec, error: message }) + return message + }, + }).pipe( + Effect.catch(() => { + // TODO: make proper events for this + // bus.publish(Session.Event.Error, { + // error: new NamedError.Unknown({ + // message: `Failed to load plugin ${load.spec}: ${message}`, + // }).toObject(), + // }) + return Effect.void + }), + ) + } + + // Notify plugins of current config + for (const hook of hooks) { + yield* Effect.tryPromise({ + try: () => Promise.resolve((hook as any).config?.(cfg)), + catch: (err) => { + log.error("plugin config hook failed", { error: err }) + }, + }).pipe(Effect.ignore) + } + + // Subscribe to bus events, fiber interrupted when scope closes + yield* bus.subscribeAll().pipe( + Stream.runForEach((input) => + Effect.sync(() => { + for (const hook of hooks) { + void hook["event"]?.({ event: input as any }) + } + }), + ), + Effect.forkScoped, + ) + + return { hooks } + }), + ) + + const trigger = Effect.fn("Plugin.trigger")(function* < + Name extends TriggerName, + Input = Parameters[Name]>[0], + Output = Parameters[Name]>[1], + >(name: Name, input: Input, output: Output) { + if (!name) return output + const s = yield* InstanceState.get(state) + for (const hook of s.hooks) { + const fn = hook[name] as any + if (!fn) continue + yield* Effect.promise(async () => fn(input, output)) + } + return output + }) + + const list = Effect.fn("Plugin.list")(function* () { + const s = yield* InstanceState.get(state) + return s.hooks + }) + + const init = Effect.fn("Plugin.init")(function* () { + yield* InstanceState.get(state) + }) + + return Service.of({ trigger, list, init }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Config.defaultLayer)) + +export * as Plugin from "." diff --git a/packages/opencode/src/plugin/plugin.ts b/packages/opencode/src/plugin/plugin.ts deleted file mode 100644 index d1fc60d993..0000000000 --- a/packages/opencode/src/plugin/plugin.ts +++ /dev/null @@ -1,287 +0,0 @@ -import type { - Hooks, - PluginInput, - Plugin as PluginInstance, - PluginModule, - WorkspaceAdaptor as PluginWorkspaceAdaptor, -} from "@opencode-ai/plugin" -import { Config } from "../config" -import { Bus } from "../bus" -import { Log } from "../util" -import { createOpencodeClient } from "@opencode-ai/sdk" -import { Flag } from "../flag/flag" -import { CodexAuthPlugin } from "./codex" -import { Session } from "../session" -import { NamedError } from "@opencode-ai/shared/util/error" -import { CopilotAuthPlugin } from "./github-copilot/copilot" -import { gitlabAuthPlugin as GitlabAuthPlugin } from "opencode-gitlab-auth" -import { PoeAuthPlugin } from "opencode-poe-auth" -import { CloudflareAIGatewayAuthPlugin, CloudflareWorkersAuthPlugin } from "./cloudflare" -import { Effect, Layer, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { InstanceState } from "@/effect" -import { errorMessage } from "@/util/error" -import { PluginLoader } from "./loader" -import { parsePluginSpecifier, readPluginId, readV1Plugin, resolvePluginId } from "./shared" -import { registerAdaptor } from "@/control-plane/adaptors" -import type { WorkspaceAdaptor } from "@/control-plane/types" - -const log = Log.create({ service: "plugin" }) - -type State = { - hooks: Hooks[] -} - -// Hook names that follow the (input, output) => Promise trigger pattern -type TriggerName = { - [K in keyof Hooks]-?: NonNullable extends (input: any, output: any) => Promise ? K : never -}[keyof Hooks] - -export interface Interface { - readonly trigger: < - Name extends TriggerName, - Input = Parameters[Name]>[0], - Output = Parameters[Name]>[1], - >( - name: Name, - input: Input, - output: Output, - ) => Effect.Effect - readonly list: () => Effect.Effect - readonly init: () => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Plugin") {} - -// Built-in plugins that are directly imported (not installed from npm) -const INTERNAL_PLUGINS: PluginInstance[] = [ - CodexAuthPlugin, - CopilotAuthPlugin, - GitlabAuthPlugin, - PoeAuthPlugin, - CloudflareWorkersAuthPlugin, - CloudflareAIGatewayAuthPlugin, -] - -function isServerPlugin(value: unknown): value is PluginInstance { - return typeof value === "function" -} - -function getServerPlugin(value: unknown) { - if (isServerPlugin(value)) return value - if (!value || typeof value !== "object" || !("server" in value)) return - if (!isServerPlugin(value.server)) return - return value.server -} - -function getLegacyPlugins(mod: Record) { - const seen = new Set() - const result: PluginInstance[] = [] - - for (const entry of Object.values(mod)) { - if (seen.has(entry)) continue - seen.add(entry) - const plugin = getServerPlugin(entry) - if (!plugin) throw new TypeError("Plugin export is not a function") - result.push(plugin) - } - - return result -} - -async function applyPlugin(load: PluginLoader.Loaded, input: PluginInput, hooks: Hooks[]) { - const plugin = readV1Plugin(load.mod, load.spec, "server", "detect") - if (plugin) { - await resolvePluginId(load.source, load.spec, load.target, readPluginId(plugin.id, load.spec), load.pkg) - hooks.push(await (plugin as PluginModule).server(input, load.options)) - return - } - - for (const server of getLegacyPlugins(load.mod)) { - hooks.push(await server(input, load.options)) - } -} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const config = yield* Config.Service - - const state = yield* InstanceState.make( - Effect.fn("Plugin.state")(function* (ctx) { - const hooks: Hooks[] = [] - const bridge = yield* EffectBridge.make() - - function publishPluginError(message: string) { - bridge.fork(bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })) - } - - const { Server } = yield* Effect.promise(() => import("../server/server")) - - const client = createOpencodeClient({ - baseUrl: "http://localhost:4096", - directory: ctx.directory, - headers: Flag.OPENCODE_SERVER_PASSWORD - ? { - Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`, - } - : undefined, - fetch: async (...args) => (await Server.Default()).app.fetch(...args), - }) - const cfg = yield* config.get() - const input: PluginInput = { - client, - project: ctx.project, - worktree: ctx.worktree, - directory: ctx.directory, - experimental_workspace: { - register(type: string, adaptor: PluginWorkspaceAdaptor) { - registerAdaptor(ctx.project.id, type, adaptor as WorkspaceAdaptor) - }, - }, - get serverUrl(): URL { - return Server.url ?? new URL("http://localhost:4096") - }, - // @ts-expect-error - $: typeof Bun === "undefined" ? undefined : Bun.$, - } - - for (const plugin of INTERNAL_PLUGINS) { - log.info("loading internal plugin", { name: plugin.name }) - const init = yield* Effect.tryPromise({ - try: () => plugin(input), - catch: (err) => { - log.error("failed to load internal plugin", { name: plugin.name, error: err }) - }, - }).pipe(Effect.option) - if (init._tag === "Some") hooks.push(init.value) - } - - const plugins = Flag.OPENCODE_PURE ? [] : (cfg.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && cfg.plugin_origins?.length) { - log.info("skipping external plugins in pure mode", { count: cfg.plugin_origins.length }) - } - if (plugins.length) yield* config.waitForDependencies() - - const loaded = yield* Effect.promise(() => - PluginLoader.loadExternal({ - items: plugins, - kind: "server", - report: { - start(candidate) { - log.info("loading plugin", { path: candidate.plan.spec }) - }, - missing(candidate, _retry, message) { - log.warn("plugin has no server entrypoint", { path: candidate.plan.spec, message }) - }, - error(candidate, _retry, stage, error, resolved) { - const spec = candidate.plan.spec - const cause = error instanceof Error ? (error.cause ?? error) : error - const message = stage === "load" ? errorMessage(error) : errorMessage(cause) - - if (stage === "install") { - const parsed = parsePluginSpecifier(spec) - log.error("failed to install plugin", { pkg: parsed.pkg, version: parsed.version, error: message }) - publishPluginError(`Failed to install plugin ${parsed.pkg}@${parsed.version}: ${message}`) - return - } - - if (stage === "compatibility") { - log.warn("plugin incompatible", { path: spec, error: message }) - publishPluginError(`Plugin ${spec} skipped: ${message}`) - return - } - - if (stage === "entry") { - log.error("failed to resolve plugin server entry", { path: spec, error: message }) - publishPluginError(`Failed to load plugin ${spec}: ${message}`) - return - } - - log.error("failed to load plugin", { path: spec, target: resolved?.entry, error: message }) - publishPluginError(`Failed to load plugin ${spec}: ${message}`) - }, - }, - }), - ) - for (const load of loaded) { - if (!load) continue - - // Keep plugin execution sequential so hook registration and execution - // order remains deterministic across plugin runs. - yield* Effect.tryPromise({ - try: () => applyPlugin(load, input, hooks), - catch: (err) => { - const message = errorMessage(err) - log.error("failed to load plugin", { path: load.spec, error: message }) - return message - }, - }).pipe( - Effect.catch(() => { - // TODO: make proper events for this - // bus.publish(Session.Event.Error, { - // error: new NamedError.Unknown({ - // message: `Failed to load plugin ${load.spec}: ${message}`, - // }).toObject(), - // }) - return Effect.void - }), - ) - } - - // Notify plugins of current config - for (const hook of hooks) { - yield* Effect.tryPromise({ - try: () => Promise.resolve((hook as any).config?.(cfg)), - catch: (err) => { - log.error("plugin config hook failed", { error: err }) - }, - }).pipe(Effect.ignore) - } - - // Subscribe to bus events, fiber interrupted when scope closes - yield* bus.subscribeAll().pipe( - Stream.runForEach((input) => - Effect.sync(() => { - for (const hook of hooks) { - void hook["event"]?.({ event: input as any }) - } - }), - ), - Effect.forkScoped, - ) - - return { hooks } - }), - ) - - const trigger = Effect.fn("Plugin.trigger")(function* < - Name extends TriggerName, - Input = Parameters[Name]>[0], - Output = Parameters[Name]>[1], - >(name: Name, input: Input, output: Output) { - if (!name) return output - const s = yield* InstanceState.get(state) - for (const hook of s.hooks) { - const fn = hook[name] as any - if (!fn) continue - yield* Effect.promise(async () => fn(input, output)) - } - return output - }) - - const list = Effect.fn("Plugin.list")(function* () { - const s = yield* InstanceState.get(state) - return s.hooks - }) - - const init = Effect.fn("Plugin.init")(function* () { - yield* InstanceState.get(state) - }) - - return Service.of({ trigger, list, init }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(Bus.layer), Layer.provide(Config.defaultLayer)) diff --git a/packages/opencode/test/plugin/auth-override.test.ts b/packages/opencode/test/plugin/auth-override.test.ts index b570d8b141..89d1641afd 100644 --- a/packages/opencode/test/plugin/auth-override.test.ts +++ b/packages/opencode/test/plugin/auth-override.test.ts @@ -63,7 +63,7 @@ describe("plugin.auth-override", () => { }, 30000) // Increased timeout for plugin installation }) -const file = path.join(import.meta.dir, "../../src/plugin/plugin.ts") +const file = path.join(import.meta.dir, "../../src/plugin/index.ts") describe("plugin.config-hook-error-isolation", () => { test("config hooks are individually error-isolated in the layer factory", async () => { From 610c036ef1e30d0209dbeb0a815c9b16e03ab1e3 Mon Sep 17 00:00:00 2001 From: thakrarsagar Date: Fri, 17 Apr 2026 02:14:58 +0530 Subject: [PATCH 053/201] fix(opencode): use low reasoning effort for GitHub Copilot gpt-5 models (#22824) Co-authored-by: opencode-agent[bot] Co-authored-by: rekram1-node --- packages/opencode/src/provider/transform.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index e527251b0f..492db40520 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -923,7 +923,7 @@ export function smallOptions(model: Provider.Model) { model.api.npm === "@ai-sdk/github-copilot" ) { if (model.api.id.includes("gpt-5")) { - if (model.api.id.includes("5.")) { + if (model.api.id.includes("5.") || model.api.id.includes("5-mini")) { return { store: false, reasoningEffort: "low" } } return { store: false, reasoningEffort: "minimal" } From cdfbb26c003a42d2fd1e2875dd6cce43e5d19678 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:55:57 -0400 Subject: [PATCH 054/201] refactor: collapse bus barrel into bus/index.ts (#22902) --- packages/opencode/src/bus/bus.ts | 191 ---------------------------- packages/opencode/src/bus/index.ts | 194 ++++++++++++++++++++++++++++- 2 files changed, 193 insertions(+), 192 deletions(-) delete mode 100644 packages/opencode/src/bus/bus.ts diff --git a/packages/opencode/src/bus/bus.ts b/packages/opencode/src/bus/bus.ts deleted file mode 100644 index beac809925..0000000000 --- a/packages/opencode/src/bus/bus.ts +++ /dev/null @@ -1,191 +0,0 @@ -import z from "zod" -import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect" -import { EffectBridge } from "@/effect" -import { Log } from "../util" -import { BusEvent } from "./bus-event" -import { GlobalBus } from "./global" -import { InstanceState } from "@/effect" -import { makeRuntime } from "@/effect/run-service" - -const log = Log.create({ service: "bus" }) - -export const InstanceDisposed = BusEvent.define( - "server.instance.disposed", - z.object({ - directory: z.string(), - }), -) - -type Payload = { - type: D["type"] - properties: z.infer -} - -type State = { - wildcard: PubSub.PubSub - typed: Map> -} - -export interface Interface { - readonly publish: ( - def: D, - properties: z.output, - ) => Effect.Effect - readonly subscribe: (def: D) => Stream.Stream> - readonly subscribeAll: () => Stream.Stream - readonly subscribeCallback: ( - def: D, - callback: (event: Payload) => unknown, - ) => Effect.Effect<() => void> - readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void> -} - -export class Service extends Context.Service()("@opencode/Bus") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const state = yield* InstanceState.make( - Effect.fn("Bus.state")(function* (ctx) { - const wildcard = yield* PubSub.unbounded() - const typed = new Map>() - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - // Publish InstanceDisposed before shutting down so subscribers see it - yield* PubSub.publish(wildcard, { - type: InstanceDisposed.type, - properties: { directory: ctx.directory }, - }) - yield* PubSub.shutdown(wildcard) - for (const ps of typed.values()) { - yield* PubSub.shutdown(ps) - } - }), - ) - - return { wildcard, typed } - }), - ) - - function getOrCreate(state: State, def: D) { - return Effect.gen(function* () { - let ps = state.typed.get(def.type) - if (!ps) { - ps = yield* PubSub.unbounded() - state.typed.set(def.type, ps) - } - return ps as unknown as PubSub.PubSub> - }) - } - - function publish(def: D, properties: z.output) { - return Effect.gen(function* () { - const s = yield* InstanceState.get(state) - const payload: Payload = { type: def.type, properties } - log.info("publishing", { type: def.type }) - - const ps = s.typed.get(def.type) - if (ps) yield* PubSub.publish(ps, payload) - yield* PubSub.publish(s.wildcard, payload) - - const dir = yield* InstanceState.directory - const context = yield* InstanceState.context - const workspace = yield* InstanceState.workspaceID - - GlobalBus.emit("event", { - directory: dir, - project: context.project.id, - workspace, - payload, - }) - }) - } - - function subscribe(def: D): Stream.Stream> { - log.info("subscribing", { type: def.type }) - return Stream.unwrap( - Effect.gen(function* () { - const s = yield* InstanceState.get(state) - const ps = yield* getOrCreate(s, def) - return Stream.fromPubSub(ps) - }), - ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type })))) - } - - function subscribeAll(): Stream.Stream { - log.info("subscribing", { type: "*" }) - return Stream.unwrap( - Effect.gen(function* () { - const s = yield* InstanceState.get(state) - return Stream.fromPubSub(s.wildcard) - }), - ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" })))) - } - - function on(pubsub: PubSub.PubSub, type: string, callback: (event: T) => unknown) { - return Effect.gen(function* () { - log.info("subscribing", { type }) - const bridge = yield* EffectBridge.make() - const scope = yield* Scope.make() - const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub)) - - yield* Scope.provide(scope)( - Stream.fromSubscription(subscription).pipe( - Stream.runForEach((msg) => - Effect.tryPromise({ - try: () => Promise.resolve().then(() => callback(msg)), - catch: (cause) => { - log.error("subscriber failed", { type, cause }) - }, - }).pipe(Effect.ignore), - ), - Effect.forkScoped, - ), - ) - - return () => { - log.info("unsubscribing", { type }) - bridge.fork(Scope.close(scope, Exit.void)) - } - }) - } - - const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* ( - def: D, - callback: (event: Payload) => unknown, - ) { - const s = yield* InstanceState.get(state) - const ps = yield* getOrCreate(s, def) - return yield* on(ps, def.type, callback) - }) - - const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) { - const s = yield* InstanceState.get(state) - return yield* on(s.wildcard, "*", callback) - }) - - return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback }) - }), -) - -export const defaultLayer = layer - -const { runPromise, runSync } = makeRuntime(Service, layer) - -// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe, -// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw. -export async function publish(def: D, properties: z.output) { - return runPromise((svc) => svc.publish(def, properties)) -} - -export function subscribe( - def: D, - callback: (event: { type: D["type"]; properties: z.infer }) => unknown, -) { - return runSync((svc) => svc.subscribeCallback(def, callback)) -} - -export function subscribeAll(callback: (event: any) => unknown) { - return runSync((svc) => svc.subscribeAllCallback(callback)) -} diff --git a/packages/opencode/src/bus/index.ts b/packages/opencode/src/bus/index.ts index 3c21d7c7d1..8a9579b599 100644 --- a/packages/opencode/src/bus/index.ts +++ b/packages/opencode/src/bus/index.ts @@ -1 +1,193 @@ -export * as Bus from "./bus" +import z from "zod" +import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect" +import { EffectBridge } from "@/effect" +import { Log } from "../util" +import { BusEvent } from "./bus-event" +import { GlobalBus } from "./global" +import { InstanceState } from "@/effect" +import { makeRuntime } from "@/effect/run-service" + +const log = Log.create({ service: "bus" }) + +export const InstanceDisposed = BusEvent.define( + "server.instance.disposed", + z.object({ + directory: z.string(), + }), +) + +type Payload = { + type: D["type"] + properties: z.infer +} + +type State = { + wildcard: PubSub.PubSub + typed: Map> +} + +export interface Interface { + readonly publish: ( + def: D, + properties: z.output, + ) => Effect.Effect + readonly subscribe: (def: D) => Stream.Stream> + readonly subscribeAll: () => Stream.Stream + readonly subscribeCallback: ( + def: D, + callback: (event: Payload) => unknown, + ) => Effect.Effect<() => void> + readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void> +} + +export class Service extends Context.Service()("@opencode/Bus") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const state = yield* InstanceState.make( + Effect.fn("Bus.state")(function* (ctx) { + const wildcard = yield* PubSub.unbounded() + const typed = new Map>() + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + // Publish InstanceDisposed before shutting down so subscribers see it + yield* PubSub.publish(wildcard, { + type: InstanceDisposed.type, + properties: { directory: ctx.directory }, + }) + yield* PubSub.shutdown(wildcard) + for (const ps of typed.values()) { + yield* PubSub.shutdown(ps) + } + }), + ) + + return { wildcard, typed } + }), + ) + + function getOrCreate(state: State, def: D) { + return Effect.gen(function* () { + let ps = state.typed.get(def.type) + if (!ps) { + ps = yield* PubSub.unbounded() + state.typed.set(def.type, ps) + } + return ps as unknown as PubSub.PubSub> + }) + } + + function publish(def: D, properties: z.output) { + return Effect.gen(function* () { + const s = yield* InstanceState.get(state) + const payload: Payload = { type: def.type, properties } + log.info("publishing", { type: def.type }) + + const ps = s.typed.get(def.type) + if (ps) yield* PubSub.publish(ps, payload) + yield* PubSub.publish(s.wildcard, payload) + + const dir = yield* InstanceState.directory + const context = yield* InstanceState.context + const workspace = yield* InstanceState.workspaceID + + GlobalBus.emit("event", { + directory: dir, + project: context.project.id, + workspace, + payload, + }) + }) + } + + function subscribe(def: D): Stream.Stream> { + log.info("subscribing", { type: def.type }) + return Stream.unwrap( + Effect.gen(function* () { + const s = yield* InstanceState.get(state) + const ps = yield* getOrCreate(s, def) + return Stream.fromPubSub(ps) + }), + ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type })))) + } + + function subscribeAll(): Stream.Stream { + log.info("subscribing", { type: "*" }) + return Stream.unwrap( + Effect.gen(function* () { + const s = yield* InstanceState.get(state) + return Stream.fromPubSub(s.wildcard) + }), + ).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" })))) + } + + function on(pubsub: PubSub.PubSub, type: string, callback: (event: T) => unknown) { + return Effect.gen(function* () { + log.info("subscribing", { type }) + const bridge = yield* EffectBridge.make() + const scope = yield* Scope.make() + const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub)) + + yield* Scope.provide(scope)( + Stream.fromSubscription(subscription).pipe( + Stream.runForEach((msg) => + Effect.tryPromise({ + try: () => Promise.resolve().then(() => callback(msg)), + catch: (cause) => { + log.error("subscriber failed", { type, cause }) + }, + }).pipe(Effect.ignore), + ), + Effect.forkScoped, + ), + ) + + return () => { + log.info("unsubscribing", { type }) + bridge.fork(Scope.close(scope, Exit.void)) + } + }) + } + + const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* ( + def: D, + callback: (event: Payload) => unknown, + ) { + const s = yield* InstanceState.get(state) + const ps = yield* getOrCreate(s, def) + return yield* on(ps, def.type, callback) + }) + + const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) { + const s = yield* InstanceState.get(state) + return yield* on(s.wildcard, "*", callback) + }) + + return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback }) + }), +) + +export const defaultLayer = layer + +const { runPromise, runSync } = makeRuntime(Service, layer) + +// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe, +// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw. +export async function publish(def: D, properties: z.output) { + return runPromise((svc) => svc.publish(def, properties)) +} + +export function subscribe( + def: D, + callback: (event: { type: D["type"]; properties: z.infer }) => unknown, +) { + return runSync((svc) => svc.subscribeCallback(def, callback)) +} + +export function subscribeAll(callback: (event: any) => unknown) { + return runSync((svc) => svc.subscribeAllCallback(callback)) +} + +export * as Bus from "." From 1694c5bfe1248c4997bbf76849f6e297e31d710d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:56:09 -0400 Subject: [PATCH 055/201] refactor: collapse file barrel into file/index.ts (#22901) --- packages/opencode/src/file/file.ts | 654 --------------------------- packages/opencode/src/file/index.ts | 657 +++++++++++++++++++++++++++- 2 files changed, 656 insertions(+), 655 deletions(-) delete mode 100644 packages/opencode/src/file/file.ts diff --git a/packages/opencode/src/file/file.ts b/packages/opencode/src/file/file.ts deleted file mode 100644 index ee8df2b0b9..0000000000 --- a/packages/opencode/src/file/file.ts +++ /dev/null @@ -1,654 +0,0 @@ -import { BusEvent } from "@/bus/bus-event" -import { InstanceState } from "@/effect" - -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Git } from "@/git" -import { Effect, Layer, Context, Scope } from "effect" -import * as Stream from "effect/Stream" -import { formatPatch, structuredPatch } from "diff" -import fuzzysort from "fuzzysort" -import ignore from "ignore" -import path from "path" -import z from "zod" -import { Global } from "../global" -import { Instance } from "../project/instance" -import { Log } from "../util" -import { Protected } from "./protected" -import { Ripgrep } from "./ripgrep" - -export const Info = z - .object({ - path: z.string(), - added: z.number().int(), - removed: z.number().int(), - status: z.enum(["added", "deleted", "modified"]), - }) - .meta({ - ref: "File", - }) - -export type Info = z.infer - -export const Node = z - .object({ - name: z.string(), - path: z.string(), - absolute: z.string(), - type: z.enum(["file", "directory"]), - ignored: z.boolean(), - }) - .meta({ - ref: "FileNode", - }) -export type Node = z.infer - -export const Content = z - .object({ - type: z.enum(["text", "binary"]), - content: z.string(), - diff: z.string().optional(), - patch: z - .object({ - oldFileName: z.string(), - newFileName: z.string(), - oldHeader: z.string().optional(), - newHeader: z.string().optional(), - hunks: z.array( - z.object({ - oldStart: z.number(), - oldLines: z.number(), - newStart: z.number(), - newLines: z.number(), - lines: z.array(z.string()), - }), - ), - index: z.string().optional(), - }) - .optional(), - encoding: z.literal("base64").optional(), - mimeType: z.string().optional(), - }) - .meta({ - ref: "FileContent", - }) -export type Content = z.infer - -export const Event = { - Edited: BusEvent.define( - "file.edited", - z.object({ - file: z.string(), - }), - ), -} - -const log = Log.create({ service: "file" }) - -const binary = new Set([ - "exe", - "dll", - "pdb", - "bin", - "so", - "dylib", - "o", - "a", - "lib", - "wav", - "mp3", - "ogg", - "oga", - "ogv", - "ogx", - "flac", - "aac", - "wma", - "m4a", - "weba", - "mp4", - "avi", - "mov", - "wmv", - "flv", - "webm", - "mkv", - "zip", - "tar", - "gz", - "gzip", - "bz", - "bz2", - "bzip", - "bzip2", - "7z", - "rar", - "xz", - "lz", - "z", - "pdf", - "doc", - "docx", - "ppt", - "pptx", - "xls", - "xlsx", - "dmg", - "iso", - "img", - "vmdk", - "ttf", - "otf", - "woff", - "woff2", - "eot", - "sqlite", - "db", - "mdb", - "apk", - "ipa", - "aab", - "xapk", - "app", - "pkg", - "deb", - "rpm", - "snap", - "flatpak", - "appimage", - "msi", - "msp", - "jar", - "war", - "ear", - "class", - "kotlin_module", - "dex", - "vdex", - "odex", - "oat", - "art", - "wasm", - "wat", - "bc", - "ll", - "s", - "ko", - "sys", - "drv", - "efi", - "rom", - "com", -]) - -const image = new Set([ - "png", - "jpg", - "jpeg", - "gif", - "bmp", - "webp", - "ico", - "tif", - "tiff", - "svg", - "svgz", - "avif", - "apng", - "jxl", - "heic", - "heif", - "raw", - "cr2", - "nef", - "arw", - "dng", - "orf", - "raf", - "pef", - "x3f", -]) - -const text = new Set([ - "ts", - "tsx", - "mts", - "cts", - "mtsx", - "ctsx", - "js", - "jsx", - "mjs", - "cjs", - "sh", - "bash", - "zsh", - "fish", - "ps1", - "psm1", - "cmd", - "bat", - "json", - "jsonc", - "json5", - "yaml", - "yml", - "toml", - "md", - "mdx", - "txt", - "xml", - "html", - "htm", - "css", - "scss", - "sass", - "less", - "graphql", - "gql", - "sql", - "ini", - "cfg", - "conf", - "env", -]) - -const textName = new Set([ - "dockerfile", - "makefile", - ".gitignore", - ".gitattributes", - ".editorconfig", - ".npmrc", - ".nvmrc", - ".prettierrc", - ".eslintrc", -]) - -const mime: Record = { - png: "image/png", - jpg: "image/jpeg", - jpeg: "image/jpeg", - gif: "image/gif", - bmp: "image/bmp", - webp: "image/webp", - ico: "image/x-icon", - tif: "image/tiff", - tiff: "image/tiff", - svg: "image/svg+xml", - svgz: "image/svg+xml", - avif: "image/avif", - apng: "image/apng", - jxl: "image/jxl", - heic: "image/heic", - heif: "image/heif", -} - -type Entry = { files: string[]; dirs: string[] } - -const ext = (file: string) => path.extname(file).toLowerCase().slice(1) -const name = (file: string) => path.basename(file).toLowerCase() -const isImageByExtension = (file: string) => image.has(ext(file)) -const isTextByExtension = (file: string) => text.has(ext(file)) -const isTextByName = (file: string) => textName.has(name(file)) -const isBinaryByExtension = (file: string) => binary.has(ext(file)) -const isImage = (mimeType: string) => mimeType.startsWith("image/") -const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file) - -function shouldEncode(mimeType: string) { - const type = mimeType.toLowerCase() - log.debug("shouldEncode", { type }) - if (!type) return false - if (type.startsWith("text/")) return false - if (type.includes("charset=")) return false - const top = type.split("/", 2)[0] - return ["image", "audio", "video", "font", "model", "multipart"].includes(top) -} - -const hidden = (item: string) => { - const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "") - return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1) -} - -const sortHiddenLast = (items: string[], prefer: boolean) => { - if (prefer) return items - const visible: string[] = [] - const hiddenItems: string[] = [] - for (const item of items) { - if (hidden(item)) hiddenItems.push(item) - else visible.push(item) - } - return [...visible, ...hiddenItems] -} - -interface State { - cache: Entry -} - -export interface Interface { - readonly init: () => Effect.Effect - readonly status: () => Effect.Effect - readonly read: (file: string) => Effect.Effect - readonly list: (dir?: string) => Effect.Effect - readonly search: (input: { - query: string - limit?: number - dirs?: boolean - type?: "file" | "directory" - }) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/File") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const appFs = yield* AppFileSystem.Service - const rg = yield* Ripgrep.Service - const git = yield* Git.Service - const scope = yield* Scope.Scope - - const state = yield* InstanceState.make( - Effect.fn("File.state")(() => - Effect.succeed({ - cache: { files: [], dirs: [] } as Entry, - }), - ), - ) - - const scan = Effect.fn("File.scan")(function* () { - if (Instance.directory === path.parse(Instance.directory).root) return - const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global" - const next: Entry = { files: [], dirs: [] } - - if (isGlobalHome) { - const dirs = new Set() - const protectedNames = Protected.names() - const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) - const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name) - const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name) - const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => [])) - - for (const entry of top) { - if (entry.type !== "directory") continue - if (shouldIgnoreName(entry.name)) continue - dirs.add(entry.name + "/") - - const base = path.join(Instance.directory, entry.name) - const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => [])) - for (const child of children) { - if (child.type !== "directory") continue - if (shouldIgnoreNested(child.name)) continue - dirs.add(entry.name + "/" + child.name + "/") - } - } - - next.dirs = Array.from(dirs).toSorted() - } else { - const files = yield* rg.files({ cwd: Instance.directory }).pipe( - Stream.runCollect, - Effect.map((chunk) => [...chunk]), - ) - const seen = new Set() - for (const file of files) { - next.files.push(file) - let current = file - while (true) { - const dir = path.dirname(current) - if (dir === ".") break - if (dir === current) break - current = dir - if (seen.has(dir)) continue - seen.add(dir) - next.dirs.push(dir + "/") - } - } - } - - const s = yield* InstanceState.get(state) - s.cache = next - }) - - let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) - - const ensure = Effect.fn("File.ensure")(function* () { - yield* cachedScan - cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) - }) - - const gitText = Effect.fnUntraced(function* (args: string[]) { - return (yield* git.run(args, { cwd: Instance.directory })).text() - }) - - const init = Effect.fn("File.init")(function* () { - yield* ensure().pipe(Effect.forkIn(scope)) - }) - - const status = Effect.fn("File.status")(function* () { - if (Instance.project.vcs !== "git") return [] - - const diffOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "diff", - "--numstat", - "HEAD", - ]) - - const changed: Info[] = [] - - if (diffOutput.trim()) { - for (const line of diffOutput.trim().split("\n")) { - const [added, removed, file] = line.split("\t") - changed.push({ - path: file, - added: added === "-" ? 0 : parseInt(added, 10), - removed: removed === "-" ? 0 : parseInt(removed, 10), - status: "modified", - }) - } - } - - const untrackedOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "ls-files", - "--others", - "--exclude-standard", - ]) - - if (untrackedOutput.trim()) { - for (const file of untrackedOutput.trim().split("\n")) { - const content = yield* appFs - .readFileString(path.join(Instance.directory, file)) - .pipe(Effect.catch(() => Effect.succeed(undefined))) - if (content === undefined) continue - changed.push({ - path: file, - added: content.split("\n").length, - removed: 0, - status: "added", - }) - } - } - - const deletedOutput = yield* gitText([ - "-c", - "core.fsmonitor=false", - "-c", - "core.quotepath=false", - "diff", - "--name-only", - "--diff-filter=D", - "HEAD", - ]) - - if (deletedOutput.trim()) { - for (const file of deletedOutput.trim().split("\n")) { - changed.push({ - path: file, - added: 0, - removed: 0, - status: "deleted", - }) - } - } - - return changed.map((item) => { - const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path) - return { - ...item, - path: path.relative(Instance.directory, full), - } - }) - }) - - const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) { - using _ = log.time("read", { file }) - const full = path.join(Instance.directory, file) - - if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory") - - if (isImageByExtension(file)) { - const exists = yield* appFs.existsSafe(full) - if (exists) { - const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) - return { - type: "text" as const, - content: Buffer.from(bytes).toString("base64"), - mimeType: getImageMimeType(file), - encoding: "base64" as const, - } - } - return { type: "text" as const, content: "" } - } - - const knownText = isTextByExtension(file) || isTextByName(file) - - if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" } - - const exists = yield* appFs.existsSafe(full) - if (!exists) return { type: "text" as const, content: "" } - - const mimeType = AppFileSystem.mimeType(full) - const encode = knownText ? false : shouldEncode(mimeType) - - if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType } - - if (encode) { - const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) - return { - type: "text" as const, - content: Buffer.from(bytes).toString("base64"), - mimeType, - encoding: "base64" as const, - } - } - - const content = yield* appFs.readFileString(full).pipe( - Effect.map((s) => s.trim()), - Effect.catch(() => Effect.succeed("")), - ) - - if (Instance.project.vcs === "git") { - let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file]) - if (!diff.trim()) { - diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file]) - } - if (diff.trim()) { - const original = yield* git.show(Instance.directory, "HEAD", file) - const patch = structuredPatch(file, file, original, content, "old", "new", { - context: Infinity, - ignoreWhitespace: true, - }) - return { type: "text" as const, content, patch, diff: formatPatch(patch) } - } - return { type: "text" as const, content } - } - - return { type: "text" as const, content } - }) - - const list = Effect.fn("File.list")(function* (dir?: string) { - const exclude = [".git", ".DS_Store"] - let ignored = (_: string) => false - if (Instance.project.vcs === "git") { - const ig = ignore() - const gitignore = path.join(Instance.project.worktree, ".gitignore") - const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed(""))) - if (gitignoreText) ig.add(gitignoreText) - const ignoreFile = path.join(Instance.project.worktree, ".ignore") - const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed(""))) - if (ignoreText) ig.add(ignoreText) - ignored = ig.ignores.bind(ig) - } - - const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory - if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory") - - const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => [])) - - const nodes: Node[] = [] - for (const entry of entries) { - if (exclude.includes(entry.name)) continue - const absolute = path.join(resolved, entry.name) - const file = path.relative(Instance.directory, absolute) - const type = entry.type === "directory" ? "directory" : "file" - nodes.push({ - name: entry.name, - path: file, - absolute, - type, - ignored: ignored(type === "directory" ? file + "/" : file), - }) - } - return nodes.sort((a, b) => { - if (a.type !== b.type) return a.type === "directory" ? -1 : 1 - return a.name.localeCompare(b.name) - }) - }) - - const search = Effect.fn("File.search")(function* (input: { - query: string - limit?: number - dirs?: boolean - type?: "file" | "directory" - }) { - yield* ensure() - const { cache } = yield* InstanceState.get(state) - - const query = input.query.trim() - const limit = input.limit ?? 100 - const kind = input.type ?? (input.dirs === false ? "file" : "all") - log.info("search", { query, kind }) - - const preferHidden = query.startsWith(".") || query.includes("/.") - - if (!query) { - if (kind === "file") return cache.files.slice(0, limit) - return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit) - } - - const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs] - - const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit - const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target) - const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted - - log.info("search", { query, kind, results: output.length }) - return output - }) - - log.info("init") - return Service.of({ init, status, read, list, search }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(Ripgrep.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Git.defaultLayer), -) diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index b65ac9d686..2f30b5400d 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -1 +1,656 @@ -export * as File from "./file" +import { BusEvent } from "@/bus/bus-event" +import { InstanceState } from "@/effect" + +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Git } from "@/git" +import { Effect, Layer, Context, Scope } from "effect" +import * as Stream from "effect/Stream" +import { formatPatch, structuredPatch } from "diff" +import fuzzysort from "fuzzysort" +import ignore from "ignore" +import path from "path" +import z from "zod" +import { Global } from "../global" +import { Instance } from "../project/instance" +import { Log } from "../util" +import { Protected } from "./protected" +import { Ripgrep } from "./ripgrep" + +export const Info = z + .object({ + path: z.string(), + added: z.number().int(), + removed: z.number().int(), + status: z.enum(["added", "deleted", "modified"]), + }) + .meta({ + ref: "File", + }) + +export type Info = z.infer + +export const Node = z + .object({ + name: z.string(), + path: z.string(), + absolute: z.string(), + type: z.enum(["file", "directory"]), + ignored: z.boolean(), + }) + .meta({ + ref: "FileNode", + }) +export type Node = z.infer + +export const Content = z + .object({ + type: z.enum(["text", "binary"]), + content: z.string(), + diff: z.string().optional(), + patch: z + .object({ + oldFileName: z.string(), + newFileName: z.string(), + oldHeader: z.string().optional(), + newHeader: z.string().optional(), + hunks: z.array( + z.object({ + oldStart: z.number(), + oldLines: z.number(), + newStart: z.number(), + newLines: z.number(), + lines: z.array(z.string()), + }), + ), + index: z.string().optional(), + }) + .optional(), + encoding: z.literal("base64").optional(), + mimeType: z.string().optional(), + }) + .meta({ + ref: "FileContent", + }) +export type Content = z.infer + +export const Event = { + Edited: BusEvent.define( + "file.edited", + z.object({ + file: z.string(), + }), + ), +} + +const log = Log.create({ service: "file" }) + +const binary = new Set([ + "exe", + "dll", + "pdb", + "bin", + "so", + "dylib", + "o", + "a", + "lib", + "wav", + "mp3", + "ogg", + "oga", + "ogv", + "ogx", + "flac", + "aac", + "wma", + "m4a", + "weba", + "mp4", + "avi", + "mov", + "wmv", + "flv", + "webm", + "mkv", + "zip", + "tar", + "gz", + "gzip", + "bz", + "bz2", + "bzip", + "bzip2", + "7z", + "rar", + "xz", + "lz", + "z", + "pdf", + "doc", + "docx", + "ppt", + "pptx", + "xls", + "xlsx", + "dmg", + "iso", + "img", + "vmdk", + "ttf", + "otf", + "woff", + "woff2", + "eot", + "sqlite", + "db", + "mdb", + "apk", + "ipa", + "aab", + "xapk", + "app", + "pkg", + "deb", + "rpm", + "snap", + "flatpak", + "appimage", + "msi", + "msp", + "jar", + "war", + "ear", + "class", + "kotlin_module", + "dex", + "vdex", + "odex", + "oat", + "art", + "wasm", + "wat", + "bc", + "ll", + "s", + "ko", + "sys", + "drv", + "efi", + "rom", + "com", +]) + +const image = new Set([ + "png", + "jpg", + "jpeg", + "gif", + "bmp", + "webp", + "ico", + "tif", + "tiff", + "svg", + "svgz", + "avif", + "apng", + "jxl", + "heic", + "heif", + "raw", + "cr2", + "nef", + "arw", + "dng", + "orf", + "raf", + "pef", + "x3f", +]) + +const text = new Set([ + "ts", + "tsx", + "mts", + "cts", + "mtsx", + "ctsx", + "js", + "jsx", + "mjs", + "cjs", + "sh", + "bash", + "zsh", + "fish", + "ps1", + "psm1", + "cmd", + "bat", + "json", + "jsonc", + "json5", + "yaml", + "yml", + "toml", + "md", + "mdx", + "txt", + "xml", + "html", + "htm", + "css", + "scss", + "sass", + "less", + "graphql", + "gql", + "sql", + "ini", + "cfg", + "conf", + "env", +]) + +const textName = new Set([ + "dockerfile", + "makefile", + ".gitignore", + ".gitattributes", + ".editorconfig", + ".npmrc", + ".nvmrc", + ".prettierrc", + ".eslintrc", +]) + +const mime: Record = { + png: "image/png", + jpg: "image/jpeg", + jpeg: "image/jpeg", + gif: "image/gif", + bmp: "image/bmp", + webp: "image/webp", + ico: "image/x-icon", + tif: "image/tiff", + tiff: "image/tiff", + svg: "image/svg+xml", + svgz: "image/svg+xml", + avif: "image/avif", + apng: "image/apng", + jxl: "image/jxl", + heic: "image/heic", + heif: "image/heif", +} + +type Entry = { files: string[]; dirs: string[] } + +const ext = (file: string) => path.extname(file).toLowerCase().slice(1) +const name = (file: string) => path.basename(file).toLowerCase() +const isImageByExtension = (file: string) => image.has(ext(file)) +const isTextByExtension = (file: string) => text.has(ext(file)) +const isTextByName = (file: string) => textName.has(name(file)) +const isBinaryByExtension = (file: string) => binary.has(ext(file)) +const isImage = (mimeType: string) => mimeType.startsWith("image/") +const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file) + +function shouldEncode(mimeType: string) { + const type = mimeType.toLowerCase() + log.debug("shouldEncode", { type }) + if (!type) return false + if (type.startsWith("text/")) return false + if (type.includes("charset=")) return false + const top = type.split("/", 2)[0] + return ["image", "audio", "video", "font", "model", "multipart"].includes(top) +} + +const hidden = (item: string) => { + const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "") + return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1) +} + +const sortHiddenLast = (items: string[], prefer: boolean) => { + if (prefer) return items + const visible: string[] = [] + const hiddenItems: string[] = [] + for (const item of items) { + if (hidden(item)) hiddenItems.push(item) + else visible.push(item) + } + return [...visible, ...hiddenItems] +} + +interface State { + cache: Entry +} + +export interface Interface { + readonly init: () => Effect.Effect + readonly status: () => Effect.Effect + readonly read: (file: string) => Effect.Effect + readonly list: (dir?: string) => Effect.Effect + readonly search: (input: { + query: string + limit?: number + dirs?: boolean + type?: "file" | "directory" + }) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/File") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const appFs = yield* AppFileSystem.Service + const rg = yield* Ripgrep.Service + const git = yield* Git.Service + const scope = yield* Scope.Scope + + const state = yield* InstanceState.make( + Effect.fn("File.state")(() => + Effect.succeed({ + cache: { files: [], dirs: [] } as Entry, + }), + ), + ) + + const scan = Effect.fn("File.scan")(function* () { + if (Instance.directory === path.parse(Instance.directory).root) return + const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global" + const next: Entry = { files: [], dirs: [] } + + if (isGlobalHome) { + const dirs = new Set() + const protectedNames = Protected.names() + const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) + const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name) + const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name) + const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => [])) + + for (const entry of top) { + if (entry.type !== "directory") continue + if (shouldIgnoreName(entry.name)) continue + dirs.add(entry.name + "/") + + const base = path.join(Instance.directory, entry.name) + const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => [])) + for (const child of children) { + if (child.type !== "directory") continue + if (shouldIgnoreNested(child.name)) continue + dirs.add(entry.name + "/" + child.name + "/") + } + } + + next.dirs = Array.from(dirs).toSorted() + } else { + const files = yield* rg.files({ cwd: Instance.directory }).pipe( + Stream.runCollect, + Effect.map((chunk) => [...chunk]), + ) + const seen = new Set() + for (const file of files) { + next.files.push(file) + let current = file + while (true) { + const dir = path.dirname(current) + if (dir === ".") break + if (dir === current) break + current = dir + if (seen.has(dir)) continue + seen.add(dir) + next.dirs.push(dir + "/") + } + } + } + + const s = yield* InstanceState.get(state) + s.cache = next + }) + + let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) + + const ensure = Effect.fn("File.ensure")(function* () { + yield* cachedScan + cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void))) + }) + + const gitText = Effect.fnUntraced(function* (args: string[]) { + return (yield* git.run(args, { cwd: Instance.directory })).text() + }) + + const init = Effect.fn("File.init")(function* () { + yield* ensure().pipe(Effect.forkIn(scope)) + }) + + const status = Effect.fn("File.status")(function* () { + if (Instance.project.vcs !== "git") return [] + + const diffOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "diff", + "--numstat", + "HEAD", + ]) + + const changed: Info[] = [] + + if (diffOutput.trim()) { + for (const line of diffOutput.trim().split("\n")) { + const [added, removed, file] = line.split("\t") + changed.push({ + path: file, + added: added === "-" ? 0 : parseInt(added, 10), + removed: removed === "-" ? 0 : parseInt(removed, 10), + status: "modified", + }) + } + } + + const untrackedOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "ls-files", + "--others", + "--exclude-standard", + ]) + + if (untrackedOutput.trim()) { + for (const file of untrackedOutput.trim().split("\n")) { + const content = yield* appFs + .readFileString(path.join(Instance.directory, file)) + .pipe(Effect.catch(() => Effect.succeed(undefined))) + if (content === undefined) continue + changed.push({ + path: file, + added: content.split("\n").length, + removed: 0, + status: "added", + }) + } + } + + const deletedOutput = yield* gitText([ + "-c", + "core.fsmonitor=false", + "-c", + "core.quotepath=false", + "diff", + "--name-only", + "--diff-filter=D", + "HEAD", + ]) + + if (deletedOutput.trim()) { + for (const file of deletedOutput.trim().split("\n")) { + changed.push({ + path: file, + added: 0, + removed: 0, + status: "deleted", + }) + } + } + + return changed.map((item) => { + const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path) + return { + ...item, + path: path.relative(Instance.directory, full), + } + }) + }) + + const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) { + using _ = log.time("read", { file }) + const full = path.join(Instance.directory, file) + + if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory") + + if (isImageByExtension(file)) { + const exists = yield* appFs.existsSafe(full) + if (exists) { + const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) + return { + type: "text" as const, + content: Buffer.from(bytes).toString("base64"), + mimeType: getImageMimeType(file), + encoding: "base64" as const, + } + } + return { type: "text" as const, content: "" } + } + + const knownText = isTextByExtension(file) || isTextByName(file) + + if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" } + + const exists = yield* appFs.existsSafe(full) + if (!exists) return { type: "text" as const, content: "" } + + const mimeType = AppFileSystem.mimeType(full) + const encode = knownText ? false : shouldEncode(mimeType) + + if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType } + + if (encode) { + const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array()))) + return { + type: "text" as const, + content: Buffer.from(bytes).toString("base64"), + mimeType, + encoding: "base64" as const, + } + } + + const content = yield* appFs.readFileString(full).pipe( + Effect.map((s) => s.trim()), + Effect.catch(() => Effect.succeed("")), + ) + + if (Instance.project.vcs === "git") { + let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file]) + if (!diff.trim()) { + diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file]) + } + if (diff.trim()) { + const original = yield* git.show(Instance.directory, "HEAD", file) + const patch = structuredPatch(file, file, original, content, "old", "new", { + context: Infinity, + ignoreWhitespace: true, + }) + return { type: "text" as const, content, patch, diff: formatPatch(patch) } + } + return { type: "text" as const, content } + } + + return { type: "text" as const, content } + }) + + const list = Effect.fn("File.list")(function* (dir?: string) { + const exclude = [".git", ".DS_Store"] + let ignored = (_: string) => false + if (Instance.project.vcs === "git") { + const ig = ignore() + const gitignore = path.join(Instance.project.worktree, ".gitignore") + const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed(""))) + if (gitignoreText) ig.add(gitignoreText) + const ignoreFile = path.join(Instance.project.worktree, ".ignore") + const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed(""))) + if (ignoreText) ig.add(ignoreText) + ignored = ig.ignores.bind(ig) + } + + const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory + if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory") + + const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => [])) + + const nodes: Node[] = [] + for (const entry of entries) { + if (exclude.includes(entry.name)) continue + const absolute = path.join(resolved, entry.name) + const file = path.relative(Instance.directory, absolute) + const type = entry.type === "directory" ? "directory" : "file" + nodes.push({ + name: entry.name, + path: file, + absolute, + type, + ignored: ignored(type === "directory" ? file + "/" : file), + }) + } + return nodes.sort((a, b) => { + if (a.type !== b.type) return a.type === "directory" ? -1 : 1 + return a.name.localeCompare(b.name) + }) + }) + + const search = Effect.fn("File.search")(function* (input: { + query: string + limit?: number + dirs?: boolean + type?: "file" | "directory" + }) { + yield* ensure() + const { cache } = yield* InstanceState.get(state) + + const query = input.query.trim() + const limit = input.limit ?? 100 + const kind = input.type ?? (input.dirs === false ? "file" : "all") + log.info("search", { query, kind }) + + const preferHidden = query.startsWith(".") || query.includes("/.") + + if (!query) { + if (kind === "file") return cache.files.slice(0, limit) + return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit) + } + + const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs] + + const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit + const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target) + const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted + + log.info("search", { query, kind, results: output.length }) + return output + }) + + log.info("init") + return Service.of({ init, status, read, list, search }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Ripgrep.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Git.defaultLayer), +) + +export * as File from "." From ae584332b36668ddfe4faa1b65157f5d276d34ad Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 15:56:29 -0500 Subject: [PATCH 056/201] fix: uncomment import (#22923) --- packages/opencode/src/cli/cmd/tui/plugin/runtime.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index ac1c0fc3b8..e1b2eca1dd 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -1,4 +1,4 @@ -// import "@opentui/solid/runtime-plugin-support" +import "@opentui/solid/runtime-plugin-support" import { type TuiDispose, type TuiPlugin, From 86c54c5acc7b3bf4d527cbd9fdfbd5dfc925b4d6 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 16:58:17 -0400 Subject: [PATCH 057/201] fix(tui): minor logging cleanup (#22924) --- .../src/cli/cmd/tui/component/dialog-workspace-create.tsx | 2 -- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 1 - 2 files changed, 3 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index ca504d864d..ad5cd45782 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -41,9 +41,7 @@ export async function openWorkspaceSession(input: { workspaceID: input.workspaceID, }) - console.log("opening!") while (true) { - console.log("creating") const result = await client.session.create({ workspace: input.workspaceID }).catch((err) => { log.error("workspace session create request failed", { workspaceID: input.workspaceID, diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 10b70d50ac..29511b8ebf 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -492,7 +492,6 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return last.time.completed ? "idle" : "working" }, async sync(sessionID: string) { - console.log("YO", sessionID, fullSyncedSessions.has(sessionID)) if (fullSyncedSessions.has(sessionID)) return const [session, messages, todo, diff] = await Promise.all([ sdk.client.session.get({ sessionID }, { throwOnError: true }), From 32548bcb4af7db393d91f315ec2222954326e766 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:59:17 -0400 Subject: [PATCH 058/201] refactor: unwrap ConfigPlugin namespace to flat exports + self-reexport (#22876) --- packages/opencode/src/config/plugin.ts | 148 ++++++++++++------------- 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index 3a10c0a715..7d335bcc53 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -4,81 +4,81 @@ import { pathToFileURL } from "url" import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" import path from "path" -export namespace ConfigPlugin { - const Options = z.record(z.string(), z.unknown()) - export type Options = z.infer +const Options = z.record(z.string(), z.unknown()) +export type Options = z.infer - // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. - // It answers "what should we load?" but says nothing about where that value came from. - export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) - export type Spec = z.infer +// Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. +// It answers "what should we load?" but says nothing about where that value came from. +export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) +export type Spec = z.infer - export type Scope = "global" | "local" +export type Scope = "global" | "local" - // Origin keeps the original config provenance attached to a spec. - // After multiple config files are merged, callers still need to know which file declared the plugin - // and whether it should behave like a global or project-local plugin. - export type Origin = { - spec: Spec - source: string - scope: Scope - } - - export async function load(dir: string) { - const plugins: ConfigPlugin.Spec[] = [] - - for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { - cwd: dir, - absolute: true, - dot: true, - symlink: true, - })) { - plugins.push(pathToFileURL(item).href) - } - return plugins - } - - export function pluginSpecifier(plugin: Spec): string { - return Array.isArray(plugin) ? plugin[0] : plugin - } - - export function pluginOptions(plugin: Spec): Options | undefined { - return Array.isArray(plugin) ? plugin[1] : undefined - } - - // Path-like specs are resolved relative to the config file that declared them so merges later on do not - // accidentally reinterpret `./plugin.ts` relative to some other directory. - export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { - const spec = pluginSpecifier(plugin) - if (!isPathPluginSpec(spec)) return plugin - - const base = path.dirname(configFilepath) - const file = (() => { - if (spec.startsWith("file://")) return spec - if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href - return pathToFileURL(path.resolve(base, spec)).href - })() - - const resolved = await resolvePathPluginTarget(file).catch(() => file) - - if (Array.isArray(plugin)) return [resolved, plugin[1]] - return resolved - } - - // Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the - // full Origin so downstream code still knows which config file won and where follow-up writes should go. - export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { - const seen = new Set() - const list: Origin[] = [] - - for (const plugin of plugins.toReversed()) { - const spec = pluginSpecifier(plugin.spec) - const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg - if (seen.has(name)) continue - seen.add(name) - list.push(plugin) - } - - return list.toReversed() - } +// Origin keeps the original config provenance attached to a spec. +// After multiple config files are merged, callers still need to know which file declared the plugin +// and whether it should behave like a global or project-local plugin. +export type Origin = { + spec: Spec + source: string + scope: Scope } + +export async function load(dir: string) { + const plugins: Spec[] = [] + + for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", { + cwd: dir, + absolute: true, + dot: true, + symlink: true, + })) { + plugins.push(pathToFileURL(item).href) + } + return plugins +} + +export function pluginSpecifier(plugin: Spec): string { + return Array.isArray(plugin) ? plugin[0] : plugin +} + +export function pluginOptions(plugin: Spec): Options | undefined { + return Array.isArray(plugin) ? plugin[1] : undefined +} + +// Path-like specs are resolved relative to the config file that declared them so merges later on do not +// accidentally reinterpret `./plugin.ts` relative to some other directory. +export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise { + const spec = pluginSpecifier(plugin) + if (!isPathPluginSpec(spec)) return plugin + + const base = path.dirname(configFilepath) + const file = (() => { + if (spec.startsWith("file://")) return spec + if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href + return pathToFileURL(path.resolve(base, spec)).href + })() + + const resolved = await resolvePathPluginTarget(file).catch(() => file) + + if (Array.isArray(plugin)) return [resolved, plugin[1]] + return resolved +} + +// Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the +// full Origin so downstream code still knows which config file won and where follow-up writes should go. +export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] { + const seen = new Set() + const list: Origin[] = [] + + for (const plugin of plugins.toReversed()) { + const spec = pluginSpecifier(plugin.spec) + const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg + if (seen.has(name)) continue + seen.add(name) + list.push(plugin) + } + + return list.toReversed() +} + +export * as ConfigPlugin from "./plugin" From 0e86466f990edd046867820b9fac97766d11db3f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 16:59:30 -0400 Subject: [PATCH 059/201] refactor: unwrap Discovery namespace to flat exports + self-reexport (#22878) --- packages/opencode/src/skill/discovery.ts | 208 +++++++++++------------ 1 file changed, 104 insertions(+), 104 deletions(-) diff --git a/packages/opencode/src/skill/discovery.ts b/packages/opencode/src/skill/discovery.ts index eff64ed2bb..debd68dd3d 100644 --- a/packages/opencode/src/skill/discovery.ts +++ b/packages/opencode/src/skill/discovery.ts @@ -6,111 +6,111 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Global } from "../global" import { Log } from "../util" -export namespace Discovery { - const skillConcurrency = 4 - const fileConcurrency = 8 +const skillConcurrency = 4 +const fileConcurrency = 8 - class IndexSkill extends Schema.Class("IndexSkill")({ - name: Schema.String, - files: Schema.Array(Schema.String), - }) {} +class IndexSkill extends Schema.Class("IndexSkill")({ + name: Schema.String, + files: Schema.Array(Schema.String), +}) {} - class Index extends Schema.Class("Index")({ - skills: Schema.Array(IndexSkill), - }) {} +class Index extends Schema.Class("Index")({ + skills: Schema.Array(IndexSkill), +}) {} - export interface Interface { - readonly pull: (url: string) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SkillDiscovery") {} - - export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const log = Log.create({ service: "skill-discovery" }) - const fs = yield* AppFileSystem.Service - const path = yield* Path.Path - const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) - const cache = path.join(Global.Path.cache, "skills") - - const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) { - if (yield* fs.exists(dest).pipe(Effect.orDie)) return true - - return yield* HttpClientRequest.get(url).pipe( - http.execute, - Effect.flatMap((res) => res.arrayBuffer), - Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))), - Effect.as(true), - Effect.catch((err) => - Effect.sync(() => { - log.error("failed to download", { url, err }) - return false - }), - ), - ) - }) - - const pull = Effect.fn("Discovery.pull")(function* (url: string) { - const base = url.endsWith("/") ? url : `${url}/` - const index = new URL("index.json", base).href - const host = base.slice(0, -1) - - log.info("fetching index", { url: index }) - - const data = yield* HttpClientRequest.get(index).pipe( - HttpClientRequest.acceptJson, - http.execute, - Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)), - Effect.catch((err) => - Effect.sync(() => { - log.error("failed to fetch index", { url: index, err }) - return null - }), - ), - ) - - if (!data) return [] - - const list = data.skills.filter((skill) => { - if (!skill.files.includes("SKILL.md")) { - log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name }) - return false - } - return true - }) - - const dirs = yield* Effect.forEach( - list, - (skill) => - Effect.gen(function* () { - const root = path.join(cache, skill.name) - - yield* Effect.forEach( - skill.files, - (file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)), - { - concurrency: fileConcurrency, - }, - ) - - const md = path.join(root, "SKILL.md") - return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null - }), - { concurrency: skillConcurrency }, - ) - - return dirs.filter((dir): dir is string => dir !== null) - }) - - return Service.of({ pull }) - }), - ) - - export const defaultLayer: Layer.Layer = layer.pipe( - Layer.provide(FetchHttpClient.layer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(NodePath.layer), - ) +export interface Interface { + readonly pull: (url: string) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SkillDiscovery") {} + +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const log = Log.create({ service: "skill-discovery" }) + const fs = yield* AppFileSystem.Service + const path = yield* Path.Path + const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) + const cache = path.join(Global.Path.cache, "skills") + + const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) { + if (yield* fs.exists(dest).pipe(Effect.orDie)) return true + + return yield* HttpClientRequest.get(url).pipe( + http.execute, + Effect.flatMap((res) => res.arrayBuffer), + Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))), + Effect.as(true), + Effect.catch((err) => + Effect.sync(() => { + log.error("failed to download", { url, err }) + return false + }), + ), + ) + }) + + const pull = Effect.fn("Discovery.pull")(function* (url: string) { + const base = url.endsWith("/") ? url : `${url}/` + const index = new URL("index.json", base).href + const host = base.slice(0, -1) + + log.info("fetching index", { url: index }) + + const data = yield* HttpClientRequest.get(index).pipe( + HttpClientRequest.acceptJson, + http.execute, + Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)), + Effect.catch((err) => + Effect.sync(() => { + log.error("failed to fetch index", { url: index, err }) + return null + }), + ), + ) + + if (!data) return [] + + const list = data.skills.filter((skill) => { + if (!skill.files.includes("SKILL.md")) { + log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name }) + return false + } + return true + }) + + const dirs = yield* Effect.forEach( + list, + (skill) => + Effect.gen(function* () { + const root = path.join(cache, skill.name) + + yield* Effect.forEach( + skill.files, + (file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)), + { + concurrency: fileConcurrency, + }, + ) + + const md = path.join(root, "SKILL.md") + return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null + }), + { concurrency: skillConcurrency }, + ) + + return dirs.filter((dir): dir is string => dir !== null) + }) + + return Service.of({ pull }) + }), + ) + +export const defaultLayer: Layer.Layer = layer.pipe( + Layer.provide(FetchHttpClient.layer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(NodePath.layer), +) + +export * as Discovery from "./discovery" From 9f201d637034ff0a4c1e518481c76ba1a4f0eeb6 Mon Sep 17 00:00:00 2001 From: opencode Date: Thu, 16 Apr 2026 21:54:54 +0000 Subject: [PATCH 060/201] release: v1.4.7 --- bun.lock | 32 +++++++++++++------------- packages/app/package.json | 2 +- packages/console/app/package.json | 2 +- packages/console/core/package.json | 2 +- packages/console/function/package.json | 2 +- packages/console/mail/package.json | 2 +- packages/desktop-electron/package.json | 2 +- packages/desktop/package.json | 2 +- packages/enterprise/package.json | 2 +- packages/extensions/zed/extension.toml | 12 +++++----- packages/function/package.json | 2 +- packages/opencode/package.json | 2 +- packages/plugin/package.json | 2 +- packages/sdk/js/package.json | 2 +- packages/shared/package.json | 2 +- packages/slack/package.json | 2 +- packages/ui/package.json | 2 +- packages/web/package.json | 2 +- sdks/vscode/package.json | 2 +- 19 files changed, 39 insertions(+), 39 deletions(-) diff --git a/bun.lock b/bun.lock index e236f3f491..63232cb29e 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.6", + "version": "1.4.7", "bin": { "opencode": "./bin/opencode", }, @@ -458,7 +458,7 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.6", + "version": "1.4.7", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", diff --git a/packages/app/package.json b/packages/app/package.json index 483c71dc50..2941637d08 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.6", + "version": "1.4.7", "description": "", "type": "module", "exports": { diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 062114eebd..8783f3fd05 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/core/package.json b/packages/console/core/package.json index 760174cf04..cdefd0e609 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.6", + "version": "1.4.7", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/function/package.json b/packages/console/function/package.json index 840f32742d..898c540bac 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.6", + "version": "1.4.7", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index 5aa94224e7..46ff28b7d1 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.6", + "version": "1.4.7", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index cc0bad7d74..e1f69b5b20 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index 3fd02ad881..d8eea4ea36 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index 3c4a835f35..12a72e647f 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.6", + "version": "1.4.7", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index d6be9c6b61..d164534cf7 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.6" +version = "1.4.7" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 941f093fcc..36a9ddc321 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.6", + "version": "1.4.7", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 3a98d0eb93..1dabd91b8d 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.6", + "version": "1.4.7", "name": "opencode", "type": "module", "license": "MIT", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 76fe2e862f..6f9a0ea1dc 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 49c441600c..53a5893143 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 4d10a30a36..9dec6bdb6c 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.6", + "version": "1.4.7", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/slack/package.json b/packages/slack/package.json index 9239ac5727..a23500241e 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index 21974e3ec7..cd559041cc 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.6", + "version": "1.4.7", "type": "module", "license": "MIT", "exports": { diff --git a/packages/web/package.json b/packages/web/package.json index 5ca11ea296..a53ef51932 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.6", + "version": "1.4.7", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index 634583bd31..c499f679fe 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.6", + "version": "1.4.7", "publisher": "sst-dev", "repository": { "type": "git", From 9db40996cc5ce5877565d99e4199656345e8b80f Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 18:01:14 -0400 Subject: [PATCH 061/201] fix build script --- packages/opencode/src/cli/cmd/tui/context/sync.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 29511b8ebf..b5734e67d0 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -463,6 +463,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return store.status }, get ready() { + if (process.env.OPENCODE_FAST_BOOT) return true return store.status !== "loading" }, get path() { From dbe2ff52b25abf8cdeb878aa32614be22f28131f Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 18:40:22 -0400 Subject: [PATCH 062/201] fix tui otel profiling --- packages/opencode/src/cli/effect/runtime.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/effect/runtime.ts b/packages/opencode/src/cli/effect/runtime.ts index 4d85fa55b6..57b9f8ede9 100644 --- a/packages/opencode/src/cli/effect/runtime.ts +++ b/packages/opencode/src/cli/effect/runtime.ts @@ -6,7 +6,7 @@ export const memoMap = Layer.makeMemoMapUnsafe() export function makeRuntime(service: Context.Service, layer: Layer.Layer) { let rt: ManagedRuntime.ManagedRuntime | undefined const getRuntime = () => - (rt ??= ManagedRuntime.make(Layer.merge(layer, Observability.layer) as Layer.Layer, { memoMap })) + (rt ??= ManagedRuntime.make(Layer.provideMerge(layer, Observability.layer) as Layer.Layer, { memoMap })) return { runSync: (fn: (svc: S) => Effect.Effect) => getRuntime().runSync(service.use(fn)), From cb18f2ef407c49e7e91e03f0b7c4a72c2d4d05c1 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 17:45:35 -0500 Subject: [PATCH 063/201] fix: ensure azure sets prompt cache key by default (#22957) --- packages/opencode/src/provider/transform.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 492db40520..0ebd8bbf59 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -798,6 +798,7 @@ export function options(input: { if (input.model.api.npm === "@ai-sdk/azure") { result["store"] = true + result["promptCacheKey"] = input.sessionID } if (input.model.api.npm === "@openrouter/ai-sdk-provider") { From 23d48a7cf1af47870ef39def684eb8d569c66f4b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:46:49 -0400 Subject: [PATCH 064/201] refactor: unwrap BusEvent namespace + self-reexport (#22962) --- packages/opencode/src/bus/bus-event.ts | 52 +++++++++++++------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/opencode/src/bus/bus-event.ts b/packages/opencode/src/bus/bus-event.ts index 369a40ed88..efaed94406 100644 --- a/packages/opencode/src/bus/bus-event.ts +++ b/packages/opencode/src/bus/bus-event.ts @@ -1,33 +1,33 @@ import z from "zod" import type { ZodType } from "zod" -export namespace BusEvent { - export type Definition = ReturnType +export type Definition = ReturnType - const registry = new Map() +const registry = new Map() - export function define(type: Type, properties: Properties) { - const result = { - type, - properties, - } - registry.set(type, result) - return result - } - - export function payloads() { - return registry - .entries() - .map(([type, def]) => { - return z - .object({ - type: z.literal(type), - properties: def.properties, - }) - .meta({ - ref: `Event.${def.type}`, - }) - }) - .toArray() +export function define(type: Type, properties: Properties) { + const result = { + type, + properties, } + registry.set(type, result) + return result } + +export function payloads() { + return registry + .entries() + .map(([type, def]) => { + return z + .object({ + type: z.literal(type), + properties: def.properties, + }) + .meta({ + ref: `Event.${def.type}`, + }) + }) + .toArray() +} + +export * as BusEvent from "./bus-event" From e2d161dfdd54fdd30f8e36e8cf4f46e261dab96e Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:48:24 -0400 Subject: [PATCH 065/201] refactor: unwrap Identifier namespace + self-reexport (#22963) --- packages/opencode/src/id/id.ts | 162 ++++++++++++++++----------------- 1 file changed, 81 insertions(+), 81 deletions(-) diff --git a/packages/opencode/src/id/id.ts b/packages/opencode/src/id/id.ts index 3d4cddf530..46c210fa5d 100644 --- a/packages/opencode/src/id/id.ts +++ b/packages/opencode/src/id/id.ts @@ -1,86 +1,86 @@ import z from "zod" import { randomBytes } from "crypto" -export namespace Identifier { - const prefixes = { - event: "evt", - session: "ses", - message: "msg", - permission: "per", - question: "que", - user: "usr", - part: "prt", - pty: "pty", - tool: "tool", - workspace: "wrk", - entry: "ent", - } as const +const prefixes = { + event: "evt", + session: "ses", + message: "msg", + permission: "per", + question: "que", + user: "usr", + part: "prt", + pty: "pty", + tool: "tool", + workspace: "wrk", + entry: "ent", +} as const - export function schema(prefix: keyof typeof prefixes) { - return z.string().startsWith(prefixes[prefix]) - } - - const LENGTH = 26 - - // State for monotonic ID generation - let lastTimestamp = 0 - let counter = 0 - - export function ascending(prefix: keyof typeof prefixes, given?: string) { - return generateID(prefix, "ascending", given) - } - - export function descending(prefix: keyof typeof prefixes, given?: string) { - return generateID(prefix, "descending", given) - } - - function generateID(prefix: keyof typeof prefixes, direction: "descending" | "ascending", given?: string): string { - if (!given) { - return create(prefixes[prefix], direction) - } - - if (!given.startsWith(prefixes[prefix])) { - throw new Error(`ID ${given} does not start with ${prefixes[prefix]}`) - } - return given - } - - function randomBase62(length: number): string { - const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - let result = "" - const bytes = randomBytes(length) - for (let i = 0; i < length; i++) { - result += chars[bytes[i] % 62] - } - return result - } - - export function create(prefix: string, direction: "descending" | "ascending", timestamp?: number): string { - const currentTimestamp = timestamp ?? Date.now() - - if (currentTimestamp !== lastTimestamp) { - lastTimestamp = currentTimestamp - counter = 0 - } - counter++ - - let now = BigInt(currentTimestamp) * BigInt(0x1000) + BigInt(counter) - - now = direction === "descending" ? ~now : now - - const timeBytes = Buffer.alloc(6) - for (let i = 0; i < 6; i++) { - timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) - } - - return prefix + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12) - } - - /** Extract timestamp from an ascending ID. Does not work with descending IDs. */ - export function timestamp(id: string): number { - const prefix = id.split("_")[0] - const hex = id.slice(prefix.length + 1, prefix.length + 13) - const encoded = BigInt("0x" + hex) - return Number(encoded / BigInt(0x1000)) - } +export function schema(prefix: keyof typeof prefixes) { + return z.string().startsWith(prefixes[prefix]) } + +const LENGTH = 26 + +// State for monotonic ID generation +let lastTimestamp = 0 +let counter = 0 + +export function ascending(prefix: keyof typeof prefixes, given?: string) { + return generateID(prefix, "ascending", given) +} + +export function descending(prefix: keyof typeof prefixes, given?: string) { + return generateID(prefix, "descending", given) +} + +function generateID(prefix: keyof typeof prefixes, direction: "descending" | "ascending", given?: string): string { + if (!given) { + return create(prefixes[prefix], direction) + } + + if (!given.startsWith(prefixes[prefix])) { + throw new Error(`ID ${given} does not start with ${prefixes[prefix]}`) + } + return given +} + +function randomBase62(length: number): string { + const chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + let result = "" + const bytes = randomBytes(length) + for (let i = 0; i < length; i++) { + result += chars[bytes[i] % 62] + } + return result +} + +export function create(prefix: string, direction: "descending" | "ascending", timestamp?: number): string { + const currentTimestamp = timestamp ?? Date.now() + + if (currentTimestamp !== lastTimestamp) { + lastTimestamp = currentTimestamp + counter = 0 + } + counter++ + + let now = BigInt(currentTimestamp) * BigInt(0x1000) + BigInt(counter) + + now = direction === "descending" ? ~now : now + + const timeBytes = Buffer.alloc(6) + for (let i = 0; i < 6; i++) { + timeBytes[i] = Number((now >> BigInt(40 - 8 * i)) & BigInt(0xff)) + } + + return prefix + "_" + timeBytes.toString("hex") + randomBase62(LENGTH - 12) +} + +/** Extract timestamp from an ascending ID. Does not work with descending IDs. */ +export function timestamp(id: string): number { + const prefix = id.split("_")[0] + const hex = id.slice(prefix.length + 1, prefix.length + 13) + const encoded = BigInt("0x" + hex) + return Number(encoded / BigInt(0x1000)) +} + +export * as Identifier from "./id" From 30fc791480ebdabc9c62c70713e6cb52b44caff2 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:49:52 -0400 Subject: [PATCH 066/201] refactor: unwrap Ripgrep namespace + self-reexport (#22965) --- packages/opencode/src/file/ripgrep.ts | 1048 ++++++++++++------------- 1 file changed, 524 insertions(+), 524 deletions(-) diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index 9a78c5b7fb..ac450108e1 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -8,568 +8,568 @@ import { ripgrep } from "ripgrep" import { Filesystem } from "@/util" import { Log } from "@/util" -export namespace Ripgrep { - const log = Log.create({ service: "ripgrep" }) +const log = Log.create({ service: "ripgrep" }) - const Stats = z.object({ - elapsed: z.object({ - secs: z.number(), - nanos: z.number(), +const Stats = z.object({ + elapsed: z.object({ + secs: z.number(), + nanos: z.number(), + human: z.string(), + }), + searches: z.number(), + searches_with_match: z.number(), + bytes_searched: z.number(), + bytes_printed: z.number(), + matched_lines: z.number(), + matches: z.number(), +}) + +const Begin = z.object({ + type: z.literal("begin"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + }), +}) + +export const Match = z.object({ + type: z.literal("match"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + lines: z.object({ + text: z.string(), + }), + line_number: z.number(), + absolute_offset: z.number(), + submatches: z.array( + z.object({ + match: z.object({ + text: z.string(), + }), + start: z.number(), + end: z.number(), + }), + ), + }), +}) + +const End = z.object({ + type: z.literal("end"), + data: z.object({ + path: z.object({ + text: z.string(), + }), + binary_offset: z.number().nullable(), + stats: Stats, + }), +}) + +const Summary = z.object({ + type: z.literal("summary"), + data: z.object({ + elapsed_total: z.object({ human: z.string(), + nanos: z.number(), + secs: z.number(), }), - searches: z.number(), - searches_with_match: z.number(), - bytes_searched: z.number(), - bytes_printed: z.number(), - matched_lines: z.number(), - matches: z.number(), - }) + stats: Stats, + }), +}) - const Begin = z.object({ - type: z.literal("begin"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - }), - }) +const Result = z.union([Begin, Match, End, Summary]) - export const Match = z.object({ - type: z.literal("match"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - lines: z.object({ - text: z.string(), - }), - line_number: z.number(), - absolute_offset: z.number(), - submatches: z.array( - z.object({ - match: z.object({ - text: z.string(), - }), - start: z.number(), - end: z.number(), - }), - ), - }), - }) +export type Result = z.infer +export type Match = z.infer +export type Item = Match["data"] +export type Begin = z.infer +export type End = z.infer +export type Summary = z.infer +export type Row = Match["data"] - const End = z.object({ - type: z.literal("end"), - data: z.object({ - path: z.object({ - text: z.string(), - }), - binary_offset: z.number().nullable(), - stats: Stats, - }), - }) +export interface SearchResult { + items: Item[] + partial: boolean +} - const Summary = z.object({ - type: z.literal("summary"), - data: z.object({ - elapsed_total: z.object({ - human: z.string(), - nanos: z.number(), - secs: z.number(), - }), - stats: Stats, - }), - }) +export interface FilesInput { + cwd: string + glob?: string[] + hidden?: boolean + follow?: boolean + maxDepth?: number + signal?: AbortSignal +} - const Result = z.union([Begin, Match, End, Summary]) +export interface SearchInput { + cwd: string + pattern: string + glob?: string[] + limit?: number + follow?: boolean + file?: string[] + signal?: AbortSignal +} - export type Result = z.infer - export type Match = z.infer - export type Item = Match["data"] - export type Begin = z.infer - export type End = z.infer - export type Summary = z.infer - export type Row = Match["data"] +export interface TreeInput { + cwd: string + limit?: number + signal?: AbortSignal +} - export interface SearchResult { - items: Item[] - partial: boolean +export interface Interface { + readonly files: (input: FilesInput) => Stream.Stream + readonly tree: (input: TreeInput) => Effect.Effect + readonly search: (input: SearchInput) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Ripgrep") {} + +type Run = { kind: "files" | "search"; cwd: string; args: string[] } + +type WorkerResult = { + type: "result" + code: number + stdout: string + stderr: string +} + +type WorkerLine = { + type: "line" + line: string +} + +type WorkerDone = { + type: "done" + code: number + stderr: string +} + +type WorkerError = { + type: "error" + error: { + message: string + name?: string + stack?: string } +} - export interface FilesInput { - cwd: string - glob?: string[] - hidden?: boolean - follow?: boolean - maxDepth?: number - signal?: AbortSignal +function env() { + const env = Object.fromEntries( + Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), + ) + delete env.RIPGREP_CONFIG_PATH + return env +} + +function text(input: unknown) { + if (typeof input === "string") return input + if (input instanceof ArrayBuffer) return Buffer.from(input).toString() + if (ArrayBuffer.isView(input)) return Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString() + return String(input) +} + +function toError(input: unknown) { + if (input instanceof Error) return input + if (typeof input === "string") return new Error(input) + return new Error(String(input)) +} + +function abort(signal?: AbortSignal) { + const err = signal?.reason + if (err instanceof Error) return err + const out = new Error("Aborted") + out.name = "AbortError" + return out +} + +function error(stderr: string, code: number) { + const err = new Error(stderr.trim() || `ripgrep failed with code ${code}`) + err.name = "RipgrepError" + return err +} + +function clean(file: string) { + return path.normalize(file.replace(/^\.[\\/]/, "")) +} + +function row(data: Row): Row { + return { + ...data, + path: { + ...data.path, + text: clean(data.path.text), + }, } +} - export interface SearchInput { - cwd: string - pattern: string - glob?: string[] - limit?: number - follow?: boolean - file?: string[] - signal?: AbortSignal +function opts(cwd: string) { + return { + env: env(), + preopens: { ".": cwd }, } +} - export interface TreeInput { - cwd: string - limit?: number - signal?: AbortSignal - } - - export interface Interface { - readonly files: (input: FilesInput) => Stream.Stream - readonly tree: (input: TreeInput) => Effect.Effect - readonly search: (input: SearchInput) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/Ripgrep") {} - - type Run = { kind: "files" | "search"; cwd: string; args: string[] } - - type WorkerResult = { - type: "result" - code: number - stdout: string - stderr: string - } - - type WorkerLine = { - type: "line" - line: string - } - - type WorkerDone = { - type: "done" - code: number - stderr: string - } - - type WorkerError = { - type: "error" - error: { - message: string - name?: string - stack?: string - } - } - - function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) - delete env.RIPGREP_CONFIG_PATH - return env - } - - function text(input: unknown) { - if (typeof input === "string") return input - if (input instanceof ArrayBuffer) return Buffer.from(input).toString() - if (ArrayBuffer.isView(input)) return Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString() - return String(input) - } - - function toError(input: unknown) { - if (input instanceof Error) return input - if (typeof input === "string") return new Error(input) - return new Error(String(input)) - } - - function abort(signal?: AbortSignal) { - const err = signal?.reason - if (err instanceof Error) return err - const out = new Error("Aborted") - out.name = "AbortError" - return out - } - - function error(stderr: string, code: number) { - const err = new Error(stderr.trim() || `ripgrep failed with code ${code}`) - err.name = "RipgrepError" - return err - } - - function clean(file: string) { - return path.normalize(file.replace(/^\.[\\/]/, "")) - } - - function row(data: Row): Row { - return { - ...data, - path: { - ...data.path, - text: clean(data.path.text), - }, - } - } - - function opts(cwd: string) { - return { - env: env(), - preopens: { ".": cwd }, - } - } - - function check(cwd: string) { - return Effect.tryPromise({ - try: () => fs.stat(cwd).catch(() => undefined), - catch: toError, - }).pipe( - Effect.flatMap((stat) => - stat?.isDirectory() - ? Effect.void - : Effect.fail( - Object.assign(new Error(`No such file or directory: '${cwd}'`), { - code: "ENOENT", - errno: -2, - path: cwd, - }), - ), - ), - ) - } - - function filesArgs(input: FilesInput) { - const args = ["--files", "--glob=!.git/*"] - if (input.follow) args.push("--follow") - if (input.hidden !== false) args.push("--hidden") - if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`) - if (input.glob) { - for (const glob of input.glob) { - args.push(`--glob=${glob}`) - } - } - args.push(".") - return args - } - - function searchArgs(input: SearchInput) { - const args = ["--json", "--hidden", "--glob=!.git/*", "--no-messages"] - if (input.follow) args.push("--follow") - if (input.glob) { - for (const glob of input.glob) { - args.push(`--glob=${glob}`) - } - } - if (input.limit) args.push(`--max-count=${input.limit}`) - args.push("--", input.pattern, ...(input.file ?? ["."])) - return args - } - - function parse(stdout: string) { - return stdout - .trim() - .split(/\r?\n/) - .filter(Boolean) - .map((line) => Result.parse(JSON.parse(line))) - .flatMap((item) => (item.type === "match" ? [row(item.data)] : [])) - } - - declare const OPENCODE_RIPGREP_WORKER_PATH: string - - function target(): Effect.Effect { - if (typeof OPENCODE_RIPGREP_WORKER_PATH !== "undefined") { - return Effect.succeed(OPENCODE_RIPGREP_WORKER_PATH) - } - const js = new URL("./ripgrep.worker.js", import.meta.url) - return Effect.tryPromise({ - try: () => Filesystem.exists(fileURLToPath(js)), - catch: toError, - }).pipe(Effect.map((exists) => (exists ? js : new URL("./ripgrep.worker.ts", import.meta.url)))) - } - - function worker() { - return target().pipe(Effect.flatMap((file) => Effect.sync(() => new Worker(file, { env: env() })))) - } - - function drain(buf: string, chunk: unknown, push: (line: string) => void) { - const lines = (buf + text(chunk)).split(/\r?\n/) - buf = lines.pop() || "" - for (const line of lines) { - if (line) push(line) - } - return buf - } - - function fail(queue: Queue.Queue, err: Error) { - Queue.failCauseUnsafe(queue, Cause.fail(err)) - } - - function searchDirect(input: SearchInput) { - return Effect.tryPromise({ - try: () => - ripgrep(searchArgs(input), { - buffer: true, - ...opts(input.cwd), - }), - catch: toError, - }).pipe( - Effect.flatMap((ret) => { - const out = ret.stdout ?? "" - if (ret.code !== 0 && ret.code !== 1 && ret.code !== 2) { - return Effect.fail(error(ret.stderr ?? "", ret.code ?? 1)) - } - return Effect.sync(() => ({ - items: ret.code === 1 ? [] : parse(out), - partial: ret.code === 2, - })) - }), - ) - } - - function searchWorker(input: SearchInput) { - if (input.signal?.aborted) return Effect.fail(abort(input.signal)) - - return Effect.acquireUseRelease( - worker(), - (w) => - Effect.callback((resume, signal) => { - let open = true - const done = (effect: Effect.Effect) => { - if (!open) return - open = false - resume(effect) - } - const onabort = () => done(Effect.fail(abort(input.signal))) - - w.onerror = (evt) => { - done(Effect.fail(toError(evt.error ?? evt.message))) - } - w.onmessage = (evt: MessageEvent) => { - const msg = evt.data - if (msg.type === "error") { - done(Effect.fail(Object.assign(new Error(msg.error.message), msg.error))) - return - } - if (msg.code === 1) { - done(Effect.succeed({ items: [], partial: false })) - return - } - if (msg.code !== 0 && msg.code !== 1 && msg.code !== 2) { - done(Effect.fail(error(msg.stderr, msg.code))) - return - } - done( - Effect.sync(() => ({ - items: parse(msg.stdout), - partial: msg.code === 2, - })), - ) - } - - input.signal?.addEventListener("abort", onabort, { once: true }) - signal.addEventListener("abort", onabort, { once: true }) - w.postMessage({ - kind: "search", - cwd: input.cwd, - args: searchArgs(input), - } satisfies Run) - - return Effect.sync(() => { - input.signal?.removeEventListener("abort", onabort) - signal.removeEventListener("abort", onabort) - w.onerror = null - w.onmessage = null - }) - }), - (w) => Effect.sync(() => w.terminate()), - ) - } - - function filesDirect(input: FilesInput) { - return Stream.callback( - Effect.fnUntraced(function* (queue: Queue.Queue) { - let buf = "" - let err = "" - - const out = { - write(chunk: unknown) { - buf = drain(buf, chunk, (line) => { - Queue.offerUnsafe(queue, clean(line)) - }) - }, - } - - const stderr = { - write(chunk: unknown) { - err += text(chunk) - }, - } - - yield* Effect.forkScoped( - Effect.gen(function* () { - yield* check(input.cwd) - const ret = yield* Effect.tryPromise({ - try: () => - ripgrep(filesArgs(input), { - stdout: out, - stderr, - ...opts(input.cwd), - }), - catch: toError, - }) - if (buf) Queue.offerUnsafe(queue, clean(buf)) - if (ret.code === 0 || ret.code === 1) { - Queue.endUnsafe(queue) - return - } - fail(queue, error(err, ret.code ?? 1)) - }).pipe( - Effect.catch((err) => - Effect.sync(() => { - fail(queue, err) - }), - ), +function check(cwd: string) { + return Effect.tryPromise({ + try: () => fs.stat(cwd).catch(() => undefined), + catch: toError, + }).pipe( + Effect.flatMap((stat) => + stat?.isDirectory() + ? Effect.void + : Effect.fail( + Object.assign(new Error(`No such file or directory: '${cwd}'`), { + code: "ENOENT", + errno: -2, + path: cwd, + }), ), - ) - }), - ) + ), + ) +} + +function filesArgs(input: FilesInput) { + const args = ["--files", "--glob=!.git/*"] + if (input.follow) args.push("--follow") + if (input.hidden !== false) args.push("--hidden") + if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`) + if (input.glob) { + for (const glob of input.glob) { + args.push(`--glob=${glob}`) + } } + args.push(".") + return args +} - function filesWorker(input: FilesInput) { - return Stream.callback( - Effect.fnUntraced(function* (queue: Queue.Queue) { - if (input.signal?.aborted) { - fail(queue, abort(input.signal)) - return - } +function searchArgs(input: SearchInput) { + const args = ["--json", "--hidden", "--glob=!.git/*", "--no-messages"] + if (input.follow) args.push("--follow") + if (input.glob) { + for (const glob of input.glob) { + args.push(`--glob=${glob}`) + } + } + if (input.limit) args.push(`--max-count=${input.limit}`) + args.push("--", input.pattern, ...(input.file ?? ["."])) + return args +} - const w = yield* Effect.acquireRelease(worker(), (w) => Effect.sync(() => w.terminate())) +function parse(stdout: string) { + return stdout + .trim() + .split(/\r?\n/) + .filter(Boolean) + .map((line) => Result.parse(JSON.parse(line))) + .flatMap((item) => (item.type === "match" ? [row(item.data)] : [])) +} + +declare const OPENCODE_RIPGREP_WORKER_PATH: string + +function target(): Effect.Effect { + if (typeof OPENCODE_RIPGREP_WORKER_PATH !== "undefined") { + return Effect.succeed(OPENCODE_RIPGREP_WORKER_PATH) + } + const js = new URL("./ripgrep.worker.js", import.meta.url) + return Effect.tryPromise({ + try: () => Filesystem.exists(fileURLToPath(js)), + catch: toError, + }).pipe(Effect.map((exists) => (exists ? js : new URL("./ripgrep.worker.ts", import.meta.url)))) +} + +function worker() { + return target().pipe(Effect.flatMap((file) => Effect.sync(() => new Worker(file, { env: env() })))) +} + +function drain(buf: string, chunk: unknown, push: (line: string) => void) { + const lines = (buf + text(chunk)).split(/\r?\n/) + buf = lines.pop() || "" + for (const line of lines) { + if (line) push(line) + } + return buf +} + +function fail(queue: Queue.Queue, err: Error) { + Queue.failCauseUnsafe(queue, Cause.fail(err)) +} + +function searchDirect(input: SearchInput) { + return Effect.tryPromise({ + try: () => + ripgrep(searchArgs(input), { + buffer: true, + ...opts(input.cwd), + }), + catch: toError, + }).pipe( + Effect.flatMap((ret) => { + const out = ret.stdout ?? "" + if (ret.code !== 0 && ret.code !== 1 && ret.code !== 2) { + return Effect.fail(error(ret.stderr ?? "", ret.code ?? 1)) + } + return Effect.sync(() => ({ + items: ret.code === 1 ? [] : parse(out), + partial: ret.code === 2, + })) + }), + ) +} + +function searchWorker(input: SearchInput) { + if (input.signal?.aborted) return Effect.fail(abort(input.signal)) + + return Effect.acquireUseRelease( + worker(), + (w) => + Effect.callback((resume, signal) => { let open = true - const close = () => { - if (!open) return false + const done = (effect: Effect.Effect) => { + if (!open) return open = false - return true - } - const onabort = () => { - if (!close()) return - fail(queue, abort(input.signal)) + resume(effect) } + const onabort = () => done(Effect.fail(abort(input.signal))) w.onerror = (evt) => { - if (!close()) return - fail(queue, toError(evt.error ?? evt.message)) + done(Effect.fail(toError(evt.error ?? evt.message))) } - w.onmessage = (evt: MessageEvent) => { + w.onmessage = (evt: MessageEvent) => { const msg = evt.data - if (msg.type === "line") { - if (open) Queue.offerUnsafe(queue, msg.line) - return - } - if (!close()) return if (msg.type === "error") { - fail(queue, Object.assign(new Error(msg.error.message), msg.error)) + done(Effect.fail(Object.assign(new Error(msg.error.message), msg.error))) return } - if (msg.code === 0 || msg.code === 1) { - Queue.endUnsafe(queue) + if (msg.code === 1) { + done(Effect.succeed({ items: [], partial: false })) return } - fail(queue, error(msg.stderr, msg.code)) - } - - yield* Effect.acquireRelease( - Effect.sync(() => { - input.signal?.addEventListener("abort", onabort, { once: true }) - w.postMessage({ - kind: "files", - cwd: input.cwd, - args: filesArgs(input), - } satisfies Run) - }), - () => - Effect.sync(() => { - input.signal?.removeEventListener("abort", onabort) - w.onerror = null - w.onmessage = null - }), - ) - }), - ) - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const source = (input: FilesInput) => { - const useWorker = !!input.signal && typeof Worker !== "undefined" - if (!useWorker && input.signal) { - log.warn("worker unavailable, ripgrep abort disabled") - } - return useWorker ? filesWorker(input) : filesDirect(input) - } - - const files: Interface["files"] = (input) => source(input) - - const tree: Interface["tree"] = Effect.fn("Ripgrep.tree")(function* (input: TreeInput) { - log.info("tree", input) - const list = Array.from(yield* source({ cwd: input.cwd, signal: input.signal }).pipe(Stream.runCollect)) - - interface Node { - name: string - children: Map - } - - function child(node: Node, name: string) { - const item = node.children.get(name) - if (item) return item - const next = { name, children: new Map() } - node.children.set(name, next) - return next - } - - function count(node: Node): number { - return Array.from(node.children.values()).reduce((sum, child) => sum + 1 + count(child), 0) - } - - const root: Node = { name: "", children: new Map() } - for (const file of list) { - if (file.includes(".opencode")) continue - const parts = file.split(path.sep) - if (parts.length < 2) continue - let node = root - for (const part of parts.slice(0, -1)) { - node = child(node, part) + if (msg.code !== 0 && msg.code !== 1 && msg.code !== 2) { + done(Effect.fail(error(msg.stderr, msg.code))) + return } - } - - const total = count(root) - const limit = input.limit ?? total - const lines: string[] = [] - const queue: Array<{ node: Node; path: string }> = Array.from(root.children.values()) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((node) => ({ node, path: node.name })) - - let used = 0 - for (let i = 0; i < queue.length && used < limit; i++) { - const item = queue[i] - lines.push(item.path) - used++ - queue.push( - ...Array.from(item.node.children.values()) - .sort((a, b) => a.name.localeCompare(b.name)) - .map((node) => ({ node, path: `${item.path}/${node.name}` })), + done( + Effect.sync(() => ({ + items: parse(msg.stdout), + partial: msg.code === 2, + })), ) } - if (total > used) lines.push(`[${total - used} truncated]`) - return lines.join("\n") - }) + input.signal?.addEventListener("abort", onabort, { once: true }) + signal.addEventListener("abort", onabort, { once: true }) + w.postMessage({ + kind: "search", + cwd: input.cwd, + args: searchArgs(input), + } satisfies Run) - const search: Interface["search"] = Effect.fn("Ripgrep.search")(function* (input: SearchInput) { - const useWorker = !!input.signal && typeof Worker !== "undefined" - if (!useWorker && input.signal) { - log.warn("worker unavailable, ripgrep abort disabled") - } - return yield* useWorker ? searchWorker(input) : searchDirect(input) - }) + return Effect.sync(() => { + input.signal?.removeEventListener("abort", onabort) + signal.removeEventListener("abort", onabort) + w.onerror = null + w.onmessage = null + }) + }), + (w) => Effect.sync(() => w.terminate()), + ) +} - return Service.of({ files, tree, search }) +function filesDirect(input: FilesInput) { + return Stream.callback( + Effect.fnUntraced(function* (queue: Queue.Queue) { + let buf = "" + let err = "" + + const out = { + write(chunk: unknown) { + buf = drain(buf, chunk, (line) => { + Queue.offerUnsafe(queue, clean(line)) + }) + }, + } + + const stderr = { + write(chunk: unknown) { + err += text(chunk) + }, + } + + yield* Effect.forkScoped( + Effect.gen(function* () { + yield* check(input.cwd) + const ret = yield* Effect.tryPromise({ + try: () => + ripgrep(filesArgs(input), { + stdout: out, + stderr, + ...opts(input.cwd), + }), + catch: toError, + }) + if (buf) Queue.offerUnsafe(queue, clean(buf)) + if (ret.code === 0 || ret.code === 1) { + Queue.endUnsafe(queue) + return + } + fail(queue, error(err, ret.code ?? 1)) + }).pipe( + Effect.catch((err) => + Effect.sync(() => { + fail(queue, err) + }), + ), + ), + ) }), ) - - export const defaultLayer = layer } + +function filesWorker(input: FilesInput) { + return Stream.callback( + Effect.fnUntraced(function* (queue: Queue.Queue) { + if (input.signal?.aborted) { + fail(queue, abort(input.signal)) + return + } + + const w = yield* Effect.acquireRelease(worker(), (w) => Effect.sync(() => w.terminate())) + let open = true + const close = () => { + if (!open) return false + open = false + return true + } + const onabort = () => { + if (!close()) return + fail(queue, abort(input.signal)) + } + + w.onerror = (evt) => { + if (!close()) return + fail(queue, toError(evt.error ?? evt.message)) + } + w.onmessage = (evt: MessageEvent) => { + const msg = evt.data + if (msg.type === "line") { + if (open) Queue.offerUnsafe(queue, msg.line) + return + } + if (!close()) return + if (msg.type === "error") { + fail(queue, Object.assign(new Error(msg.error.message), msg.error)) + return + } + if (msg.code === 0 || msg.code === 1) { + Queue.endUnsafe(queue) + return + } + fail(queue, error(msg.stderr, msg.code)) + } + + yield* Effect.acquireRelease( + Effect.sync(() => { + input.signal?.addEventListener("abort", onabort, { once: true }) + w.postMessage({ + kind: "files", + cwd: input.cwd, + args: filesArgs(input), + } satisfies Run) + }), + () => + Effect.sync(() => { + input.signal?.removeEventListener("abort", onabort) + w.onerror = null + w.onmessage = null + }), + ) + }), + ) +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const source = (input: FilesInput) => { + const useWorker = !!input.signal && typeof Worker !== "undefined" + if (!useWorker && input.signal) { + log.warn("worker unavailable, ripgrep abort disabled") + } + return useWorker ? filesWorker(input) : filesDirect(input) + } + + const files: Interface["files"] = (input) => source(input) + + const tree: Interface["tree"] = Effect.fn("Ripgrep.tree")(function* (input: TreeInput) { + log.info("tree", input) + const list = Array.from(yield* source({ cwd: input.cwd, signal: input.signal }).pipe(Stream.runCollect)) + + interface Node { + name: string + children: Map + } + + function child(node: Node, name: string) { + const item = node.children.get(name) + if (item) return item + const next = { name, children: new Map() } + node.children.set(name, next) + return next + } + + function count(node: Node): number { + return Array.from(node.children.values()).reduce((sum, child) => sum + 1 + count(child), 0) + } + + const root: Node = { name: "", children: new Map() } + for (const file of list) { + if (file.includes(".opencode")) continue + const parts = file.split(path.sep) + if (parts.length < 2) continue + let node = root + for (const part of parts.slice(0, -1)) { + node = child(node, part) + } + } + + const total = count(root) + const limit = input.limit ?? total + const lines: string[] = [] + const queue: Array<{ node: Node; path: string }> = Array.from(root.children.values()) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: node.name })) + + let used = 0 + for (let i = 0; i < queue.length && used < limit; i++) { + const item = queue[i] + lines.push(item.path) + used++ + queue.push( + ...Array.from(item.node.children.values()) + .sort((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: `${item.path}/${node.name}` })), + ) + } + + if (total > used) lines.push(`[${total - used} truncated]`) + return lines.join("\n") + }) + + const search: Interface["search"] = Effect.fn("Ripgrep.search")(function* (input: SearchInput) { + const useWorker = !!input.signal && typeof Worker !== "undefined" + if (!useWorker && input.signal) { + log.warn("worker unavailable, ripgrep abort disabled") + } + return yield* useWorker ? searchWorker(input) : searchDirect(input) + }) + + return Service.of({ files, tree, search }) + }), +) + +export const defaultLayer = layer + +export * as Ripgrep from "./ripgrep" From 218eca7c2bc95355f594c0fe50853326c86c469f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:11 -0400 Subject: [PATCH 067/201] refactor: unwrap MDNS namespace + self-reexport (#22968) --- packages/opencode/src/server/mdns.ts | 88 ++++++++++++++-------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/packages/opencode/src/server/mdns.ts b/packages/opencode/src/server/mdns.ts index 2011771a20..580456754d 100644 --- a/packages/opencode/src/server/mdns.ts +++ b/packages/opencode/src/server/mdns.ts @@ -3,58 +3,58 @@ import { Bonjour } from "bonjour-service" const log = Log.create({ service: "mdns" }) -export namespace MDNS { - let bonjour: Bonjour | undefined - let currentPort: number | undefined +let bonjour: Bonjour | undefined +let currentPort: number | undefined - export function publish(port: number, domain?: string) { - if (currentPort === port) return - if (bonjour) unpublish() +export function publish(port: number, domain?: string) { + if (currentPort === port) return + if (bonjour) unpublish() - try { - const host = domain ?? "opencode.local" - const name = `opencode-${port}` - bonjour = new Bonjour() - const service = bonjour.publish({ - name, - type: "http", - host, - port, - txt: { path: "/" }, - }) + try { + const host = domain ?? "opencode.local" + const name = `opencode-${port}` + bonjour = new Bonjour() + const service = bonjour.publish({ + name, + type: "http", + host, + port, + txt: { path: "/" }, + }) - service.on("up", () => { - log.info("mDNS service published", { name, port }) - }) + service.on("up", () => { + log.info("mDNS service published", { name, port }) + }) - service.on("error", (err) => { - log.error("mDNS service error", { error: err }) - }) + service.on("error", (err) => { + log.error("mDNS service error", { error: err }) + }) - currentPort = port - } catch (err) { - log.error("mDNS publish failed", { error: err }) - if (bonjour) { - try { - bonjour.destroy() - } catch {} - } - bonjour = undefined - currentPort = undefined - } - } - - export function unpublish() { + currentPort = port + } catch (err) { + log.error("mDNS publish failed", { error: err }) if (bonjour) { try { - bonjour.unpublishAll() bonjour.destroy() - } catch (err) { - log.error("mDNS unpublish failed", { error: err }) - } - bonjour = undefined - currentPort = undefined - log.info("mDNS service unpublished") + } catch {} } + bonjour = undefined + currentPort = undefined } } + +export function unpublish() { + if (bonjour) { + try { + bonjour.unpublishAll() + bonjour.destroy() + } catch (err) { + log.error("mDNS unpublish failed", { error: err }) + } + bonjour = undefined + currentPort = undefined + log.info("mDNS service unpublished") + } +} + +export * as MDNS from "./mdns" From 715786bbf96304f617c5f2a48ed49fe6101c90ef Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:15 -0400 Subject: [PATCH 068/201] refactor: unwrap FileTime namespace + self-reexport (#22966) --- packages/opencode/src/file/time.ts | 208 ++++++++++++++--------------- 1 file changed, 104 insertions(+), 104 deletions(-) diff --git a/packages/opencode/src/file/time.ts b/packages/opencode/src/file/time.ts index 327eadbef5..cc26682d57 100644 --- a/packages/opencode/src/file/time.ts +++ b/packages/opencode/src/file/time.ts @@ -5,109 +5,109 @@ import { Flag } from "@/flag/flag" import type { SessionID } from "@/session/schema" import { Log } from "../util" -export namespace FileTime { - const log = Log.create({ service: "file.time" }) +const log = Log.create({ service: "file.time" }) - export type Stamp = { - readonly read: Date - readonly mtime: number | undefined - readonly size: number | undefined - } - - const session = (reads: Map>, sessionID: SessionID) => { - const value = reads.get(sessionID) - if (value) return value - - const next = new Map() - reads.set(sessionID, next) - return next - } - - interface State { - reads: Map> - locks: Map - } - - export interface Interface { - readonly read: (sessionID: SessionID, file: string) => Effect.Effect - readonly get: (sessionID: SessionID, file: string) => Effect.Effect - readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect - readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/FileTime") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fsys = yield* AppFileSystem.Service - const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK - - const stamp = Effect.fnUntraced(function* (file: string) { - const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) - return { - read: yield* DateTime.nowAsDate, - mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, - size: info ? Number(info.size) : undefined, - } - }) - const state = yield* InstanceState.make( - Effect.fn("FileTime.state")(() => - Effect.succeed({ - reads: new Map>(), - locks: new Map(), - }), - ), - ) - - const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { - filepath = AppFileSystem.normalizePath(filepath) - const locks = (yield* InstanceState.get(state)).locks - const lock = locks.get(filepath) - if (lock) return lock - - const next = Semaphore.makeUnsafe(1) - locks.set(filepath, next) - return next - }) - - const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - log.info("read", { sessionID, file }) - session(reads, sessionID).set(file, yield* stamp(file)) - }) - - const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - return reads.get(sessionID)?.get(file)?.read - }) - - const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { - if (disableCheck) return - filepath = AppFileSystem.normalizePath(filepath) - - const reads = (yield* InstanceState.get(state)).reads - const time = reads.get(sessionID)?.get(filepath) - if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) - - const next = yield* stamp(filepath) - const changed = next.mtime !== time.mtime || next.size !== time.size - if (!changed) return - - throw new Error( - `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, - ) - }) - - const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { - return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) - }) - - return Service.of({ read, get, assert, withLock }) - }), - ).pipe(Layer.orDie) - - export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) +export type Stamp = { + readonly read: Date + readonly mtime: number | undefined + readonly size: number | undefined } + +const session = (reads: Map>, sessionID: SessionID) => { + const value = reads.get(sessionID) + if (value) return value + + const next = new Map() + reads.set(sessionID, next) + return next +} + +interface State { + reads: Map> + locks: Map +} + +export interface Interface { + readonly read: (sessionID: SessionID, file: string) => Effect.Effect + readonly get: (sessionID: SessionID, file: string) => Effect.Effect + readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect + readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/FileTime") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const fsys = yield* AppFileSystem.Service + const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK + + const stamp = Effect.fnUntraced(function* (file: string) { + const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) + return { + read: yield* DateTime.nowAsDate, + mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, + size: info ? Number(info.size) : undefined, + } + }) + const state = yield* InstanceState.make( + Effect.fn("FileTime.state")(() => + Effect.succeed({ + reads: new Map>(), + locks: new Map(), + }), + ), + ) + + const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { + filepath = AppFileSystem.normalizePath(filepath) + const locks = (yield* InstanceState.get(state)).locks + const lock = locks.get(filepath) + if (lock) return lock + + const next = Semaphore.makeUnsafe(1) + locks.set(filepath, next) + return next + }) + + const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { + file = AppFileSystem.normalizePath(file) + const reads = (yield* InstanceState.get(state)).reads + log.info("read", { sessionID, file }) + session(reads, sessionID).set(file, yield* stamp(file)) + }) + + const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { + file = AppFileSystem.normalizePath(file) + const reads = (yield* InstanceState.get(state)).reads + return reads.get(sessionID)?.get(file)?.read + }) + + const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { + if (disableCheck) return + filepath = AppFileSystem.normalizePath(filepath) + + const reads = (yield* InstanceState.get(state)).reads + const time = reads.get(sessionID)?.get(filepath) + if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) + + const next = yield* stamp(filepath) + const changed = next.mtime !== time.mtime || next.size !== time.size + if (!changed) return + + throw new Error( + `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, + ) + }) + + const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { + return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) + }) + + return Service.of({ read, get, assert, withLock }) + }), +).pipe(Layer.orDie) + +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) + +export * as FileTime from "./time" From 1089fa041561d76a58d72e464d95219af682eb8c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:50:32 -0400 Subject: [PATCH 069/201] refactor: unwrap ServerProxy namespace + self-reexport (#22969) --- packages/opencode/src/server/proxy.ts | 146 +++++++++++++------------- 1 file changed, 73 insertions(+), 73 deletions(-) diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 07703fdc80..22bc89baf2 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -101,83 +101,83 @@ const app = (upgrade: UpgradeWebSocket) => }), ) -export namespace ServerProxy { - const log = Log.Default.clone().tag("service", "server-proxy") +const log = Log.Default.clone().tag("service", "server-proxy") - export async function http( - url: string | URL, - extra: HeadersInit | undefined, - req: Request, - workspaceID: WorkspaceID, - ) { - if (!Workspace.isSyncing(workspaceID)) { - return new Response(`broken sync connection for workspace: ${workspaceID}`, { - status: 503, - headers: { - "content-type": "text/plain; charset=utf-8", - }, - }) - } - - return fetch( - new Request(url, { - method: req.method, - headers: headers(req, extra), - body: req.method === "GET" || req.method === "HEAD" ? undefined : req.body, - redirect: "manual", - signal: req.signal, - }), - ).then((res) => { - const sync = Fence.parse(res.headers) - const next = new Headers(res.headers) - next.delete("content-encoding") - next.delete("content-length") - - const done = sync ? Fence.wait(workspaceID, sync, req.signal) : Promise.resolve() - - return done.then(async () => { - console.log("proxy http response", { - method: req.method, - request: req.url, - url: String(url), - status: res.status, - statusText: res.statusText, - }) - return new Response(res.body, { - status: res.status, - statusText: res.statusText, - headers: next, - }) - }) +export async function http( + url: string | URL, + extra: HeadersInit | undefined, + req: Request, + workspaceID: WorkspaceID, +) { + if (!Workspace.isSyncing(workspaceID)) { + return new Response(`broken sync connection for workspace: ${workspaceID}`, { + status: 503, + headers: { + "content-type": "text/plain; charset=utf-8", + }, }) } - export function websocket( - upgrade: UpgradeWebSocket, - target: string | URL, - extra: HeadersInit | undefined, - req: Request, - env: unknown, - ) { - const proxy = new URL(req.url) - proxy.pathname = "/__workspace_ws" - proxy.search = "" - const next = new Headers(req.headers) - next.set("x-opencode-proxy-url", socket(target)) - for (const [key, value] of new Headers(extra).entries()) { - next.set(key, value) - } - log.info("proxy websocket", { - request: req.url, - target: String(target), - }) - return app(upgrade).fetch( - new Request(proxy, { + return fetch( + new Request(url, { + method: req.method, + headers: headers(req, extra), + body: req.method === "GET" || req.method === "HEAD" ? undefined : req.body, + redirect: "manual", + signal: req.signal, + }), + ).then((res) => { + const sync = Fence.parse(res.headers) + const next = new Headers(res.headers) + next.delete("content-encoding") + next.delete("content-length") + + const done = sync ? Fence.wait(workspaceID, sync, req.signal) : Promise.resolve() + + return done.then(async () => { + console.log("proxy http response", { method: req.method, + request: req.url, + url: String(url), + status: res.status, + statusText: res.statusText, + }) + return new Response(res.body, { + status: res.status, + statusText: res.statusText, headers: next, - signal: req.signal, - }), - env as never, - ) - } + }) + }) + }) } + +export function websocket( + upgrade: UpgradeWebSocket, + target: string | URL, + extra: HeadersInit | undefined, + req: Request, + env: unknown, +) { + const proxy = new URL(req.url) + proxy.pathname = "/__workspace_ws" + proxy.search = "" + const next = new Headers(req.headers) + next.set("x-opencode-proxy-url", socket(target)) + for (const [key, value] of new Headers(extra).entries()) { + next.set(key, value) + } + log.info("proxy websocket", { + request: req.url, + target: String(target), + }) + return app(upgrade).fetch( + new Request(proxy, { + method: req.method, + headers: next, + signal: req.signal, + }), + env as never, + ) +} + +export * as ServerProxy from "./proxy" From c03fa362572d8108d2d76c2a18bbf616a7345dac Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:51:01 -0400 Subject: [PATCH 070/201] refactor: unwrap Server namespace + self-reexport (#22970) --- packages/opencode/src/server/server.ts | 180 ++++++++++++------------- 1 file changed, 90 insertions(+), 90 deletions(-) diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index fc3b399f79..892a99a77c 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -17,37 +17,22 @@ globalThis.AI_SDK_LOG_WARNINGS = false initProjectors() -export namespace Server { - const log = Log.create({ service: "server" }) +const log = Log.create({ service: "server" }) - export type Listener = { - hostname: string - port: number - url: URL - stop: (close?: boolean) => Promise - } +export type Listener = { + hostname: string + port: number + url: URL + stop: (close?: boolean) => Promise +} - export const Default = lazy(() => create({})) +export const Default = lazy(() => create({})) - function create(opts: { cors?: string[] }) { - const app = new Hono() - const runtime = adapter.create(app) - - if (Flag.OPENCODE_WORKSPACE_ID) { - return { - app: app - .onError(ErrorMiddleware) - .use(AuthMiddleware) - .use(LoggerMiddleware) - .use(CompressionMiddleware) - .use(CorsMiddleware(opts)) - .use(FenceMiddleware) - .route("/", ControlPlaneRoutes()) - .route("/", InstanceRoutes(runtime.upgradeWebSocket)), - runtime, - } - } +function create(opts: { cors?: string[] }) { + const app = new Hono() + const runtime = adapter.create(app) + if (Flag.OPENCODE_WORKSPACE_ID) { return { app: app .onError(ErrorMiddleware) @@ -55,73 +40,88 @@ export namespace Server { .use(LoggerMiddleware) .use(CompressionMiddleware) .use(CorsMiddleware(opts)) + .use(FenceMiddleware) .route("/", ControlPlaneRoutes()) - .route("/", InstanceRoutes(runtime.upgradeWebSocket)) - .route("/", UIRoutes()), + .route("/", InstanceRoutes(runtime.upgradeWebSocket)), runtime, } } - export async function openapi() { - // Build a fresh app with all routes registered directly so - // hono-openapi can see describeRoute metadata (`.route()` wraps - // handlers when the sub-app has a custom errorHandler, which - // strips the metadata symbol). - const { app } = create({}) - const result = await generateSpecs(app, { - documentation: { - info: { - title: "opencode", - version: "1.0.0", - description: "opencode api", - }, - openapi: "3.1.1", - }, - }) - return result - } - - export let url: URL - - export async function listen(opts: { - port: number - hostname: string - mdns?: boolean - mdnsDomain?: string - cors?: string[] - }): Promise { - const built = create(opts) - const server = await built.runtime.listen(opts) - - const next = new URL("http://localhost") - next.hostname = opts.hostname - next.port = String(server.port) - url = next - - const mdns = - opts.mdns && - server.port && - opts.hostname !== "127.0.0.1" && - opts.hostname !== "localhost" && - opts.hostname !== "::1" - if (mdns) { - MDNS.publish(server.port, opts.mdnsDomain) - } else if (opts.mdns) { - log.warn("mDNS enabled but hostname is loopback; skipping mDNS publish") - } - - let closing: Promise | undefined - return { - hostname: opts.hostname, - port: server.port, - url: next, - stop(close?: boolean) { - closing ??= (async () => { - if (mdns) MDNS.unpublish() - await server.stop(close) - })() - return closing - }, - } + return { + app: app + .onError(ErrorMiddleware) + .use(AuthMiddleware) + .use(LoggerMiddleware) + .use(CompressionMiddleware) + .use(CorsMiddleware(opts)) + .route("/", ControlPlaneRoutes()) + .route("/", InstanceRoutes(runtime.upgradeWebSocket)) + .route("/", UIRoutes()), + runtime, } } + +export async function openapi() { + // Build a fresh app with all routes registered directly so + // hono-openapi can see describeRoute metadata (`.route()` wraps + // handlers when the sub-app has a custom errorHandler, which + // strips the metadata symbol). + const { app } = create({}) + const result = await generateSpecs(app, { + documentation: { + info: { + title: "opencode", + version: "1.0.0", + description: "opencode api", + }, + openapi: "3.1.1", + }, + }) + return result +} + +export let url: URL + +export async function listen(opts: { + port: number + hostname: string + mdns?: boolean + mdnsDomain?: string + cors?: string[] +}): Promise { + const built = create(opts) + const server = await built.runtime.listen(opts) + + const next = new URL("http://localhost") + next.hostname = opts.hostname + next.port = String(server.port) + url = next + + const mdns = + opts.mdns && + server.port && + opts.hostname !== "127.0.0.1" && + opts.hostname !== "localhost" && + opts.hostname !== "::1" + if (mdns) { + MDNS.publish(server.port, opts.mdnsDomain) + } else if (opts.mdns) { + log.warn("mDNS enabled but hostname is loopback; skipping mDNS publish") + } + + let closing: Promise | undefined + return { + hostname: opts.hostname, + port: server.port, + url: next, + stop(close?: boolean) { + closing ??= (async () => { + if (mdns) MDNS.unpublish() + await server.stop(close) + })() + return closing + }, + } +} + +export * as Server from "./server" From 5d47ea091879b026b8efb9d09af06deb0643e46a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 19:52:04 -0400 Subject: [PATCH 071/201] refactor: unwrap ConfigMCP namespace + self-reexport (#22948) --- .../opencode/src/cli/cmd/tui/context/kv.tsx | 2 +- packages/opencode/src/cli/error.ts | 8 +- packages/opencode/src/config/mcp.ts | 130 +++++++++--------- packages/opencode/src/lsp/lsp.ts | 9 +- packages/opencode/src/npm/index.ts | 13 +- packages/opencode/src/provider/provider.ts | 6 +- packages/opencode/src/session/session.ts | 22 +-- packages/opencode/src/tool/tool.ts | 2 +- packages/opencode/src/util/filesystem.ts | 2 +- packages/opencode/test/config/config.test.ts | 2 +- 10 files changed, 104 insertions(+), 92 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/kv.tsx b/packages/opencode/src/cli/cmd/tui/context/kv.tsx index 39e976b0e5..803752e766 100644 --- a/packages/opencode/src/cli/cmd/tui/context/kv.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/kv.tsx @@ -12,7 +12,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({ const [store, setStore] = createStore>() const filePath = path.join(Global.Path.state, "kv.json") - Filesystem.readJson(filePath) + Filesystem.readJson>(filePath) .then((x) => { setStore(x) }) diff --git a/packages/opencode/src/cli/error.ts b/packages/opencode/src/cli/error.ts index 89b557e2d2..f286b5166f 100644 --- a/packages/opencode/src/cli/error.ts +++ b/packages/opencode/src/cli/error.ts @@ -28,10 +28,10 @@ export function FormatError(input: unknown) { // ProviderModelNotFoundError: { providerID: string, modelID: string, suggestions?: string[] } if (NamedError.hasName(input, "ProviderModelNotFoundError")) { const data = (input as ErrorLike).data - const suggestions = data?.suggestions as string[] | undefined + const suggestions: string[] = Array.isArray(data?.suggestions) ? data.suggestions : [] return [ `Model not found: ${data?.providerID}/${data?.modelID}`, - ...(Array.isArray(suggestions) && suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []), + ...(suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []), `Try: \`opencode models\` to list available models`, `Or check your config (opencode.json) provider/model names`, ].join("\n") @@ -64,10 +64,10 @@ export function FormatError(input: unknown) { const data = (input as ErrorLike).data const path = data?.path const message = data?.message - const issues = data?.issues as Array<{ message: string; path: string[] }> | undefined + const issues: Array<{ message: string; path: string[] }> = Array.isArray(data?.issues) ? data.issues : [] return [ `Configuration is invalid${path && path !== "config" ? ` at ${path}` : ""}` + (message ? `: ${message}` : ""), - ...(issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []), + ...issues.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")), ].join("\n") } diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index fb8f8caa41..fda933b421 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -1,70 +1,70 @@ import z from "zod" -export namespace ConfigMCP { - export const Local = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) +export const Local = z + .object({ + type: z.literal("local").describe("Type of MCP server connection"), + command: z.string().array().describe("Command and arguments to run the MCP server"), + environment: z + .record(z.string(), z.string()) + .optional() + .describe("Environment variables to set when running the MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpLocalConfig", + }) - export const OAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) - export type OAuth = z.infer +export const OAuth = z + .object({ + clientId: z + .string() + .optional() + .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), + clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), + scope: z.string().optional().describe("OAuth scopes to request during authorization"), + redirectUri: z + .string() + .optional() + .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), + }) + .strict() + .meta({ + ref: "McpOAuthConfig", + }) +export type OAuth = z.infer - export const Remote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([OAuth, z.literal(false)]) - .optional() - .describe( - "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", - ), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) +export const Remote = z + .object({ + type: z.literal("remote").describe("Type of MCP server connection"), + url: z.string().describe("URL of the remote MCP server"), + enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), + headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), + oauth: z + .union([OAuth, z.literal(false)]) + .optional() + .describe( + "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + ), + timeout: z + .number() + .int() + .positive() + .optional() + .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), + }) + .strict() + .meta({ + ref: "McpRemoteConfig", + }) - export const Info = z.discriminatedUnion("type", [Local, Remote]) - export type Info = z.infer -} +export const Info = z.discriminatedUnion("type", [Local, Remote]) +export type Info = z.infer + +export * as ConfigMCP from "./mcp" diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index d4d1e75634..d895e73256 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -440,12 +440,11 @@ export const layer = Layer.effect( const workspaceSymbol = Effect.fn("LSP.workspaceSymbol")(function* (query: string) { const results = yield* runAll((client) => client.connection - .sendRequest("workspace/symbol", { query }) - .then((result: any) => result.filter((x: Symbol) => kinds.includes(x.kind))) - .then((result: any) => result.slice(0, 10)) - .catch(() => []), + .sendRequest("workspace/symbol", { query }) + .then((result) => result.filter((x) => kinds.includes(x.kind)).slice(0, 10)) + .catch(() => [] as Symbol[]), ) - return results.flat() as Symbol[] + return results.flat() }) const prepareCallHierarchy = Effect.fn("LSP.prepareCallHierarchy")(function* (input: LocInput) { diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 174df12974..425b27f420 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -124,8 +124,17 @@ export async function install(dir: string) { return } - const pkg = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock = await Filesystem.readJson(path.join(dir, "package-lock.json")).catch(() => ({})) + type PackageDeps = Record + type PackageJson = { + dependencies?: PackageDeps + devDependencies?: PackageDeps + peerDependencies?: PackageDeps + optionalDependencies?: PackageDeps + } + const pkg: PackageJson = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) + const lock: { packages?: Record } = await Filesystem.readJson<{ + packages?: Record + }>(path.join(dir, "package-lock.json")).catch(() => ({})) const declared = new Set([ ...Object.keys(pkg.dependencies || {}), diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 43ae9a5e9f..a7297634e7 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -547,12 +547,14 @@ function custom(dep: CustomDep): Record { }, async getModel(sdk: any, modelID: string, options?: Record) { if (modelID.startsWith("duo-workflow-")) { - const workflowRef = options?.workflowRef as string | undefined + const workflowRef = typeof options?.workflowRef === "string" ? options.workflowRef : undefined // Use the static mapping if it exists, otherwise use duo-workflow with selectedModelRef const sdkModelID = isWorkflowModel(modelID) ? modelID : "duo-workflow" + const workflowDefinition = + typeof options?.workflowDefinition === "string" ? options.workflowDefinition : undefined const model = sdk.workflowChat(sdkModelID, { featureFlags, - workflowDefinition: options?.workflowDefinition as string | undefined, + workflowDefinition, }) if (workflowRef) { model.selectedModelRef = workflowRef diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index 8c5fc29e4a..a453b19815 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -272,16 +272,18 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa input.usage.inputTokenDetails?.cacheReadTokens ?? input.usage.cachedInputTokens ?? 0, ) const cacheWriteInputTokens = safe( - (input.usage.inputTokenDetails?.cacheWriteTokens ?? - input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? - // google-vertex-anthropic returns metadata under "vertex" key - // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages') - input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ?? - // @ts-expect-error - input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? - // @ts-expect-error - input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ?? - 0) as number, + Number( + input.usage.inputTokenDetails?.cacheWriteTokens ?? + input.metadata?.["anthropic"]?.["cacheCreationInputTokens"] ?? + // google-vertex-anthropic returns metadata under "vertex" key + // (AnthropicMessagesLanguageModel custom provider key from 'vertex.anthropic.messages') + input.metadata?.["vertex"]?.["cacheCreationInputTokens"] ?? + // @ts-expect-error + input.metadata?.["bedrock"]?.["usage"]?.["cacheWriteInputTokens"] ?? + // @ts-expect-error + input.metadata?.["venice"]?.["usage"]?.["cacheCreationInputTokens"] ?? + 0, + ), ) // AI SDK v6 normalized inputTokens to include cached tokens across all providers diff --git a/packages/opencode/src/tool/tool.ts b/packages/opencode/src/tool/tool.ts index 0ea0435fb1..179149afd2 100644 --- a/packages/opencode/src/tool/tool.ts +++ b/packages/opencode/src/tool/tool.ts @@ -19,7 +19,7 @@ export type Context = { agent: string abort: AbortSignal callID?: string - extra?: { [key: string]: any } + extra?: { [key: string]: unknown } messages: MessageV2.WithParts[] metadata(input: { title?: string; metadata?: M }): Effect.Effect ask(input: Omit): Effect.Effect diff --git a/packages/opencode/src/util/filesystem.ts b/packages/opencode/src/util/filesystem.ts index 3ff2c6e3f4..6c4d455224 100644 --- a/packages/opencode/src/util/filesystem.ts +++ b/packages/opencode/src/util/filesystem.ts @@ -39,7 +39,7 @@ export async function readText(p: string): Promise { return readFile(p, "utf-8") } -export async function readJson(p: string): Promise { +export async function readJson(p: string): Promise { return JSON.parse(await readFile(p, "utf-8")) } diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index c41f395e51..3e90842e18 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -757,7 +757,7 @@ test("updates config and writes to file", async () => { const newConfig = { model: "updated/model" } await save(newConfig as any) - const writtenConfig = await Filesystem.readJson(path.join(tmp.path, "config.json")) + const writtenConfig = await Filesystem.readJson<{ model: string }>(path.join(tmp.path, "config.json")) expect(writtenConfig.model).toBe("updated/model") }, }) From f9aa3d77cd543ad3a46f86e36a2908f0cc2e652f Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Thu, 16 Apr 2026 23:53:10 +0000 Subject: [PATCH 072/201] chore: generate --- packages/opencode/src/config/mcp.ts | 4 +--- packages/opencode/src/server/proxy.ts | 7 +------ 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index fda933b421..5036cd6e4f 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -49,9 +49,7 @@ export const Remote = z oauth: z .union([OAuth, z.literal(false)]) .optional() - .describe( - "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", - ), + .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), timeout: z .number() .int() diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 22bc89baf2..9c1fd1f288 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -103,12 +103,7 @@ const app = (upgrade: UpgradeWebSocket) => const log = Log.Default.clone().tag("service", "server-proxy") -export async function http( - url: string | URL, - extra: HeadersInit | undefined, - req: Request, - workspaceID: WorkspaceID, -) { +export async function http(url: string | URL, extra: HeadersInit | undefined, req: Request, workspaceID: WorkspaceID) { if (!Workspace.isSyncing(workspaceID)) { return new Response(`broken sync connection for workspace: ${workspaceID}`, { status: 503, From bae80af1b4620961664076bd257c22b88b57eeaf Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:15 -0400 Subject: [PATCH 073/201] refactor: unwrap Workspace namespace + self-reexport (#22934) --- .../opencode/src/control-plane/workspace.ts | 886 +++++++++--------- 1 file changed, 443 insertions(+), 443 deletions(-) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 9c1c4c8960..3af11707e8 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -26,173 +26,239 @@ import { AppRuntime } from "@/effect/app-runtime" import { EventSequenceTable } from "@/sync/event.sql" import { waitEvent } from "./util" -export namespace Workspace { - export const Info = WorkspaceInfo.meta({ - ref: "Workspace", - }) - export type Info = z.infer +export const Info = WorkspaceInfo.meta({ + ref: "Workspace", +}) +export type Info = z.infer - export const ConnectionStatus = z.object({ - workspaceID: WorkspaceID.zod, - status: z.enum(["connected", "connecting", "disconnected", "error"]), - error: z.string().optional(), - }) - export type ConnectionStatus = z.infer +export const ConnectionStatus = z.object({ + workspaceID: WorkspaceID.zod, + status: z.enum(["connected", "connecting", "disconnected", "error"]), + error: z.string().optional(), +}) +export type ConnectionStatus = z.infer - const Restore = z.object({ - workspaceID: WorkspaceID.zod, - sessionID: SessionID.zod, - total: z.number().int().min(0), - step: z.number().int().min(0), - }) +const Restore = z.object({ + workspaceID: WorkspaceID.zod, + sessionID: SessionID.zod, + total: z.number().int().min(0), + step: z.number().int().min(0), +}) - export const Event = { - Ready: BusEvent.define( - "workspace.ready", - z.object({ - name: z.string(), - }), - ), - Failed: BusEvent.define( - "workspace.failed", - z.object({ - message: z.string(), - }), - ), - Restore: BusEvent.define("workspace.restore", Restore), - Status: BusEvent.define("workspace.status", ConnectionStatus), +export const Event = { + Ready: BusEvent.define( + "workspace.ready", + z.object({ + name: z.string(), + }), + ), + Failed: BusEvent.define( + "workspace.failed", + z.object({ + message: z.string(), + }), + ), + Restore: BusEvent.define("workspace.restore", Restore), + Status: BusEvent.define("workspace.status", ConnectionStatus), +} + +function fromRow(row: typeof WorkspaceTable.$inferSelect): Info { + return { + id: row.id, + type: row.type, + branch: row.branch, + name: row.name, + directory: row.directory, + extra: row.extra, + projectID: row.project_id, + } +} + +const CreateInput = z.object({ + id: WorkspaceID.zod.optional(), + type: Info.shape.type, + branch: Info.shape.branch, + projectID: ProjectID.zod, + extra: Info.shape.extra, +}) + +export const create = fn(CreateInput, async (input) => { + const id = WorkspaceID.ascending(input.id) + const adaptor = await getAdaptor(input.projectID, input.type) + + const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null }) + + const info: Info = { + id, + type: config.type, + branch: config.branch ?? null, + name: config.name ?? null, + directory: config.directory ?? null, + extra: config.extra ?? null, + projectID: input.projectID, } - function fromRow(row: typeof WorkspaceTable.$inferSelect): Info { - return { - id: row.id, - type: row.type, - branch: row.branch, - name: row.name, - directory: row.directory, - extra: row.extra, - projectID: row.project_id, - } - } - - const CreateInput = z.object({ - id: WorkspaceID.zod.optional(), - type: Info.shape.type, - branch: Info.shape.branch, - projectID: ProjectID.zod, - extra: Info.shape.extra, + Database.use((db) => { + db.insert(WorkspaceTable) + .values({ + id: info.id, + type: info.type, + branch: info.branch, + name: info.name, + directory: info.directory, + extra: info.extra, + project_id: info.projectID, + }) + .run() }) - export const create = fn(CreateInput, async (input) => { - const id = WorkspaceID.ascending(input.id) - const adaptor = await getAdaptor(input.projectID, input.type) + const env = { + OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), + OPENCODE_WORKSPACE_ID: config.id, + OPENCODE_EXPERIMENTAL_WORKSPACES: "true", + } + await adaptor.create(config, env) - const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null }) + startSync(info) - const info: Info = { - id, - type: config.type, - branch: config.branch ?? null, - name: config.name ?? null, - directory: config.directory ?? null, - extra: config.extra ?? null, - projectID: input.projectID, - } + await waitEvent({ + timeout: TIMEOUT, + fn(event) { + if (event.workspace === info.id && event.payload.type === Event.Status.type) { + const { status } = event.payload.properties + return status === "error" || status === "connected" + } + return false + }, + }) - Database.use((db) => { - db.insert(WorkspaceTable) - .values({ - id: info.id, - type: info.type, - branch: info.branch, - name: info.name, - directory: info.directory, - extra: info.extra, - project_id: info.projectID, - }) - .run() - }) + return info +}) - const env = { - OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), - OPENCODE_WORKSPACE_ID: config.id, - OPENCODE_EXPERIMENTAL_WORKSPACES: "true", - } - await adaptor.create(config, env) +const SessionRestoreInput = z.object({ + workspaceID: WorkspaceID.zod, + sessionID: SessionID.zod, +}) - startSync(info) +export const sessionRestore = fn(SessionRestoreInput, async (input) => { + log.info("session restore requested", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + }) + try { + const space = await get(input.workspaceID) + if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`) - await waitEvent({ - timeout: TIMEOUT, - fn(event) { - if (event.workspace === info.id && event.payload.type === Event.Status.type) { - const { status } = event.payload.properties - return status === "error" || status === "connected" - } - return false + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + // Need to switch the workspace of the session + SyncEvent.run(Session.Event.Updated, { + sessionID: input.sessionID, + info: { + workspaceID: input.workspaceID, }, }) - return info - }) + const rows = Database.use((db) => + db + .select({ + id: EventTable.id, + aggregateID: EventTable.aggregate_id, + seq: EventTable.seq, + type: EventTable.type, + data: EventTable.data, + }) + .from(EventTable) + .where(eq(EventTable.aggregate_id, input.sessionID)) + .orderBy(asc(EventTable.seq)) + .all(), + ) + if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`) - const SessionRestoreInput = z.object({ - workspaceID: WorkspaceID.zod, - sessionID: SessionID.zod, - }) + const all = rows - export const sessionRestore = fn(SessionRestoreInput, async (input) => { - log.info("session restore requested", { + const size = 10 + const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size)) + const total = sets.length + log.info("session restore prepared", { workspaceID: input.workspaceID, sessionID: input.sessionID, + workspaceType: space.type, + directory: space.directory, + target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, + events: all.length, + batches: total, + first: all[0]?.seq, + last: all.at(-1)?.seq, }) - try { - const space = await get(input.workspaceID) - if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`) - - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - // Need to switch the workspace of the session - SyncEvent.run(Session.Event.Updated, { - sessionID: input.sessionID, - info: { + GlobalBus.emit("event", { + directory: "global", + workspace: input.workspaceID, + payload: { + type: Event.Restore.type, + properties: { workspaceID: input.workspaceID, + sessionID: input.sessionID, + total, + step: 0, }, - }) - - const rows = Database.use((db) => - db - .select({ - id: EventTable.id, - aggregateID: EventTable.aggregate_id, - seq: EventTable.seq, - type: EventTable.type, - data: EventTable.data, - }) - .from(EventTable) - .where(eq(EventTable.aggregate_id, input.sessionID)) - .orderBy(asc(EventTable.seq)) - .all(), - ) - if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`) - - const all = rows - - const size = 10 - const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size)) - const total = sets.length - log.info("session restore prepared", { + }, + }) + for (const [i, events] of sets.entries()) { + log.info("session restore batch starting", { workspaceID: input.workspaceID, sessionID: input.sessionID, - workspaceType: space.type, - directory: space.directory, + step: i + 1, + total, + events: events.length, + first: events[0]?.seq, + last: events.at(-1)?.seq, target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, - events: all.length, - batches: total, - first: all[0]?.seq, - last: all.at(-1)?.seq, }) + if (target.type === "local") { + SyncEvent.replayAll(events) + log.info("session restore batch replayed locally", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + events: events.length, + }) + } else { + const url = route(target.url, "/sync/replay") + const headers = new Headers(target.headers) + headers.set("content-type", "application/json") + const res = await fetch(url, { + method: "POST", + headers, + body: JSON.stringify({ + directory: space.directory ?? "", + events, + }), + }) + if (!res.ok) { + const body = await res.text() + log.error("session restore batch failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + status: res.status, + body, + }) + throw new Error( + `Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`, + ) + } + log.info("session restore batch posted", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + step: i + 1, + total, + status: res.status, + }) + } GlobalBus.emit("event", { directory: "global", workspace: input.workspaceID, @@ -202,329 +268,263 @@ export namespace Workspace { workspaceID: input.workspaceID, sessionID: input.sessionID, total, - step: 0, + step: i + 1, }, }, }) - for (const [i, events] of sets.entries()) { - log.info("session restore batch starting", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - events: events.length, - first: events[0]?.seq, - last: events.at(-1)?.seq, - target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory, + } + + log.info("session restore complete", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + batches: total, + }) + + return { + total, + } + } catch (err) { + log.error("session restore failed", { + workspaceID: input.workspaceID, + sessionID: input.sessionID, + error: errorData(err), + }) + throw err + } +}) + +export function list(project: Project.Info) { + const rows = Database.use((db) => + db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), + ) + const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) + + for (const space of spaces) startSync(space) + return spaces +} + +function lookup(id: WorkspaceID) { + const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) + if (!row) return + return fromRow(row) +} + +export const get = fn(WorkspaceID.zod, async (id) => { + const space = lookup(id) + if (!space) return + startSync(space) + return space +}) + +export const remove = fn(WorkspaceID.zod, async (id) => { + const sessions = Database.use((db) => + db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(), + ) + for (const session of sessions) { + await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id))) + } + + const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) + + if (row) { + stopSync(id) + + const info = fromRow(row) + try { + const adaptor = await getAdaptor(info.projectID, row.type) + await adaptor.remove(info) + } catch { + log.error("adaptor not available when removing workspace", { type: row.type }) + } + Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run()) + return info + } +}) + +const connections = new Map() +const aborts = new Map() +const TIMEOUT = 5000 + +function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { + const prev = connections.get(id) + if (prev?.status === status && prev?.error === error) return + const next = { workspaceID: id, status, error } + connections.set(id, next) + + if (status === "error") { + aborts.delete(id) + } + + GlobalBus.emit("event", { + directory: "global", + workspace: id, + payload: { + type: Event.Status.type, + properties: next, + }, + }) +} + +export function status(): ConnectionStatus[] { + return [...connections.values()] +} + +function synced(state: Record) { + const ids = Object.keys(state) + if (ids.length === 0) return true + + const done = Object.fromEntries( + Database.use((db) => + db + .select({ + id: EventSequenceTable.aggregate_id, + seq: EventSequenceTable.seq, }) - if (target.type === "local") { - SyncEvent.replayAll(events) - log.info("session restore batch replayed locally", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - events: events.length, - }) - } else { - const url = route(target.url, "/sync/replay") - const headers = new Headers(target.headers) - headers.set("content-type", "application/json") - const res = await fetch(url, { - method: "POST", - headers, - body: JSON.stringify({ - directory: space.directory ?? "", - events, - }), - }) - if (!res.ok) { - const body = await res.text() - log.error("session restore batch failed", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - status: res.status, - body, - }) - throw new Error( - `Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`, - ) - } - log.info("session restore batch posted", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - step: i + 1, - total, - status: res.status, - }) + .from(EventSequenceTable) + .where(inArray(EventSequenceTable.aggregate_id, ids)) + .all(), + ).map((row) => [row.id, row.seq]), + ) as Record + + return ids.every((id) => { + return (done[id] ?? -1) >= state[id] + }) +} + +export async function isSyncing(workspaceID: WorkspaceID) { + return aborts.has(workspaceID) +} + +export async function waitForSync(workspaceID: WorkspaceID, state: Record, signal?: AbortSignal) { + if (synced(state)) return + + try { + await waitEvent({ + timeout: TIMEOUT, + signal, + fn(event) { + if (event.workspace !== workspaceID && event.payload.type !== "sync") { + return false } - GlobalBus.emit("event", { - directory: "global", - workspace: input.workspaceID, - payload: { - type: Event.Restore.type, - properties: { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - total, - step: i + 1, - }, - }, - }) - } - - log.info("session restore complete", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - batches: total, - }) - - return { - total, - } - } catch (err) { - log.error("session restore failed", { - workspaceID: input.workspaceID, - sessionID: input.sessionID, - error: errorData(err), - }) - throw err - } - }) - - export function list(project: Project.Info) { - const rows = Database.use((db) => - db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), - ) - const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) - - for (const space of spaces) startSync(space) - return spaces - } - - function lookup(id: WorkspaceID) { - const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) - if (!row) return - return fromRow(row) - } - - export const get = fn(WorkspaceID.zod, async (id) => { - const space = lookup(id) - if (!space) return - startSync(space) - return space - }) - - export const remove = fn(WorkspaceID.zod, async (id) => { - const sessions = Database.use((db) => - db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(), - ) - for (const session of sessions) { - await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id))) - } - - const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) - - if (row) { - stopSync(id) - - const info = fromRow(row) - try { - const adaptor = await getAdaptor(info.projectID, row.type) - await adaptor.remove(info) - } catch { - log.error("adaptor not available when removing workspace", { type: row.type }) - } - Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run()) - return info - } - }) - - const connections = new Map() - const aborts = new Map() - const TIMEOUT = 5000 - - function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { - const prev = connections.get(id) - if (prev?.status === status && prev?.error === error) return - const next = { workspaceID: id, status, error } - connections.set(id, next) - - if (status === "error") { - aborts.delete(id) - } - - GlobalBus.emit("event", { - directory: "global", - workspace: id, - payload: { - type: Event.Status.type, - properties: next, + return synced(state) }, }) + } catch { + if (signal?.aborted) throw signal.reason ?? new Error("Request aborted") + throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`) } +} - export function status(): ConnectionStatus[] { - return [...connections.values()] - } +const log = Log.create({ service: "workspace-sync" }) - function synced(state: Record) { - const ids = Object.keys(state) - if (ids.length === 0) return true +function route(url: string | URL, path: string) { + const next = new URL(url) + next.pathname = `${next.pathname.replace(/\/$/, "")}${path}` + next.search = "" + next.hash = "" + return next +} - const done = Object.fromEntries( - Database.use((db) => - db - .select({ - id: EventSequenceTable.aggregate_id, - seq: EventSequenceTable.seq, - }) - .from(EventSequenceTable) - .where(inArray(EventSequenceTable.aggregate_id, ids)) - .all(), - ).map((row) => [row.id, row.seq]), - ) as Record - - return ids.every((id) => { - return (done[id] ?? -1) >= state[id] - }) - } - - export async function isSyncing(workspaceID: WorkspaceID) { - return aborts.has(workspaceID) - } - - export async function waitForSync(workspaceID: WorkspaceID, state: Record, signal?: AbortSignal) { - if (synced(state)) return - - try { - await waitEvent({ - timeout: TIMEOUT, - signal, - fn(event) { - if (event.workspace !== workspaceID && event.payload.type !== "sync") { - return false - } - return synced(state) - }, - }) - } catch { - if (signal?.aborted) throw signal.reason ?? new Error("Request aborted") - throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`) - } - } - - const log = Log.create({ service: "workspace-sync" }) - - function route(url: string | URL, path: string) { - const next = new URL(url) - next.pathname = `${next.pathname.replace(/\/$/, "")}${path}` - next.search = "" - next.hash = "" - return next - } - - async function syncWorkspace(space: Info, signal: AbortSignal) { - while (!signal.aborted) { - log.info("connecting to global sync", { workspace: space.name }) - setStatus(space.id, "connecting") - - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - if (target.type === "local") return - - const res = await fetch(route(target.url, "/global/event"), { - method: "GET", - headers: target.headers, - signal, - }).catch((err: unknown) => { - setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) - - log.info("failed to connect to global sync", { - workspace: space.name, - error: err, - }) - return undefined - }) - - if (!res || !res.ok || !res.body) { - const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` - log.info("failed to connect to global sync", { workspace: space.name, error }) - setStatus(space.id, "error", error) - await sleep(1000) - continue - } - - log.info("global sync connected", { workspace: space.name }) - setStatus(space.id, "connected") - - await parseSSE(res.body, signal, (evt: any) => { - try { - if (!("payload" in evt)) return - - if (evt.payload.type === "sync") { - SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) - } - - GlobalBus.emit("event", { - directory: evt.directory, - project: evt.project, - workspace: space.id, - payload: evt.payload, - }) - } catch (err) { - log.info("failed to replay global event", { - workspaceID: space.id, - error: err, - }) - } - }) - - log.info("disconnected from global sync: " + space.id) - setStatus(space.id, "disconnected") - - // TODO: Implement exponential backoff - await sleep(1000) - } - } - - async function startSync(space: Info) { - if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return +async function syncWorkspace(space: Info, signal: AbortSignal) { + while (!signal.aborted) { + log.info("connecting to global sync", { workspace: space.name }) + setStatus(space.id, "connecting") const adaptor = await getAdaptor(space.projectID, space.type) const target = await adaptor.target(space) - if (target.type === "local") { - void Filesystem.exists(target.directory).then((exists) => { - setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + if (target.type === "local") return + + const res = await fetch(route(target.url, "/global/event"), { + method: "GET", + headers: target.headers, + signal, + }).catch((err: unknown) => { + setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) + + log.info("failed to connect to global sync", { + workspace: space.name, + error: err, }) - return + return undefined + }) + + if (!res || !res.ok || !res.body) { + const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` + log.info("failed to connect to global sync", { workspace: space.name, error }) + setStatus(space.id, "error", error) + await sleep(1000) + continue } - if (aborts.has(space.id)) return true + log.info("global sync connected", { workspace: space.name }) + setStatus(space.id, "connected") + await parseSSE(res.body, signal, (evt: any) => { + try { + if (!("payload" in evt)) return + + if (evt.payload.type === "sync") { + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + } + + GlobalBus.emit("event", { + directory: evt.directory, + project: evt.project, + workspace: space.id, + payload: evt.payload, + }) + } catch (err) { + log.info("failed to replay global event", { + workspaceID: space.id, + error: err, + }) + } + }) + + log.info("disconnected from global sync: " + space.id) setStatus(space.id, "disconnected") - const abort = new AbortController() - aborts.set(space.id, abort) - - void syncWorkspace(space, abort.signal).catch((error) => { - aborts.delete(space.id) - - setStatus(space.id, "error", String(error)) - log.warn("workspace listener failed", { - workspaceID: space.id, - error, - }) - }) - } - - function stopSync(id: WorkspaceID) { - aborts.get(id)?.abort() - aborts.delete(id) - connections.delete(id) + // TODO: Implement exponential backoff + await sleep(1000) } } + +async function startSync(space: Info) { + if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return + + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + if (target.type === "local") { + void Filesystem.exists(target.directory).then((exists) => { + setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + }) + return + } + + if (aborts.has(space.id)) return true + + setStatus(space.id, "disconnected") + + const abort = new AbortController() + aborts.set(space.id, abort) + + void syncWorkspace(space, abort.signal).catch((error) => { + aborts.delete(space.id) + + setStatus(space.id, "error", String(error)) + log.warn("workspace listener failed", { + workspaceID: space.id, + error, + }) + }) +} + +function stopSync(id: WorkspaceID) { + aborts.get(id)?.abort() + aborts.delete(id) + connections.delete(id) +} + +export * as Workspace from "./workspace" From 4e27804160e7df606c27bdc72c1a8acae2304629 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:46 -0400 Subject: [PATCH 074/201] refactor: unwrap McpOAuthCallback namespace + self-reexport (#22943) --- packages/opencode/src/mcp/oauth-callback.ts | 338 ++++++++++---------- 1 file changed, 169 insertions(+), 169 deletions(-) diff --git a/packages/opencode/src/mcp/oauth-callback.ts b/packages/opencode/src/mcp/oauth-callback.ts index 3e6169517f..fbb43d3921 100644 --- a/packages/opencode/src/mcp/oauth-callback.ts +++ b/packages/opencode/src/mcp/oauth-callback.ts @@ -56,177 +56,177 @@ interface PendingAuth { timeout: ReturnType } -export namespace McpOAuthCallback { - let server: ReturnType | undefined - const pendingAuths = new Map() - // Reverse index: mcpName → oauthState, so cancelPending(mcpName) can - // find the right entry in pendingAuths (which is keyed by oauthState). - const mcpNameToState = new Map() +let server: ReturnType | undefined +const pendingAuths = new Map() +// Reverse index: mcpName → oauthState, so cancelPending(mcpName) can +// find the right entry in pendingAuths (which is keyed by oauthState). +const mcpNameToState = new Map() - const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes +const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000 // 5 minutes - function cleanupStateIndex(oauthState: string) { - for (const [name, state] of mcpNameToState) { - if (state === oauthState) { - mcpNameToState.delete(name) - break - } +function cleanupStateIndex(oauthState: string) { + for (const [name, state] of mcpNameToState) { + if (state === oauthState) { + mcpNameToState.delete(name) + break } } - - function handleRequest(req: import("http").IncomingMessage, res: import("http").ServerResponse) { - const url = new URL(req.url || "/", `http://localhost:${currentPort}`) - - if (url.pathname !== currentPath) { - res.writeHead(404) - res.end("Not found") - return - } - - const code = url.searchParams.get("code") - const state = url.searchParams.get("state") - const error = url.searchParams.get("error") - const errorDescription = url.searchParams.get("error_description") - - log.info("received oauth callback", { hasCode: !!code, state, error }) - - // Enforce state parameter presence - if (!state) { - const errorMsg = "Missing required state parameter - potential CSRF attack" - log.error("oauth callback missing state parameter", { url: url.toString() }) - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - if (error) { - const errorMsg = errorDescription || error - if (pendingAuths.has(state)) { - const pending = pendingAuths.get(state)! - clearTimeout(pending.timeout) - pendingAuths.delete(state) - cleanupStateIndex(state) - pending.reject(new Error(errorMsg)) - } - res.writeHead(200, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - if (!code) { - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR("No authorization code provided")) - return - } - - // Validate state parameter - if (!pendingAuths.has(state)) { - const errorMsg = "Invalid or expired state parameter - potential CSRF attack" - log.error("oauth callback with invalid state", { state, pendingStates: Array.from(pendingAuths.keys()) }) - res.writeHead(400, { "Content-Type": "text/html" }) - res.end(HTML_ERROR(errorMsg)) - return - } - - const pending = pendingAuths.get(state)! - - clearTimeout(pending.timeout) - pendingAuths.delete(state) - cleanupStateIndex(state) - pending.resolve(code) - - res.writeHead(200, { "Content-Type": "text/html" }) - res.end(HTML_SUCCESS) - } - - export async function ensureRunning(redirectUri?: string): Promise { - // Parse the redirect URI to get port and path (uses defaults if not provided) - const { port, path } = parseRedirectUri(redirectUri) - - // If server is running on a different port/path, stop it first - if (server && (currentPort !== port || currentPath !== path)) { - log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port }) - await stop() - } - - if (server) return - - const running = await isPortInUse(port) - if (running) { - log.info("oauth callback server already running on another instance", { port }) - return - } - - currentPort = port - currentPath = path - - server = createServer(handleRequest) - await new Promise((resolve, reject) => { - server!.listen(currentPort, () => { - log.info("oauth callback server started", { port: currentPort, path: currentPath }) - resolve() - }) - server!.on("error", reject) - }) - } - - export function waitForCallback(oauthState: string, mcpName?: string): Promise { - if (mcpName) mcpNameToState.set(mcpName, oauthState) - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - if (pendingAuths.has(oauthState)) { - pendingAuths.delete(oauthState) - if (mcpName) mcpNameToState.delete(mcpName) - reject(new Error("OAuth callback timeout - authorization took too long")) - } - }, CALLBACK_TIMEOUT_MS) - - pendingAuths.set(oauthState, { resolve, reject, timeout }) - }) - } - - export function cancelPending(mcpName: string): void { - // Look up the oauthState for this mcpName via the reverse index - const oauthState = mcpNameToState.get(mcpName) - const key = oauthState ?? mcpName - const pending = pendingAuths.get(key) - if (pending) { - clearTimeout(pending.timeout) - pendingAuths.delete(key) - mcpNameToState.delete(mcpName) - pending.reject(new Error("Authorization cancelled")) - } - } - - export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise { - return new Promise((resolve) => { - const socket = createConnection(port, "127.0.0.1") - socket.on("connect", () => { - socket.destroy() - resolve(true) - }) - socket.on("error", () => { - resolve(false) - }) - }) - } - - export async function stop(): Promise { - if (server) { - await new Promise((resolve) => server!.close(() => resolve())) - server = undefined - log.info("oauth callback server stopped") - } - - for (const [_name, pending] of pendingAuths) { - clearTimeout(pending.timeout) - pending.reject(new Error("OAuth callback server stopped")) - } - pendingAuths.clear() - mcpNameToState.clear() - } - - export function isRunning(): boolean { - return server !== undefined - } } + +function handleRequest(req: import("http").IncomingMessage, res: import("http").ServerResponse) { + const url = new URL(req.url || "/", `http://localhost:${currentPort}`) + + if (url.pathname !== currentPath) { + res.writeHead(404) + res.end("Not found") + return + } + + const code = url.searchParams.get("code") + const state = url.searchParams.get("state") + const error = url.searchParams.get("error") + const errorDescription = url.searchParams.get("error_description") + + log.info("received oauth callback", { hasCode: !!code, state, error }) + + // Enforce state parameter presence + if (!state) { + const errorMsg = "Missing required state parameter - potential CSRF attack" + log.error("oauth callback missing state parameter", { url: url.toString() }) + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + if (error) { + const errorMsg = errorDescription || error + if (pendingAuths.has(state)) { + const pending = pendingAuths.get(state)! + clearTimeout(pending.timeout) + pendingAuths.delete(state) + cleanupStateIndex(state) + pending.reject(new Error(errorMsg)) + } + res.writeHead(200, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + if (!code) { + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR("No authorization code provided")) + return + } + + // Validate state parameter + if (!pendingAuths.has(state)) { + const errorMsg = "Invalid or expired state parameter - potential CSRF attack" + log.error("oauth callback with invalid state", { state, pendingStates: Array.from(pendingAuths.keys()) }) + res.writeHead(400, { "Content-Type": "text/html" }) + res.end(HTML_ERROR(errorMsg)) + return + } + + const pending = pendingAuths.get(state)! + + clearTimeout(pending.timeout) + pendingAuths.delete(state) + cleanupStateIndex(state) + pending.resolve(code) + + res.writeHead(200, { "Content-Type": "text/html" }) + res.end(HTML_SUCCESS) +} + +export async function ensureRunning(redirectUri?: string): Promise { + // Parse the redirect URI to get port and path (uses defaults if not provided) + const { port, path } = parseRedirectUri(redirectUri) + + // If server is running on a different port/path, stop it first + if (server && (currentPort !== port || currentPath !== path)) { + log.info("stopping oauth callback server to reconfigure", { oldPort: currentPort, newPort: port }) + await stop() + } + + if (server) return + + const running = await isPortInUse(port) + if (running) { + log.info("oauth callback server already running on another instance", { port }) + return + } + + currentPort = port + currentPath = path + + server = createServer(handleRequest) + await new Promise((resolve, reject) => { + server!.listen(currentPort, () => { + log.info("oauth callback server started", { port: currentPort, path: currentPath }) + resolve() + }) + server!.on("error", reject) + }) +} + +export function waitForCallback(oauthState: string, mcpName?: string): Promise { + if (mcpName) mcpNameToState.set(mcpName, oauthState) + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + if (pendingAuths.has(oauthState)) { + pendingAuths.delete(oauthState) + if (mcpName) mcpNameToState.delete(mcpName) + reject(new Error("OAuth callback timeout - authorization took too long")) + } + }, CALLBACK_TIMEOUT_MS) + + pendingAuths.set(oauthState, { resolve, reject, timeout }) + }) +} + +export function cancelPending(mcpName: string): void { + // Look up the oauthState for this mcpName via the reverse index + const oauthState = mcpNameToState.get(mcpName) + const key = oauthState ?? mcpName + const pending = pendingAuths.get(key) + if (pending) { + clearTimeout(pending.timeout) + pendingAuths.delete(key) + mcpNameToState.delete(mcpName) + pending.reject(new Error("Authorization cancelled")) + } +} + +export async function isPortInUse(port: number = OAUTH_CALLBACK_PORT): Promise { + return new Promise((resolve) => { + const socket = createConnection(port, "127.0.0.1") + socket.on("connect", () => { + socket.destroy() + resolve(true) + }) + socket.on("error", () => { + resolve(false) + }) + }) +} + +export async function stop(): Promise { + if (server) { + await new Promise((resolve) => server!.close(() => resolve())) + server = undefined + log.info("oauth callback server stopped") + } + + for (const [_name, pending] of pendingAuths) { + clearTimeout(pending.timeout) + pending.reject(new Error("OAuth callback server stopped")) + } + pendingAuths.clear() + mcpNameToState.clear() +} + +export function isRunning(): boolean { + return server !== undefined +} + +export * as McpOAuthCallback from "./oauth-callback" From 19d15d9ff7826db276219bccce278f78b654a431 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:48 -0400 Subject: [PATCH 075/201] refactor: unwrap ConfigProvider namespace + self-reexport (#22949) --- packages/opencode/src/config/provider.ts | 230 +++++++++++------------ 1 file changed, 115 insertions(+), 115 deletions(-) diff --git a/packages/opencode/src/config/provider.ts b/packages/opencode/src/config/provider.ts index 09efedf497..877677519f 100644 --- a/packages/opencode/src/config/provider.ts +++ b/packages/opencode/src/config/provider.ts @@ -1,120 +1,120 @@ import z from "zod" -export namespace ConfigProvider { - export const Model = z - .object({ - id: z.string(), - name: z.string(), - family: z.string().optional(), - release_date: z.string(), - attachment: z.boolean(), - reasoning: z.boolean(), - temperature: z.boolean(), - tool_call: z.boolean(), - interleaved: z - .union([ - z.literal(true), - z - .object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }) - .strict(), - ]) - .optional(), - cost: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - context_over_200k: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - }) - .optional(), - }) - .optional(), - limit: z.object({ - context: z.number(), - input: z.number().optional(), +export const Model = z + .object({ + id: z.string(), + name: z.string(), + family: z.string().optional(), + release_date: z.string(), + attachment: z.boolean(), + reasoning: z.boolean(), + temperature: z.boolean(), + tool_call: z.boolean(), + interleaved: z + .union([ + z.literal(true), + z + .object({ + field: z.enum(["reasoning_content", "reasoning_details"]), + }) + .strict(), + ]) + .optional(), + cost: z + .object({ + input: z.number(), output: z.number(), - }), - modalities: z - .object({ - input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - }) - .optional(), - experimental: z.boolean().optional(), - status: z.enum(["alpha", "beta", "deprecated"]).optional(), - provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()).optional(), - variants: z - .record( - z.string(), - z - .object({ - disabled: z.boolean().optional().describe("Disable this variant for the model"), - }) - .catchall(z.any()), - ) - .optional() - .describe("Variant-specific configuration"), - }) - .partial() + cache_read: z.number().optional(), + cache_write: z.number().optional(), + context_over_200k: z + .object({ + input: z.number(), + output: z.number(), + cache_read: z.number().optional(), + cache_write: z.number().optional(), + }) + .optional(), + }) + .optional(), + limit: z.object({ + context: z.number(), + input: z.number().optional(), + output: z.number(), + }), + modalities: z + .object({ + input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), + }) + .optional(), + experimental: z.boolean().optional(), + status: z.enum(["alpha", "beta", "deprecated"]).optional(), + provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), + options: z.record(z.string(), z.any()), + headers: z.record(z.string(), z.string()).optional(), + variants: z + .record( + z.string(), + z + .object({ + disabled: z.boolean().optional().describe("Disable this variant for the model"), + }) + .catchall(z.any()), + ) + .optional() + .describe("Variant-specific configuration"), + }) + .partial() - export const Info = z - .object({ - api: z.string().optional(), - name: z.string(), - env: z.array(z.string()), - id: z.string(), - npm: z.string().optional(), - whitelist: z.array(z.string()).optional(), - blacklist: z.array(z.string()).optional(), - options: z - .object({ - apiKey: z.string().optional(), - baseURL: z.string().optional(), - enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), - setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), - timeout: z - .union([ - z - .number() - .int() - .positive() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - z.literal(false).describe("Disable timeout for this provider entirely."), - ]) - .optional() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - chunkTimeout: z - .number() - .int() - .positive() - .optional() - .describe( - "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", - ), - }) - .catchall(z.any()) - .optional(), - models: z.record(z.string(), Model).optional(), - }) - .partial() - .strict() - .meta({ - ref: "ProviderConfig", - }) +export const Info = z + .object({ + api: z.string().optional(), + name: z.string(), + env: z.array(z.string()), + id: z.string(), + npm: z.string().optional(), + whitelist: z.array(z.string()).optional(), + blacklist: z.array(z.string()).optional(), + options: z + .object({ + apiKey: z.string().optional(), + baseURL: z.string().optional(), + enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), + setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), + timeout: z + .union([ + z + .number() + .int() + .positive() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + z.literal(false).describe("Disable timeout for this provider entirely."), + ]) + .optional() + .describe( + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + ), + chunkTimeout: z + .number() + .int() + .positive() + .optional() + .describe( + "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + ), + }) + .catchall(z.any()) + .optional(), + models: z.record(z.string(), Model).optional(), + }) + .partial() + .strict() + .meta({ + ref: "ProviderConfig", + }) - export type Info = z.infer -} +export type Info = z.infer + +export * as ConfigProvider from "./provider" From 1291e82bb4d881a1c0ac5cb882da2b97a169ae9d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:00:50 -0400 Subject: [PATCH 076/201] refactor: unwrap ACP namespace + self-reexport (#22936) --- packages/opencode/src/acp/agent.ts | 3030 ++++++++++++++-------------- 1 file changed, 1515 insertions(+), 1515 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 5d8c723ea7..7180feabcb 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -57,793 +57,262 @@ type ModelOption = { modelId: string; name: string } const DEFAULT_VARIANT_VALUE = "default" -export namespace ACP { - const log = Log.create({ service: "acp-agent" }) +const log = Log.create({ service: "acp-agent" }) - async function getContextLimit( - sdk: OpencodeClient, - providerID: ProviderID, - modelID: ModelID, - directory: string, - ): Promise { - const providers = await sdk.config - .providers({ directory }) - .then((x) => x.data?.providers ?? []) - .catch((error) => { - log.error("failed to get providers for context limit", { error }) - return [] - }) +async function getContextLimit( + sdk: OpencodeClient, + providerID: ProviderID, + modelID: ModelID, + directory: string, +): Promise { + const providers = await sdk.config + .providers({ directory }) + .then((x) => x.data?.providers ?? []) + .catch((error) => { + log.error("failed to get providers for context limit", { error }) + return [] + }) - const provider = providers.find((p) => p.id === providerID) - const model = provider?.models[modelID] - return model?.limit.context ?? null + const provider = providers.find((p) => p.id === providerID) + const model = provider?.models[modelID] + return model?.limit.context ?? null +} + +async function sendUsageUpdate( + connection: AgentSideConnection, + sdk: OpencodeClient, + sessionID: string, + directory: string, +): Promise { + const messages = await sdk.session + .messages({ sessionID, directory }, { throwOnError: true }) + .then((x) => x.data) + .catch((error) => { + log.error("failed to fetch messages for usage update", { error }) + return undefined + }) + + if (!messages) return + + const assistantMessages = messages.filter( + (m): m is { info: AssistantMessage; parts: SessionMessageResponse["parts"] } => m.info.role === "assistant", + ) + + const lastAssistant = assistantMessages[assistantMessages.length - 1] + if (!lastAssistant) return + + const msg = lastAssistant.info + if (!msg.providerID || !msg.modelID) return + const size = await getContextLimit(sdk, ProviderID.make(msg.providerID), ModelID.make(msg.modelID), directory) + + if (!size) { + // Cannot calculate usage without known context size + return } - async function sendUsageUpdate( - connection: AgentSideConnection, - sdk: OpencodeClient, - sessionID: string, - directory: string, - ): Promise { - const messages = await sdk.session - .messages({ sessionID, directory }, { throwOnError: true }) - .then((x) => x.data) - .catch((error) => { - log.error("failed to fetch messages for usage update", { error }) - return undefined - }) + const used = msg.tokens.input + (msg.tokens.cache?.read ?? 0) + const totalCost = assistantMessages.reduce((sum, m) => sum + m.info.cost, 0) - if (!messages) return - - const assistantMessages = messages.filter( - (m): m is { info: AssistantMessage; parts: SessionMessageResponse["parts"] } => m.info.role === "assistant", - ) - - const lastAssistant = assistantMessages[assistantMessages.length - 1] - if (!lastAssistant) return - - const msg = lastAssistant.info - if (!msg.providerID || !msg.modelID) return - const size = await getContextLimit(sdk, ProviderID.make(msg.providerID), ModelID.make(msg.modelID), directory) - - if (!size) { - // Cannot calculate usage without known context size - return - } - - const used = msg.tokens.input + (msg.tokens.cache?.read ?? 0) - const totalCost = assistantMessages.reduce((sum, m) => sum + m.info.cost, 0) - - await connection - .sessionUpdate({ - sessionId: sessionID, - update: { - sessionUpdate: "usage_update", - used, - size, - cost: { amount: totalCost, currency: "USD" }, - }, - }) - .catch((error) => { - log.error("failed to send usage update", { error }) - }) - } - - export async function init({ sdk: _sdk }: { sdk: OpencodeClient }) { - return { - create: (connection: AgentSideConnection, fullConfig: ACPConfig) => { - return new Agent(connection, fullConfig) + await connection + .sessionUpdate({ + sessionId: sessionID, + update: { + sessionUpdate: "usage_update", + used, + size, + cost: { amount: totalCost, currency: "USD" }, }, - } + }) + .catch((error) => { + log.error("failed to send usage update", { error }) + }) +} + +export async function init({ sdk: _sdk }: { sdk: OpencodeClient }) { + return { + create: (connection: AgentSideConnection, fullConfig: ACPConfig) => { + return new Agent(connection, fullConfig) + }, + } +} + +export class Agent implements ACPAgent { + private connection: AgentSideConnection + private config: ACPConfig + private sdk: OpencodeClient + private sessionManager: ACPSessionManager + private eventAbort = new AbortController() + private eventStarted = false + private bashSnapshots = new Map() + private toolStarts = new Set() + private permissionQueues = new Map>() + private permissionOptions: PermissionOption[] = [ + { optionId: "once", kind: "allow_once", name: "Allow once" }, + { optionId: "always", kind: "allow_always", name: "Always allow" }, + { optionId: "reject", kind: "reject_once", name: "Reject" }, + ] + + constructor(connection: AgentSideConnection, config: ACPConfig) { + this.connection = connection + this.config = config + this.sdk = config.sdk + this.sessionManager = new ACPSessionManager(this.sdk) + this.startEventSubscription() } - export class Agent implements ACPAgent { - private connection: AgentSideConnection - private config: ACPConfig - private sdk: OpencodeClient - private sessionManager: ACPSessionManager - private eventAbort = new AbortController() - private eventStarted = false - private bashSnapshots = new Map() - private toolStarts = new Set() - private permissionQueues = new Map>() - private permissionOptions: PermissionOption[] = [ - { optionId: "once", kind: "allow_once", name: "Allow once" }, - { optionId: "always", kind: "allow_always", name: "Always allow" }, - { optionId: "reject", kind: "reject_once", name: "Reject" }, - ] + private startEventSubscription() { + if (this.eventStarted) return + this.eventStarted = true + this.runEventSubscription().catch((error) => { + if (this.eventAbort.signal.aborted) return + log.error("event subscription failed", { error }) + }) + } - constructor(connection: AgentSideConnection, config: ACPConfig) { - this.connection = connection - this.config = config - this.sdk = config.sdk - this.sessionManager = new ACPSessionManager(this.sdk) - this.startEventSubscription() - } - - private startEventSubscription() { - if (this.eventStarted) return - this.eventStarted = true - this.runEventSubscription().catch((error) => { - if (this.eventAbort.signal.aborted) return - log.error("event subscription failed", { error }) + private async runEventSubscription() { + while (true) { + if (this.eventAbort.signal.aborted) return + const events = await this.sdk.global.event({ + signal: this.eventAbort.signal, }) - } - - private async runEventSubscription() { - while (true) { + for await (const event of events.stream) { if (this.eventAbort.signal.aborted) return - const events = await this.sdk.global.event({ - signal: this.eventAbort.signal, + const payload = event?.payload + if (!payload) continue + await this.handleEvent(payload as Event).catch((error) => { + log.error("failed to handle event", { error, type: payload.type }) }) - for await (const event of events.stream) { - if (this.eventAbort.signal.aborted) return - const payload = event?.payload - if (!payload) continue - await this.handleEvent(payload as Event).catch((error) => { - log.error("failed to handle event", { error, type: payload.type }) - }) - } } } + } - private async handleEvent(event: Event) { - switch (event.type) { - case "permission.asked": { - const permission = event.properties - const session = this.sessionManager.tryGet(permission.sessionID) - if (!session) return + private async handleEvent(event: Event) { + switch (event.type) { + case "permission.asked": { + const permission = event.properties + const session = this.sessionManager.tryGet(permission.sessionID) + if (!session) return - const prev = this.permissionQueues.get(permission.sessionID) ?? Promise.resolve() - const next = prev - .then(async () => { - const directory = session.cwd + const prev = this.permissionQueues.get(permission.sessionID) ?? Promise.resolve() + const next = prev + .then(async () => { + const directory = session.cwd - const res = await this.connection - .requestPermission({ - sessionId: permission.sessionID, - toolCall: { - toolCallId: permission.tool?.callID ?? permission.id, - status: "pending", - title: permission.permission, - rawInput: permission.metadata, - kind: toToolKind(permission.permission), - locations: toLocations(permission.permission, permission.metadata), - }, - options: this.permissionOptions, + const res = await this.connection + .requestPermission({ + sessionId: permission.sessionID, + toolCall: { + toolCallId: permission.tool?.callID ?? permission.id, + status: "pending", + title: permission.permission, + rawInput: permission.metadata, + kind: toToolKind(permission.permission), + locations: toLocations(permission.permission, permission.metadata), + }, + options: this.permissionOptions, + }) + .catch(async (error) => { + log.error("failed to request permission from ACP", { + error, + permissionID: permission.id, + sessionID: permission.sessionID, }) - .catch(async (error) => { - log.error("failed to request permission from ACP", { - error, - permissionID: permission.id, - sessionID: permission.sessionID, - }) - await this.sdk.permission.reply({ - requestID: permission.id, - reply: "reject", - directory, - }) - return undefined - }) - - if (!res) return - if (res.outcome.outcome !== "selected") { await this.sdk.permission.reply({ requestID: permission.id, reply: "reject", directory, }) - return - } - - if (res.outcome.optionId !== "reject" && permission.permission == "edit") { - const metadata = permission.metadata || {} - const filepath = typeof metadata["filepath"] === "string" ? metadata["filepath"] : "" - const diff = typeof metadata["diff"] === "string" ? metadata["diff"] : "" - const content = (await Filesystem.exists(filepath)) ? await Filesystem.readText(filepath) : "" - const newContent = getNewContent(content, diff) - - if (newContent) { - void this.connection.writeTextFile({ - sessionId: session.id, - path: filepath, - content: newContent, - }) - } - } + return undefined + }) + if (!res) return + if (res.outcome.outcome !== "selected") { await this.sdk.permission.reply({ requestID: permission.id, - reply: res.outcome.optionId as "once" | "always" | "reject", + reply: "reject", directory, }) - }) - .catch((error) => { - log.error("failed to handle permission", { error, permissionID: permission.id }) - }) - .finally(() => { - if (this.permissionQueues.get(permission.sessionID) === next) { - this.permissionQueues.delete(permission.sessionID) + return + } + + if (res.outcome.optionId !== "reject" && permission.permission == "edit") { + const metadata = permission.metadata || {} + const filepath = typeof metadata["filepath"] === "string" ? metadata["filepath"] : "" + const diff = typeof metadata["diff"] === "string" ? metadata["diff"] : "" + const content = (await Filesystem.exists(filepath)) ? await Filesystem.readText(filepath) : "" + const newContent = getNewContent(content, diff) + + if (newContent) { + void this.connection.writeTextFile({ + sessionId: session.id, + path: filepath, + content: newContent, + }) } + } + + await this.sdk.permission.reply({ + requestID: permission.id, + reply: res.outcome.optionId as "once" | "always" | "reject", + directory, }) - this.permissionQueues.set(permission.sessionID, next) - return - } + }) + .catch((error) => { + log.error("failed to handle permission", { error, permissionID: permission.id }) + }) + .finally(() => { + if (this.permissionQueues.get(permission.sessionID) === next) { + this.permissionQueues.delete(permission.sessionID) + } + }) + this.permissionQueues.set(permission.sessionID, next) + return + } - case "message.part.updated": { - log.info("message part updated", { event: event.properties }) - const props = event.properties - const part = props.part - const session = this.sessionManager.tryGet(part.sessionID) - if (!session) return - const sessionId = session.id + case "message.part.updated": { + log.info("message part updated", { event: event.properties }) + const props = event.properties + const part = props.part + const session = this.sessionManager.tryGet(part.sessionID) + if (!session) return + const sessionId = session.id - if (part.type === "tool") { - await this.toolStart(sessionId, part) + if (part.type === "tool") { + await this.toolStart(sessionId, part) - switch (part.state.status) { - case "pending": - this.bashSnapshots.delete(part.callID) - return + switch (part.state.status) { + case "pending": + this.bashSnapshots.delete(part.callID) + return - case "running": - const output = this.bashOutput(part) - const content: ToolCallContent[] = [] - if (output) { - const hash = Hash.fast(output) - if (part.tool === "bash") { - if (this.bashSnapshots.get(part.callID) === hash) { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "in_progress", - kind: toToolKind(part.tool), - title: part.tool, - locations: toLocations(part.tool, part.state.input), - rawInput: part.state.input, - }, - }) - .catch((error) => { - log.error("failed to send tool in_progress to ACP", { error }) - }) - return - } - this.bashSnapshots.set(part.callID, hash) - } - content.push({ - type: "content", - content: { - type: "text", - text: output, - }, - }) - } - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "in_progress", - kind: toToolKind(part.tool), - title: part.tool, - locations: toLocations(part.tool, part.state.input), - rawInput: part.state.input, - ...(content.length > 0 && { content }), - }, - }) - .catch((error) => { - log.error("failed to send tool in_progress to ACP", { error }) - }) - return - - case "completed": { - this.toolStarts.delete(part.callID) - this.bashSnapshots.delete(part.callID) - const kind = toToolKind(part.tool) - const content: ToolCallContent[] = [ - { - type: "content", - content: { - type: "text", - text: part.state.output, - }, - }, - ] - - if (kind === "edit") { - const input = part.state.input - const filePath = typeof input["filePath"] === "string" ? input["filePath"] : "" - const oldText = typeof input["oldString"] === "string" ? input["oldString"] : "" - const newText = - typeof input["newString"] === "string" - ? input["newString"] - : typeof input["content"] === "string" - ? input["content"] - : "" - content.push({ - type: "diff", - path: filePath, - oldText, - newText, - }) - } - - if (part.tool === "todowrite") { - const parsedTodos = z.array(Todo.Info).safeParse(JSON.parse(part.state.output)) - if (parsedTodos.success) { + case "running": + const output = this.bashOutput(part) + const content: ToolCallContent[] = [] + if (output) { + const hash = Hash.fast(output) + if (part.tool === "bash") { + if (this.bashSnapshots.get(part.callID) === hash) { await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: "plan", - entries: parsedTodos.data.map((todo) => { - const status: PlanEntry["status"] = - todo.status === "cancelled" ? "completed" : (todo.status as PlanEntry["status"]) - return { - priority: "medium", - status, - content: todo.content, - } - }), + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "in_progress", + kind: toToolKind(part.tool), + title: part.tool, + locations: toLocations(part.tool, part.state.input), + rawInput: part.state.input, }, }) .catch((error) => { - log.error("failed to send session update for todo", { error }) + log.error("failed to send tool in_progress to ACP", { error }) }) - } else { - log.error("failed to parse todo output", { error: parsedTodos.error }) + return } + this.bashSnapshots.set(part.callID, hash) } - - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "completed", - kind, - content, - title: part.state.title, - rawInput: part.state.input, - rawOutput: { - output: part.state.output, - metadata: part.state.metadata, - }, - }, - }) - .catch((error) => { - log.error("failed to send tool completed to ACP", { error }) - }) - return - } - case "error": - this.toolStarts.delete(part.callID) - this.bashSnapshots.delete(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call_update", - toolCallId: part.callID, - status: "failed", - kind: toToolKind(part.tool), - title: part.tool, - rawInput: part.state.input, - content: [ - { - type: "content", - content: { - type: "text", - text: part.state.error, - }, - }, - ], - rawOutput: { - error: part.state.error, - metadata: part.state.metadata, - }, - }, - }) - .catch((error) => { - log.error("failed to send tool error to ACP", { error }) - }) - return - } - } - - // ACP clients already know the prompt they just submitted, so replaying - // live user parts duplicates the message. We still replay user history in - // loadSession() and forkSession() via processMessage(). - if (part.type !== "text" && part.type !== "file") return - - return - } - - case "message.part.delta": { - const props = event.properties - const session = this.sessionManager.tryGet(props.sessionID) - if (!session) return - const sessionId = session.id - - const message = await this.sdk.session - .message( - { - sessionID: props.sessionID, - messageID: props.messageID, - directory: session.cwd, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((error) => { - log.error("unexpected error when fetching message", { error }) - return undefined - }) - - if (!message || message.info.role !== "assistant") return - - const part = message.parts.find((p) => p.id === props.partID) - if (!part) return - - if (part.type === "text" && props.field === "text" && part.ignored !== true) { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "agent_message_chunk", - messageId: props.messageID, - content: { - type: "text", - text: props.delta, - }, - }, - }) - .catch((error) => { - log.error("failed to send text delta to ACP", { error }) - }) - return - } - - if (part.type === "reasoning" && props.field === "text") { - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "agent_thought_chunk", - messageId: props.messageID, - content: { - type: "text", - text: props.delta, - }, - }, - }) - .catch((error) => { - log.error("failed to send reasoning delta to ACP", { error }) - }) - } - return - } - } - } - - async initialize(params: InitializeRequest): Promise { - log.info("initialize", { protocolVersion: params.protocolVersion }) - - const authMethod: AuthMethod = { - description: "Run `opencode auth login` in the terminal", - name: "Login with opencode", - id: "opencode-login", - } - - // If client supports terminal-auth capability, use that instead. - if (params.clientCapabilities?._meta?.["terminal-auth"] === true) { - authMethod._meta = { - "terminal-auth": { - command: "opencode", - args: ["auth", "login"], - label: "OpenCode Login", - }, - } - } - - return { - protocolVersion: 1, - agentCapabilities: { - loadSession: true, - mcpCapabilities: { - http: true, - sse: true, - }, - promptCapabilities: { - embeddedContext: true, - image: true, - }, - sessionCapabilities: { - fork: {}, - list: {}, - resume: {}, - }, - }, - authMethods: [authMethod], - agentInfo: { - name: "OpenCode", - version: InstallationVersion, - }, - } - } - - async authenticate(_params: AuthenticateRequest) { - throw new Error("Authentication not implemented") - } - - async newSession(params: NewSessionRequest) { - const directory = params.cwd - try { - const model = await defaultModel(this.config, directory) - - // Store ACP session state - const state = await this.sessionManager.create(params.cwd, params.mcpServers, model) - const sessionId = state.id - - log.info("creating_session", { sessionId, mcpServers: params.mcpServers.length }) - - const load = await this.loadSessionMode({ - cwd: directory, - mcpServers: params.mcpServers, - sessionId, - }) - - return { - sessionId, - configOptions: load.configOptions, - models: load.models, - modes: load.modes, - _meta: load._meta, - } - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async loadSession(params: LoadSessionRequest) { - const directory = params.cwd - const sessionId = params.sessionId - - try { - const model = await defaultModel(this.config, directory) - - // Store ACP session state - await this.sessionManager.load(sessionId, params.cwd, params.mcpServers, model) - - log.info("load_session", { sessionId, mcpServers: params.mcpServers.length }) - - const result = await this.loadSessionMode({ - cwd: directory, - mcpServers: params.mcpServers, - sessionId, - }) - - // Replay session history - const messages = await this.sdk.session - .messages( - { - sessionID: sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((err) => { - log.error("unexpected error when fetching message", { error: err }) - return undefined - }) - - const lastUser = messages?.findLast((m) => m.info.role === "user")?.info - if (lastUser?.role === "user") { - result.models.currentModelId = `${lastUser.model.providerID}/${lastUser.model.modelID}` - this.sessionManager.setModel(sessionId, { - providerID: ProviderID.make(lastUser.model.providerID), - modelID: ModelID.make(lastUser.model.modelID), - }) - if (result.modes?.availableModes.some((m) => m.id === lastUser.agent)) { - result.modes.currentModeId = lastUser.agent - this.sessionManager.setMode(sessionId, lastUser.agent) - } - result.configOptions = buildConfigOptions({ - currentModelId: result.models.currentModelId, - availableModels: result.models.availableModels, - modes: result.modes, - }) - } - - for (const msg of messages ?? []) { - log.debug("replay message", msg) - await this.processMessage(msg) - } - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return result - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async listSessions(params: ListSessionsRequest): Promise { - try { - const cursor = params.cursor ? Number(params.cursor) : undefined - const limit = 100 - - const sessions = await this.sdk.session - .list( - { - directory: params.cwd ?? undefined, - roots: true, - }, - { throwOnError: true }, - ) - .then((x) => x.data ?? []) - - const sorted = sessions.toSorted((a, b) => b.time.updated - a.time.updated) - const filtered = cursor ? sorted.filter((s) => s.time.updated < cursor) : sorted - const page = filtered.slice(0, limit) - - const entries: SessionInfo[] = page.map((session) => ({ - sessionId: session.id, - cwd: session.directory, - title: session.title, - updatedAt: new Date(session.time.updated).toISOString(), - })) - - const last = page[page.length - 1] - const next = filtered.length > limit && last ? String(last.time.updated) : undefined - - const response: ListSessionsResponse = { - sessions: entries, - } - if (next) response.nextCursor = next - return response - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async unstable_forkSession(params: ForkSessionRequest): Promise { - const directory = params.cwd - const mcpServers = params.mcpServers ?? [] - - try { - const model = await defaultModel(this.config, directory) - - const forked = await this.sdk.session - .fork( - { - sessionID: params.sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - - if (!forked) { - throw new Error("Fork session returned no data") - } - - const sessionId = forked.id - await this.sessionManager.load(sessionId, directory, mcpServers, model) - - log.info("fork_session", { sessionId, mcpServers: mcpServers.length }) - - const mode = await this.loadSessionMode({ - cwd: directory, - mcpServers, - sessionId, - }) - - const messages = await this.sdk.session - .messages( - { - sessionID: sessionId, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((err) => { - log.error("unexpected error when fetching message", { error: err }) - return undefined - }) - - for (const msg of messages ?? []) { - log.debug("replay message", msg) - await this.processMessage(msg) - } - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return mode - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - async unstable_resumeSession(params: ResumeSessionRequest): Promise { - const directory = params.cwd - const sessionId = params.sessionId - const mcpServers = params.mcpServers ?? [] - - try { - const model = await defaultModel(this.config, directory) - await this.sessionManager.load(sessionId, directory, mcpServers, model) - - log.info("resume_session", { sessionId, mcpServers: mcpServers.length }) - - const result = await this.loadSessionMode({ - cwd: directory, - mcpServers, - sessionId, - }) - - await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) - - return result - } catch (e) { - const error = MessageV2.fromError(e, { - providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), - }) - if (LoadAPIKeyError.isInstance(error)) { - throw RequestError.authRequired() - } - throw e - } - } - - private async processMessage(message: SessionMessageResponse) { - log.debug("process message", message) - if (message.info.role !== "assistant" && message.info.role !== "user") return - const sessionId = message.info.sessionID - - for (const part of message.parts) { - if (part.type === "tool") { - await this.toolStart(sessionId, part) - switch (part.state.status) { - case "pending": - this.bashSnapshots.delete(part.callID) - break - case "running": - const output = this.bashOutput(part) - const runningContent: ToolCallContent[] = [] - if (output) { - runningContent.push({ + content.push({ type: "content", content: { type: "text", @@ -862,14 +331,15 @@ export namespace ACP { title: part.tool, locations: toLocations(part.tool, part.state.input), rawInput: part.state.input, - ...(runningContent.length > 0 && { content: runningContent }), + ...(content.length > 0 && { content }), }, }) - .catch((err) => { - log.error("failed to send tool in_progress to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool in_progress to ACP", { error }) }) - break - case "completed": + return + + case "completed": { this.toolStarts.delete(part.callID) this.bashSnapshots.delete(part.callID) const kind = toToolKind(part.tool) @@ -920,8 +390,8 @@ export namespace ACP { }), }, }) - .catch((err) => { - log.error("failed to send session update for todo", { error: err }) + .catch((error) => { + log.error("failed to send session update for todo", { error }) }) } else { log.error("failed to parse todo output", { error: parsedTodos.error }) @@ -945,10 +415,11 @@ export namespace ACP { }, }, }) - .catch((err) => { - log.error("failed to send tool completed to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool completed to ACP", { error }) }) - break + return + } case "error": this.toolStarts.delete(part.callID) this.bashSnapshots.delete(part.callID) @@ -977,865 +448,1394 @@ export namespace ACP { }, }, }) - .catch((err) => { - log.error("failed to send tool error to ACP", { error: err }) + .catch((error) => { + log.error("failed to send tool error to ACP", { error }) }) - break + return } - } else if (part.type === "text") { - if (part.text) { - const audience: Role[] | undefined = part.synthetic ? ["assistant"] : part.ignored ? ["user"] : undefined + } + + // ACP clients already know the prompt they just submitted, so replaying + // live user parts duplicates the message. We still replay user history in + // loadSession() and forkSession() via processMessage(). + if (part.type !== "text" && part.type !== "file") return + + return + } + + case "message.part.delta": { + const props = event.properties + const session = this.sessionManager.tryGet(props.sessionID) + if (!session) return + const sessionId = session.id + + const message = await this.sdk.session + .message( + { + sessionID: props.sessionID, + messageID: props.messageID, + directory: session.cwd, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((error) => { + log.error("unexpected error when fetching message", { error }) + return undefined + }) + + if (!message || message.info.role !== "assistant") return + + const part = message.parts.find((p) => p.id === props.partID) + if (!part) return + + if (part.type === "text" && props.field === "text" && part.ignored !== true) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_message_chunk", + messageId: props.messageID, + content: { + type: "text", + text: props.delta, + }, + }, + }) + .catch((error) => { + log.error("failed to send text delta to ACP", { error }) + }) + return + } + + if (part.type === "reasoning" && props.field === "text") { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_thought_chunk", + messageId: props.messageID, + content: { + type: "text", + text: props.delta, + }, + }, + }) + .catch((error) => { + log.error("failed to send reasoning delta to ACP", { error }) + }) + } + return + } + } + } + + async initialize(params: InitializeRequest): Promise { + log.info("initialize", { protocolVersion: params.protocolVersion }) + + const authMethod: AuthMethod = { + description: "Run `opencode auth login` in the terminal", + name: "Login with opencode", + id: "opencode-login", + } + + // If client supports terminal-auth capability, use that instead. + if (params.clientCapabilities?._meta?.["terminal-auth"] === true) { + authMethod._meta = { + "terminal-auth": { + command: "opencode", + args: ["auth", "login"], + label: "OpenCode Login", + }, + } + } + + return { + protocolVersion: 1, + agentCapabilities: { + loadSession: true, + mcpCapabilities: { + http: true, + sse: true, + }, + promptCapabilities: { + embeddedContext: true, + image: true, + }, + sessionCapabilities: { + fork: {}, + list: {}, + resume: {}, + }, + }, + authMethods: [authMethod], + agentInfo: { + name: "OpenCode", + version: InstallationVersion, + }, + } + } + + async authenticate(_params: AuthenticateRequest) { + throw new Error("Authentication not implemented") + } + + async newSession(params: NewSessionRequest) { + const directory = params.cwd + try { + const model = await defaultModel(this.config, directory) + + // Store ACP session state + const state = await this.sessionManager.create(params.cwd, params.mcpServers, model) + const sessionId = state.id + + log.info("creating_session", { sessionId, mcpServers: params.mcpServers.length }) + + const load = await this.loadSessionMode({ + cwd: directory, + mcpServers: params.mcpServers, + sessionId, + }) + + return { + sessionId, + configOptions: load.configOptions, + models: load.models, + modes: load.modes, + _meta: load._meta, + } + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async loadSession(params: LoadSessionRequest) { + const directory = params.cwd + const sessionId = params.sessionId + + try { + const model = await defaultModel(this.config, directory) + + // Store ACP session state + await this.sessionManager.load(sessionId, params.cwd, params.mcpServers, model) + + log.info("load_session", { sessionId, mcpServers: params.mcpServers.length }) + + const result = await this.loadSessionMode({ + cwd: directory, + mcpServers: params.mcpServers, + sessionId, + }) + + // Replay session history + const messages = await this.sdk.session + .messages( + { + sessionID: sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((err) => { + log.error("unexpected error when fetching message", { error: err }) + return undefined + }) + + const lastUser = messages?.findLast((m) => m.info.role === "user")?.info + if (lastUser?.role === "user") { + result.models.currentModelId = `${lastUser.model.providerID}/${lastUser.model.modelID}` + this.sessionManager.setModel(sessionId, { + providerID: ProviderID.make(lastUser.model.providerID), + modelID: ModelID.make(lastUser.model.modelID), + }) + if (result.modes?.availableModes.some((m) => m.id === lastUser.agent)) { + result.modes.currentModeId = lastUser.agent + this.sessionManager.setMode(sessionId, lastUser.agent) + } + result.configOptions = buildConfigOptions({ + currentModelId: result.models.currentModelId, + availableModels: result.models.availableModels, + modes: result.modes, + }) + } + + for (const msg of messages ?? []) { + log.debug("replay message", msg) + await this.processMessage(msg) + } + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return result + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async listSessions(params: ListSessionsRequest): Promise { + try { + const cursor = params.cursor ? Number(params.cursor) : undefined + const limit = 100 + + const sessions = await this.sdk.session + .list( + { + directory: params.cwd ?? undefined, + roots: true, + }, + { throwOnError: true }, + ) + .then((x) => x.data ?? []) + + const sorted = sessions.toSorted((a, b) => b.time.updated - a.time.updated) + const filtered = cursor ? sorted.filter((s) => s.time.updated < cursor) : sorted + const page = filtered.slice(0, limit) + + const entries: SessionInfo[] = page.map((session) => ({ + sessionId: session.id, + cwd: session.directory, + title: session.title, + updatedAt: new Date(session.time.updated).toISOString(), + })) + + const last = page[page.length - 1] + const next = filtered.length > limit && last ? String(last.time.updated) : undefined + + const response: ListSessionsResponse = { + sessions: entries, + } + if (next) response.nextCursor = next + return response + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async unstable_forkSession(params: ForkSessionRequest): Promise { + const directory = params.cwd + const mcpServers = params.mcpServers ?? [] + + try { + const model = await defaultModel(this.config, directory) + + const forked = await this.sdk.session + .fork( + { + sessionID: params.sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + + if (!forked) { + throw new Error("Fork session returned no data") + } + + const sessionId = forked.id + await this.sessionManager.load(sessionId, directory, mcpServers, model) + + log.info("fork_session", { sessionId, mcpServers: mcpServers.length }) + + const mode = await this.loadSessionMode({ + cwd: directory, + mcpServers, + sessionId, + }) + + const messages = await this.sdk.session + .messages( + { + sessionID: sessionId, + directory, + }, + { throwOnError: true }, + ) + .then((x) => x.data) + .catch((err) => { + log.error("unexpected error when fetching message", { error: err }) + return undefined + }) + + for (const msg of messages ?? []) { + log.debug("replay message", msg) + await this.processMessage(msg) + } + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return mode + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + async unstable_resumeSession(params: ResumeSessionRequest): Promise { + const directory = params.cwd + const sessionId = params.sessionId + const mcpServers = params.mcpServers ?? [] + + try { + const model = await defaultModel(this.config, directory) + await this.sessionManager.load(sessionId, directory, mcpServers, model) + + log.info("resume_session", { sessionId, mcpServers: mcpServers.length }) + + const result = await this.loadSessionMode({ + cwd: directory, + mcpServers, + sessionId, + }) + + await sendUsageUpdate(this.connection, this.sdk, sessionId, directory) + + return result + } catch (e) { + const error = MessageV2.fromError(e, { + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), + }) + if (LoadAPIKeyError.isInstance(error)) { + throw RequestError.authRequired() + } + throw e + } + } + + private async processMessage(message: SessionMessageResponse) { + log.debug("process message", message) + if (message.info.role !== "assistant" && message.info.role !== "user") return + const sessionId = message.info.sessionID + + for (const part of message.parts) { + if (part.type === "tool") { + await this.toolStart(sessionId, part) + switch (part.state.status) { + case "pending": + this.bashSnapshots.delete(part.callID) + break + case "running": + const output = this.bashOutput(part) + const runningContent: ToolCallContent[] = [] + if (output) { + runningContent.push({ + type: "content", + content: { + type: "text", + text: output, + }, + }) + } await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk", - messageId: message.info.id, - content: { - type: "text", - text: part.text, - ...(audience && { annotations: { audience } }), + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "in_progress", + kind: toToolKind(part.tool), + title: part.tool, + locations: toLocations(part.tool, part.state.input), + rawInput: part.state.input, + ...(runningContent.length > 0 && { content: runningContent }), + }, + }) + .catch((err) => { + log.error("failed to send tool in_progress to ACP", { error: err }) + }) + break + case "completed": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) + const kind = toToolKind(part.tool) + const content: ToolCallContent[] = [ + { + type: "content", + content: { + type: "text", + text: part.state.output, + }, + }, + ] + + if (kind === "edit") { + const input = part.state.input + const filePath = typeof input["filePath"] === "string" ? input["filePath"] : "" + const oldText = typeof input["oldString"] === "string" ? input["oldString"] : "" + const newText = + typeof input["newString"] === "string" + ? input["newString"] + : typeof input["content"] === "string" + ? input["content"] + : "" + content.push({ + type: "diff", + path: filePath, + oldText, + newText, + }) + } + + if (part.tool === "todowrite") { + const parsedTodos = z.array(Todo.Info).safeParse(JSON.parse(part.state.output)) + if (parsedTodos.success) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "plan", + entries: parsedTodos.data.map((todo) => { + const status: PlanEntry["status"] = + todo.status === "cancelled" ? "completed" : (todo.status as PlanEntry["status"]) + return { + priority: "medium", + status, + content: todo.content, + } + }), + }, + }) + .catch((err) => { + log.error("failed to send session update for todo", { error: err }) + }) + } else { + log.error("failed to parse todo output", { error: parsedTodos.error }) + } + } + + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "completed", + kind, + content, + title: part.state.title, + rawInput: part.state.input, + rawOutput: { + output: part.state.output, + metadata: part.state.metadata, }, }, }) .catch((err) => { - log.error("failed to send text to ACP", { error: err }) + log.error("failed to send tool completed to ACP", { error: err }) }) - } - } else if (part.type === "file") { - // Replay file attachments as appropriate ACP content blocks. - // OpenCode stores files internally as { type: "file", url, filename, mime }. - // We convert these back to ACP blocks based on the URL scheme and MIME type: - // - file:// URLs → resource_link - // - data: URLs with image/* → image block - // - data: URLs with text/* or application/json → resource with text - // - data: URLs with other types → resource with blob - const url = part.url - const filename = part.filename ?? "file" - const mime = part.mime || "application/octet-stream" - const messageChunk = message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk" + break + case "error": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "tool_call_update", + toolCallId: part.callID, + status: "failed", + kind: toToolKind(part.tool), + title: part.tool, + rawInput: part.state.input, + content: [ + { + type: "content", + content: { + type: "text", + text: part.state.error, + }, + }, + ], + rawOutput: { + error: part.state.error, + metadata: part.state.metadata, + }, + }, + }) + .catch((err) => { + log.error("failed to send tool error to ACP", { error: err }) + }) + break + } + } else if (part.type === "text") { + if (part.text) { + const audience: Role[] | undefined = part.synthetic ? ["assistant"] : part.ignored ? ["user"] : undefined + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk", + messageId: message.info.id, + content: { + type: "text", + text: part.text, + ...(audience && { annotations: { audience } }), + }, + }, + }) + .catch((err) => { + log.error("failed to send text to ACP", { error: err }) + }) + } + } else if (part.type === "file") { + // Replay file attachments as appropriate ACP content blocks. + // OpenCode stores files internally as { type: "file", url, filename, mime }. + // We convert these back to ACP blocks based on the URL scheme and MIME type: + // - file:// URLs → resource_link + // - data: URLs with image/* → image block + // - data: URLs with text/* or application/json → resource with text + // - data: URLs with other types → resource with blob + const url = part.url + const filename = part.filename ?? "file" + const mime = part.mime || "application/octet-stream" + const messageChunk = message.info.role === "user" ? "user_message_chunk" : "agent_message_chunk" - if (url.startsWith("file://")) { - // Local file reference - send as resource_link + if (url.startsWith("file://")) { + // Local file reference - send as resource_link + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: messageChunk, + messageId: message.info.id, + content: { type: "resource_link", uri: url, name: filename, mimeType: mime }, + }, + }) + .catch((err) => { + log.error("failed to send resource_link to ACP", { error: err }) + }) + } else if (url.startsWith("data:")) { + // Embedded content - parse data URL and send as appropriate block type + const base64Match = url.match(/^data:([^;]+);base64,(.*)$/) + const dataMime = base64Match?.[1] + const base64Data = base64Match?.[2] ?? "" + + const effectiveMime = dataMime || mime + + if (effectiveMime.startsWith("image/")) { + // Image - send as image block await this.connection .sessionUpdate({ sessionId, update: { sessionUpdate: messageChunk, messageId: message.info.id, - content: { type: "resource_link", uri: url, name: filename, mimeType: mime }, + content: { + type: "image", + mimeType: effectiveMime, + data: base64Data, + uri: pathToFileURL(filename).href, + }, }, }) .catch((err) => { - log.error("failed to send resource_link to ACP", { error: err }) + log.error("failed to send image to ACP", { error: err }) }) - } else if (url.startsWith("data:")) { - // Embedded content - parse data URL and send as appropriate block type - const base64Match = url.match(/^data:([^;]+);base64,(.*)$/) - const dataMime = base64Match?.[1] - const base64Data = base64Match?.[2] ?? "" + } else { + // Non-image: text types get decoded, binary types stay as blob + const isText = effectiveMime.startsWith("text/") || effectiveMime === "application/json" + const fileUri = pathToFileURL(filename).href + const resource = isText + ? { + uri: fileUri, + mimeType: effectiveMime, + text: Buffer.from(base64Data, "base64").toString("utf-8"), + } + : { uri: fileUri, mimeType: effectiveMime, blob: base64Data } - const effectiveMime = dataMime || mime - - if (effectiveMime.startsWith("image/")) { - // Image - send as image block - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: messageChunk, - messageId: message.info.id, - content: { - type: "image", - mimeType: effectiveMime, - data: base64Data, - uri: pathToFileURL(filename).href, - }, - }, - }) - .catch((err) => { - log.error("failed to send image to ACP", { error: err }) - }) - } else { - // Non-image: text types get decoded, binary types stay as blob - const isText = effectiveMime.startsWith("text/") || effectiveMime === "application/json" - const fileUri = pathToFileURL(filename).href - const resource = isText - ? { - uri: fileUri, - mimeType: effectiveMime, - text: Buffer.from(base64Data, "base64").toString("utf-8"), - } - : { uri: fileUri, mimeType: effectiveMime, blob: base64Data } - - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: messageChunk, - messageId: message.info.id, - content: { type: "resource", resource }, - }, - }) - .catch((err) => { - log.error("failed to send resource to ACP", { error: err }) - }) - } - } - // URLs that don't match file:// or data: are skipped (unsupported) - } else if (part.type === "reasoning") { - if (part.text) { await this.connection .sessionUpdate({ sessionId, update: { - sessionUpdate: "agent_thought_chunk", + sessionUpdate: messageChunk, messageId: message.info.id, - content: { - type: "text", - text: part.text, - }, + content: { type: "resource", resource }, }, }) .catch((err) => { - log.error("failed to send reasoning to ACP", { error: err }) + log.error("failed to send resource to ACP", { error: err }) }) } } - } - } - - private bashOutput(part: ToolPart) { - if (part.tool !== "bash") return - if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return - const output = part.state.metadata["output"] - if (typeof output !== "string") return - return output - } - - private async toolStart(sessionId: string, part: ToolPart) { - if (this.toolStarts.has(part.callID)) return - this.toolStarts.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((error) => { - log.error("failed to send tool pending to ACP", { error }) - }) - } - - private async loadAvailableModes(directory: string): Promise { - const agents = await this.config.sdk.app - .agents( - { - directory, - }, - { throwOnError: true }, - ) - .then((resp) => resp.data!) - - return agents - .filter((agent) => agent.mode !== "subagent" && !agent.hidden) - .map((agent) => ({ - id: agent.name, - name: agent.name, - description: agent.description, - })) - } - - private async resolveModeState( - directory: string, - sessionId: string, - ): Promise<{ availableModes: ModeOption[]; currentModeId?: string }> { - const availableModes = await this.loadAvailableModes(directory) - const currentModeId = - this.sessionManager.get(sessionId).modeId || - (await (async () => { - if (!availableModes.length) return undefined - const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) - const resolvedModeId = - availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id - this.sessionManager.setMode(sessionId, resolvedModeId) - return resolvedModeId - })()) - - return { availableModes, currentModeId } - } - - private async loadSessionMode(params: LoadSessionRequest) { - const directory = params.cwd - const model = await defaultModel(this.config, directory) - const sessionId = params.sessionId - - const providers = await this.sdk.config.providers({ directory }).then((x) => x.data!.providers) - const entries = sortProvidersByName(providers) - const availableVariants = modelVariantsFromProviders(entries, model) - const currentVariant = this.sessionManager.getVariant(sessionId) - if (currentVariant && !availableVariants.includes(currentVariant)) { - this.sessionManager.setVariant(sessionId, undefined) - } - const availableModels = buildAvailableModels(entries, { includeVariants: true }) - const modeState = await this.resolveModeState(directory, sessionId) - const currentModeId = modeState.currentModeId - const modes = currentModeId - ? { - availableModes: modeState.availableModes, - currentModeId, - } - : undefined - - const commands = await this.config.sdk.command - .list( - { - directory, - }, - { throwOnError: true }, - ) - .then((resp) => resp.data!) - - const availableCommands = commands.map((command) => ({ - name: command.name, - description: command.description ?? "", - })) - const names = new Set(availableCommands.map((c) => c.name)) - if (!names.has("compact")) - availableCommands.push({ - name: "compact", - description: "compact the session", - }) - - const mcpServers: Record = {} - for (const server of params.mcpServers) { - if ("type" in server) { - mcpServers[server.name] = { - url: server.url, - headers: server.headers.reduce>((acc, { name, value }) => { - acc[name] = value - return acc - }, {}), - type: "remote", - } - } else { - mcpServers[server.name] = { - type: "local", - command: [server.command, ...server.args], - environment: server.env.reduce>((acc, { name, value }) => { - acc[name] = value - return acc - }, {}), - } - } - } - - await Promise.all( - Object.entries(mcpServers).map(async ([key, mcp]) => { - await this.sdk.mcp - .add( - { - directory, - name: key, - config: mcp, + // URLs that don't match file:// or data: are skipped (unsupported) + } else if (part.type === "reasoning") { + if (part.text) { + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "agent_thought_chunk", + messageId: message.info.id, + content: { + type: "text", + text: part.text, + }, }, - { throwOnError: true }, - ) - .catch((error) => { - log.error("failed to add mcp server", { name: key, error }) }) - }), - ) + .catch((err) => { + log.error("failed to send reasoning to ACP", { error: err }) + }) + } + } + } + } - setTimeout(() => { - void this.connection.sessionUpdate({ - sessionId, - update: { - sessionUpdate: "available_commands_update", - availableCommands, - }, - }) - }, 0) + private bashOutput(part: ToolPart) { + if (part.tool !== "bash") return + if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return + const output = part.state.metadata["output"] + if (typeof output !== "string") return + return output + } - return { + private async toolStart(sessionId: string, part: ToolPart) { + if (this.toolStarts.has(part.callID)) return + this.toolStarts.add(part.callID) + await this.connection + .sessionUpdate({ sessionId, - models: { - currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), - availableModels, + update: { + sessionUpdate: "tool_call", + toolCallId: part.callID, + title: part.tool, + kind: toToolKind(part.tool), + status: "pending", + locations: [], + rawInput: {}, }, - modes, - configOptions: buildConfigOptions({ - currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), - availableModels, - modes, - }), - _meta: buildVariantMeta({ - model, - variant: this.sessionManager.getVariant(sessionId), - availableVariants, - }), - } + }) + .catch((error) => { + log.error("failed to send tool pending to ACP", { error }) + }) + } + + private async loadAvailableModes(directory: string): Promise { + const agents = await this.config.sdk.app + .agents( + { + directory, + }, + { throwOnError: true }, + ) + .then((resp) => resp.data!) + + return agents + .filter((agent) => agent.mode !== "subagent" && !agent.hidden) + .map((agent) => ({ + id: agent.name, + name: agent.name, + description: agent.description, + })) + } + + private async resolveModeState( + directory: string, + sessionId: string, + ): Promise<{ availableModes: ModeOption[]; currentModeId?: string }> { + const availableModes = await this.loadAvailableModes(directory) + const currentModeId = + this.sessionManager.get(sessionId).modeId || + (await (async () => { + if (!availableModes.length) return undefined + const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) + const resolvedModeId = + availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id + this.sessionManager.setMode(sessionId, resolvedModeId) + return resolvedModeId + })()) + + return { availableModes, currentModeId } + } + + private async loadSessionMode(params: LoadSessionRequest) { + const directory = params.cwd + const model = await defaultModel(this.config, directory) + const sessionId = params.sessionId + + const providers = await this.sdk.config.providers({ directory }).then((x) => x.data!.providers) + const entries = sortProvidersByName(providers) + const availableVariants = modelVariantsFromProviders(entries, model) + const currentVariant = this.sessionManager.getVariant(sessionId) + if (currentVariant && !availableVariants.includes(currentVariant)) { + this.sessionManager.setVariant(sessionId, undefined) } - - async unstable_setSessionModel(params: SetSessionModelRequest) { - const session = this.sessionManager.get(params.sessionId) - const providers = await this.sdk.config - .providers({ directory: session.cwd }, { throwOnError: true }) - .then((x) => x.data!.providers) - - const selection = parseModelSelection(params.modelId, providers) - this.sessionManager.setModel(session.id, selection.model) - this.sessionManager.setVariant(session.id, selection.variant) - - const entries = sortProvidersByName(providers) - const availableVariants = modelVariantsFromProviders(entries, selection.model) - - return { - _meta: buildVariantMeta({ - model: selection.model, - variant: selection.variant, - availableVariants, - }), - } - } - - async setSessionMode(params: SetSessionModeRequest): Promise { - const session = this.sessionManager.get(params.sessionId) - const availableModes = await this.loadAvailableModes(session.cwd) - if (!availableModes.some((mode) => mode.id === params.modeId)) { - throw new Error(`Agent not found: ${params.modeId}`) - } - this.sessionManager.setMode(params.sessionId, params.modeId) - } - - async setSessionConfigOption(params: SetSessionConfigOptionRequest): Promise { - const session = this.sessionManager.get(params.sessionId) - const providers = await this.sdk.config - .providers({ directory: session.cwd }, { throwOnError: true }) - .then((x) => x.data!.providers) - const entries = sortProvidersByName(providers) - - if (params.configId === "model") { - if (typeof params.value !== "string") throw RequestError.invalidParams("model value must be a string") - const selection = parseModelSelection(params.value, providers) - this.sessionManager.setModel(session.id, selection.model) - this.sessionManager.setVariant(session.id, selection.variant) - } else if (params.configId === "mode") { - if (typeof params.value !== "string") throw RequestError.invalidParams("mode value must be a string") - const availableModes = await this.loadAvailableModes(session.cwd) - if (!availableModes.some((mode) => mode.id === params.value)) { - throw RequestError.invalidParams(JSON.stringify({ error: `Mode not found: ${params.value}` })) + const availableModels = buildAvailableModels(entries, { includeVariants: true }) + const modeState = await this.resolveModeState(directory, sessionId) + const currentModeId = modeState.currentModeId + const modes = currentModeId + ? { + availableModes: modeState.availableModes, + currentModeId, } - this.sessionManager.setMode(session.id, params.value) - } else { - throw RequestError.invalidParams(JSON.stringify({ error: `Unknown config option: ${params.configId}` })) - } + : undefined - const updatedSession = this.sessionManager.get(session.id) - const model = updatedSession.model ?? (await defaultModel(this.config, session.cwd)) - const availableVariants = modelVariantsFromProviders(entries, model) - const currentModelId = formatModelIdWithVariant(model, updatedSession.variant, availableVariants, true) - const availableModels = buildAvailableModels(entries, { includeVariants: true }) - const modeState = await this.resolveModeState(session.cwd, session.id) - const modes = modeState.currentModeId - ? { availableModes: modeState.availableModes, currentModeId: modeState.currentModeId } - : undefined + const commands = await this.config.sdk.command + .list( + { + directory, + }, + { throwOnError: true }, + ) + .then((resp) => resp.data!) - return { - configOptions: buildConfigOptions({ currentModelId, availableModels, modes }), - } - } - - async prompt(params: PromptRequest) { - const sessionID = params.sessionId - const session = this.sessionManager.get(sessionID) - const directory = session.cwd - - const current = session.model - const model = current ?? (await defaultModel(this.config, directory)) - if (!current) { - this.sessionManager.setModel(session.id, model) - } - const agent = - session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) - - const parts: Array< - | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } - | { type: "file"; url: string; filename: string; mime: string } - > = [] - for (const part of params.prompt) { - switch (part.type) { - case "text": - const audience = part.annotations?.audience - const forAssistant = audience?.length === 1 && audience[0] === "assistant" - const forUser = audience?.length === 1 && audience[0] === "user" - parts.push({ - type: "text" as const, - text: part.text, - ...(forAssistant && { synthetic: true }), - ...(forUser && { ignored: true }), - }) - break - case "image": { - const parsed = parseUri(part.uri ?? "") - const filename = parsed.type === "file" ? parsed.filename : "image" - if (part.data) { - parts.push({ - type: "file", - url: `data:${part.mimeType};base64,${part.data}`, - filename, - mime: part.mimeType, - }) - } else if (part.uri && part.uri.startsWith("http:")) { - parts.push({ - type: "file", - url: part.uri, - filename, - mime: part.mimeType, - }) - } - break - } - - case "resource_link": - const parsed = parseUri(part.uri) - // Use the name from resource_link if available - if (part.name && parsed.type === "file") { - parsed.filename = part.name - } - parts.push(parsed) - - break - - case "resource": { - const resource = part.resource - if ("text" in resource && resource.text) { - parts.push({ - type: "text", - text: resource.text, - }) - } else if ("blob" in resource && resource.blob && resource.mimeType) { - // Binary resource (PDFs, etc.): store as file part with data URL - const parsed = parseUri(resource.uri ?? "") - const filename = parsed.type === "file" ? parsed.filename : "file" - parts.push({ - type: "file", - url: `data:${resource.mimeType};base64,${resource.blob}`, - filename, - mime: resource.mimeType, - }) - } - break - } - - default: - break - } - } - - log.info("parts", { parts }) - - const cmd = (() => { - const text = parts - .filter((p): p is { type: "text"; text: string } => p.type === "text") - .map((p) => p.text) - .join("") - .trim() - - if (!text.startsWith("/")) return - - const [name, ...rest] = text.slice(1).split(/\s+/) - return { name, args: rest.join(" ").trim() } - })() - - const buildUsage = (msg: AssistantMessage): Usage => ({ - totalTokens: - msg.tokens.input + - msg.tokens.output + - msg.tokens.reasoning + - (msg.tokens.cache?.read ?? 0) + - (msg.tokens.cache?.write ?? 0), - inputTokens: msg.tokens.input, - outputTokens: msg.tokens.output, - thoughtTokens: msg.tokens.reasoning || undefined, - cachedReadTokens: msg.tokens.cache?.read || undefined, - cachedWriteTokens: msg.tokens.cache?.write || undefined, + const availableCommands = commands.map((command) => ({ + name: command.name, + description: command.description ?? "", + })) + const names = new Set(availableCommands.map((c) => c.name)) + if (!names.has("compact")) + availableCommands.push({ + name: "compact", + description: "compact the session", }) - if (!cmd) { - const response = await this.sdk.session.prompt({ - sessionID, - model: { - providerID: model.providerID, - modelID: model.modelID, - }, - variant: this.sessionManager.getVariant(sessionID), - parts, - agent, - directory, - }) - const msg = response.data?.info - - await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - - return { - stopReason: "end_turn" as const, - usage: msg ? buildUsage(msg) : undefined, - _meta: {}, + const mcpServers: Record = {} + for (const server of params.mcpServers) { + if ("type" in server) { + mcpServers[server.name] = { + url: server.url, + headers: server.headers.reduce>((acc, { name, value }) => { + acc[name] = value + return acc + }, {}), + type: "remote", + } + } else { + mcpServers[server.name] = { + type: "local", + command: [server.command, ...server.args], + environment: server.env.reduce>((acc, { name, value }) => { + acc[name] = value + return acc + }, {}), } } + } - const command = await this.config.sdk.command - .list({ directory }, { throwOnError: true }) - .then((x) => x.data!.find((c) => c.name === cmd.name)) - if (command) { - const response = await this.sdk.session.command({ - sessionID, - command: command.name, - arguments: cmd.args, - model: model.providerID + "/" + model.modelID, - agent, - directory, - }) - const msg = response.data?.info - - await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - - return { - stopReason: "end_turn" as const, - usage: msg ? buildUsage(msg) : undefined, - _meta: {}, - } - } - - switch (cmd.name) { - case "compact": - await this.config.sdk.session.summarize( + await Promise.all( + Object.entries(mcpServers).map(async ([key, mcp]) => { + await this.sdk.mcp + .add( { - sessionID, directory, - providerID: model.providerID, - modelID: model.modelID, + name: key, + config: mcp, }, { throwOnError: true }, ) + .catch((error) => { + log.error("failed to add mcp server", { name: key, error }) + }) + }), + ) + + setTimeout(() => { + void this.connection.sessionUpdate({ + sessionId, + update: { + sessionUpdate: "available_commands_update", + availableCommands, + }, + }) + }, 0) + + return { + sessionId, + models: { + currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), + availableModels, + }, + modes, + configOptions: buildConfigOptions({ + currentModelId: formatModelIdWithVariant(model, currentVariant, availableVariants, true), + availableModels, + modes, + }), + _meta: buildVariantMeta({ + model, + variant: this.sessionManager.getVariant(sessionId), + availableVariants, + }), + } + } + + async unstable_setSessionModel(params: SetSessionModelRequest) { + const session = this.sessionManager.get(params.sessionId) + const providers = await this.sdk.config + .providers({ directory: session.cwd }, { throwOnError: true }) + .then((x) => x.data!.providers) + + const selection = parseModelSelection(params.modelId, providers) + this.sessionManager.setModel(session.id, selection.model) + this.sessionManager.setVariant(session.id, selection.variant) + + const entries = sortProvidersByName(providers) + const availableVariants = modelVariantsFromProviders(entries, selection.model) + + return { + _meta: buildVariantMeta({ + model: selection.model, + variant: selection.variant, + availableVariants, + }), + } + } + + async setSessionMode(params: SetSessionModeRequest): Promise { + const session = this.sessionManager.get(params.sessionId) + const availableModes = await this.loadAvailableModes(session.cwd) + if (!availableModes.some((mode) => mode.id === params.modeId)) { + throw new Error(`Agent not found: ${params.modeId}`) + } + this.sessionManager.setMode(params.sessionId, params.modeId) + } + + async setSessionConfigOption(params: SetSessionConfigOptionRequest): Promise { + const session = this.sessionManager.get(params.sessionId) + const providers = await this.sdk.config + .providers({ directory: session.cwd }, { throwOnError: true }) + .then((x) => x.data!.providers) + const entries = sortProvidersByName(providers) + + if (params.configId === "model") { + if (typeof params.value !== "string") throw RequestError.invalidParams("model value must be a string") + const selection = parseModelSelection(params.value, providers) + this.sessionManager.setModel(session.id, selection.model) + this.sessionManager.setVariant(session.id, selection.variant) + } else if (params.configId === "mode") { + if (typeof params.value !== "string") throw RequestError.invalidParams("mode value must be a string") + const availableModes = await this.loadAvailableModes(session.cwd) + if (!availableModes.some((mode) => mode.id === params.value)) { + throw RequestError.invalidParams(JSON.stringify({ error: `Mode not found: ${params.value}` })) + } + this.sessionManager.setMode(session.id, params.value) + } else { + throw RequestError.invalidParams(JSON.stringify({ error: `Unknown config option: ${params.configId}` })) + } + + const updatedSession = this.sessionManager.get(session.id) + const model = updatedSession.model ?? (await defaultModel(this.config, session.cwd)) + const availableVariants = modelVariantsFromProviders(entries, model) + const currentModelId = formatModelIdWithVariant(model, updatedSession.variant, availableVariants, true) + const availableModels = buildAvailableModels(entries, { includeVariants: true }) + const modeState = await this.resolveModeState(session.cwd, session.id) + const modes = modeState.currentModeId + ? { availableModes: modeState.availableModes, currentModeId: modeState.currentModeId } + : undefined + + return { + configOptions: buildConfigOptions({ currentModelId, availableModels, modes }), + } + } + + async prompt(params: PromptRequest) { + const sessionID = params.sessionId + const session = this.sessionManager.get(sessionID) + const directory = session.cwd + + const current = session.model + const model = current ?? (await defaultModel(this.config, directory)) + if (!current) { + this.sessionManager.setModel(session.id, model) + } + const agent = + session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) + + const parts: Array< + | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } + | { type: "file"; url: string; filename: string; mime: string } + > = [] + for (const part of params.prompt) { + switch (part.type) { + case "text": + const audience = part.annotations?.audience + const forAssistant = audience?.length === 1 && audience[0] === "assistant" + const forUser = audience?.length === 1 && audience[0] === "user" + parts.push({ + type: "text" as const, + text: part.text, + ...(forAssistant && { synthetic: true }), + ...(forUser && { ignored: true }), + }) + break + case "image": { + const parsed = parseUri(part.uri ?? "") + const filename = parsed.type === "file" ? parsed.filename : "image" + if (part.data) { + parts.push({ + type: "file", + url: `data:${part.mimeType};base64,${part.data}`, + filename, + mime: part.mimeType, + }) + } else if (part.uri && part.uri.startsWith("http:")) { + parts.push({ + type: "file", + url: part.uri, + filename, + mime: part.mimeType, + }) + } + break + } + + case "resource_link": + const parsed = parseUri(part.uri) + // Use the name from resource_link if available + if (part.name && parsed.type === "file") { + parsed.filename = part.name + } + parts.push(parsed) + + break + + case "resource": { + const resource = part.resource + if ("text" in resource && resource.text) { + parts.push({ + type: "text", + text: resource.text, + }) + } else if ("blob" in resource && resource.blob && resource.mimeType) { + // Binary resource (PDFs, etc.): store as file part with data URL + const parsed = parseUri(resource.uri ?? "") + const filename = parsed.type === "file" ? parsed.filename : "file" + parts.push({ + type: "file", + url: `data:${resource.mimeType};base64,${resource.blob}`, + filename, + mime: resource.mimeType, + }) + } + break + } + + default: break } + } + + log.info("parts", { parts }) + + const cmd = (() => { + const text = parts + .filter((p): p is { type: "text"; text: string } => p.type === "text") + .map((p) => p.text) + .join("") + .trim() + + if (!text.startsWith("/")) return + + const [name, ...rest] = text.slice(1).split(/\s+/) + return { name, args: rest.join(" ").trim() } + })() + + const buildUsage = (msg: AssistantMessage): Usage => ({ + totalTokens: + msg.tokens.input + + msg.tokens.output + + msg.tokens.reasoning + + (msg.tokens.cache?.read ?? 0) + + (msg.tokens.cache?.write ?? 0), + inputTokens: msg.tokens.input, + outputTokens: msg.tokens.output, + thoughtTokens: msg.tokens.reasoning || undefined, + cachedReadTokens: msg.tokens.cache?.read || undefined, + cachedWriteTokens: msg.tokens.cache?.write || undefined, + }) + + if (!cmd) { + const response = await this.sdk.session.prompt({ + sessionID, + model: { + providerID: model.providerID, + modelID: model.modelID, + }, + variant: this.sessionManager.getVariant(sessionID), + parts, + agent, + directory, + }) + const msg = response.data?.info await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) return { stopReason: "end_turn" as const, + usage: msg ? buildUsage(msg) : undefined, _meta: {}, } } - async cancel(params: CancelNotification) { - const session = this.sessionManager.get(params.sessionId) - await this.config.sdk.session.abort( - { - sessionID: params.sessionId, - directory: session.cwd, - }, - { throwOnError: true }, - ) - } - } - - function toToolKind(toolName: string): ToolKind { - const tool = toolName.toLocaleLowerCase() - switch (tool) { - case "bash": - return "execute" - case "webfetch": - return "fetch" - - case "edit": - case "patch": - case "write": - return "edit" - - case "grep": - case "glob": - case "context7_resolve_library_id": - case "context7_get_library_docs": - return "search" - - case "read": - return "read" - - default: - return "other" - } - } - - function toLocations(toolName: string, input: Record): { path: string }[] { - const tool = toolName.toLocaleLowerCase() - switch (tool) { - case "read": - case "edit": - case "write": - return input["filePath"] ? [{ path: input["filePath"] }] : [] - case "glob": - case "grep": - return input["path"] ? [{ path: input["path"] }] : [] - case "bash": - return [] - default: - return [] - } - } - - async function defaultModel(config: ACPConfig, cwd?: string): Promise<{ providerID: ProviderID; modelID: ModelID }> { - const sdk = config.sdk - const configured = config.defaultModel - if (configured) return configured - - const directory = cwd ?? process.cwd() - - const specified = await sdk.config - .get({ directory }, { throwOnError: true }) - .then((resp) => { - const cfg = resp.data - if (!cfg || !cfg.model) return undefined - return Provider.parseModel(cfg.model) - }) - .catch((error) => { - log.error("failed to load user config for default model", { error }) - return undefined + const command = await this.config.sdk.command + .list({ directory }, { throwOnError: true }) + .then((x) => x.data!.find((c) => c.name === cmd.name)) + if (command) { + const response = await this.sdk.session.command({ + sessionID, + command: command.name, + arguments: cmd.args, + model: model.providerID + "/" + model.modelID, + agent, + directory, }) + const msg = response.data?.info - const providers = await sdk.config - .providers({ directory }, { throwOnError: true }) - .then((x) => x.data?.providers ?? []) - .catch((error) => { - log.error("failed to list providers for default model", { error }) - return [] - }) + await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) - if (specified && providers.length) { - const provider = providers.find((p) => p.id === specified.providerID) - if (provider && provider.models[specified.modelID]) return specified - } - - if (specified && !providers.length) return specified - - const opencodeProvider = providers.find((p) => p.id === "opencode") - if (opencodeProvider) { - if (opencodeProvider.models["big-pickle"]) { - return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } - } - const [best] = Provider.sort(Object.values(opencodeProvider.models)) - if (best) { - return { - providerID: ProviderID.make(best.providerID), - modelID: ModelID.make(best.id), - } + return { + stopReason: "end_turn" as const, + usage: msg ? buildUsage(msg) : undefined, + _meta: {}, } } - const models = providers.flatMap((p) => Object.values(p.models)) - const [best] = Provider.sort(models) + switch (cmd.name) { + case "compact": + await this.config.sdk.session.summarize( + { + sessionID, + directory, + providerID: model.providerID, + modelID: model.modelID, + }, + { throwOnError: true }, + ) + break + } + + await sendUsageUpdate(this.connection, this.sdk, sessionID, directory) + + return { + stopReason: "end_turn" as const, + _meta: {}, + } + } + + async cancel(params: CancelNotification) { + const session = this.sessionManager.get(params.sessionId) + await this.config.sdk.session.abort( + { + sessionID: params.sessionId, + directory: session.cwd, + }, + { throwOnError: true }, + ) + } +} + +function toToolKind(toolName: string): ToolKind { + const tool = toolName.toLocaleLowerCase() + switch (tool) { + case "bash": + return "execute" + case "webfetch": + return "fetch" + + case "edit": + case "patch": + case "write": + return "edit" + + case "grep": + case "glob": + case "context7_resolve_library_id": + case "context7_get_library_docs": + return "search" + + case "read": + return "read" + + default: + return "other" + } +} + +function toLocations(toolName: string, input: Record): { path: string }[] { + const tool = toolName.toLocaleLowerCase() + switch (tool) { + case "read": + case "edit": + case "write": + return input["filePath"] ? [{ path: input["filePath"] }] : [] + case "glob": + case "grep": + return input["path"] ? [{ path: input["path"] }] : [] + case "bash": + return [] + default: + return [] + } +} + +async function defaultModel(config: ACPConfig, cwd?: string): Promise<{ providerID: ProviderID; modelID: ModelID }> { + const sdk = config.sdk + const configured = config.defaultModel + if (configured) return configured + + const directory = cwd ?? process.cwd() + + const specified = await sdk.config + .get({ directory }, { throwOnError: true }) + .then((resp) => { + const cfg = resp.data + if (!cfg || !cfg.model) return undefined + return Provider.parseModel(cfg.model) + }) + .catch((error) => { + log.error("failed to load user config for default model", { error }) + return undefined + }) + + const providers = await sdk.config + .providers({ directory }, { throwOnError: true }) + .then((x) => x.data?.providers ?? []) + .catch((error) => { + log.error("failed to list providers for default model", { error }) + return [] + }) + + if (specified && providers.length) { + const provider = providers.find((p) => p.id === specified.providerID) + if (provider && provider.models[specified.modelID]) return specified + } + + if (specified && !providers.length) return specified + + const opencodeProvider = providers.find((p) => p.id === "opencode") + if (opencodeProvider) { + if (opencodeProvider.models["big-pickle"]) { + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } + } + const [best] = Provider.sort(Object.values(opencodeProvider.models)) if (best) { return { providerID: ProviderID.make(best.providerID), modelID: ModelID.make(best.id), } } - - if (specified) return specified - - return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } } - function parseUri( - uri: string, - ): { type: "file"; url: string; filename: string; mime: string } | { type: "text"; text: string } { - try { - if (uri.startsWith("file://")) { - const path = uri.slice(7) + const models = providers.flatMap((p) => Object.values(p.models)) + const [best] = Provider.sort(models) + if (best) { + return { + providerID: ProviderID.make(best.providerID), + modelID: ModelID.make(best.id), + } + } + + if (specified) return specified + + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } +} + +function parseUri( + uri: string, +): { type: "file"; url: string; filename: string; mime: string } | { type: "text"; text: string } { + try { + if (uri.startsWith("file://")) { + const path = uri.slice(7) + const name = path.split("/").pop() || path + return { + type: "file", + url: uri, + filename: name, + mime: "text/plain", + } + } + if (uri.startsWith("zed://")) { + const url = new URL(uri) + const path = url.searchParams.get("path") + if (path) { const name = path.split("/").pop() || path return { type: "file", - url: uri, + url: pathToFileURL(path).href, filename: name, mime: "text/plain", } } - if (uri.startsWith("zed://")) { - const url = new URL(uri) - const path = url.searchParams.get("path") - if (path) { - const name = path.split("/").pop() || path - return { - type: "file", - url: pathToFileURL(path).href, - filename: name, - mime: "text/plain", - } - } - } - return { - type: "text", - text: uri, - } - } catch { - return { - type: "text", - text: uri, - } } - } - - function getNewContent(fileOriginal: string, unifiedDiff: string): string | undefined { - const result = applyPatch(fileOriginal, unifiedDiff) - if (result === false) { - log.error("Failed to apply unified diff (context mismatch)") - return undefined - } - return result - } - - function sortProvidersByName(providers: T[]): T[] { - return [...providers].sort((a, b) => { - const nameA = a.name.toLowerCase() - const nameB = b.name.toLowerCase() - if (nameA < nameB) return -1 - if (nameA > nameB) return 1 - return 0 - }) - } - - function modelVariantsFromProviders( - providers: Array<{ id: string; models: Record }> }>, - model: { providerID: ProviderID; modelID: ModelID }, - ): string[] { - const provider = providers.find((entry) => entry.id === model.providerID) - if (!provider) return [] - const modelInfo = provider.models[model.modelID] - if (!modelInfo?.variants) return [] - return Object.keys(modelInfo.variants) - } - - function buildAvailableModels( - providers: Array<{ id: string; name: string; models: Record }>, - options: { includeVariants?: boolean } = {}, - ): ModelOption[] { - const includeVariants = options.includeVariants ?? false - return providers.flatMap((provider) => { - const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( - provider.models, - ) - const models = Provider.sort(unsorted) - return models.flatMap((model) => { - const base: ModelOption = { - modelId: `${provider.id}/${model.id}`, - name: `${provider.name}/${model.name}`, - } - if (!includeVariants || !model.variants) return [base] - const variants = Object.keys(model.variants).filter((variant) => variant !== DEFAULT_VARIANT_VALUE) - const variantOptions = variants.map((variant) => ({ - modelId: `${provider.id}/${model.id}/${variant}`, - name: `${provider.name}/${model.name} (${variant})`, - })) - return [base, ...variantOptions] - }) - }) - } - - function formatModelIdWithVariant( - model: { providerID: ProviderID; modelID: ModelID }, - variant: string | undefined, - availableVariants: string[], - includeVariant: boolean, - ) { - const base = `${model.providerID}/${model.modelID}` - if (!includeVariant || !variant || !availableVariants.includes(variant)) return base - return `${base}/${variant}` - } - - function buildVariantMeta(input: { - model: { providerID: ProviderID; modelID: ModelID } - variant?: string - availableVariants: string[] - }) { return { - opencode: { - modelId: `${input.model.providerID}/${input.model.modelID}`, - variant: input.variant ?? null, - availableVariants: input.availableVariants, - }, + type: "text", + text: uri, + } + } catch { + return { + type: "text", + text: uri, } } +} - function parseModelSelection( - modelId: string, - providers: Array<{ id: string; models: Record }> }>, - ): { model: { providerID: ProviderID; modelID: ModelID }; variant?: string } { - const parsed = Provider.parseModel(modelId) - const provider = providers.find((p) => p.id === parsed.providerID) - if (!provider) { - return { model: parsed, variant: undefined } - } +function getNewContent(fileOriginal: string, unifiedDiff: string): string | undefined { + const result = applyPatch(fileOriginal, unifiedDiff) + if (result === false) { + log.error("Failed to apply unified diff (context mismatch)") + return undefined + } + return result +} - // Check if modelID exists directly - if (provider.models[parsed.modelID]) { - return { model: parsed, variant: undefined } - } +function sortProvidersByName(providers: T[]): T[] { + return [...providers].sort((a, b) => { + const nameA = a.name.toLowerCase() + const nameB = b.name.toLowerCase() + if (nameA < nameB) return -1 + if (nameA > nameB) return 1 + return 0 + }) +} - // Try to extract variant from end of modelID (e.g., "claude-sonnet-4/high" -> model: "claude-sonnet-4", variant: "high") - const segments = parsed.modelID.split("/") - if (segments.length > 1) { - const candidateVariant = segments[segments.length - 1] - const baseModelId = segments.slice(0, -1).join("/") - const baseModelInfo = provider.models[baseModelId] - if (baseModelInfo?.variants && candidateVariant in baseModelInfo.variants) { - return { - model: { providerID: parsed.providerID, modelID: ModelID.make(baseModelId) }, - variant: candidateVariant, - } +function modelVariantsFromProviders( + providers: Array<{ id: string; models: Record }> }>, + model: { providerID: ProviderID; modelID: ModelID }, +): string[] { + const provider = providers.find((entry) => entry.id === model.providerID) + if (!provider) return [] + const modelInfo = provider.models[model.modelID] + if (!modelInfo?.variants) return [] + return Object.keys(modelInfo.variants) +} + +function buildAvailableModels( + providers: Array<{ id: string; name: string; models: Record }>, + options: { includeVariants?: boolean } = {}, +): ModelOption[] { + const includeVariants = options.includeVariants ?? false + return providers.flatMap((provider) => { + const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( + provider.models, + ) + const models = Provider.sort(unsorted) + return models.flatMap((model) => { + const base: ModelOption = { + modelId: `${provider.id}/${model.id}`, + name: `${provider.name}/${model.name}`, } - } + if (!includeVariants || !model.variants) return [base] + const variants = Object.keys(model.variants).filter((variant) => variant !== DEFAULT_VARIANT_VALUE) + const variantOptions = variants.map((variant) => ({ + modelId: `${provider.id}/${model.id}/${variant}`, + name: `${provider.name}/${model.name} (${variant})`, + })) + return [base, ...variantOptions] + }) + }) +} +function formatModelIdWithVariant( + model: { providerID: ProviderID; modelID: ModelID }, + variant: string | undefined, + availableVariants: string[], + includeVariant: boolean, +) { + const base = `${model.providerID}/${model.modelID}` + if (!includeVariant || !variant || !availableVariants.includes(variant)) return base + return `${base}/${variant}` +} + +function buildVariantMeta(input: { + model: { providerID: ProviderID; modelID: ModelID } + variant?: string + availableVariants: string[] +}) { + return { + opencode: { + modelId: `${input.model.providerID}/${input.model.modelID}`, + variant: input.variant ?? null, + availableVariants: input.availableVariants, + }, + } +} + +function parseModelSelection( + modelId: string, + providers: Array<{ id: string; models: Record }> }>, +): { model: { providerID: ProviderID; modelID: ModelID }; variant?: string } { + const parsed = Provider.parseModel(modelId) + const provider = providers.find((p) => p.id === parsed.providerID) + if (!provider) { return { model: parsed, variant: undefined } } - function buildConfigOptions(input: { - currentModelId: string - availableModels: ModelOption[] - modes?: { availableModes: ModeOption[]; currentModeId: string } | undefined - }): SessionConfigOption[] { - const options: SessionConfigOption[] = [ - { - id: "model", - name: "Model", - category: "model", - type: "select", - currentValue: input.currentModelId, - options: input.availableModels.map((m) => ({ value: m.modelId, name: m.name })), - }, - ] - if (input.modes) { - options.push({ - id: "mode", - name: "Session Mode", - category: "mode", - type: "select", - currentValue: input.modes.currentModeId, - options: input.modes.availableModes.map((m) => ({ - value: m.id, - name: m.name, - ...(m.description ? { description: m.description } : {}), - })), - }) - } - return options + // Check if modelID exists directly + if (provider.models[parsed.modelID]) { + return { model: parsed, variant: undefined } } + + // Try to extract variant from end of modelID (e.g., "claude-sonnet-4/high" -> model: "claude-sonnet-4", variant: "high") + const segments = parsed.modelID.split("/") + if (segments.length > 1) { + const candidateVariant = segments[segments.length - 1] + const baseModelId = segments.slice(0, -1).join("/") + const baseModelInfo = provider.models[baseModelId] + if (baseModelInfo?.variants && candidateVariant in baseModelInfo.variants) { + return { + model: { providerID: parsed.providerID, modelID: ModelID.make(baseModelId) }, + variant: candidateVariant, + } + } + } + + return { model: parsed, variant: undefined } } + +function buildConfigOptions(input: { + currentModelId: string + availableModels: ModelOption[] + modes?: { availableModes: ModeOption[]; currentModeId: string } | undefined +}): SessionConfigOption[] { + const options: SessionConfigOption[] = [ + { + id: "model", + name: "Model", + category: "model", + type: "select", + currentValue: input.currentModelId, + options: input.availableModels.map((m) => ({ value: m.modelId, name: m.name })), + }, + ] + if (input.modes) { + options.push({ + id: "mode", + name: "Session Mode", + category: "mode", + type: "select", + currentValue: input.modes.currentModeId, + options: input.modes.availableModes.map((m) => ({ + value: m.id, + name: m.name, + ...(m.description ? { description: m.description } : {}), + })), + }) + } + return options +} + +export * as ACP from "./agent" From cde105e7a8832b9c6d9d0a43d5699b4768156533 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:09 -0400 Subject: [PATCH 077/201] refactor: unwrap CopilotModels namespace + self-reexport (#22947) --- .../src/plugin/github-copilot/models.ts | 266 +++++++++--------- 1 file changed, 133 insertions(+), 133 deletions(-) diff --git a/packages/opencode/src/plugin/github-copilot/models.ts b/packages/opencode/src/plugin/github-copilot/models.ts index dfd6ceceaa..71d21afbe4 100644 --- a/packages/opencode/src/plugin/github-copilot/models.ts +++ b/packages/opencode/src/plugin/github-copilot/models.ts @@ -1,146 +1,146 @@ import { z } from "zod" import type { Model } from "@opencode-ai/sdk/v2" -export namespace CopilotModels { - export const schema = z.object({ - data: z.array( - z.object({ - model_picker_enabled: z.boolean(), - id: z.string(), - name: z.string(), - // every version looks like: `{model.id}-YYYY-MM-DD` - version: z.string(), - supported_endpoints: z.array(z.string()).optional(), - capabilities: z.object({ - family: z.string(), - limits: z.object({ - max_context_window_tokens: z.number(), - max_output_tokens: z.number(), - max_prompt_tokens: z.number(), - vision: z - .object({ - max_prompt_image_size: z.number(), - max_prompt_images: z.number(), - supported_media_types: z.array(z.string()), - }) - .optional(), - }), - supports: z.object({ - adaptive_thinking: z.boolean().optional(), - max_thinking_budget: z.number().optional(), - min_thinking_budget: z.number().optional(), - reasoning_effort: z.array(z.string()).optional(), - streaming: z.boolean(), - structured_outputs: z.boolean().optional(), - tool_calls: z.boolean(), - vision: z.boolean().optional(), - }), +export const schema = z.object({ + data: z.array( + z.object({ + model_picker_enabled: z.boolean(), + id: z.string(), + name: z.string(), + // every version looks like: `{model.id}-YYYY-MM-DD` + version: z.string(), + supported_endpoints: z.array(z.string()).optional(), + capabilities: z.object({ + family: z.string(), + limits: z.object({ + max_context_window_tokens: z.number(), + max_output_tokens: z.number(), + max_prompt_tokens: z.number(), + vision: z + .object({ + max_prompt_image_size: z.number(), + max_prompt_images: z.number(), + supported_media_types: z.array(z.string()), + }) + .optional(), + }), + supports: z.object({ + adaptive_thinking: z.boolean().optional(), + max_thinking_budget: z.number().optional(), + min_thinking_budget: z.number().optional(), + reasoning_effort: z.array(z.string()).optional(), + streaming: z.boolean(), + structured_outputs: z.boolean().optional(), + tool_calls: z.boolean(), + vision: z.boolean().optional(), }), }), - ), - }) + }), + ), +}) - type Item = z.infer["data"][number] +type Item = z.infer["data"][number] - function build(key: string, remote: Item, url: string, prev?: Model): Model { - const reasoning = - !!remote.capabilities.supports.adaptive_thinking || - !!remote.capabilities.supports.reasoning_effort?.length || - remote.capabilities.supports.max_thinking_budget !== undefined || - remote.capabilities.supports.min_thinking_budget !== undefined - const image = - (remote.capabilities.supports.vision ?? false) || - (remote.capabilities.limits.vision?.supported_media_types ?? []).some((item) => item.startsWith("image/")) +function build(key: string, remote: Item, url: string, prev?: Model): Model { + const reasoning = + !!remote.capabilities.supports.adaptive_thinking || + !!remote.capabilities.supports.reasoning_effort?.length || + remote.capabilities.supports.max_thinking_budget !== undefined || + remote.capabilities.supports.min_thinking_budget !== undefined + const image = + (remote.capabilities.supports.vision ?? false) || + (remote.capabilities.limits.vision?.supported_media_types ?? []).some((item) => item.startsWith("image/")) - const isMsgApi = remote.supported_endpoints?.includes("/v1/messages") + const isMsgApi = remote.supported_endpoints?.includes("/v1/messages") - return { - id: key, - providerID: "github-copilot", - api: { - id: remote.id, - url: isMsgApi ? `${url}/v1` : url, - npm: isMsgApi ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot", + return { + id: key, + providerID: "github-copilot", + api: { + id: remote.id, + url: isMsgApi ? `${url}/v1` : url, + npm: isMsgApi ? "@ai-sdk/anthropic" : "@ai-sdk/github-copilot", + }, + // API response wins + status: "active", + limit: { + context: remote.capabilities.limits.max_context_window_tokens, + input: remote.capabilities.limits.max_prompt_tokens, + output: remote.capabilities.limits.max_output_tokens, + }, + capabilities: { + temperature: prev?.capabilities.temperature ?? true, + reasoning: prev?.capabilities.reasoning ?? reasoning, + attachment: prev?.capabilities.attachment ?? true, + toolcall: remote.capabilities.supports.tool_calls, + input: { + text: true, + audio: false, + image, + video: false, + pdf: false, }, - // API response wins - status: "active", - limit: { - context: remote.capabilities.limits.max_context_window_tokens, - input: remote.capabilities.limits.max_prompt_tokens, - output: remote.capabilities.limits.max_output_tokens, + output: { + text: true, + audio: false, + image: false, + video: false, + pdf: false, }, - capabilities: { - temperature: prev?.capabilities.temperature ?? true, - reasoning: prev?.capabilities.reasoning ?? reasoning, - attachment: prev?.capabilities.attachment ?? true, - toolcall: remote.capabilities.supports.tool_calls, - input: { - text: true, - audio: false, - image, - video: false, - pdf: false, - }, - output: { - text: true, - audio: false, - image: false, - video: false, - pdf: false, - }, - interleaved: false, - }, - // existing wins - family: prev?.family ?? remote.capabilities.family, - name: prev?.name ?? remote.name, - cost: { - input: 0, - output: 0, - cache: { read: 0, write: 0 }, - }, - options: prev?.options ?? {}, - headers: prev?.headers ?? {}, - release_date: - prev?.release_date ?? - (remote.version.startsWith(`${remote.id}-`) ? remote.version.slice(remote.id.length + 1) : remote.version), - variants: prev?.variants ?? {}, - } - } - - export async function get( - baseURL: string, - headers: HeadersInit = {}, - existing: Record = {}, - ): Promise> { - const data = await fetch(`${baseURL}/models`, { - headers, - signal: AbortSignal.timeout(5_000), - }).then(async (res) => { - if (!res.ok) { - throw new Error(`Failed to fetch models: ${res.status}`) - } - return schema.parse(await res.json()) - }) - - const result = { ...existing } - const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) - - // prune existing models whose api.id isn't in the endpoint response - for (const [key, model] of Object.entries(result)) { - const m = remote.get(model.api.id) - if (!m) { - delete result[key] - continue - } - result[key] = build(key, m, baseURL, model) - } - - // add new endpoint models not already keyed in result - for (const [id, m] of remote) { - if (id in result) continue - result[id] = build(id, m, baseURL) - } - - return result + interleaved: false, + }, + // existing wins + family: prev?.family ?? remote.capabilities.family, + name: prev?.name ?? remote.name, + cost: { + input: 0, + output: 0, + cache: { read: 0, write: 0 }, + }, + options: prev?.options ?? {}, + headers: prev?.headers ?? {}, + release_date: + prev?.release_date ?? + (remote.version.startsWith(`${remote.id}-`) ? remote.version.slice(remote.id.length + 1) : remote.version), + variants: prev?.variants ?? {}, } } + +export async function get( + baseURL: string, + headers: HeadersInit = {}, + existing: Record = {}, +): Promise> { + const data = await fetch(`${baseURL}/models`, { + headers, + signal: AbortSignal.timeout(5_000), + }).then(async (res) => { + if (!res.ok) { + throw new Error(`Failed to fetch models: ${res.status}`) + } + return schema.parse(await res.json()) + }) + + const result = { ...existing } + const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) + + // prune existing models whose api.id isn't in the endpoint response + for (const [key, model] of Object.entries(result)) { + const m = remote.get(model.api.id) + if (!m) { + delete result[key] + continue + } + result[key] = build(key, m, baseURL, model) + } + + // add new endpoint models not already keyed in result + for (const [id, m] of remote) { + if (id in result) continue + result[id] = build(id, m, baseURL) + } + + return result +} + +export * as CopilotModels from "./models" From fdd5b77bfd9c525a2ae6656a011c1419748761e3 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:12 -0400 Subject: [PATCH 078/201] refactor: unwrap McpAuth namespace + self-reexport (#22942) --- packages/opencode/src/mcp/auth.ts | 266 +++++++++++++++--------------- 1 file changed, 133 insertions(+), 133 deletions(-) diff --git a/packages/opencode/src/mcp/auth.ts b/packages/opencode/src/mcp/auth.ts index 85f9e1d8c9..efb046d7a7 100644 --- a/packages/opencode/src/mcp/auth.ts +++ b/packages/opencode/src/mcp/auth.ts @@ -4,141 +4,141 @@ import { Global } from "../global" import { Effect, Layer, Context } from "effect" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -export namespace McpAuth { - export const Tokens = z.object({ - accessToken: z.string(), - refreshToken: z.string().optional(), - expiresAt: z.number().optional(), - scope: z.string().optional(), - }) - export type Tokens = z.infer +export const Tokens = z.object({ + accessToken: z.string(), + refreshToken: z.string().optional(), + expiresAt: z.number().optional(), + scope: z.string().optional(), +}) +export type Tokens = z.infer - export const ClientInfo = z.object({ - clientId: z.string(), - clientSecret: z.string().optional(), - clientIdIssuedAt: z.number().optional(), - clientSecretExpiresAt: z.number().optional(), - }) - export type ClientInfo = z.infer +export const ClientInfo = z.object({ + clientId: z.string(), + clientSecret: z.string().optional(), + clientIdIssuedAt: z.number().optional(), + clientSecretExpiresAt: z.number().optional(), +}) +export type ClientInfo = z.infer - export const Entry = z.object({ - tokens: Tokens.optional(), - clientInfo: ClientInfo.optional(), - codeVerifier: z.string().optional(), - oauthState: z.string().optional(), - serverUrl: z.string().optional(), - }) - export type Entry = z.infer +export const Entry = z.object({ + tokens: Tokens.optional(), + clientInfo: ClientInfo.optional(), + codeVerifier: z.string().optional(), + oauthState: z.string().optional(), + serverUrl: z.string().optional(), +}) +export type Entry = z.infer - const filepath = path.join(Global.Path.data, "mcp-auth.json") +const filepath = path.join(Global.Path.data, "mcp-auth.json") - export interface Interface { - readonly all: () => Effect.Effect> - readonly get: (mcpName: string) => Effect.Effect - readonly getForUrl: (mcpName: string, serverUrl: string) => Effect.Effect - readonly set: (mcpName: string, entry: Entry, serverUrl?: string) => Effect.Effect - readonly remove: (mcpName: string) => Effect.Effect - readonly updateTokens: (mcpName: string, tokens: Tokens, serverUrl?: string) => Effect.Effect - readonly updateClientInfo: (mcpName: string, clientInfo: ClientInfo, serverUrl?: string) => Effect.Effect - readonly updateCodeVerifier: (mcpName: string, codeVerifier: string) => Effect.Effect - readonly clearCodeVerifier: (mcpName: string) => Effect.Effect - readonly updateOAuthState: (mcpName: string, oauthState: string) => Effect.Effect - readonly getOAuthState: (mcpName: string) => Effect.Effect - readonly clearOAuthState: (mcpName: string) => Effect.Effect - readonly isTokenExpired: (mcpName: string) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/McpAuth") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fs = yield* AppFileSystem.Service - - const all = Effect.fn("McpAuth.all")(function* () { - return yield* fs.readJson(filepath).pipe( - Effect.map((data) => data as Record), - Effect.catch(() => Effect.succeed({} as Record)), - ) - }) - - const get = Effect.fn("McpAuth.get")(function* (mcpName: string) { - const data = yield* all() - return data[mcpName] - }) - - const getForUrl = Effect.fn("McpAuth.getForUrl")(function* (mcpName: string, serverUrl: string) { - const entry = yield* get(mcpName) - if (!entry) return undefined - if (!entry.serverUrl) return undefined - if (entry.serverUrl !== serverUrl) return undefined - return entry - }) - - const set = Effect.fn("McpAuth.set")(function* (mcpName: string, entry: Entry, serverUrl?: string) { - const data = yield* all() - if (serverUrl) entry.serverUrl = serverUrl - yield* fs.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600).pipe(Effect.orDie) - }) - - const remove = Effect.fn("McpAuth.remove")(function* (mcpName: string) { - const data = yield* all() - delete data[mcpName] - yield* fs.writeJson(filepath, data, 0o600).pipe(Effect.orDie) - }) - - const updateField = (field: K, spanName: string) => - Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string, value: NonNullable, serverUrl?: string) { - const entry = (yield* get(mcpName)) ?? {} - entry[field] = value - yield* set(mcpName, entry, serverUrl) - }) - - const clearField = (field: K, spanName: string) => - Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string) { - const entry = yield* get(mcpName) - if (entry) { - delete entry[field] - yield* set(mcpName, entry) - } - }) - - const updateTokens = updateField("tokens", "updateTokens") - const updateClientInfo = updateField("clientInfo", "updateClientInfo") - const updateCodeVerifier = updateField("codeVerifier", "updateCodeVerifier") - const updateOAuthState = updateField("oauthState", "updateOAuthState") - const clearCodeVerifier = clearField("codeVerifier", "clearCodeVerifier") - const clearOAuthState = clearField("oauthState", "clearOAuthState") - - const getOAuthState = Effect.fn("McpAuth.getOAuthState")(function* (mcpName: string) { - const entry = yield* get(mcpName) - return entry?.oauthState - }) - - const isTokenExpired = Effect.fn("McpAuth.isTokenExpired")(function* (mcpName: string) { - const entry = yield* get(mcpName) - if (!entry?.tokens) return null - if (!entry.tokens.expiresAt) return false - return entry.tokens.expiresAt < Date.now() / 1000 - }) - - return Service.of({ - all, - get, - getForUrl, - set, - remove, - updateTokens, - updateClientInfo, - updateCodeVerifier, - clearCodeVerifier, - updateOAuthState, - getOAuthState, - clearOAuthState, - isTokenExpired, - }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) +export interface Interface { + readonly all: () => Effect.Effect> + readonly get: (mcpName: string) => Effect.Effect + readonly getForUrl: (mcpName: string, serverUrl: string) => Effect.Effect + readonly set: (mcpName: string, entry: Entry, serverUrl?: string) => Effect.Effect + readonly remove: (mcpName: string) => Effect.Effect + readonly updateTokens: (mcpName: string, tokens: Tokens, serverUrl?: string) => Effect.Effect + readonly updateClientInfo: (mcpName: string, clientInfo: ClientInfo, serverUrl?: string) => Effect.Effect + readonly updateCodeVerifier: (mcpName: string, codeVerifier: string) => Effect.Effect + readonly clearCodeVerifier: (mcpName: string) => Effect.Effect + readonly updateOAuthState: (mcpName: string, oauthState: string) => Effect.Effect + readonly getOAuthState: (mcpName: string) => Effect.Effect + readonly clearOAuthState: (mcpName: string) => Effect.Effect + readonly isTokenExpired: (mcpName: string) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/McpAuth") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const fs = yield* AppFileSystem.Service + + const all = Effect.fn("McpAuth.all")(function* () { + return yield* fs.readJson(filepath).pipe( + Effect.map((data) => data as Record), + Effect.catch(() => Effect.succeed({} as Record)), + ) + }) + + const get = Effect.fn("McpAuth.get")(function* (mcpName: string) { + const data = yield* all() + return data[mcpName] + }) + + const getForUrl = Effect.fn("McpAuth.getForUrl")(function* (mcpName: string, serverUrl: string) { + const entry = yield* get(mcpName) + if (!entry) return undefined + if (!entry.serverUrl) return undefined + if (entry.serverUrl !== serverUrl) return undefined + return entry + }) + + const set = Effect.fn("McpAuth.set")(function* (mcpName: string, entry: Entry, serverUrl?: string) { + const data = yield* all() + if (serverUrl) entry.serverUrl = serverUrl + yield* fs.writeJson(filepath, { ...data, [mcpName]: entry }, 0o600).pipe(Effect.orDie) + }) + + const remove = Effect.fn("McpAuth.remove")(function* (mcpName: string) { + const data = yield* all() + delete data[mcpName] + yield* fs.writeJson(filepath, data, 0o600).pipe(Effect.orDie) + }) + + const updateField = (field: K, spanName: string) => + Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string, value: NonNullable, serverUrl?: string) { + const entry = (yield* get(mcpName)) ?? {} + entry[field] = value + yield* set(mcpName, entry, serverUrl) + }) + + const clearField = (field: K, spanName: string) => + Effect.fn(`McpAuth.${spanName}`)(function* (mcpName: string) { + const entry = yield* get(mcpName) + if (entry) { + delete entry[field] + yield* set(mcpName, entry) + } + }) + + const updateTokens = updateField("tokens", "updateTokens") + const updateClientInfo = updateField("clientInfo", "updateClientInfo") + const updateCodeVerifier = updateField("codeVerifier", "updateCodeVerifier") + const updateOAuthState = updateField("oauthState", "updateOAuthState") + const clearCodeVerifier = clearField("codeVerifier", "clearCodeVerifier") + const clearOAuthState = clearField("oauthState", "clearOAuthState") + + const getOAuthState = Effect.fn("McpAuth.getOAuthState")(function* (mcpName: string) { + const entry = yield* get(mcpName) + return entry?.oauthState + }) + + const isTokenExpired = Effect.fn("McpAuth.isTokenExpired")(function* (mcpName: string) { + const entry = yield* get(mcpName) + if (!entry?.tokens) return null + if (!entry.tokens.expiresAt) return false + return entry.tokens.expiresAt < Date.now() / 1000 + }) + + return Service.of({ + all, + get, + getForUrl, + set, + remove, + updateTokens, + updateClientInfo, + updateCodeVerifier, + clearCodeVerifier, + updateOAuthState, + getOAuthState, + clearOAuthState, + isTokenExpired, + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) + +export * as McpAuth from "./auth" From f6dbb2f3e0de46d2a5e07618548c94259889a22a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:37 -0400 Subject: [PATCH 079/201] refactor: unwrap Heap namespace + self-reexport (#22931) --- packages/opencode/src/cli/heap.ts | 82 +++++++++++++++---------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/packages/opencode/src/cli/heap.ts b/packages/opencode/src/cli/heap.ts index cf1cffa800..87b7b2ebf9 100644 --- a/packages/opencode/src/cli/heap.ts +++ b/packages/opencode/src/cli/heap.ts @@ -8,52 +8,52 @@ const log = Log.create({ service: "heap" }) const MINUTE = 60_000 const LIMIT = 2 * 1024 * 1024 * 1024 -export namespace Heap { - let timer: Timer | undefined - let lock = false - let armed = true +let timer: Timer | undefined +let lock = false +let armed = true - export function start() { - if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return - if (timer) return +export function start() { + if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return + if (timer) return - const run = async () => { - if (lock) return + const run = async () => { + if (lock) return - const stat = process.memoryUsage() - if (stat.rss <= LIMIT) { - armed = true - return - } - if (!armed) return + const stat = process.memoryUsage() + if (stat.rss <= LIMIT) { + armed = true + return + } + if (!armed) return - lock = true - armed = false - const file = path.join( - Global.Path.log, - `heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`, - ) - log.warn("heap usage exceeded limit", { - rss: stat.rss, - heap: stat.heapUsed, - file, + lock = true + armed = false + const file = path.join( + Global.Path.log, + `heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`, + ) + log.warn("heap usage exceeded limit", { + rss: stat.rss, + heap: stat.heapUsed, + file, + }) + + await Promise.resolve() + .then(() => writeHeapSnapshot(file)) + .catch((err) => { + log.error("failed to write heap snapshot", { + error: err instanceof Error ? err.message : String(err), + file, + }) }) - await Promise.resolve() - .then(() => writeHeapSnapshot(file)) - .catch((err) => { - log.error("failed to write heap snapshot", { - error: err instanceof Error ? err.message : String(err), - file, - }) - }) - - lock = false - } - - timer = setInterval(() => { - void run() - }, MINUTE) - timer.unref?.() + lock = false } + + timer = setInterval(() => { + void run() + }, MINUTE) + timer.unref?.() } + +export * as Heap from "./heap" From 79732ab17560c59745eef6d151b4b6cc69e23163 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:41 -0400 Subject: [PATCH 080/201] refactor: unwrap UI namespace + self-reexport (#22951) --- packages/opencode/src/cli/ui.ts | 226 ++++++++++++++++---------------- 1 file changed, 113 insertions(+), 113 deletions(-) diff --git a/packages/opencode/src/cli/ui.ts b/packages/opencode/src/cli/ui.ts index d735a55417..46335d24a8 100644 --- a/packages/opencode/src/cli/ui.ts +++ b/packages/opencode/src/cli/ui.ts @@ -3,131 +3,131 @@ import { EOL } from "os" import { NamedError } from "@opencode-ai/shared/util/error" import { logo as glyphs } from "./logo" -export namespace UI { - const wordmark = [ - `⠀ ▄ `, - `█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`, - `█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`, - `▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`, - ] +const wordmark = [ + `⠀ ▄ `, + `█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`, + `█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`, + `▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`, +] - export const CancelledError = NamedError.create("UICancelledError", z.void()) +export const CancelledError = NamedError.create("UICancelledError", z.void()) - export const Style = { - TEXT_HIGHLIGHT: "\x1b[96m", - TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m", - TEXT_DIM: "\x1b[90m", - TEXT_DIM_BOLD: "\x1b[90m\x1b[1m", - TEXT_NORMAL: "\x1b[0m", - TEXT_NORMAL_BOLD: "\x1b[1m", - TEXT_WARNING: "\x1b[93m", - TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m", - TEXT_DANGER: "\x1b[91m", - TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m", - TEXT_SUCCESS: "\x1b[92m", - TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m", - TEXT_INFO: "\x1b[94m", - TEXT_INFO_BOLD: "\x1b[94m\x1b[1m", - } +export const Style = { + TEXT_HIGHLIGHT: "\x1b[96m", + TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m", + TEXT_DIM: "\x1b[90m", + TEXT_DIM_BOLD: "\x1b[90m\x1b[1m", + TEXT_NORMAL: "\x1b[0m", + TEXT_NORMAL_BOLD: "\x1b[1m", + TEXT_WARNING: "\x1b[93m", + TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m", + TEXT_DANGER: "\x1b[91m", + TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m", + TEXT_SUCCESS: "\x1b[92m", + TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m", + TEXT_INFO: "\x1b[94m", + TEXT_INFO_BOLD: "\x1b[94m\x1b[1m", +} - export function println(...message: string[]) { - print(...message) - process.stderr.write(EOL) - } +export function println(...message: string[]) { + print(...message) + process.stderr.write(EOL) +} - export function print(...message: string[]) { - blank = false - process.stderr.write(message.join(" ")) - } +export function print(...message: string[]) { + blank = false + process.stderr.write(message.join(" ")) +} - let blank = false - export function empty() { - if (blank) return - println("" + Style.TEXT_NORMAL) - blank = true - } +let blank = false +export function empty() { + if (blank) return + println("" + Style.TEXT_NORMAL) + blank = true +} - export function logo(pad?: string) { - if (!process.stdout.isTTY && !process.stderr.isTTY) { - const result = [] - for (const row of wordmark) { - if (pad) result.push(pad) - result.push(row) - result.push(EOL) - } - return result.join("").trimEnd() - } - - const result: string[] = [] - const reset = "\x1b[0m" - const left = { - fg: "\x1b[90m", - shadow: "\x1b[38;5;235m", - bg: "\x1b[48;5;235m", - } - const right = { - fg: reset, - shadow: "\x1b[38;5;238m", - bg: "\x1b[48;5;238m", - } - const gap = " " - const draw = (line: string, fg: string, shadow: string, bg: string) => { - const parts: string[] = [] - for (const char of line) { - if (char === "_") { - parts.push(bg, " ", reset) - continue - } - if (char === "^") { - parts.push(fg, bg, "▀", reset) - continue - } - if (char === "~") { - parts.push(shadow, "▀", reset) - continue - } - if (char === " ") { - parts.push(" ") - continue - } - parts.push(fg, char, reset) - } - return parts.join("") - } - glyphs.left.forEach((row, index) => { +export function logo(pad?: string) { + if (!process.stdout.isTTY && !process.stderr.isTTY) { + const result = [] + for (const row of wordmark) { if (pad) result.push(pad) - result.push(draw(row, left.fg, left.shadow, left.bg)) - result.push(gap) - const other = glyphs.right[index] ?? "" - result.push(draw(other, right.fg, right.shadow, right.bg)) + result.push(row) result.push(EOL) - }) + } return result.join("").trimEnd() } - export async function input(prompt: string): Promise { - const readline = require("readline") - const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout, - }) - - return new Promise((resolve) => { - rl.question(prompt, (answer: string) => { - rl.close() - resolve(answer.trim()) - }) - }) + const result: string[] = [] + const reset = "\x1b[0m" + const left = { + fg: "\x1b[90m", + shadow: "\x1b[38;5;235m", + bg: "\x1b[48;5;235m", } - - export function error(message: string) { - if (message.startsWith("Error: ")) { - message = message.slice("Error: ".length) + const right = { + fg: reset, + shadow: "\x1b[38;5;238m", + bg: "\x1b[48;5;238m", + } + const gap = " " + const draw = (line: string, fg: string, shadow: string, bg: string) => { + const parts: string[] = [] + for (const char of line) { + if (char === "_") { + parts.push(bg, " ", reset) + continue + } + if (char === "^") { + parts.push(fg, bg, "▀", reset) + continue + } + if (char === "~") { + parts.push(shadow, "▀", reset) + continue + } + if (char === " ") { + parts.push(" ") + continue + } + parts.push(fg, char, reset) } - println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) - } - - export function markdown(text: string): string { - return text + return parts.join("") } + glyphs.left.forEach((row, index) => { + if (pad) result.push(pad) + result.push(draw(row, left.fg, left.shadow, left.bg)) + result.push(gap) + const other = glyphs.right[index] ?? "" + result.push(draw(other, right.fg, right.shadow, right.bg)) + result.push(EOL) + }) + return result.join("").trimEnd() } + +export async function input(prompt: string): Promise { + const readline = require("readline") + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }) + + return new Promise((resolve) => { + rl.question(prompt, (answer: string) => { + rl.close() + resolve(answer.trim()) + }) + }) +} + +export function error(message: string) { + if (message.startsWith("Error: ")) { + message = message.slice("Error: ".length) + } + println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message) +} + +export function markdown(text: string): string { + return text +} + +export * as UI from "./ui" From fb0274446043401d48fae0aefb8e21f75a080ee8 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:01:44 -0400 Subject: [PATCH 081/201] refactor: unwrap Agent namespace + self-reexport (#22935) --- packages/opencode/src/agent/agent.ts | 766 +++++++++++++-------------- 1 file changed, 383 insertions(+), 383 deletions(-) diff --git a/packages/opencode/src/agent/agent.ts b/packages/opencode/src/agent/agent.ts index 54ca484555..07f742fe12 100644 --- a/packages/opencode/src/agent/agent.ts +++ b/packages/opencode/src/agent/agent.ts @@ -24,389 +24,389 @@ import { InstanceState } from "@/effect" import * as Option from "effect/Option" import * as OtelTracer from "@effect/opentelemetry/Tracer" -export namespace Agent { - export const Info = z - .object({ - name: z.string(), - description: z.string().optional(), - mode: z.enum(["subagent", "primary", "all"]), - native: z.boolean().optional(), - hidden: z.boolean().optional(), - topP: z.number().optional(), - temperature: z.number().optional(), - color: z.string().optional(), - permission: Permission.Ruleset.zod, - model: z - .object({ - modelID: ModelID.zod, - providerID: ProviderID.zod, - }) - .optional(), - variant: z.string().optional(), - prompt: z.string().optional(), - options: z.record(z.string(), z.any()), - steps: z.number().int().positive().optional(), - }) - .meta({ - ref: "Agent", - }) - export type Info = z.infer - - export interface Interface { - readonly get: (agent: string) => Effect.Effect - readonly list: () => Effect.Effect - readonly defaultAgent: () => Effect.Effect - readonly generate: (input: { - description: string - model?: { providerID: ProviderID; modelID: ModelID } - }) => Effect.Effect<{ - identifier: string - whenToUse: string - systemPrompt: string - }> - } - - type State = Omit - - export class Service extends Context.Service()("@opencode/Agent") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const auth = yield* Auth.Service - const plugin = yield* Plugin.Service - const skill = yield* Skill.Service - const provider = yield* Provider.Service - - const state = yield* InstanceState.make( - Effect.fn("Agent.state")(function* (_ctx) { - const cfg = yield* config.get() - const skillDirs = yield* skill.dirs() - const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))] - - const defaults = Permission.fromConfig({ - "*": "allow", - doom_loop: "ask", - external_directory: { - "*": "ask", - ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), - }, - question: "deny", - plan_enter: "deny", - plan_exit: "deny", - // mirrors github.com/github/gitignore Node.gitignore pattern for .env files - read: { - "*": "allow", - "*.env": "ask", - "*.env.*": "ask", - "*.env.example": "allow", - }, - }) - - const user = Permission.fromConfig(cfg.permission ?? {}) - - const agents: Record = { - build: { - name: "build", - description: "The default agent. Executes tools based on configured permissions.", - options: {}, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - question: "allow", - plan_enter: "allow", - }), - user, - ), - mode: "primary", - native: true, - }, - plan: { - name: "plan", - description: "Plan mode. Disallows all edit tools.", - options: {}, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - question: "allow", - plan_exit: "allow", - external_directory: { - [path.join(Global.Path.data, "plans", "*")]: "allow", - }, - edit: { - "*": "deny", - [path.join(".opencode", "plans", "*.md")]: "allow", - [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: - "allow", - }, - }), - user, - ), - mode: "primary", - native: true, - }, - general: { - name: "general", - description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - todowrite: "deny", - }), - user, - ), - options: {}, - mode: "subagent", - native: true, - }, - explore: { - name: "explore", - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - grep: "allow", - glob: "allow", - list: "allow", - bash: "allow", - webfetch: "allow", - websearch: "allow", - codesearch: "allow", - read: "allow", - external_directory: { - "*": "ask", - ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), - }, - }), - user, - ), - description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`, - prompt: PROMPT_EXPLORE, - options: {}, - mode: "subagent", - native: true, - }, - compaction: { - name: "compaction", - mode: "primary", - native: true, - hidden: true, - prompt: PROMPT_COMPACTION, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - options: {}, - }, - title: { - name: "title", - mode: "primary", - options: {}, - native: true, - hidden: true, - temperature: 0.5, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - prompt: PROMPT_TITLE, - }, - summary: { - name: "summary", - mode: "primary", - options: {}, - native: true, - hidden: true, - permission: Permission.merge( - defaults, - Permission.fromConfig({ - "*": "deny", - }), - user, - ), - prompt: PROMPT_SUMMARY, - }, - } - - for (const [key, value] of Object.entries(cfg.agent ?? {})) { - if (value.disable) { - delete agents[key] - continue - } - let item = agents[key] - if (!item) - item = agents[key] = { - name: key, - mode: "all", - permission: Permission.merge(defaults, user), - options: {}, - native: false, - } - if (value.model) item.model = Provider.parseModel(value.model) - item.variant = value.variant ?? item.variant - item.prompt = value.prompt ?? item.prompt - item.description = value.description ?? item.description - item.temperature = value.temperature ?? item.temperature - item.topP = value.top_p ?? item.topP - item.mode = value.mode ?? item.mode - item.color = value.color ?? item.color - item.hidden = value.hidden ?? item.hidden - item.name = value.name ?? item.name - item.steps = value.steps ?? item.steps - item.options = mergeDeep(item.options, value.options ?? {}) - item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {})) - } - - // Ensure Truncate.GLOB is allowed unless explicitly configured - for (const name in agents) { - const agent = agents[name] - const explicit = agent.permission.some((r) => { - if (r.permission !== "external_directory") return false - if (r.action !== "deny") return false - return r.pattern === Truncate.GLOB - }) - if (explicit) continue - - agents[name].permission = Permission.merge( - agents[name].permission, - Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }), - ) - } - - const get = Effect.fnUntraced(function* (agent: string) { - return agents[agent] - }) - - const list = Effect.fnUntraced(function* () { - const cfg = yield* config.get() - return pipe( - agents, - values(), - sortBy( - [(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"], - [(x) => x.name, "asc"], - ), - ) - }) - - const defaultAgent = Effect.fnUntraced(function* () { - const c = yield* config.get() - if (c.default_agent) { - const agent = agents[c.default_agent] - if (!agent) throw new Error(`default agent "${c.default_agent}" not found`) - if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`) - if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`) - return agent.name - } - const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true) - if (!visible) throw new Error("no primary visible agent found") - return visible.name - }) - - return { - get, - list, - defaultAgent, - } satisfies State - }), - ) - - return Service.of({ - get: Effect.fn("Agent.get")(function* (agent: string) { - return yield* InstanceState.useEffect(state, (s) => s.get(agent)) - }), - list: Effect.fn("Agent.list")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.list()) - }), - defaultAgent: Effect.fn("Agent.defaultAgent")(function* () { - return yield* InstanceState.useEffect(state, (s) => s.defaultAgent()) - }), - generate: Effect.fn("Agent.generate")(function* (input: { - description: string - model?: { providerID: ProviderID; modelID: ModelID } - }) { - const cfg = yield* config.get() - const model = input.model ?? (yield* provider.defaultModel()) - const resolved = yield* provider.getModel(model.providerID, model.modelID) - const language = yield* provider.getLanguage(resolved) - const tracer = cfg.experimental?.openTelemetry - ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) - : undefined - - const system = [PROMPT_GENERATE] - yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system }) - const existing = yield* InstanceState.useEffect(state, (s) => s.list()) - - // TODO: clean this up so provider specific logic doesnt bleed over - const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie) - const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth" - - const params = { - experimental_telemetry: { - isEnabled: cfg.experimental?.openTelemetry, - tracer, - metadata: { - userId: cfg.username ?? "unknown", - }, - }, - temperature: 0.3, - messages: [ - ...(isOpenaiOauth - ? [] - : system.map( - (item): ModelMessage => ({ - role: "system", - content: item, - }), - )), - { - role: "user", - content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`, - }, - ], - model: language, - schema: z.object({ - identifier: z.string(), - whenToUse: z.string(), - systemPrompt: z.string(), - }), - } satisfies Parameters[0] - - if (isOpenaiOauth) { - return yield* Effect.promise(async () => { - const result = streamObject({ - ...params, - providerOptions: ProviderTransform.providerOptions(resolved, { - instructions: system.join("\n"), - store: false, - }), - onError: () => {}, - }) - for await (const part of result.fullStream) { - if (part.type === "error") throw part.error - } - return result.object - }) - } - - return yield* Effect.promise(() => generateObject(params).then((r) => r.object)) - }), +export const Info = z + .object({ + name: z.string(), + description: z.string().optional(), + mode: z.enum(["subagent", "primary", "all"]), + native: z.boolean().optional(), + hidden: z.boolean().optional(), + topP: z.number().optional(), + temperature: z.number().optional(), + color: z.string().optional(), + permission: Permission.Ruleset.zod, + model: z + .object({ + modelID: ModelID.zod, + providerID: ProviderID.zod, }) - }), - ) + .optional(), + variant: z.string().optional(), + prompt: z.string().optional(), + options: z.record(z.string(), z.any()), + steps: z.number().int().positive().optional(), + }) + .meta({ + ref: "Agent", + }) +export type Info = z.infer - export const defaultLayer = layer.pipe( - Layer.provide(Plugin.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Auth.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Skill.defaultLayer), - ) +export interface Interface { + readonly get: (agent: string) => Effect.Effect + readonly list: () => Effect.Effect + readonly defaultAgent: () => Effect.Effect + readonly generate: (input: { + description: string + model?: { providerID: ProviderID; modelID: ModelID } + }) => Effect.Effect<{ + identifier: string + whenToUse: string + systemPrompt: string + }> } + +type State = Omit + +export class Service extends Context.Service()("@opencode/Agent") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const auth = yield* Auth.Service + const plugin = yield* Plugin.Service + const skill = yield* Skill.Service + const provider = yield* Provider.Service + + const state = yield* InstanceState.make( + Effect.fn("Agent.state")(function* (_ctx) { + const cfg = yield* config.get() + const skillDirs = yield* skill.dirs() + const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))] + + const defaults = Permission.fromConfig({ + "*": "allow", + doom_loop: "ask", + external_directory: { + "*": "ask", + ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), + }, + question: "deny", + plan_enter: "deny", + plan_exit: "deny", + // mirrors github.com/github/gitignore Node.gitignore pattern for .env files + read: { + "*": "allow", + "*.env": "ask", + "*.env.*": "ask", + "*.env.example": "allow", + }, + }) + + const user = Permission.fromConfig(cfg.permission ?? {}) + + const agents: Record = { + build: { + name: "build", + description: "The default agent. Executes tools based on configured permissions.", + options: {}, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + question: "allow", + plan_enter: "allow", + }), + user, + ), + mode: "primary", + native: true, + }, + plan: { + name: "plan", + description: "Plan mode. Disallows all edit tools.", + options: {}, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + question: "allow", + plan_exit: "allow", + external_directory: { + [path.join(Global.Path.data, "plans", "*")]: "allow", + }, + edit: { + "*": "deny", + [path.join(".opencode", "plans", "*.md")]: "allow", + [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: + "allow", + }, + }), + user, + ), + mode: "primary", + native: true, + }, + general: { + name: "general", + description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + todowrite: "deny", + }), + user, + ), + options: {}, + mode: "subagent", + native: true, + }, + explore: { + name: "explore", + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + grep: "allow", + glob: "allow", + list: "allow", + bash: "allow", + webfetch: "allow", + websearch: "allow", + codesearch: "allow", + read: "allow", + external_directory: { + "*": "ask", + ...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])), + }, + }), + user, + ), + description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`, + prompt: PROMPT_EXPLORE, + options: {}, + mode: "subagent", + native: true, + }, + compaction: { + name: "compaction", + mode: "primary", + native: true, + hidden: true, + prompt: PROMPT_COMPACTION, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + options: {}, + }, + title: { + name: "title", + mode: "primary", + options: {}, + native: true, + hidden: true, + temperature: 0.5, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + prompt: PROMPT_TITLE, + }, + summary: { + name: "summary", + mode: "primary", + options: {}, + native: true, + hidden: true, + permission: Permission.merge( + defaults, + Permission.fromConfig({ + "*": "deny", + }), + user, + ), + prompt: PROMPT_SUMMARY, + }, + } + + for (const [key, value] of Object.entries(cfg.agent ?? {})) { + if (value.disable) { + delete agents[key] + continue + } + let item = agents[key] + if (!item) + item = agents[key] = { + name: key, + mode: "all", + permission: Permission.merge(defaults, user), + options: {}, + native: false, + } + if (value.model) item.model = Provider.parseModel(value.model) + item.variant = value.variant ?? item.variant + item.prompt = value.prompt ?? item.prompt + item.description = value.description ?? item.description + item.temperature = value.temperature ?? item.temperature + item.topP = value.top_p ?? item.topP + item.mode = value.mode ?? item.mode + item.color = value.color ?? item.color + item.hidden = value.hidden ?? item.hidden + item.name = value.name ?? item.name + item.steps = value.steps ?? item.steps + item.options = mergeDeep(item.options, value.options ?? {}) + item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {})) + } + + // Ensure Truncate.GLOB is allowed unless explicitly configured + for (const name in agents) { + const agent = agents[name] + const explicit = agent.permission.some((r) => { + if (r.permission !== "external_directory") return false + if (r.action !== "deny") return false + return r.pattern === Truncate.GLOB + }) + if (explicit) continue + + agents[name].permission = Permission.merge( + agents[name].permission, + Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }), + ) + } + + const get = Effect.fnUntraced(function* (agent: string) { + return agents[agent] + }) + + const list = Effect.fnUntraced(function* () { + const cfg = yield* config.get() + return pipe( + agents, + values(), + sortBy( + [(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"], + [(x) => x.name, "asc"], + ), + ) + }) + + const defaultAgent = Effect.fnUntraced(function* () { + const c = yield* config.get() + if (c.default_agent) { + const agent = agents[c.default_agent] + if (!agent) throw new Error(`default agent "${c.default_agent}" not found`) + if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`) + if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`) + return agent.name + } + const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true) + if (!visible) throw new Error("no primary visible agent found") + return visible.name + }) + + return { + get, + list, + defaultAgent, + } satisfies State + }), + ) + + return Service.of({ + get: Effect.fn("Agent.get")(function* (agent: string) { + return yield* InstanceState.useEffect(state, (s) => s.get(agent)) + }), + list: Effect.fn("Agent.list")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.list()) + }), + defaultAgent: Effect.fn("Agent.defaultAgent")(function* () { + return yield* InstanceState.useEffect(state, (s) => s.defaultAgent()) + }), + generate: Effect.fn("Agent.generate")(function* (input: { + description: string + model?: { providerID: ProviderID; modelID: ModelID } + }) { + const cfg = yield* config.get() + const model = input.model ?? (yield* provider.defaultModel()) + const resolved = yield* provider.getModel(model.providerID, model.modelID) + const language = yield* provider.getLanguage(resolved) + const tracer = cfg.experimental?.openTelemetry + ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) + : undefined + + const system = [PROMPT_GENERATE] + yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system }) + const existing = yield* InstanceState.useEffect(state, (s) => s.list()) + + // TODO: clean this up so provider specific logic doesnt bleed over + const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie) + const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth" + + const params = { + experimental_telemetry: { + isEnabled: cfg.experimental?.openTelemetry, + tracer, + metadata: { + userId: cfg.username ?? "unknown", + }, + }, + temperature: 0.3, + messages: [ + ...(isOpenaiOauth + ? [] + : system.map( + (item): ModelMessage => ({ + role: "system", + content: item, + }), + )), + { + role: "user", + content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`, + }, + ], + model: language, + schema: z.object({ + identifier: z.string(), + whenToUse: z.string(), + systemPrompt: z.string(), + }), + } satisfies Parameters[0] + + if (isOpenaiOauth) { + return yield* Effect.promise(async () => { + const result = streamObject({ + ...params, + providerOptions: ProviderTransform.providerOptions(resolved, { + instructions: system.join("\n"), + store: false, + }), + onError: () => {}, + }) + for await (const part of result.fullStream) { + if (part.type === "error") throw part.error + } + return result.object + }) + } + + return yield* Effect.promise(() => generateObject(params).then((r) => r.object)) + }), + }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(Plugin.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Auth.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Skill.defaultLayer), +) + +export * as Agent from "./agent" From 974fa1b8b1990c2e51172e32ef321e5fdce0843d Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:05 -0400 Subject: [PATCH 082/201] refactor: unwrap PluginMeta namespace + self-reexport (#22945) --- packages/opencode/src/plugin/meta.ts | 312 +++++++++++++-------------- 1 file changed, 156 insertions(+), 156 deletions(-) diff --git a/packages/opencode/src/plugin/meta.ts b/packages/opencode/src/plugin/meta.ts index 89955d1dfb..86ad8fbab1 100644 --- a/packages/opencode/src/plugin/meta.ts +++ b/packages/opencode/src/plugin/meta.ts @@ -8,181 +8,181 @@ import { Flock } from "@opencode-ai/shared/util/flock" import { parsePluginSpecifier, pluginSource } from "./shared" -export namespace PluginMeta { - type Source = "file" | "npm" +type Source = "file" | "npm" - export type Theme = { - src: string - dest: string - mtime?: number - size?: number - } +export type Theme = { + src: string + dest: string + mtime?: number + size?: number +} - export type Entry = { - id: string - source: Source - spec: string - target: string - requested?: string - version?: string - modified?: number - first_time: number - last_time: number - time_changed: number - load_count: number - fingerprint: string - themes?: Record - } +export type Entry = { + id: string + source: Source + spec: string + target: string + requested?: string + version?: string + modified?: number + first_time: number + last_time: number + time_changed: number + load_count: number + fingerprint: string + themes?: Record +} - export type State = "first" | "updated" | "same" +export type State = "first" | "updated" | "same" - export type Touch = { - spec: string - target: string - id: string - } +export type Touch = { + spec: string + target: string + id: string +} - type Store = Record - type Core = Omit - type Row = Touch & { core: Core } +type Store = Record +type Core = Omit +type Row = Touch & { core: Core } - function storePath() { - return Flag.OPENCODE_PLUGIN_META_FILE ?? path.join(Global.Path.state, "plugin-meta.json") - } +function storePath() { + return Flag.OPENCODE_PLUGIN_META_FILE ?? path.join(Global.Path.state, "plugin-meta.json") +} - function lock(file: string) { - return `plugin-meta:${file}` - } +function lock(file: string) { + return `plugin-meta:${file}` +} - function fileTarget(spec: string, target: string) { - if (spec.startsWith("file://")) return fileURLToPath(spec) - if (target.startsWith("file://")) return fileURLToPath(target) - return - } +function fileTarget(spec: string, target: string) { + if (spec.startsWith("file://")) return fileURLToPath(spec) + if (target.startsWith("file://")) return fileURLToPath(target) + return +} - async function modifiedAt(file: string) { - const stat = await Filesystem.statAsync(file) - if (!stat) return - const mtime = stat.mtimeMs - return Math.floor(typeof mtime === "bigint" ? Number(mtime) : mtime) - } +async function modifiedAt(file: string) { + const stat = await Filesystem.statAsync(file) + if (!stat) return + const mtime = stat.mtimeMs + return Math.floor(typeof mtime === "bigint" ? Number(mtime) : mtime) +} - function resolvedTarget(target: string) { - if (target.startsWith("file://")) return fileURLToPath(target) - return target - } +function resolvedTarget(target: string) { + if (target.startsWith("file://")) return fileURLToPath(target) + return target +} - async function npmVersion(target: string) { - const resolved = resolvedTarget(target) - const stat = await Filesystem.statAsync(resolved) - const dir = stat?.isDirectory() ? resolved : path.dirname(resolved) - return Filesystem.readJson<{ version?: string }>(path.join(dir, "package.json")) - .then((item) => item.version) - .catch(() => undefined) - } - - async function entryCore(item: Touch): Promise { - const spec = item.spec - const target = item.target - const source = pluginSource(spec) - if (source === "file") { - const file = fileTarget(spec, target) - return { - id: item.id, - source, - spec, - target, - modified: file ? await modifiedAt(file) : undefined, - } - } +async function npmVersion(target: string) { + const resolved = resolvedTarget(target) + const stat = await Filesystem.statAsync(resolved) + const dir = stat?.isDirectory() ? resolved : path.dirname(resolved) + return Filesystem.readJson<{ version?: string }>(path.join(dir, "package.json")) + .then((item) => item.version) + .catch(() => undefined) +} +async function entryCore(item: Touch): Promise { + const spec = item.spec + const target = item.target + const source = pluginSource(spec) + if (source === "file") { + const file = fileTarget(spec, target) return { id: item.id, source, spec, target, - requested: parsePluginSpecifier(spec).version, - version: await npmVersion(target), + modified: file ? await modifiedAt(file) : undefined, } } - function fingerprint(value: Core) { - if (value.source === "file") return [value.target, value.modified ?? ""].join("|") - return [value.target, value.requested ?? "", value.version ?? ""].join("|") - } - - async function read(file: string): Promise { - return Filesystem.readJson(file).catch(() => ({}) as Store) - } - - async function row(item: Touch): Promise { - return { - ...item, - core: await entryCore(item), - } - } - - function next(prev: Entry | undefined, core: Core, now: number): { state: State; entry: Entry } { - const entry: Entry = { - ...core, - first_time: prev?.first_time ?? now, - last_time: now, - time_changed: prev?.time_changed ?? now, - load_count: (prev?.load_count ?? 0) + 1, - fingerprint: fingerprint(core), - themes: prev?.themes, - } - const state: State = !prev ? "first" : prev.fingerprint === entry.fingerprint ? "same" : "updated" - if (state === "updated") entry.time_changed = now - return { - state, - entry, - } - } - - export async function touchMany(items: Touch[]): Promise> { - if (!items.length) return [] - const file = storePath() - const rows = await Promise.all(items.map((item) => row(item))) - - return Flock.withLock(lock(file), async () => { - const store = await read(file) - const now = Date.now() - const out: Array<{ state: State; entry: Entry }> = [] - for (const item of rows) { - const hit = next(store[item.id], item.core, now) - store[item.id] = hit.entry - out.push(hit) - } - await Filesystem.writeJson(file, store) - return out - }) - } - - export async function touch(spec: string, target: string, id: string): Promise<{ state: State; entry: Entry }> { - return touchMany([{ spec, target, id }]).then((item) => { - const hit = item[0] - if (hit) return hit - throw new Error("Failed to touch plugin metadata.") - }) - } - - export async function setTheme(id: string, name: string, theme: Theme): Promise { - const file = storePath() - await Flock.withLock(lock(file), async () => { - const store = await read(file) - const entry = store[id] - if (!entry) return - entry.themes = { - ...entry.themes, - [name]: theme, - } - await Filesystem.writeJson(file, store) - }) - } - - export async function list(): Promise { - const file = storePath() - return Flock.withLock(lock(file), async () => read(file)) + return { + id: item.id, + source, + spec, + target, + requested: parsePluginSpecifier(spec).version, + version: await npmVersion(target), } } + +function fingerprint(value: Core) { + if (value.source === "file") return [value.target, value.modified ?? ""].join("|") + return [value.target, value.requested ?? "", value.version ?? ""].join("|") +} + +async function read(file: string): Promise { + return Filesystem.readJson(file).catch(() => ({}) as Store) +} + +async function row(item: Touch): Promise { + return { + ...item, + core: await entryCore(item), + } +} + +function next(prev: Entry | undefined, core: Core, now: number): { state: State; entry: Entry } { + const entry: Entry = { + ...core, + first_time: prev?.first_time ?? now, + last_time: now, + time_changed: prev?.time_changed ?? now, + load_count: (prev?.load_count ?? 0) + 1, + fingerprint: fingerprint(core), + themes: prev?.themes, + } + const state: State = !prev ? "first" : prev.fingerprint === entry.fingerprint ? "same" : "updated" + if (state === "updated") entry.time_changed = now + return { + state, + entry, + } +} + +export async function touchMany(items: Touch[]): Promise> { + if (!items.length) return [] + const file = storePath() + const rows = await Promise.all(items.map((item) => row(item))) + + return Flock.withLock(lock(file), async () => { + const store = await read(file) + const now = Date.now() + const out: Array<{ state: State; entry: Entry }> = [] + for (const item of rows) { + const hit = next(store[item.id], item.core, now) + store[item.id] = hit.entry + out.push(hit) + } + await Filesystem.writeJson(file, store) + return out + }) +} + +export async function touch(spec: string, target: string, id: string): Promise<{ state: State; entry: Entry }> { + return touchMany([{ spec, target, id }]).then((item) => { + const hit = item[0] + if (hit) return hit + throw new Error("Failed to touch plugin metadata.") + }) +} + +export async function setTheme(id: string, name: string, theme: Theme): Promise { + const file = storePath() + await Flock.withLock(lock(file), async () => { + const store = await read(file) + const entry = store[id] + if (!entry) return + entry.themes = { + ...entry.themes, + [name]: theme, + } + await Filesystem.writeJson(file, store) + }) +} + +export async function list(): Promise { + const file = storePath() + return Flock.withLock(lock(file), async () => read(file)) +} + +export * as PluginMeta from "./meta" From 06d247c70982b9bba0bb25f4b990fb59e3374650 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:08 -0400 Subject: [PATCH 083/201] refactor: unwrap FileIgnore namespace + self-reexport (#22937) --- packages/opencode/src/file/ignore.ts | 140 +++++++++++++-------------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/packages/opencode/src/file/ignore.ts b/packages/opencode/src/file/ignore.ts index 63f2f594eb..efce872808 100644 --- a/packages/opencode/src/file/ignore.ts +++ b/packages/opencode/src/file/ignore.ts @@ -1,81 +1,81 @@ import { Glob } from "@opencode-ai/shared/util/glob" -export namespace FileIgnore { - const FOLDERS = new Set([ - "node_modules", - "bower_components", - ".pnpm-store", - "vendor", - ".npm", - "dist", - "build", - "out", - ".next", - "target", - "bin", - "obj", - ".git", - ".svn", - ".hg", - ".vscode", - ".idea", - ".turbo", - ".output", - "desktop", - ".sst", - ".cache", - ".webkit-cache", - "__pycache__", - ".pytest_cache", - "mypy_cache", - ".history", - ".gradle", - ]) +const FOLDERS = new Set([ + "node_modules", + "bower_components", + ".pnpm-store", + "vendor", + ".npm", + "dist", + "build", + "out", + ".next", + "target", + "bin", + "obj", + ".git", + ".svn", + ".hg", + ".vscode", + ".idea", + ".turbo", + ".output", + "desktop", + ".sst", + ".cache", + ".webkit-cache", + "__pycache__", + ".pytest_cache", + "mypy_cache", + ".history", + ".gradle", +]) - const FILES = [ - "**/*.swp", - "**/*.swo", +const FILES = [ + "**/*.swp", + "**/*.swo", - "**/*.pyc", + "**/*.pyc", - // OS - "**/.DS_Store", - "**/Thumbs.db", + // OS + "**/.DS_Store", + "**/Thumbs.db", - // Logs & temp - "**/logs/**", - "**/tmp/**", - "**/temp/**", - "**/*.log", + // Logs & temp + "**/logs/**", + "**/tmp/**", + "**/temp/**", + "**/*.log", - // Coverage/test outputs - "**/coverage/**", - "**/.nyc_output/**", - ] + // Coverage/test outputs + "**/coverage/**", + "**/.nyc_output/**", +] - export const PATTERNS = [...FILES, ...FOLDERS] +export const PATTERNS = [...FILES, ...FOLDERS] - export function match( - filepath: string, - opts?: { - extra?: string[] - whitelist?: string[] - }, - ) { - for (const pattern of opts?.whitelist || []) { - if (Glob.match(pattern, filepath)) return false - } - - const parts = filepath.split(/[/\\]/) - for (let i = 0; i < parts.length; i++) { - if (FOLDERS.has(parts[i])) return true - } - - const extra = opts?.extra || [] - for (const pattern of [...FILES, ...extra]) { - if (Glob.match(pattern, filepath)) return true - } - - return false +export function match( + filepath: string, + opts?: { + extra?: string[] + whitelist?: string[] + }, +) { + for (const pattern of opts?.whitelist || []) { + if (Glob.match(pattern, filepath)) return false } + + const parts = filepath.split(/[/\\]/) + for (let i = 0; i < parts.length; i++) { + if (FOLDERS.has(parts[i])) return true + } + + const extra = opts?.extra || [] + for (const pattern of [...FILES, ...extra]) { + if (Glob.match(pattern, filepath)) return true + } + + return false } + +export * as FileIgnore from "./ignore" From 2704ad9110e6705dacd1b018a7245c1950a3ae80 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:24 -0400 Subject: [PATCH 084/201] refactor: unwrap TuiConfig namespace + self-reexport (#22952) --- .../opencode/src/cli/cmd/tui/config/tui.ts | 360 +++++++++--------- 1 file changed, 180 insertions(+), 180 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index e8eb9ff5d3..d264273bca 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -17,197 +17,197 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -export namespace TuiConfig { - const log = Log.create({ service: "tui.config" }) +const log = Log.create({ service: "tui.config" }) - export const Info = TuiInfo +export const Info = TuiInfo - type Acc = { - result: Info - } +type Acc = { + result: Info +} - type State = { - config: Info - deps: Array> - } +type State = { + config: Info + deps: Array> +} - export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] - } +export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] +} - export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect - } +export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect +} - export class Service extends Context.Service()("@opencode/TuiConfig") {} +export class Service extends Context.Service()("@opencode/TuiConfig") {} - function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" - } +function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" +} - function customPath() { - return Flag.OPENCODE_TUI_CONFIG - } +function customPath() { + return Flag.OPENCODE_TUI_CONFIG +} - function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { - delete data.tui - return data - } - - const tui = data.tui +function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { delete data.tui - return { - ...tui, - ...data, - } + return data } - async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config - } - - async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins - } - - async function loadState(ctx: { directory: string }) { - let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - const directories = await ConfigPaths.directories(ctx.directory) - const custom = customPath() - await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) - // Re-compute after migration since migrateTuiConfig may have created new tui.json files - projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - if (custom) { - await mergeFile(acc, custom, ctx) - log.debug("loaded custom tui config", { path: custom }) - } - - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) - } - } - - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") - } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], - } - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) - - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) - - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) - - const { runPromise } = makeRuntime(Service, defaultLayer) - - export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) - } - - export async function get() { - return runPromise((svc) => svc.get()) - } - - async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) - } - - async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} - - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) - return {} - }) + const tui = data.tui + delete data.tui + return { + ...tui, + ...data, } } + +async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config +} + +async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins +} + +async function loadState(ctx: { directory: string }) { + let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + const directories = await ConfigPaths.directories(ctx.directory) + const custom = customPath() + await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) + // Re-compute after migration since migrateTuiConfig may have created new tui.json files + projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + + const acc: Acc = { + result: {}, + } + + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } + + if (custom) { + await mergeFile(acc, custom, ctx) + log.debug("loaded custom tui config", { path: custom }) + } + + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } + + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) + + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } + + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) + + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), +) + +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + +const { runPromise } = makeRuntime(Service, defaultLayer) + +export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) +} + +export async function get() { + return runPromise((svc) => svc.get()) +} + +async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) + return {} + }) +} + +async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) +} + +export * as TuiConfig from "./tui" From 059b32c2124da9bb7a9e3cc1e9a49e3a72f29740 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:51 -0400 Subject: [PATCH 085/201] refactor: unwrap Protected namespace + self-reexport (#22938) --- packages/opencode/src/file/protected.ts | 38 ++++++++++++------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/packages/opencode/src/file/protected.ts b/packages/opencode/src/file/protected.ts index d519746193..a316e790b8 100644 --- a/packages/opencode/src/file/protected.ts +++ b/packages/opencode/src/file/protected.ts @@ -37,23 +37,23 @@ const DARWIN_ROOT = ["/.DocumentRevisions-V100", "/.Spotlight-V100", "/.Trashes" const WIN32_HOME = ["AppData", "Downloads", "Desktop", "Documents", "Pictures", "Music", "Videos", "OneDrive"] -export namespace Protected { - /** Directory basenames to skip when scanning the home directory. */ - export function names(): ReadonlySet { - if (process.platform === "darwin") return new Set(DARWIN_HOME) - if (process.platform === "win32") return new Set(WIN32_HOME) - return new Set() - } - - /** Absolute paths that should never be watched, stated, or scanned. */ - export function paths(): string[] { - if (process.platform === "darwin") - return [ - ...DARWIN_HOME.map((n) => path.join(home, n)), - ...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)), - ...DARWIN_ROOT, - ] - if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n)) - return [] - } +/** Directory basenames to skip when scanning the home directory. */ +export function names(): ReadonlySet { + if (process.platform === "darwin") return new Set(DARWIN_HOME) + if (process.platform === "win32") return new Set(WIN32_HOME) + return new Set() } + +/** Absolute paths that should never be watched, stated, or scanned. */ +export function paths(): string[] { + if (process.platform === "darwin") + return [ + ...DARWIN_HOME.map((n) => path.join(home, n)), + ...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)), + ...DARWIN_ROOT, + ] + if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n)) + return [] +} + +export * as Protected from "./protected" From 635970b0a117481603d9894975ca502bc3887224 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:02:53 -0400 Subject: [PATCH 086/201] refactor: unwrap ConfigSkills namespace + self-reexport (#22950) --- packages/opencode/src/config/skills.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts index bdc63f5d6a..38cbf99e7d 100644 --- a/packages/opencode/src/config/skills.ts +++ b/packages/opencode/src/config/skills.ts @@ -1,13 +1,13 @@ import z from "zod" -export namespace ConfigSkills { - export const Info = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), - }) +export const Info = z.object({ + paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), + urls: z + .array(z.string()) + .optional() + .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), +}) - export type Info = z.infer -} +export type Info = z.infer + +export * as ConfigSkills from "./skills" From 53dc7b164940edbc5793bac83f91d7fca7b78fe5 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 00:04:01 +0000 Subject: [PATCH 087/201] chore: generate --- packages/opencode/src/acp/agent.ts | 10 +++------- packages/opencode/src/agent/agent.ts | 3 +-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 7180feabcb..f12328153b 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -1162,8 +1162,7 @@ export class Agent implements ACPAgent { (await (async () => { if (!availableModes.length) return undefined const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())) - const resolvedModeId = - availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id + const resolvedModeId = availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id this.sessionManager.setMode(sessionId, resolvedModeId) return resolvedModeId })()) @@ -1362,8 +1361,7 @@ export class Agent implements ACPAgent { if (!current) { this.sessionManager.setModel(session.id, model) } - const agent = - session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) + const agent = session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))) const parts: Array< | { type: "text"; text: string; synthetic?: boolean; ignored?: boolean } @@ -1729,9 +1727,7 @@ function buildAvailableModels( ): ModelOption[] { const includeVariants = options.includeVariants ?? false return providers.flatMap((provider) => { - const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( - provider.models, - ) + const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values(provider.models) const models = Provider.sort(unsorted) return models.flatMap((model) => { const base: ModelOption = { diff --git a/packages/opencode/src/agent/agent.ts b/packages/opencode/src/agent/agent.ts index 07f742fe12..355718b6bf 100644 --- a/packages/opencode/src/agent/agent.ts +++ b/packages/opencode/src/agent/agent.ts @@ -136,8 +136,7 @@ export const layer = Layer.effect( edit: { "*": "deny", [path.join(".opencode", "plans", "*.md")]: "allow", - [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: - "allow", + [path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: "allow", }, }), user, From c0bfccc15ea6e2baea1d5b67f73d689317caa2af Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:11:17 -0400 Subject: [PATCH 088/201] tooling: add unwrap-and-self-reexport + batch-unwrap-pr scripts (#22929) --- packages/opencode/script/batch-unwrap-pr.ts | 230 +++++++++++++++++ .../script/unwrap-and-self-reexport.ts | 241 ++++++++++++++++++ 2 files changed, 471 insertions(+) create mode 100644 packages/opencode/script/batch-unwrap-pr.ts create mode 100644 packages/opencode/script/unwrap-and-self-reexport.ts diff --git a/packages/opencode/script/batch-unwrap-pr.ts b/packages/opencode/script/batch-unwrap-pr.ts new file mode 100644 index 0000000000..5730501412 --- /dev/null +++ b/packages/opencode/script/batch-unwrap-pr.ts @@ -0,0 +1,230 @@ +#!/usr/bin/env bun +/** + * Automate the full per-file namespace→self-reexport migration: + * + * 1. Create a worktree at ../opencode-worktrees/ns- on a new branch + * `kit/ns-` off `origin/dev`. + * 2. Symlink `node_modules` from the main repo into the worktree root so + * builds work without a fresh `bun install`. + * 3. Run `script/unwrap-and-self-reexport.ts` on the target file inside the worktree. + * 4. Verify: + * - `bunx --bun tsgo --noEmit` (pre-existing plugin.ts cross-worktree + * noise ignored — we compare against a pre-change baseline captured + * via `git stash`, so only NEW errors fail). + * - `bun run --conditions=browser ./src/index.ts generate`. + * - Relevant tests under `test/` if that directory exists. + * 5. Commit, push with `--no-verify`, and open a PR titled after the + * namespace. + * + * Usage: + * + * bun script/batch-unwrap-pr.ts src/file/ignore.ts + * bun script/batch-unwrap-pr.ts src/file/ignore.ts src/file/watcher.ts # multiple + * bun script/batch-unwrap-pr.ts --dry-run src/file/ignore.ts # plan only + * + * Repo assumptions: + * + * - Main checkout at /Users/kit/code/open-source/opencode (configurable via + * --repo-root=...). + * - Worktree root at /Users/kit/code/open-source/opencode-worktrees + * (configurable via --worktree-root=...). + * + * The script does NOT enable auto-merge; that's a separate manual step if we + * want it. + */ + +import fs from "node:fs" +import path from "node:path" +import { spawnSync, type SpawnSyncReturns } from "node:child_process" + +type Cmd = string[] + +function run( + cwd: string, + cmd: Cmd, + opts: { capture?: boolean; allowFail?: boolean; stdin?: string } = {}, +): SpawnSyncReturns { + const result = spawnSync(cmd[0], cmd.slice(1), { + cwd, + stdio: opts.capture ? ["pipe", "pipe", "pipe"] : ["inherit", "inherit", "inherit"], + encoding: "utf-8", + input: opts.stdin, + }) + if (!opts.allowFail && result.status !== 0) { + const label = `${path.basename(cmd[0])} ${cmd.slice(1).join(" ")}` + console.error(`[fail] ${label} (cwd=${cwd})`) + if (opts.capture) { + if (result.stdout) console.error(result.stdout) + if (result.stderr) console.error(result.stderr) + } + process.exit(result.status ?? 1) + } + return result +} + +function fileSlug(fileArg: string): string { + // src/file/ignore.ts → file-ignore + return fileArg + .replace(/^src\//, "") + .replace(/\.tsx?$/, "") + .replace(/[\/_]/g, "-") +} + +function readNamespace(absFile: string): string { + const content = fs.readFileSync(absFile, "utf-8") + const match = content.match(/^export\s+namespace\s+(\w+)\s*\{/m) + if (!match) { + console.error(`no \`export namespace\` found in ${absFile}`) + process.exit(1) + } + return match[1] +} + +// --------------------------------------------------------------------------- + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const repoRoot = ( + args.find((a) => a.startsWith("--repo-root=")) ?? "--repo-root=/Users/kit/code/open-source/opencode" +).split("=")[1] +const worktreeRoot = ( + args.find((a) => a.startsWith("--worktree-root=")) ?? "--worktree-root=/Users/kit/code/open-source/opencode-worktrees" +).split("=")[1] +const targets = args.filter((a) => !a.startsWith("--")) + +if (targets.length === 0) { + console.error("Usage: bun script/batch-unwrap-pr.ts [more files...] [--dry-run]") + process.exit(1) +} + +if (!fs.existsSync(worktreeRoot)) fs.mkdirSync(worktreeRoot, { recursive: true }) + +for (const rel of targets) { + const absSrc = path.join(repoRoot, "packages", "opencode", rel) + if (!fs.existsSync(absSrc)) { + console.error(`skip ${rel}: file does not exist under ${repoRoot}/packages/opencode`) + continue + } + const slug = fileSlug(rel) + const branch = `kit/ns-${slug}` + const wt = path.join(worktreeRoot, `ns-${slug}`) + const ns = readNamespace(absSrc) + + console.log(`\n=== ${rel} → ${ns} (branch=${branch} wt=${path.basename(wt)}) ===`) + + if (dryRun) { + console.log(` would create worktree ${wt}`) + console.log(` would run unwrap on packages/opencode/${rel}`) + console.log(` would commit, push, and open PR`) + continue + } + + // Sync dev (fetch only; we branch off origin/dev directly). + run(repoRoot, ["git", "fetch", "origin", "dev", "--quiet"]) + + // Create worktree + branch. + if (fs.existsSync(wt)) { + console.log(` worktree already exists at ${wt}; skipping`) + continue + } + run(repoRoot, ["git", "worktree", "add", "-b", branch, wt, "origin/dev"]) + + // Symlink node_modules so bun/tsgo work without a full install. + // We link both the repo root and packages/opencode, since the opencode + // package has its own local node_modules (including bunfig.toml preload deps + // like @opentui/solid) that aren't hoisted to the root. + const wtRootNodeModules = path.join(wt, "node_modules") + if (!fs.existsSync(wtRootNodeModules)) { + fs.symlinkSync(path.join(repoRoot, "node_modules"), wtRootNodeModules) + } + const wtOpencode = path.join(wt, "packages", "opencode") + const wtOpencodeNodeModules = path.join(wtOpencode, "node_modules") + if (!fs.existsSync(wtOpencodeNodeModules)) { + fs.symlinkSync(path.join(repoRoot, "packages", "opencode", "node_modules"), wtOpencodeNodeModules) + } + const wtTarget = path.join(wt, "packages", "opencode", rel) + + // Baseline tsgo output (pre-change). + const baselinePath = path.join(wt, ".ns-baseline.txt") + const baseline = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) + fs.writeFileSync(baselinePath, (baseline.stdout ?? "") + (baseline.stderr ?? "")) + + // Run the unwrap script from the MAIN repo checkout (where the tooling + // lives) targeting the worktree's file by absolute path. We run from the + // worktree root (not `packages/opencode`) to avoid triggering the + // bunfig.toml preload, which needs `@opentui/solid` that only the TUI + // workspace has installed. + const unwrapScript = path.join(repoRoot, "packages", "opencode", "script", "unwrap-and-self-reexport.ts") + run(wt, ["bun", unwrapScript, wtTarget]) + + // Post-change tsgo. + const after = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) + const afterText = (after.stdout ?? "") + (after.stderr ?? "") + + // Compare line-sets to detect NEW tsgo errors. + const sanitize = (s: string) => + s + .split("\n") + .map((l) => l.replace(/\s+$/, "")) + .filter(Boolean) + .sort() + .join("\n") + const baselineSorted = sanitize(fs.readFileSync(baselinePath, "utf-8")) + const afterSorted = sanitize(afterText) + if (baselineSorted !== afterSorted) { + console.log(` tsgo output differs from baseline. Showing diff:`) + const diffResult = spawnSync("diff", ["-u", baselinePath, "-"], { input: afterText, encoding: "utf-8" }) + if (diffResult.stdout) console.log(diffResult.stdout) + if (diffResult.stderr) console.log(diffResult.stderr) + console.error(` aborting ${rel}; investigate manually in ${wt}`) + process.exit(1) + } + + // SDK build. + run(wtOpencode, ["bun", "run", "--conditions=browser", "./src/index.ts", "generate"], { capture: true }) + + // Run tests for the directory, if a matching test dir exists. + const dirName = path.basename(path.dirname(rel)) + const testDir = path.join(wt, "packages", "opencode", "test", dirName) + if (fs.existsSync(testDir)) { + const testResult = run(wtOpencode, ["bun", "run", "test", `test/${dirName}`], { capture: true, allowFail: true }) + const combined = (testResult.stdout ?? "") + (testResult.stderr ?? "") + if (testResult.status !== 0) { + console.error(combined) + console.error(` tests failed for ${rel}; aborting`) + process.exit(1) + } + // Surface the summary line if present. + const summary = combined + .split("\n") + .filter((l) => /\bpass\b|\bfail\b/.test(l)) + .slice(-3) + .join("\n") + if (summary) console.log(` tests: ${summary.replace(/\n/g, " | ")}`) + } else { + console.log(` tests: no test/${dirName} directory, skipping`) + } + + // Clean up baseline file before committing. + fs.unlinkSync(baselinePath) + + // Commit, push, open PR. + const commitMsg = `refactor: unwrap ${ns} namespace + self-reexport` + run(wt, ["git", "add", "-A"]) + run(wt, ["git", "commit", "-m", commitMsg]) + run(wt, ["git", "push", "-u", "origin", branch, "--no-verify"]) + + const prBody = [ + "## Summary", + `- Unwrap the \`${ns}\` namespace in \`packages/opencode/${rel}\` to flat top-level exports.`, + `- Append \`export * as ${ns} from "./${path.basename(rel, ".ts")}"\` so consumers keep the same \`${ns}.x\` import ergonomics.`, + "", + "## Verification (local)", + "- `bunx --bun tsgo --noEmit` — no new errors vs baseline.", + "- `bun run --conditions=browser ./src/index.ts generate` — clean.", + `- \`bun run test test/${dirName}\` — all pass (if applicable).`, + ].join("\n") + run(wt, ["gh", "pr", "create", "--title", commitMsg, "--base", "dev", "--body", prBody]) + + console.log(` PR opened for ${rel}`) +} diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts new file mode 100644 index 0000000000..5ae703182e --- /dev/null +++ b/packages/opencode/script/unwrap-and-self-reexport.ts @@ -0,0 +1,241 @@ +#!/usr/bin/env bun +/** + * Unwrap a single `export namespace` in a file into flat top-level exports + * plus a self-reexport at the bottom of the same file. + * + * Usage: + * + * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts + * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts --dry-run + * + * Input file shape: + * + * // imports ... + * + * export namespace FileIgnore { + * export function ...(...) { ... } + * const helper = ... + * } + * + * Output shape: + * + * // imports ... + * + * export function ...(...) { ... } + * const helper = ... + * + * export * as FileIgnore from "./ignore" + * + * What the script does: + * + * 1. Uses ast-grep to locate the single `export namespace Foo { ... }` block. + * 2. Removes the `export namespace Foo {` line and the matching closing `}`. + * 3. Dedents the body by one indent level (2 spaces). + * 4. Rewrites `Foo.Bar` self-references inside the file to just `Bar` + * (but only for names that are actually exported from the namespace — + * non-exported members get the same treatment so references remain valid). + * 5. Appends `export * as Foo from "./"` at the end of the file. + * + * What it does NOT do: + * + * - Does not create or modify barrel `index.ts` files. + * - Does not rewrite any consumer imports. Consumers already import from + * the file path itself (e.g. `import { FileIgnore } from "../file/ignore"`); + * the self-reexport keeps that import working unchanged. + * - Does not handle files with more than one `export namespace` declaration. + * The script refuses that case. + * + * Requires: ast-grep (`brew install ast-grep`). + */ + +import fs from "node:fs" +import path from "node:path" + +const args = process.argv.slice(2) +const dryRun = args.includes("--dry-run") +const targetArg = args.find((a) => !a.startsWith("--")) + +if (!targetArg) { + console.error("Usage: bun script/unwrap-and-self-reexport.ts [--dry-run]") + process.exit(1) +} + +const absPath = path.resolve(targetArg) +if (!fs.existsSync(absPath) || !fs.statSync(absPath).isFile()) { + console.error(`Not a file: ${absPath}`) + process.exit(1) +} + +// Locate the namespace block with ast-grep (accurate AST boundaries). +const ast = Bun.spawnSync( + ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], + { stdout: "pipe", stderr: "pipe" }, +) +if (ast.exitCode !== 0) { + console.error("ast-grep failed:", ast.stderr.toString()) + process.exit(1) +} + +type AstMatch = { + range: { start: { line: number; column: number }; end: { line: number; column: number } } + metaVariables: { single: Record } +} +const matches = JSON.parse(ast.stdout.toString()) as AstMatch[] +if (matches.length === 0) { + console.error(`No \`export namespace\` found in ${path.relative(process.cwd(), absPath)}`) + process.exit(1) +} +if (matches.length > 1) { + console.error(`File has ${matches.length} \`export namespace\` declarations — this script handles one per file.`) + for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) + process.exit(1) +} + +const match = matches[0] +const nsName = match.metaVariables.single.NAME.text +const startLine = match.range.start.line +const endLine = match.range.end.line + +const original = fs.readFileSync(absPath, "utf-8") +const lines = original.split("\n") + +// Split the file into before/body/after. +const before = lines.slice(0, startLine) +const body = lines.slice(startLine + 1, endLine) +const after = lines.slice(endLine + 1) + +// Dedent body by one indent level (2 spaces). +const dedented = body.map((line) => { + if (line === "") return "" + if (line.startsWith(" ")) return line.slice(2) + return line +}) + +// Collect all top-level declared identifiers inside the namespace body so we can +// rewrite `Foo.X` → `X` when X is one of them. We gather BOTH exported and +// non-exported names because the namespace body might reference its own +// non-exported helpers via `Foo.helper` too. +const declaredNames = new Set() +const declRe = + /^\s*(?:export\s+)?(?:abstract\s+)?(?:async\s+)?(?:const|let|var|function|class|interface|type|enum)\s+(\w+)/ +for (const line of dedented) { + const m = line.match(declRe) + if (m) declaredNames.add(m[1]) +} +// Also capture `export { X, Y }` re-exports inside the namespace. +const reExportRe = /export\s*\{\s*([^}]+)\}/g +for (const line of dedented) { + for (const reExport of line.matchAll(reExportRe)) { + for (const part of reExport[1].split(",")) { + const name = part + .trim() + .split(/\s+as\s+/) + .pop()! + .trim() + if (name) declaredNames.add(name) + } + } +} + +// Rewrite `Foo.X` → `X` inside the body, avoiding matches in strings, comments, +// templates. We walk the line char-by-char rather than using a regex so we can +// skip over those segments cleanly. +let rewriteCount = 0 +function rewriteLine(line: string): string { + const out: string[] = [] + let i = 0 + let stringQuote: string | null = null + while (i < line.length) { + const ch = line[i] + // String / template literal pass-through. + if (stringQuote) { + out.push(ch) + if (ch === "\\" && i + 1 < line.length) { + out.push(line[i + 1]) + i += 2 + continue + } + if (ch === stringQuote) stringQuote = null + i++ + continue + } + if (ch === '"' || ch === "'" || ch === "`") { + stringQuote = ch + out.push(ch) + i++ + continue + } + // Line comment: emit the rest of the line untouched. + if (ch === "/" && line[i + 1] === "/") { + out.push(line.slice(i)) + i = line.length + continue + } + // Block comment: emit until "*/" if present on same line; else rest of line. + if (ch === "/" && line[i + 1] === "*") { + const end = line.indexOf("*/", i + 2) + if (end === -1) { + out.push(line.slice(i)) + i = line.length + } else { + out.push(line.slice(i, end + 2)) + i = end + 2 + } + continue + } + // Try to match `Foo.` at this position. + if (line.startsWith(nsName + ".", i)) { + // Make sure the char before is NOT a word character (otherwise we'd be in the middle of another identifier). + const prev = i === 0 ? "" : line[i - 1] + if (!/\w/.test(prev)) { + const after = line.slice(i + nsName.length + 1) + const nameMatch = after.match(/^([A-Za-z_$][\w$]*)/) + if (nameMatch && declaredNames.has(nameMatch[1])) { + out.push(nameMatch[1]) + i += nsName.length + 1 + nameMatch[1].length + rewriteCount++ + continue + } + } + } + out.push(ch) + i++ + } + return out.join("") +} +const rewrittenBody = dedented.map(rewriteLine) + +// Assemble the new file. Collapse multiple trailing blank lines so the +// self-reexport sits cleanly at the end. +const basename = path.basename(absPath, ".ts") +const assembled = [...before, ...rewrittenBody, ...after].join("\n") +const trimmed = assembled.replace(/\s+$/g, "") +const output = `${trimmed}\n\nexport * as ${nsName} from "./${basename}"\n` + +if (dryRun) { + console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) + console.log(`namespace: ${nsName}`) + console.log(`body lines: ${body.length}`) + console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) + console.log(`self-refs rewr: ${rewriteCount}`) + console.log(`self-reexport: export * as ${nsName} from "./${basename}"`) + console.log(`output preview (last 10 lines):`) + const outputLines = output.split("\n") + for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { + console.log(` ${l}`) + } + process.exit(0) +} + +fs.writeFileSync(absPath, output) +console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) +console.log(` body lines: ${body.length}`) +console.log(` self-refs rewr: ${rewriteCount}`) +console.log(` self-reexport: export * as ${nsName} from "./${basename}"`) +console.log("") +console.log("Next: verify with") +console.log(" bunx --bun tsgo --noEmit") +console.log(" bun run --conditions=browser ./src/index.ts generate") +console.log( + ` bun run test test/${path.relative(path.join(path.dirname(absPath), "..", ".."), absPath).replace(/\.ts$/, "")}*`, +) From 54078c4caea1adadea25ca1c4ec1479f3ab4e423 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:11:19 -0400 Subject: [PATCH 089/201] refactor: unwrap Shell namespace + self-reexport (#22964) --- packages/opencode/src/shell/shell.ts | 186 +++++++++++++-------------- 1 file changed, 93 insertions(+), 93 deletions(-) diff --git a/packages/opencode/src/shell/shell.ts b/packages/opencode/src/shell/shell.ts index 056a794dc8..60643c10b0 100644 --- a/packages/opencode/src/shell/shell.ts +++ b/packages/opencode/src/shell/shell.ts @@ -8,103 +8,103 @@ import { setTimeout as sleep } from "node:timers/promises" const SIGKILL_TIMEOUT_MS = 200 -export namespace Shell { - const BLACKLIST = new Set(["fish", "nu"]) - const LOGIN = new Set(["bash", "dash", "fish", "ksh", "sh", "zsh"]) - const POSIX = new Set(["bash", "dash", "ksh", "sh", "zsh"]) +const BLACKLIST = new Set(["fish", "nu"]) +const LOGIN = new Set(["bash", "dash", "fish", "ksh", "sh", "zsh"]) +const POSIX = new Set(["bash", "dash", "ksh", "sh", "zsh"]) - export async function killTree(proc: ChildProcess, opts?: { exited?: () => boolean }): Promise { - const pid = proc.pid - if (!pid || opts?.exited?.()) return +export async function killTree(proc: ChildProcess, opts?: { exited?: () => boolean }): Promise { + const pid = proc.pid + if (!pid || opts?.exited?.()) return - if (process.platform === "win32") { - await new Promise((resolve) => { - const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { - stdio: "ignore", - windowsHide: true, - }) - killer.once("exit", () => resolve()) - killer.once("error", () => resolve()) + if (process.platform === "win32") { + await new Promise((resolve) => { + const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { + stdio: "ignore", + windowsHide: true, }) - return - } + killer.once("exit", () => resolve()) + killer.once("error", () => resolve()) + }) + return + } - try { - process.kill(-pid, "SIGTERM") - await sleep(SIGKILL_TIMEOUT_MS) - if (!opts?.exited?.()) { - process.kill(-pid, "SIGKILL") - } - } catch (_e) { - proc.kill("SIGTERM") - await sleep(SIGKILL_TIMEOUT_MS) - if (!opts?.exited?.()) { - proc.kill("SIGKILL") - } + try { + process.kill(-pid, "SIGTERM") + await sleep(SIGKILL_TIMEOUT_MS) + if (!opts?.exited?.()) { + process.kill(-pid, "SIGKILL") + } + } catch (_e) { + proc.kill("SIGTERM") + await sleep(SIGKILL_TIMEOUT_MS) + if (!opts?.exited?.()) { + proc.kill("SIGKILL") } } - - function full(file: string) { - if (process.platform !== "win32") return file - const shell = Filesystem.windowsPath(file) - if (path.win32.dirname(shell) !== ".") { - if (shell.startsWith("/") && name(shell) === "bash") return gitbash() || shell - return shell - } - return which(shell) || shell - } - - function pick() { - const pwsh = which("pwsh.exe") - if (pwsh) return pwsh - const powershell = which("powershell.exe") - if (powershell) return powershell - } - - function select(file: string | undefined, opts?: { acceptable?: boolean }) { - if (file && (!opts?.acceptable || !BLACKLIST.has(name(file)))) return full(file) - if (process.platform === "win32") { - const shell = pick() - if (shell) return shell - } - return fallback() - } - - export function gitbash() { - if (process.platform !== "win32") return - if (Flag.OPENCODE_GIT_BASH_PATH) return Flag.OPENCODE_GIT_BASH_PATH - const git = which("git") - if (!git) return - const file = path.join(git, "..", "..", "bin", "bash.exe") - if (Filesystem.stat(file)?.size) return file - } - - function fallback() { - if (process.platform === "win32") { - const file = gitbash() - if (file) return file - return process.env.COMSPEC || "cmd.exe" - } - if (process.platform === "darwin") return "/bin/zsh" - const bash = which("bash") - if (bash) return bash - return "/bin/sh" - } - - export function name(file: string) { - if (process.platform === "win32") return path.win32.parse(Filesystem.windowsPath(file)).name.toLowerCase() - return path.basename(file).toLowerCase() - } - - export function login(file: string) { - return LOGIN.has(name(file)) - } - - export function posix(file: string) { - return POSIX.has(name(file)) - } - - export const preferred = lazy(() => select(process.env.SHELL)) - - export const acceptable = lazy(() => select(process.env.SHELL, { acceptable: true })) } + +function full(file: string) { + if (process.platform !== "win32") return file + const shell = Filesystem.windowsPath(file) + if (path.win32.dirname(shell) !== ".") { + if (shell.startsWith("/") && name(shell) === "bash") return gitbash() || shell + return shell + } + return which(shell) || shell +} + +function pick() { + const pwsh = which("pwsh.exe") + if (pwsh) return pwsh + const powershell = which("powershell.exe") + if (powershell) return powershell +} + +function select(file: string | undefined, opts?: { acceptable?: boolean }) { + if (file && (!opts?.acceptable || !BLACKLIST.has(name(file)))) return full(file) + if (process.platform === "win32") { + const shell = pick() + if (shell) return shell + } + return fallback() +} + +export function gitbash() { + if (process.platform !== "win32") return + if (Flag.OPENCODE_GIT_BASH_PATH) return Flag.OPENCODE_GIT_BASH_PATH + const git = which("git") + if (!git) return + const file = path.join(git, "..", "..", "bin", "bash.exe") + if (Filesystem.stat(file)?.size) return file +} + +function fallback() { + if (process.platform === "win32") { + const file = gitbash() + if (file) return file + return process.env.COMSPEC || "cmd.exe" + } + if (process.platform === "darwin") return "/bin/zsh" + const bash = which("bash") + if (bash) return bash + return "/bin/sh" +} + +export function name(file: string) { + if (process.platform === "win32") return path.win32.parse(Filesystem.windowsPath(file)).name.toLowerCase() + return path.basename(file).toLowerCase() +} + +export function login(file: string) { + return LOGIN.has(name(file)) +} + +export function posix(file: string) { + return POSIX.has(name(file)) +} + +export const preferred = lazy(() => select(process.env.SHELL)) + +export const acceptable = lazy(() => select(process.env.SHELL, { acceptable: true })) + +export * as Shell from "./shell" From 39342b0e759a265045a2b49f34af8ae5da773a8c Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 20:28:08 -0400 Subject: [PATCH 090/201] tui: fix Windows terminal suspend and input undo keybindings On Windows, native terminals don't support POSIX suspend (ctrl+z), so we now assign ctrl+z to input undo instead of terminal suspend. Terminal suspend is disabled on Windows to avoid conflicts with the undo functionality. --- .../src/cli/cmd/tui/config/tui-migrate.ts | 1 - .../opencode/src/cli/cmd/tui/config/tui.ts | 336 +++++++++--------- packages/opencode/src/config/config.ts | 1 - packages/opencode/src/config/keybinds.ts | 14 +- 4 files changed, 183 insertions(+), 169 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts index 3ce5c4b739..9323dd979a 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts @@ -26,7 +26,6 @@ const TuiLegacy = z interface MigrateInput { cwd: string directories: string[] - custom?: string } /** diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index d264273bca..6e5296db87 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -17,197 +17,203 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -const log = Log.create({ service: "tui.config" }) +export namespace TuiConfig { + const log = Log.create({ service: "tui.config" }) -export const Info = TuiInfo + export const Info = TuiInfo -type Acc = { - result: Info -} + type Acc = { + result: Info + } -type State = { - config: Info - deps: Array> -} + type State = { + config: Info + deps: Array> + } -export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] -} + export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] + } -export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect -} + export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect + } -export class Service extends Context.Service()("@opencode/TuiConfig") {} + export class Service extends Context.Service()("@opencode/TuiConfig") {} -function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" -} + function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" + } -function customPath() { - return Flag.OPENCODE_TUI_CONFIG -} + function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { + delete data.tui + return data + } -function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { + const tui = data.tui delete data.tui - return data - } - - const tui = data.tui - delete data.tui - return { - ...tui, - ...data, - } -} - -async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config -} - -async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins -} - -async function loadState(ctx: { directory: string }) { - let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - const directories = await ConfigPaths.directories(ctx.directory) - const custom = customPath() - await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) - // Re-compute after migration since migrateTuiConfig may have created new tui.json files - projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - if (custom) { - await mergeFile(acc, custom, ctx) - log.debug("loaded custom tui config", { path: custom }) - } - - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) + return { + ...tui, + ...data, } } - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") + async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], + async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins } -} -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) + async function loadState(ctx: { directory: string }) { + // Every config dir we may read from: global config dir, any `.opencode` + // folders between cwd and home, and OPENCODE_CONFIG_DIR. + const directories = await ConfigPaths.directories(ctx.directory) + // One-time migration: extract tui keys (theme/keybinds/tui) from existing + // opencode.json files into sibling tui.json files. + await migrateTuiConfig({ directories, cwd: ctx.directory }) - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG + ? [] + : await ConfigPaths.projectFiles("tui", ctx.directory) - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), -) + const acc: Acc = { + result: {}, + } -export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + // 1. Global tui config (lowest precedence). + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } -const { runPromise } = makeRuntime(Service, defaultLayer) + // 2. Explicit OPENCODE_TUI_CONFIG override, if set. + if (Flag.OPENCODE_TUI_CONFIG) { + await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) + log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) + } -export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) -} + // 3. Project tui files, applied root-first so the closest file wins. + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } -export async function get() { - return runPromise((svc) => svc.get()) -} + // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while + // walking up the tree. Also returned below so callers can install plugin + // dependencies from each location. + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) -async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) -} + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } -async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } + } + + export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), + ) + + export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + + const { runPromise } = makeRuntime(Service, defaultLayer) + + export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) + } + + export async function get() { + return runPromise((svc) => svc.get()) + } + + async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) return {} }) -} + } -export * as TuiConfig from "./tui" + async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) + } +} diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 3cbc539600..adccb6353b 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -19,7 +19,6 @@ import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" import { Account } from "@/account" import { isRecord } from "@/util/record" -import { InvalidError, JsonError } from "./error" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" diff --git a/packages/opencode/src/config/keybinds.ts b/packages/opencode/src/config/keybinds.ts index cb146b7cae..8a22289d2a 100644 --- a/packages/opencode/src/config/keybinds.ts +++ b/packages/opencode/src/config/keybinds.ts @@ -106,7 +106,12 @@ export const Keybinds = z input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), + input_undo: z + .string() + .optional() + // On Windows prepend ctrl+z since terminal_suspend releases the binding. + .default(process.platform === "win32" ? "ctrl+z,ctrl+-,super+z" : "ctrl+-,super+z") + .describe("Undo in input"), input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), input_word_forward: z .string() @@ -144,7 +149,12 @@ export const Keybinds = z session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), + terminal_suspend: z + .string() + .optional() + .default("ctrl+z") + .transform((v) => (process.platform === "win32" ? "none" : v)) + .describe("Suspend terminal"), terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), From d6af5a686cd45dc68504283645de990bbeaf2005 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Thu, 16 Apr 2026 20:46:40 -0400 Subject: [PATCH 091/201] tui: convert TuiConfig namespace to ES module exports --- .../opencode/src/cli/cmd/tui/config/tui.ts | 372 +++++++++--------- 1 file changed, 185 insertions(+), 187 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 6e5296db87..b55cf3b83f 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -1,3 +1,5 @@ +export * as TuiConfig from "./tui" + import z from "zod" import { mergeDeep, unique } from "remeda" import { Context, Effect, Fiber, Layer } from "effect" @@ -17,203 +19,199 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/cli/effect/runtime" import { Filesystem, Log } from "@/util" -export namespace TuiConfig { - const log = Log.create({ service: "tui.config" }) +const log = Log.create({ service: "tui.config" }) - export const Info = TuiInfo +export const Info = TuiInfo - type Acc = { - result: Info - } +type Acc = { + result: Info +} - type State = { - config: Info - deps: Array> - } +type State = { + config: Info + deps: Array> +} - export type Info = z.output & { - // Internal resolved plugin list used by runtime loading. - plugin_origins?: ConfigPlugin.Origin[] - } +export type Info = z.output & { + // Internal resolved plugin list used by runtime loading. + plugin_origins?: ConfigPlugin.Origin[] +} - export interface Interface { - readonly get: () => Effect.Effect - readonly waitForDependencies: () => Effect.Effect - } +export interface Interface { + readonly get: () => Effect.Effect + readonly waitForDependencies: () => Effect.Effect +} - export class Service extends Context.Service()("@opencode/TuiConfig") {} +export class Service extends Context.Service()("@opencode/TuiConfig") {} - function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { - if (Filesystem.contains(ctx.directory, file)) return "local" - // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" - return "global" - } +function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope { + if (Filesystem.contains(ctx.directory, file)) return "local" + // if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local" + return "global" +} - function normalize(raw: Record) { - const data = { ...raw } - if (!("tui" in data)) return data - if (!isRecord(data.tui)) { - delete data.tui - return data - } - - const tui = data.tui +function normalize(raw: Record) { + const data = { ...raw } + if (!("tui" in data)) return data + if (!isRecord(data.tui)) { delete data.tui - return { - ...tui, - ...data, - } + return data } - async function resolvePlugins(config: Info, configFilepath: string) { - if (!config.plugin) return config - for (let i = 0; i < config.plugin.length; i++) { - config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) - } - return config - } - - async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { - const data = await loadFile(file) - acc.result = mergeDeep(acc.result, data) - if (!data.plugin?.length) return - - const scope = pluginScope(file, ctx) - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(acc.result.plugin_origins ?? []), - ...data.plugin.map((spec) => ({ spec, scope, source: file })), - ]) - acc.result.plugin = plugins.map((item) => item.spec) - acc.result.plugin_origins = plugins - } - - async function loadState(ctx: { directory: string }) { - // Every config dir we may read from: global config dir, any `.opencode` - // folders between cwd and home, and OPENCODE_CONFIG_DIR. - const directories = await ConfigPaths.directories(ctx.directory) - // One-time migration: extract tui keys (theme/keybinds/tui) from existing - // opencode.json files into sibling tui.json files. - await migrateTuiConfig({ directories, cwd: ctx.directory }) - - const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG - ? [] - : await ConfigPaths.projectFiles("tui", ctx.directory) - - const acc: Acc = { - result: {}, - } - - // 1. Global tui config (lowest precedence). - for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) - } - - // 2. Explicit OPENCODE_TUI_CONFIG override, if set. - if (Flag.OPENCODE_TUI_CONFIG) { - await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) - log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) - } - - // 3. Project tui files, applied root-first so the closest file wins. - for (const file of projectFiles) { - await mergeFile(acc, file, ctx) - } - - // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while - // walking up the tree. Also returned below so callers can install plugin - // dependencies from each location. - const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) - - for (const dir of dirs) { - if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue - for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) - } - } - - const keybinds = { ...(acc.result.keybinds ?? {}) } - if (process.platform === "win32") { - // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. - keybinds.terminal_suspend = "none" - keybinds.input_undo ??= unique([ - "ctrl+z", - ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), - ]).join(",") - } - acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) - - return { - config: acc.result, - dirs: acc.result.plugin?.length ? dirs : [], - } - } - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const directory = yield* CurrentWorkingDirectory - const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) - const deps = yield* Effect.forEach( - data.dirs, - (dir) => - npm - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe(Effect.forkScoped), - { - concurrency: "unbounded", - }, - ) - - const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) - - const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => - Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), - ) - return Service.of({ get, waitForDependencies }) - }).pipe(Effect.withSpan("TuiConfig.layer")), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) - - const { runPromise } = makeRuntime(Service, defaultLayer) - - export async function waitForDependencies() { - await runPromise((svc) => svc.waitForDependencies()) - } - - export async function get() { - return runPromise((svc) => svc.get()) - } - - async function loadFile(filepath: string): Promise { - const text = await ConfigPaths.readFile(filepath) - if (!text) return {} - return load(text, filepath).catch((error) => { - log.warn("failed to load tui config", { path: filepath, error }) - return {} - }) - } - - async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { - if (!isRecord(data)) return {} - - // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json - // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) - .then((data) => resolvePlugins(data, configFilepath)) - .catch((error) => { - log.warn("invalid tui config", { path: configFilepath, error }) - return {} - }) + const tui = data.tui + delete data.tui + return { + ...tui, + ...data, } } + +async function resolvePlugins(config: Info, configFilepath: string) { + if (!config.plugin) return config + for (let i = 0; i < config.plugin.length; i++) { + config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath) + } + return config +} + +async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { + const data = await loadFile(file) + acc.result = mergeDeep(acc.result, data) + if (!data.plugin?.length) return + + const scope = pluginScope(file, ctx) + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(acc.result.plugin_origins ?? []), + ...data.plugin.map((spec) => ({ spec, scope, source: file })), + ]) + acc.result.plugin = plugins.map((item) => item.spec) + acc.result.plugin_origins = plugins +} + +async function loadState(ctx: { directory: string }) { + // Every config dir we may read from: global config dir, any `.opencode` + // folders between cwd and home, and OPENCODE_CONFIG_DIR. + const directories = await ConfigPaths.directories(ctx.directory) + // One-time migration: extract tui keys (theme/keybinds/tui) from existing + // opencode.json files into sibling tui.json files. + await migrateTuiConfig({ directories, cwd: ctx.directory }) + + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + + const acc: Acc = { + result: {}, + } + + // 1. Global tui config (lowest precedence). + for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { + await mergeFile(acc, file, ctx) + } + + // 2. Explicit OPENCODE_TUI_CONFIG override, if set. + if (Flag.OPENCODE_TUI_CONFIG) { + await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) + log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) + } + + // 3. Project tui files, applied root-first so the closest file wins. + for (const file of projectFiles) { + await mergeFile(acc, file, ctx) + } + + // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while + // walking up the tree. Also returned below so callers can install plugin + // dependencies from each location. + const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) + + for (const dir of dirs) { + if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue + for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { + await mergeFile(acc, file, ctx) + } + } + + const keybinds = { ...(acc.result.keybinds ?? {}) } + if (process.platform === "win32") { + // Native Windows terminals do not support POSIX suspend, so prefer prompt undo. + keybinds.terminal_suspend = "none" + keybinds.input_undo ??= unique([ + "ctrl+z", + ...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","), + ]).join(",") + } + acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds) + + return { + config: acc.result, + dirs: acc.result.plugin?.length ? dirs : [], + } +} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const directory = yield* CurrentWorkingDirectory + const npm = yield* Npm.Service + const data = yield* Effect.promise(() => loadState({ directory })) + const deps = yield* Effect.forEach( + data.dirs, + (dir) => + npm + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + }) + .pipe(Effect.forkScoped), + { + concurrency: "unbounded", + }, + ) + + const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config)) + + const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() => + Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid), + ) + return Service.of({ get, waitForDependencies }) + }).pipe(Effect.withSpan("TuiConfig.layer")), +) + +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) + +const { runPromise } = makeRuntime(Service, defaultLayer) + +export async function waitForDependencies() { + await runPromise((svc) => svc.waitForDependencies()) +} + +export async function get() { + return runPromise((svc) => svc.get()) +} + +async function loadFile(filepath: string): Promise { + const text = await ConfigPaths.readFile(filepath) + if (!text) return {} + return load(text, filepath).catch((error) => { + log.warn("failed to load tui config", { path: filepath, error }) + return {} + }) +} + +async function load(text: string, configFilepath: string): Promise { + return ConfigParse.load(Info, text, { + type: "path", + path: configFilepath, + missing: "empty", + normalize: (data) => { + if (!isRecord(data)) return {} + + // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json + // (mirroring the old opencode.json shape) still get their settings applied. + return normalize(data) + }, + }) + .then((data) => resolvePlugins(data, configFilepath)) + .catch((error) => { + log.warn("invalid tui config", { path: configFilepath, error }) + return {} + }) +} From 51d8219c46f902b90cee716f3b8475e163e21e7c Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:49:39 -0400 Subject: [PATCH 092/201] refactor: unwrap session/ tier-2 namespaces + self-reexport (#22973) --- packages/opencode/src/session/compaction.ts | 666 ++-- packages/opencode/src/session/instruction.ts | 318 +- packages/opencode/src/session/llm.ts | 824 ++--- packages/opencode/src/session/message-v2.ts | 1908 +++++------ packages/opencode/src/session/message.ts | 346 +- packages/opencode/src/session/processor.ts | 1158 +++---- packages/opencode/src/session/prompt.ts | 3174 +++++++++--------- packages/opencode/src/session/retry.ts | 232 +- packages/opencode/src/session/revert.ts | 290 +- packages/opencode/src/session/run-state.ts | 198 +- packages/opencode/src/session/status.ts | 156 +- packages/opencode/src/session/summary.ts | 274 +- packages/opencode/src/session/system.ts | 126 +- packages/opencode/src/session/todo.ts | 148 +- 14 files changed, 4909 insertions(+), 4909 deletions(-) diff --git a/packages/opencode/src/session/compaction.ts b/packages/opencode/src/session/compaction.ts index 3ef6977547..212f5fdbab 100644 --- a/packages/opencode/src/session/compaction.ts +++ b/packages/opencode/src/session/compaction.ts @@ -17,173 +17,172 @@ import { Effect, Layer, Context } from "effect" import { InstanceState } from "@/effect" import { isOverflow as overflow } from "./overflow" -export namespace SessionCompaction { - const log = Log.create({ service: "session.compaction" }) +const log = Log.create({ service: "session.compaction" }) - export const Event = { - Compacted: BusEvent.define( - "session.compacted", - z.object({ - sessionID: SessionID.zod, - }), - ), - } +export const Event = { + Compacted: BusEvent.define( + "session.compacted", + z.object({ + sessionID: SessionID.zod, + }), + ), +} - export const PRUNE_MINIMUM = 20_000 - export const PRUNE_PROTECT = 40_000 - const PRUNE_PROTECTED_TOOLS = ["skill"] +export const PRUNE_MINIMUM = 20_000 +export const PRUNE_PROTECT = 40_000 +const PRUNE_PROTECTED_TOOLS = ["skill"] - export interface Interface { - readonly isOverflow: (input: { +export interface Interface { + readonly isOverflow: (input: { + tokens: MessageV2.Assistant["tokens"] + model: Provider.Model + }) => Effect.Effect + readonly prune: (input: { sessionID: SessionID }) => Effect.Effect + readonly process: (input: { + parentID: MessageID + messages: MessageV2.WithParts[] + sessionID: SessionID + auto: boolean + overflow?: boolean + }) => Effect.Effect<"continue" | "stop"> + readonly create: (input: { + sessionID: SessionID + agent: string + model: { providerID: ProviderID; modelID: ModelID } + auto: boolean + overflow?: boolean + }) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionCompaction") {} + +export const layer: Layer.Layer< + Service, + never, + | Bus.Service + | Config.Service + | Session.Service + | Agent.Service + | Plugin.Service + | SessionProcessor.Service + | Provider.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const config = yield* Config.Service + const session = yield* Session.Service + const agents = yield* Agent.Service + const plugin = yield* Plugin.Service + const processors = yield* SessionProcessor.Service + const provider = yield* Provider.Service + + const isOverflow = Effect.fn("SessionCompaction.isOverflow")(function* (input: { tokens: MessageV2.Assistant["tokens"] model: Provider.Model - }) => Effect.Effect - readonly prune: (input: { sessionID: SessionID }) => Effect.Effect - readonly process: (input: { + }) { + return overflow({ cfg: yield* config.get(), tokens: input.tokens, model: input.model }) + }) + + // goes backwards through parts until there are PRUNE_PROTECT tokens worth of tool + // calls, then erases output of older tool calls to free context space + const prune = Effect.fn("SessionCompaction.prune")(function* (input: { sessionID: SessionID }) { + const cfg = yield* config.get() + if (cfg.compaction?.prune === false) return + log.info("pruning") + + const msgs = yield* session + .messages({ sessionID: input.sessionID }) + .pipe(Effect.catchIf(NotFoundError.isInstance, () => Effect.succeed(undefined))) + if (!msgs) return + + let total = 0 + let pruned = 0 + const toPrune: MessageV2.ToolPart[] = [] + let turns = 0 + + loop: for (let msgIndex = msgs.length - 1; msgIndex >= 0; msgIndex--) { + const msg = msgs[msgIndex] + if (msg.info.role === "user") turns++ + if (turns < 2) continue + if (msg.info.role === "assistant" && msg.info.summary) break loop + for (let partIndex = msg.parts.length - 1; partIndex >= 0; partIndex--) { + const part = msg.parts[partIndex] + if (part.type === "tool") + if (part.state.status === "completed") { + if (PRUNE_PROTECTED_TOOLS.includes(part.tool)) continue + if (part.state.time.compacted) break loop + const estimate = Token.estimate(part.state.output) + total += estimate + if (total > PRUNE_PROTECT) { + pruned += estimate + toPrune.push(part) + } + } + } + } + + log.info("found", { pruned, total }) + if (pruned > PRUNE_MINIMUM) { + for (const part of toPrune) { + if (part.state.status === "completed") { + part.state.time.compacted = Date.now() + yield* session.updatePart(part) + } + } + log.info("pruned", { count: toPrune.length }) + } + }) + + const processCompaction = Effect.fn("SessionCompaction.process")(function* (input: { parentID: MessageID messages: MessageV2.WithParts[] sessionID: SessionID auto: boolean overflow?: boolean - }) => Effect.Effect<"continue" | "stop"> - readonly create: (input: { - sessionID: SessionID - agent: string - model: { providerID: ProviderID; modelID: ModelID } - auto: boolean - overflow?: boolean - }) => Effect.Effect - } + }) { + const parent = input.messages.findLast((m) => m.info.id === input.parentID) + if (!parent || parent.info.role !== "user") { + throw new Error(`Compaction parent must be a user message: ${input.parentID}`) + } + const userMessage = parent.info - export class Service extends Context.Service()("@opencode/SessionCompaction") {} - - export const layer: Layer.Layer< - Service, - never, - | Bus.Service - | Config.Service - | Session.Service - | Agent.Service - | Plugin.Service - | SessionProcessor.Service - | Provider.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const config = yield* Config.Service - const session = yield* Session.Service - const agents = yield* Agent.Service - const plugin = yield* Plugin.Service - const processors = yield* SessionProcessor.Service - const provider = yield* Provider.Service - - const isOverflow = Effect.fn("SessionCompaction.isOverflow")(function* (input: { - tokens: MessageV2.Assistant["tokens"] - model: Provider.Model - }) { - return overflow({ cfg: yield* config.get(), tokens: input.tokens, model: input.model }) - }) - - // goes backwards through parts until there are PRUNE_PROTECT tokens worth of tool - // calls, then erases output of older tool calls to free context space - const prune = Effect.fn("SessionCompaction.prune")(function* (input: { sessionID: SessionID }) { - const cfg = yield* config.get() - if (cfg.compaction?.prune === false) return - log.info("pruning") - - const msgs = yield* session - .messages({ sessionID: input.sessionID }) - .pipe(Effect.catchIf(NotFoundError.isInstance, () => Effect.succeed(undefined))) - if (!msgs) return - - let total = 0 - let pruned = 0 - const toPrune: MessageV2.ToolPart[] = [] - let turns = 0 - - loop: for (let msgIndex = msgs.length - 1; msgIndex >= 0; msgIndex--) { - const msg = msgs[msgIndex] - if (msg.info.role === "user") turns++ - if (turns < 2) continue - if (msg.info.role === "assistant" && msg.info.summary) break loop - for (let partIndex = msg.parts.length - 1; partIndex >= 0; partIndex--) { - const part = msg.parts[partIndex] - if (part.type === "tool") - if (part.state.status === "completed") { - if (PRUNE_PROTECTED_TOOLS.includes(part.tool)) continue - if (part.state.time.compacted) break loop - const estimate = Token.estimate(part.state.output) - total += estimate - if (total > PRUNE_PROTECT) { - pruned += estimate - toPrune.push(part) - } - } + let messages = input.messages + let replay: + | { + info: MessageV2.User + parts: MessageV2.Part[] + } + | undefined + if (input.overflow) { + const idx = input.messages.findIndex((m) => m.info.id === input.parentID) + for (let i = idx - 1; i >= 0; i--) { + const msg = input.messages[i] + if (msg.info.role === "user" && !msg.parts.some((p) => p.type === "compaction")) { + replay = { info: msg.info, parts: msg.parts } + messages = input.messages.slice(0, i) + break } } - - log.info("found", { pruned, total }) - if (pruned > PRUNE_MINIMUM) { - for (const part of toPrune) { - if (part.state.status === "completed") { - part.state.time.compacted = Date.now() - yield* session.updatePart(part) - } - } - log.info("pruned", { count: toPrune.length }) + const hasContent = + replay && messages.some((m) => m.info.role === "user" && !m.parts.some((p) => p.type === "compaction")) + if (!hasContent) { + replay = undefined + messages = input.messages } - }) + } - const processCompaction = Effect.fn("SessionCompaction.process")(function* (input: { - parentID: MessageID - messages: MessageV2.WithParts[] - sessionID: SessionID - auto: boolean - overflow?: boolean - }) { - const parent = input.messages.findLast((m) => m.info.id === input.parentID) - if (!parent || parent.info.role !== "user") { - throw new Error(`Compaction parent must be a user message: ${input.parentID}`) - } - const userMessage = parent.info - - let messages = input.messages - let replay: - | { - info: MessageV2.User - parts: MessageV2.Part[] - } - | undefined - if (input.overflow) { - const idx = input.messages.findIndex((m) => m.info.id === input.parentID) - for (let i = idx - 1; i >= 0; i--) { - const msg = input.messages[i] - if (msg.info.role === "user" && !msg.parts.some((p) => p.type === "compaction")) { - replay = { info: msg.info, parts: msg.parts } - messages = input.messages.slice(0, i) - break - } - } - const hasContent = - replay && messages.some((m) => m.info.role === "user" && !m.parts.some((p) => p.type === "compaction")) - if (!hasContent) { - replay = undefined - messages = input.messages - } - } - - const agent = yield* agents.get("compaction") - const model = agent.model - ? yield* provider.getModel(agent.model.providerID, agent.model.modelID) - : yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID) - // Allow plugins to inject context or replace compaction prompt. - const compacting = yield* plugin.trigger( - "experimental.session.compacting", - { sessionID: input.sessionID }, - { context: [], prompt: undefined }, - ) - const defaultPrompt = `Provide a detailed prompt for continuing our conversation above. + const agent = yield* agents.get("compaction") + const model = agent.model + ? yield* provider.getModel(agent.model.providerID, agent.model.modelID) + : yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID) + // Allow plugins to inject context or replace compaction prompt. + const compacting = yield* plugin.trigger( + "experimental.session.compacting", + { sessionID: input.sessionID }, + { context: [], prompt: undefined }, + ) + const defaultPrompt = `Provide a detailed prompt for continuing our conversation above. Focus on information that would be helpful for continuing the conversation, including what we did, what we're doing, which files we're working on, and what we're going to do next. The summary that you construct will be used so that another agent can read it and continue the work. Do not call any tools. Respond only with the summary text. @@ -213,200 +212,201 @@ When constructing the summary, try to stick to this template: [Construct a structured list of relevant files that have been read, edited, or created that pertain to the task at hand. If all the files in a directory are relevant, include the path to the directory.] ---` - const prompt = compacting.prompt ?? [defaultPrompt, ...compacting.context].join("\n\n") - const msgs = structuredClone(messages) - yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) - const modelMessages = yield* MessageV2.toModelMessagesEffect(msgs, model, { stripMedia: true }) - const ctx = yield* InstanceState.context - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - role: "assistant", - parentID: input.parentID, - sessionID: input.sessionID, - mode: "compaction", - agent: "compaction", - variant: userMessage.model.variant, - summary: true, - path: { - cwd: ctx.directory, - root: ctx.worktree, + const prompt = compacting.prompt ?? [defaultPrompt, ...compacting.context].join("\n\n") + const msgs = structuredClone(messages) + yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) + const modelMessages = yield* MessageV2.toModelMessagesEffect(msgs, model, { stripMedia: true }) + const ctx = yield* InstanceState.context + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + role: "assistant", + parentID: input.parentID, + sessionID: input.sessionID, + mode: "compaction", + agent: "compaction", + variant: userMessage.model.variant, + summary: true, + path: { + cwd: ctx.directory, + root: ctx.worktree, + }, + cost: 0, + tokens: { + output: 0, + input: 0, + reasoning: 0, + cache: { read: 0, write: 0 }, + }, + modelID: model.id, + providerID: model.providerID, + time: { + created: Date.now(), + }, + } + yield* session.updateMessage(msg) + const processor = yield* processors.create({ + assistantMessage: msg, + sessionID: input.sessionID, + model, + }) + const result = yield* processor.process({ + user: userMessage, + agent, + sessionID: input.sessionID, + tools: {}, + system: [], + messages: [ + ...modelMessages, + { + role: "user", + content: [{ type: "text", text: prompt }], }, - cost: 0, - tokens: { - output: 0, - input: 0, - reasoning: 0, - cache: { read: 0, write: 0 }, - }, - modelID: model.id, - providerID: model.providerID, - time: { - created: Date.now(), - }, - } - yield* session.updateMessage(msg) - const processor = yield* processors.create({ - assistantMessage: msg, - sessionID: input.sessionID, - model, - }) - const result = yield* processor.process({ - user: userMessage, - agent, - sessionID: input.sessionID, - tools: {}, - system: [], - messages: [ - ...modelMessages, - { - role: "user", - content: [{ type: "text", text: prompt }], - }, - ], - model, - }) + ], + model, + }) - if (result === "compact") { - processor.message.error = new MessageV2.ContextOverflowError({ - message: replay - ? "Conversation history too large to compact - exceeds model context limit" - : "Session too large to compact - context exceeds model limit even after stripping media", - }).toObject() - processor.message.finish = "error" - yield* session.updateMessage(processor.message) - return "stop" + if (result === "compact") { + processor.message.error = new MessageV2.ContextOverflowError({ + message: replay + ? "Conversation history too large to compact - exceeds model context limit" + : "Session too large to compact - context exceeds model limit even after stripping media", + }).toObject() + processor.message.finish = "error" + yield* session.updateMessage(processor.message) + return "stop" + } + + if (result === "continue" && input.auto) { + if (replay) { + const original = replay.info + const replayMsg = yield* session.updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + agent: original.agent, + model: original.model, + format: original.format, + tools: original.tools, + system: original.system, + }) + for (const part of replay.parts) { + if (part.type === "compaction") continue + const replayPart = + part.type === "file" && MessageV2.isMedia(part.mime) + ? { type: "text" as const, text: `[Attached ${part.mime}: ${part.filename ?? "file"}]` } + : part + yield* session.updatePart({ + ...replayPart, + id: PartID.ascending(), + messageID: replayMsg.id, + sessionID: input.sessionID, + }) + } } - if (result === "continue" && input.auto) { - if (replay) { - const original = replay.info - const replayMsg = yield* session.updateMessage({ + if (!replay) { + const info = yield* provider.getProvider(userMessage.model.providerID) + if ( + (yield* plugin.trigger( + "experimental.compaction.autocontinue", + { + sessionID: input.sessionID, + agent: userMessage.agent, + model: yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID), + provider: { + source: info.source, + info, + options: info.options, + }, + message: userMessage, + overflow: input.overflow === true, + }, + { enabled: true }, + )).enabled + ) { + const continueMsg = yield* session.updateMessage({ id: MessageID.ascending(), role: "user", sessionID: input.sessionID, time: { created: Date.now() }, - agent: original.agent, - model: original.model, - format: original.format, - tools: original.tools, - system: original.system, + agent: userMessage.agent, + model: userMessage.model, + }) + const text = + (input.overflow + ? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n" + : "") + + "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed." + yield* session.updatePart({ + id: PartID.ascending(), + messageID: continueMsg.id, + sessionID: input.sessionID, + type: "text", + // Internal marker for auto-compaction followups so provider plugins + // can distinguish them from manual post-compaction user prompts. + // This is not a stable plugin contract and may change or disappear. + metadata: { compaction_continue: true }, + synthetic: true, + text, + time: { + start: Date.now(), + end: Date.now(), + }, }) - for (const part of replay.parts) { - if (part.type === "compaction") continue - const replayPart = - part.type === "file" && MessageV2.isMedia(part.mime) - ? { type: "text" as const, text: `[Attached ${part.mime}: ${part.filename ?? "file"}]` } - : part - yield* session.updatePart({ - ...replayPart, - id: PartID.ascending(), - messageID: replayMsg.id, - sessionID: input.sessionID, - }) - } - } - - if (!replay) { - const info = yield* provider.getProvider(userMessage.model.providerID) - if ( - (yield* plugin.trigger( - "experimental.compaction.autocontinue", - { - sessionID: input.sessionID, - agent: userMessage.agent, - model: yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID), - provider: { - source: info.source, - info, - options: info.options, - }, - message: userMessage, - overflow: input.overflow === true, - }, - { enabled: true }, - )).enabled - ) { - const continueMsg = yield* session.updateMessage({ - id: MessageID.ascending(), - role: "user", - sessionID: input.sessionID, - time: { created: Date.now() }, - agent: userMessage.agent, - model: userMessage.model, - }) - const text = - (input.overflow - ? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n" - : "") + - "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed." - yield* session.updatePart({ - id: PartID.ascending(), - messageID: continueMsg.id, - sessionID: input.sessionID, - type: "text", - // Internal marker for auto-compaction followups so provider plugins - // can distinguish them from manual post-compaction user prompts. - // This is not a stable plugin contract and may change or disappear. - metadata: { compaction_continue: true }, - synthetic: true, - text, - time: { - start: Date.now(), - end: Date.now(), - }, - }) - } } } + } - if (processor.message.error) return "stop" - if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID }) - return result + if (processor.message.error) return "stop" + if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID }) + return result + }) + + const create = Effect.fn("SessionCompaction.create")(function* (input: { + sessionID: SessionID + agent: string + model: { providerID: ProviderID; modelID: ModelID } + auto: boolean + overflow?: boolean + }) { + const msg = yield* session.updateMessage({ + id: MessageID.ascending(), + role: "user", + model: input.model, + sessionID: input.sessionID, + agent: input.agent, + time: { created: Date.now() }, }) - - const create = Effect.fn("SessionCompaction.create")(function* (input: { - sessionID: SessionID - agent: string - model: { providerID: ProviderID; modelID: ModelID } - auto: boolean - overflow?: boolean - }) { - const msg = yield* session.updateMessage({ - id: MessageID.ascending(), - role: "user", - model: input.model, - sessionID: input.sessionID, - agent: input.agent, - time: { created: Date.now() }, - }) - yield* session.updatePart({ - id: PartID.ascending(), - messageID: msg.id, - sessionID: msg.sessionID, - type: "compaction", - auto: input.auto, - overflow: input.overflow, - }) + yield* session.updatePart({ + id: PartID.ascending(), + messageID: msg.id, + sessionID: msg.sessionID, + type: "compaction", + auto: input.auto, + overflow: input.overflow, }) + }) - return Service.of({ - isOverflow, - prune, - process: processCompaction, - create, - }) - }), - ) + return Service.of({ + isOverflow, + prune, + process: processCompaction, + create, + }) + }), +) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Provider.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(SessionProcessor.defaultLayer), - Layer.provide(Agent.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - ), - ) -} +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Provider.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(SessionProcessor.defaultLayer), + Layer.provide(Agent.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + ), +) + +export * as SessionCompaction from "./compaction" diff --git a/packages/opencode/src/session/instruction.ts b/packages/opencode/src/session/instruction.ts index cd2050adf5..768f352d93 100644 --- a/packages/opencode/src/session/instruction.ts +++ b/packages/opencode/src/session/instruction.ts @@ -50,194 +50,194 @@ function extract(messages: MessageV2.WithParts[]) { return paths } -export namespace Instruction { - export interface Interface { - readonly clear: (messageID: MessageID) => Effect.Effect - readonly systemPaths: () => Effect.Effect, AppFileSystem.Error> - readonly system: () => Effect.Effect - readonly find: (dir: string) => Effect.Effect - readonly resolve: ( - messages: MessageV2.WithParts[], - filepath: string, - messageID: MessageID, - ) => Effect.Effect<{ filepath: string; content: string }[], AppFileSystem.Error> - } +export interface Interface { + readonly clear: (messageID: MessageID) => Effect.Effect + readonly systemPaths: () => Effect.Effect, AppFileSystem.Error> + readonly system: () => Effect.Effect + readonly find: (dir: string) => Effect.Effect + readonly resolve: ( + messages: MessageV2.WithParts[], + filepath: string, + messageID: MessageID, + ) => Effect.Effect<{ filepath: string; content: string }[], AppFileSystem.Error> +} - export class Service extends Context.Service()("@opencode/Instruction") {} +export class Service extends Context.Service()("@opencode/Instruction") {} - export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const cfg = yield* Config.Service - const fs = yield* AppFileSystem.Service - const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) +export const layer: Layer.Layer = + Layer.effect( + Service, + Effect.gen(function* () { + const cfg = yield* Config.Service + const fs = yield* AppFileSystem.Service + const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient)) - const state = yield* InstanceState.make( - Effect.fn("Instruction.state")(() => - Effect.succeed({ - // Track which instruction files have already been attached for a given assistant message. - claims: new Map>(), - }), - ), - ) + const state = yield* InstanceState.make( + Effect.fn("Instruction.state")(() => + Effect.succeed({ + // Track which instruction files have already been attached for a given assistant message. + claims: new Map>(), + }), + ), + ) - const relative = Effect.fnUntraced(function* (instruction: string) { - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - return yield* fs - .globUp(instruction, Instance.directory, Instance.worktree) - .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - } - if (!Flag.OPENCODE_CONFIG_DIR) { - log.warn( - `Skipping relative instruction "${instruction}" - no OPENCODE_CONFIG_DIR set while project config is disabled`, - ) - return [] - } + const relative = Effect.fnUntraced(function* (instruction: string) { + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { return yield* fs - .globUp(instruction, Flag.OPENCODE_CONFIG_DIR, Flag.OPENCODE_CONFIG_DIR) + .globUp(instruction, Instance.directory, Instance.worktree) .pipe(Effect.catch(() => Effect.succeed([] as string[]))) - }) - - const read = Effect.fnUntraced(function* (filepath: string) { - return yield* fs.readFileString(filepath).pipe(Effect.catch(() => Effect.succeed(""))) - }) - - const fetch = Effect.fnUntraced(function* (url: string) { - const res = yield* http.execute(HttpClientRequest.get(url)).pipe( - Effect.timeout(5000), - Effect.catch(() => Effect.succeed(null)), + } + if (!Flag.OPENCODE_CONFIG_DIR) { + log.warn( + `Skipping relative instruction "${instruction}" - no OPENCODE_CONFIG_DIR set while project config is disabled`, ) - if (!res) return "" - const body = yield* res.arrayBuffer.pipe(Effect.catch(() => Effect.succeed(new ArrayBuffer(0)))) - return new TextDecoder().decode(body) - }) + return [] + } + return yield* fs + .globUp(instruction, Flag.OPENCODE_CONFIG_DIR, Flag.OPENCODE_CONFIG_DIR) + .pipe(Effect.catch(() => Effect.succeed([] as string[]))) + }) - const clear = Effect.fn("Instruction.clear")(function* (messageID: MessageID) { - const s = yield* InstanceState.get(state) - s.claims.delete(messageID) - }) + const read = Effect.fnUntraced(function* (filepath: string) { + return yield* fs.readFileString(filepath).pipe(Effect.catch(() => Effect.succeed(""))) + }) - const systemPaths = Effect.fn("Instruction.systemPaths")(function* () { - const config = yield* cfg.get() - const paths = new Set() + const fetch = Effect.fnUntraced(function* (url: string) { + const res = yield* http.execute(HttpClientRequest.get(url)).pipe( + Effect.timeout(5000), + Effect.catch(() => Effect.succeed(null)), + ) + if (!res) return "" + const body = yield* res.arrayBuffer.pipe(Effect.catch(() => Effect.succeed(new ArrayBuffer(0)))) + return new TextDecoder().decode(body) + }) - // The first project-level match wins so we don't stack AGENTS.md/CLAUDE.md from every ancestor. - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - for (const file of FILES) { - const matches = yield* fs.findUp(file, Instance.directory, Instance.worktree) - if (matches.length > 0) { - matches.forEach((item) => paths.add(path.resolve(item))) - break - } - } - } + const clear = Effect.fn("Instruction.clear")(function* (messageID: MessageID) { + const s = yield* InstanceState.get(state) + s.claims.delete(messageID) + }) - for (const file of globalFiles()) { - if (yield* fs.existsSafe(file)) { - paths.add(path.resolve(file)) + const systemPaths = Effect.fn("Instruction.systemPaths")(function* () { + const config = yield* cfg.get() + const paths = new Set() + + // The first project-level match wins so we don't stack AGENTS.md/CLAUDE.md from every ancestor. + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { + for (const file of FILES) { + const matches = yield* fs.findUp(file, Instance.directory, Instance.worktree) + if (matches.length > 0) { + matches.forEach((item) => paths.add(path.resolve(item))) break } } + } - if (config.instructions) { - for (const raw of config.instructions) { - if (raw.startsWith("https://") || raw.startsWith("http://")) continue - const instruction = raw.startsWith("~/") ? path.join(os.homedir(), raw.slice(2)) : raw - const matches = yield* ( - path.isAbsolute(instruction) - ? fs.glob(path.basename(instruction), { - cwd: path.dirname(instruction), - absolute: true, - include: "file", - }) - : relative(instruction) - ).pipe(Effect.catch(() => Effect.succeed([] as string[]))) - matches.forEach((item) => paths.add(path.resolve(item))) - } + for (const file of globalFiles()) { + if (yield* fs.existsSafe(file)) { + paths.add(path.resolve(file)) + break } + } - return paths - }) - - const system = Effect.fn("Instruction.system")(function* () { - const config = yield* cfg.get() - const paths = yield* systemPaths() - const urls = (config.instructions ?? []).filter( - (item) => item.startsWith("https://") || item.startsWith("http://"), - ) - - const files = yield* Effect.forEach(Array.from(paths), read, { concurrency: 8 }) - const remote = yield* Effect.forEach(urls, fetch, { concurrency: 4 }) - - return [ - ...Array.from(paths).flatMap((item, i) => (files[i] ? [`Instructions from: ${item}\n${files[i]}`] : [])), - ...urls.flatMap((item, i) => (remote[i] ? [`Instructions from: ${item}\n${remote[i]}`] : [])), - ] - }) - - const find = Effect.fn("Instruction.find")(function* (dir: string) { - for (const file of FILES) { - const filepath = path.resolve(path.join(dir, file)) - if (yield* fs.existsSafe(filepath)) return filepath + if (config.instructions) { + for (const raw of config.instructions) { + if (raw.startsWith("https://") || raw.startsWith("http://")) continue + const instruction = raw.startsWith("~/") ? path.join(os.homedir(), raw.slice(2)) : raw + const matches = yield* ( + path.isAbsolute(instruction) + ? fs.glob(path.basename(instruction), { + cwd: path.dirname(instruction), + absolute: true, + include: "file", + }) + : relative(instruction) + ).pipe(Effect.catch(() => Effect.succeed([] as string[]))) + matches.forEach((item) => paths.add(path.resolve(item))) } - }) + } - const resolve = Effect.fn("Instruction.resolve")(function* ( - messages: MessageV2.WithParts[], - filepath: string, - messageID: MessageID, - ) { - const sys = yield* systemPaths() - const already = extract(messages) - const results: { filepath: string; content: string }[] = [] - const s = yield* InstanceState.get(state) + return paths + }) - const target = path.resolve(filepath) - const root = path.resolve(Instance.directory) - let current = path.dirname(target) + const system = Effect.fn("Instruction.system")(function* () { + const config = yield* cfg.get() + const paths = yield* systemPaths() + const urls = (config.instructions ?? []).filter( + (item) => item.startsWith("https://") || item.startsWith("http://"), + ) - // Walk upward from the file being read and attach nearby instruction files once per message. - while (current.startsWith(root) && current !== root) { - const found = yield* find(current) - if (!found || found === target || sys.has(found) || already.has(found)) { - current = path.dirname(current) - continue - } + const files = yield* Effect.forEach(Array.from(paths), read, { concurrency: 8 }) + const remote = yield* Effect.forEach(urls, fetch, { concurrency: 4 }) - let set = s.claims.get(messageID) - if (!set) { - set = new Set() - s.claims.set(messageID, set) - } - if (set.has(found)) { - current = path.dirname(current) - continue - } + return [ + ...Array.from(paths).flatMap((item, i) => (files[i] ? [`Instructions from: ${item}\n${files[i]}`] : [])), + ...urls.flatMap((item, i) => (remote[i] ? [`Instructions from: ${item}\n${remote[i]}`] : [])), + ] + }) - set.add(found) - const content = yield* read(found) - if (content) { - results.push({ filepath: found, content: `Instructions from: ${found}\n${content}` }) - } + const find = Effect.fn("Instruction.find")(function* (dir: string) { + for (const file of FILES) { + const filepath = path.resolve(path.join(dir, file)) + if (yield* fs.existsSafe(filepath)) return filepath + } + }) + const resolve = Effect.fn("Instruction.resolve")(function* ( + messages: MessageV2.WithParts[], + filepath: string, + messageID: MessageID, + ) { + const sys = yield* systemPaths() + const already = extract(messages) + const results: { filepath: string; content: string }[] = [] + const s = yield* InstanceState.get(state) + + const target = path.resolve(filepath) + const root = path.resolve(Instance.directory) + let current = path.dirname(target) + + // Walk upward from the file being read and attach nearby instruction files once per message. + while (current.startsWith(root) && current !== root) { + const found = yield* find(current) + if (!found || found === target || sys.has(found) || already.has(found)) { current = path.dirname(current) + continue } - return results - }) + let set = s.claims.get(messageID) + if (!set) { + set = new Set() + s.claims.set(messageID, set) + } + if (set.has(found)) { + current = path.dirname(current) + continue + } - return Service.of({ clear, systemPaths, system, find, resolve }) - }), - ) + set.add(found) + const content = yield* read(found) + if (content) { + results.push({ filepath: found, content: `Instructions from: ${found}\n${content}` }) + } - export const defaultLayer = layer.pipe( - Layer.provide(Config.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(FetchHttpClient.layer), + current = path.dirname(current) + } + + return results + }) + + return Service.of({ clear, systemPaths, system, find, resolve }) + }), ) - export function loaded(messages: MessageV2.WithParts[]) { - return extract(messages) - } +export const defaultLayer = layer.pipe( + Layer.provide(Config.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(FetchHttpClient.layer), +) + +export function loaded(messages: MessageV2.WithParts[]) { + return extract(messages) } + +export * as Instruction from "./instruction" diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index d38c29765a..b66e99fc82 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -25,429 +25,429 @@ import { EffectBridge } from "@/effect" import * as Option from "effect/Option" import * as OtelTracer from "@effect/opentelemetry/Tracer" -export namespace LLM { - const log = Log.create({ service: "llm" }) - export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX - type Result = Awaited> +const log = Log.create({ service: "llm" }) +export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX +type Result = Awaited> - export type StreamInput = { - user: MessageV2.User - sessionID: string - parentSessionID?: string - model: Provider.Model - agent: Agent.Info - permission?: Permission.Ruleset - system: string[] - messages: ModelMessage[] - small?: boolean - tools: Record - retries?: number - toolChoice?: "auto" | "required" | "none" - } +export type StreamInput = { + user: MessageV2.User + sessionID: string + parentSessionID?: string + model: Provider.Model + agent: Agent.Info + permission?: Permission.Ruleset + system: string[] + messages: ModelMessage[] + small?: boolean + tools: Record + retries?: number + toolChoice?: "auto" | "required" | "none" +} - export type StreamRequest = StreamInput & { - abort: AbortSignal - } +export type StreamRequest = StreamInput & { + abort: AbortSignal +} - export type Event = Result["fullStream"] extends AsyncIterable ? T : never +export type Event = Result["fullStream"] extends AsyncIterable ? T : never - export interface Interface { - readonly stream: (input: StreamInput) => Stream.Stream - } +export interface Interface { + readonly stream: (input: StreamInput) => Stream.Stream +} - export class Service extends Context.Service()("@opencode/LLM") {} +export class Service extends Context.Service()("@opencode/LLM") {} - const live: Layer.Layer< - Service, - never, - Auth.Service | Config.Service | Provider.Service | Plugin.Service | Permission.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const auth = yield* Auth.Service - const config = yield* Config.Service - const provider = yield* Provider.Service - const plugin = yield* Plugin.Service - const perm = yield* Permission.Service +const live: Layer.Layer< + Service, + never, + Auth.Service | Config.Service | Provider.Service | Plugin.Service | Permission.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const auth = yield* Auth.Service + const config = yield* Config.Service + const provider = yield* Provider.Service + const plugin = yield* Plugin.Service + const perm = yield* Permission.Service - const run = Effect.fn("LLM.run")(function* (input: StreamRequest) { - const l = log - .clone() - .tag("providerID", input.model.providerID) - .tag("modelID", input.model.id) - .tag("sessionID", input.sessionID) - .tag("small", (input.small ?? false).toString()) - .tag("agent", input.agent.name) - .tag("mode", input.agent.mode) - l.info("stream", { - modelID: input.model.id, - providerID: input.model.providerID, - }) - - const [language, cfg, item, info] = yield* Effect.all( - [ - provider.getLanguage(input.model), - config.get(), - provider.getProvider(input.model.providerID), - auth.get(input.model.providerID), - ], - { concurrency: "unbounded" }, - ) - - // TODO: move this to a proper hook - const isOpenaiOauth = item.id === "openai" && info?.type === "oauth" - - const system: string[] = [] - system.push( - [ - // use agent prompt otherwise provider prompt - ...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)), - // any custom prompt passed into this call - ...input.system, - // any custom prompt from last user message - ...(input.user.system ? [input.user.system] : []), - ] - .filter((x) => x) - .join("\n"), - ) - - const header = system[0] - yield* plugin.trigger( - "experimental.chat.system.transform", - { sessionID: input.sessionID, model: input.model }, - { system }, - ) - // rejoin to maintain 2-part structure for caching if header unchanged - if (system.length > 2 && system[0] === header) { - const rest = system.slice(1) - system.length = 0 - system.push(header, rest.join("\n")) - } - - const variant = - !input.small && input.model.variants && input.user.model.variant - ? input.model.variants[input.user.model.variant] - : {} - const base = input.small - ? ProviderTransform.smallOptions(input.model) - : ProviderTransform.options({ - model: input.model, - sessionID: input.sessionID, - providerOptions: item.options, - }) - const options: Record = pipe( - base, - mergeDeep(input.model.options), - mergeDeep(input.agent.options), - mergeDeep(variant), - ) - if (isOpenaiOauth) { - options.instructions = system.join("\n") - } - - const isWorkflow = language instanceof GitLabWorkflowLanguageModel - const messages = isOpenaiOauth - ? input.messages - : isWorkflow - ? input.messages - : [ - ...system.map( - (x): ModelMessage => ({ - role: "system", - content: x, - }), - ), - ...input.messages, - ] - - const params = yield* plugin.trigger( - "chat.params", - { - sessionID: input.sessionID, - agent: input.agent.name, - model: input.model, - provider: item, - message: input.user, - }, - { - temperature: input.model.capabilities.temperature - ? (input.agent.temperature ?? ProviderTransform.temperature(input.model)) - : undefined, - topP: input.agent.topP ?? ProviderTransform.topP(input.model), - topK: ProviderTransform.topK(input.model), - maxOutputTokens: ProviderTransform.maxOutputTokens(input.model), - options, - }, - ) - - const { headers } = yield* plugin.trigger( - "chat.headers", - { - sessionID: input.sessionID, - agent: input.agent.name, - model: input.model, - provider: item, - message: input.user, - }, - { - headers: {}, - }, - ) - - const tools = resolveTools(input) - - // LiteLLM and some Anthropic proxies require the tools parameter to be present - // when message history contains tool calls, even if no tools are being used. - // Add a dummy tool that is never called to satisfy this validation. - // This is enabled for: - // 1. Providers with "litellm" in their ID or API ID (auto-detected) - // 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways) - const isLiteLLMProxy = - item.options?.["litellmProxy"] === true || - input.model.providerID.toLowerCase().includes("litellm") || - input.model.api.id.toLowerCase().includes("litellm") - - // LiteLLM/Bedrock rejects requests where the message history contains tool - // calls but no tools param is present. When there are no active tools (e.g. - // during compaction), inject a stub tool to satisfy the validation requirement. - // The stub description explicitly tells the model not to call it. - if ( - (isLiteLLMProxy || input.model.providerID.includes("github-copilot")) && - Object.keys(tools).length === 0 && - hasToolCalls(input.messages) - ) { - tools["_noop"] = tool({ - description: "Do not call this tool. It exists only for API compatibility and must never be invoked.", - inputSchema: jsonSchema({ - type: "object", - properties: { - reason: { type: "string", description: "Unused" }, - }, - }), - execute: async () => ({ output: "", title: "", metadata: {} }), - }) - } - - // Wire up toolExecutor for DWS workflow models so that tool calls - // from the workflow service are executed via opencode's tool system - // and results sent back over the WebSocket. - if (language instanceof GitLabWorkflowLanguageModel) { - const workflowModel = language as GitLabWorkflowLanguageModel & { - sessionID?: string - sessionPreapprovedTools?: string[] - approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }> - } - workflowModel.sessionID = input.sessionID - workflowModel.systemPrompt = system.join("\n") - workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => { - const t = tools[toolName] - if (!t || !t.execute) { - return { result: "", error: `Unknown tool: ${toolName}` } - } - try { - const result = await t.execute!(JSON.parse(argsJson), { - toolCallId: _requestID, - messages: input.messages, - abortSignal: input.abort, - }) - const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result)) - return { - result: output, - metadata: typeof result === "object" ? result?.metadata : undefined, - title: typeof result === "object" ? result?.title : undefined, - } - } catch (e: any) { - return { result: "", error: e.message ?? String(e) } - } - } - - const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? []) - workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => { - const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission)) - return !match || match.action !== "ask" - }) - - const bridge = yield* EffectBridge.make() - const approvedToolsForSession = new Set() - workflowModel.approvalHandler = Instance.bind(async (approvalTools) => { - const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[] - // Auto-approve tools that were already approved in this session - // (prevents infinite approval loops for server-side MCP tools) - if (uniqueNames.every((name) => approvedToolsForSession.has(name))) { - return { approved: true } - } - - const id = PermissionID.ascending() - let unsub: (() => void) | undefined - try { - unsub = Bus.subscribe(Permission.Event.Replied, (evt) => { - if (evt.properties.requestID === id) void evt.properties.reply - }) - const toolPatterns = approvalTools.map((t: { name: string; args: string }) => { - try { - const parsed = JSON.parse(t.args) as Record - const title = (parsed?.title ?? parsed?.name ?? "") as string - return title ? `${t.name}: ${title}` : t.name - } catch { - return t.name - } - }) - const uniquePatterns = [...new Set(toolPatterns)] as string[] - await bridge.promise( - perm.ask({ - id, - sessionID: SessionID.make(input.sessionID), - permission: "workflow_tool_approval", - patterns: uniquePatterns, - metadata: { tools: approvalTools }, - always: uniquePatterns, - ruleset: [], - }), - ) - for (const name of uniqueNames) approvedToolsForSession.add(name) - workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames] - return { approved: true } - } catch { - return { approved: false } - } finally { - unsub?.() - } - }) - } - - const tracer = cfg.experimental?.openTelemetry - ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) - : undefined - - return streamText({ - onError(error) { - l.error("stream error", { - error, - }) - }, - async experimental_repairToolCall(failed) { - const lower = failed.toolCall.toolName.toLowerCase() - if (lower !== failed.toolCall.toolName && tools[lower]) { - l.info("repairing tool call", { - tool: failed.toolCall.toolName, - repaired: lower, - }) - return { - ...failed.toolCall, - toolName: lower, - } - } - return { - ...failed.toolCall, - input: JSON.stringify({ - tool: failed.toolCall.toolName, - error: failed.error.message, - }), - toolName: "invalid", - } - }, - temperature: params.temperature, - topP: params.topP, - topK: params.topK, - providerOptions: ProviderTransform.providerOptions(input.model, params.options), - activeTools: Object.keys(tools).filter((x) => x !== "invalid"), - tools, - toolChoice: input.toolChoice, - maxOutputTokens: params.maxOutputTokens, - abortSignal: input.abort, - headers: { - ...(input.model.providerID.startsWith("opencode") - ? { - "x-opencode-project": Instance.project.id, - "x-opencode-session": input.sessionID, - "x-opencode-request": input.user.id, - "x-opencode-client": Flag.OPENCODE_CLIENT, - } - : { - "x-session-affinity": input.sessionID, - ...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}), - "User-Agent": `opencode/${InstallationVersion}`, - }), - ...input.model.headers, - ...headers, - }, - maxRetries: input.retries ?? 0, - messages, - model: wrapLanguageModel({ - model: language, - middleware: [ - { - specificationVersion: "v3" as const, - async transformParams(args) { - if (args.type === "stream") { - // @ts-expect-error - args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options) - } - return args.params - }, - }, - ], - }), - experimental_telemetry: { - isEnabled: cfg.experimental?.openTelemetry, - functionId: "session.llm", - tracer, - metadata: { - userId: cfg.username ?? "unknown", - sessionId: input.sessionID, - }, - }, - }) + const run = Effect.fn("LLM.run")(function* (input: StreamRequest) { + const l = log + .clone() + .tag("providerID", input.model.providerID) + .tag("modelID", input.model.id) + .tag("sessionID", input.sessionID) + .tag("small", (input.small ?? false).toString()) + .tag("agent", input.agent.name) + .tag("mode", input.agent.mode) + l.info("stream", { + modelID: input.model.id, + providerID: input.model.providerID, }) - const stream: Interface["stream"] = (input) => - Stream.scoped( - Stream.unwrap( - Effect.gen(function* () { - const ctrl = yield* Effect.acquireRelease( - Effect.sync(() => new AbortController()), - (ctrl) => Effect.sync(() => ctrl.abort()), - ) + const [language, cfg, item, info] = yield* Effect.all( + [ + provider.getLanguage(input.model), + config.get(), + provider.getProvider(input.model.providerID), + auth.get(input.model.providerID), + ], + { concurrency: "unbounded" }, + ) - const result = yield* run({ ...input, abort: ctrl.signal }) + // TODO: move this to a proper hook + const isOpenaiOauth = item.id === "openai" && info?.type === "oauth" - return Stream.fromAsyncIterable(result.fullStream, (e) => (e instanceof Error ? e : new Error(String(e)))) - }), - ), - ) + const system: string[] = [] + system.push( + [ + // use agent prompt otherwise provider prompt + ...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)), + // any custom prompt passed into this call + ...input.system, + // any custom prompt from last user message + ...(input.user.system ? [input.user.system] : []), + ] + .filter((x) => x) + .join("\n"), + ) - return Service.of({ stream }) - }), - ) - - export const layer = live.pipe(Layer.provide(Permission.defaultLayer)) - - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Auth.defaultLayer), - Layer.provide(Config.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Plugin.defaultLayer), - ), - ) - - function resolveTools(input: Pick) { - const disabled = Permission.disabled( - Object.keys(input.tools), - Permission.merge(input.agent.permission, input.permission ?? []), - ) - return Record.filter(input.tools, (_, k) => input.user.tools?.[k] !== false && !disabled.has(k)) - } - - // Check if messages contain any tool-call content - // Used to determine if a dummy tool should be added for LiteLLM proxy compatibility - export function hasToolCalls(messages: ModelMessage[]): boolean { - for (const msg of messages) { - if (!Array.isArray(msg.content)) continue - for (const part of msg.content) { - if (part.type === "tool-call" || part.type === "tool-result") return true + const header = system[0] + yield* plugin.trigger( + "experimental.chat.system.transform", + { sessionID: input.sessionID, model: input.model }, + { system }, + ) + // rejoin to maintain 2-part structure for caching if header unchanged + if (system.length > 2 && system[0] === header) { + const rest = system.slice(1) + system.length = 0 + system.push(header, rest.join("\n")) } - } - return false - } + + const variant = + !input.small && input.model.variants && input.user.model.variant + ? input.model.variants[input.user.model.variant] + : {} + const base = input.small + ? ProviderTransform.smallOptions(input.model) + : ProviderTransform.options({ + model: input.model, + sessionID: input.sessionID, + providerOptions: item.options, + }) + const options: Record = pipe( + base, + mergeDeep(input.model.options), + mergeDeep(input.agent.options), + mergeDeep(variant), + ) + if (isOpenaiOauth) { + options.instructions = system.join("\n") + } + + const isWorkflow = language instanceof GitLabWorkflowLanguageModel + const messages = isOpenaiOauth + ? input.messages + : isWorkflow + ? input.messages + : [ + ...system.map( + (x): ModelMessage => ({ + role: "system", + content: x, + }), + ), + ...input.messages, + ] + + const params = yield* plugin.trigger( + "chat.params", + { + sessionID: input.sessionID, + agent: input.agent.name, + model: input.model, + provider: item, + message: input.user, + }, + { + temperature: input.model.capabilities.temperature + ? (input.agent.temperature ?? ProviderTransform.temperature(input.model)) + : undefined, + topP: input.agent.topP ?? ProviderTransform.topP(input.model), + topK: ProviderTransform.topK(input.model), + maxOutputTokens: ProviderTransform.maxOutputTokens(input.model), + options, + }, + ) + + const { headers } = yield* plugin.trigger( + "chat.headers", + { + sessionID: input.sessionID, + agent: input.agent.name, + model: input.model, + provider: item, + message: input.user, + }, + { + headers: {}, + }, + ) + + const tools = resolveTools(input) + + // LiteLLM and some Anthropic proxies require the tools parameter to be present + // when message history contains tool calls, even if no tools are being used. + // Add a dummy tool that is never called to satisfy this validation. + // This is enabled for: + // 1. Providers with "litellm" in their ID or API ID (auto-detected) + // 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways) + const isLiteLLMProxy = + item.options?.["litellmProxy"] === true || + input.model.providerID.toLowerCase().includes("litellm") || + input.model.api.id.toLowerCase().includes("litellm") + + // LiteLLM/Bedrock rejects requests where the message history contains tool + // calls but no tools param is present. When there are no active tools (e.g. + // during compaction), inject a stub tool to satisfy the validation requirement. + // The stub description explicitly tells the model not to call it. + if ( + (isLiteLLMProxy || input.model.providerID.includes("github-copilot")) && + Object.keys(tools).length === 0 && + hasToolCalls(input.messages) + ) { + tools["_noop"] = tool({ + description: "Do not call this tool. It exists only for API compatibility and must never be invoked.", + inputSchema: jsonSchema({ + type: "object", + properties: { + reason: { type: "string", description: "Unused" }, + }, + }), + execute: async () => ({ output: "", title: "", metadata: {} }), + }) + } + + // Wire up toolExecutor for DWS workflow models so that tool calls + // from the workflow service are executed via opencode's tool system + // and results sent back over the WebSocket. + if (language instanceof GitLabWorkflowLanguageModel) { + const workflowModel = language as GitLabWorkflowLanguageModel & { + sessionID?: string + sessionPreapprovedTools?: string[] + approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }> + } + workflowModel.sessionID = input.sessionID + workflowModel.systemPrompt = system.join("\n") + workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => { + const t = tools[toolName] + if (!t || !t.execute) { + return { result: "", error: `Unknown tool: ${toolName}` } + } + try { + const result = await t.execute!(JSON.parse(argsJson), { + toolCallId: _requestID, + messages: input.messages, + abortSignal: input.abort, + }) + const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result)) + return { + result: output, + metadata: typeof result === "object" ? result?.metadata : undefined, + title: typeof result === "object" ? result?.title : undefined, + } + } catch (e: any) { + return { result: "", error: e.message ?? String(e) } + } + } + + const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? []) + workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => { + const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission)) + return !match || match.action !== "ask" + }) + + const bridge = yield* EffectBridge.make() + const approvedToolsForSession = new Set() + workflowModel.approvalHandler = Instance.bind(async (approvalTools) => { + const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[] + // Auto-approve tools that were already approved in this session + // (prevents infinite approval loops for server-side MCP tools) + if (uniqueNames.every((name) => approvedToolsForSession.has(name))) { + return { approved: true } + } + + const id = PermissionID.ascending() + let unsub: (() => void) | undefined + try { + unsub = Bus.subscribe(Permission.Event.Replied, (evt) => { + if (evt.properties.requestID === id) void evt.properties.reply + }) + const toolPatterns = approvalTools.map((t: { name: string; args: string }) => { + try { + const parsed = JSON.parse(t.args) as Record + const title = (parsed?.title ?? parsed?.name ?? "") as string + return title ? `${t.name}: ${title}` : t.name + } catch { + return t.name + } + }) + const uniquePatterns = [...new Set(toolPatterns)] as string[] + await bridge.promise( + perm.ask({ + id, + sessionID: SessionID.make(input.sessionID), + permission: "workflow_tool_approval", + patterns: uniquePatterns, + metadata: { tools: approvalTools }, + always: uniquePatterns, + ruleset: [], + }), + ) + for (const name of uniqueNames) approvedToolsForSession.add(name) + workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames] + return { approved: true } + } catch { + return { approved: false } + } finally { + unsub?.() + } + }) + } + + const tracer = cfg.experimental?.openTelemetry + ? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer)) + : undefined + + return streamText({ + onError(error) { + l.error("stream error", { + error, + }) + }, + async experimental_repairToolCall(failed) { + const lower = failed.toolCall.toolName.toLowerCase() + if (lower !== failed.toolCall.toolName && tools[lower]) { + l.info("repairing tool call", { + tool: failed.toolCall.toolName, + repaired: lower, + }) + return { + ...failed.toolCall, + toolName: lower, + } + } + return { + ...failed.toolCall, + input: JSON.stringify({ + tool: failed.toolCall.toolName, + error: failed.error.message, + }), + toolName: "invalid", + } + }, + temperature: params.temperature, + topP: params.topP, + topK: params.topK, + providerOptions: ProviderTransform.providerOptions(input.model, params.options), + activeTools: Object.keys(tools).filter((x) => x !== "invalid"), + tools, + toolChoice: input.toolChoice, + maxOutputTokens: params.maxOutputTokens, + abortSignal: input.abort, + headers: { + ...(input.model.providerID.startsWith("opencode") + ? { + "x-opencode-project": Instance.project.id, + "x-opencode-session": input.sessionID, + "x-opencode-request": input.user.id, + "x-opencode-client": Flag.OPENCODE_CLIENT, + } + : { + "x-session-affinity": input.sessionID, + ...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}), + "User-Agent": `opencode/${InstallationVersion}`, + }), + ...input.model.headers, + ...headers, + }, + maxRetries: input.retries ?? 0, + messages, + model: wrapLanguageModel({ + model: language, + middleware: [ + { + specificationVersion: "v3" as const, + async transformParams(args) { + if (args.type === "stream") { + // @ts-expect-error + args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options) + } + return args.params + }, + }, + ], + }), + experimental_telemetry: { + isEnabled: cfg.experimental?.openTelemetry, + functionId: "session.llm", + tracer, + metadata: { + userId: cfg.username ?? "unknown", + sessionId: input.sessionID, + }, + }, + }) + }) + + const stream: Interface["stream"] = (input) => + Stream.scoped( + Stream.unwrap( + Effect.gen(function* () { + const ctrl = yield* Effect.acquireRelease( + Effect.sync(() => new AbortController()), + (ctrl) => Effect.sync(() => ctrl.abort()), + ) + + const result = yield* run({ ...input, abort: ctrl.signal }) + + return Stream.fromAsyncIterable(result.fullStream, (e) => (e instanceof Error ? e : new Error(String(e)))) + }), + ), + ) + + return Service.of({ stream }) + }), +) + +export const layer = live.pipe(Layer.provide(Permission.defaultLayer)) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Auth.defaultLayer), + Layer.provide(Config.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Plugin.defaultLayer), + ), +) + +function resolveTools(input: Pick) { + const disabled = Permission.disabled( + Object.keys(input.tools), + Permission.merge(input.agent.permission, input.permission ?? []), + ) + return Record.filter(input.tools, (_, k) => input.user.tools?.[k] !== false && !disabled.has(k)) } + +// Check if messages contain any tool-call content +// Used to determine if a dummy tool should be added for LiteLLM proxy compatibility +export function hasToolCalls(messages: ModelMessage[]): boolean { + for (const msg of messages) { + if (!Array.isArray(msg.content)) continue + for (const part of msg.content) { + if (part.type === "tool-call" || part.type === "tool-result") return true + } + } + return false +} + +export * as LLM from "./llm" diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index f5ba74826d..5e7e008401 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -24,726 +24,738 @@ interface FetchDecompressionError extends Error { path: string } -export namespace MessageV2 { - export const SYNTHETIC_ATTACHMENT_PROMPT = "Attached image(s) from tool result:" +export const SYNTHETIC_ATTACHMENT_PROMPT = "Attached image(s) from tool result:" - export function isMedia(mime: string) { - return mime.startsWith("image/") || mime === "application/pdf" - } +export function isMedia(mime: string) { + return mime.startsWith("image/") || mime === "application/pdf" +} - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AbortedError = NamedError.create("MessageAbortedError", z.object({ message: z.string() })) - export const StructuredOutputError = NamedError.create( - "StructuredOutputError", - z.object({ - message: z.string(), - retries: z.number(), - }), - ) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), - ) - export const APIError = NamedError.create( - "APIError", - z.object({ - message: z.string(), - statusCode: z.number().optional(), - isRetryable: z.boolean(), - responseHeaders: z.record(z.string(), z.string()).optional(), - responseBody: z.string().optional(), - metadata: z.record(z.string(), z.string()).optional(), - }), - ) - export type APIError = z.infer - export const ContextOverflowError = NamedError.create( - "ContextOverflowError", - z.object({ message: z.string(), responseBody: z.string().optional() }), - ) +export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) +export const AbortedError = NamedError.create("MessageAbortedError", z.object({ message: z.string() })) +export const StructuredOutputError = NamedError.create( + "StructuredOutputError", + z.object({ + message: z.string(), + retries: z.number(), + }), +) +export const AuthError = NamedError.create( + "ProviderAuthError", + z.object({ + providerID: z.string(), + message: z.string(), + }), +) +export const APIError = NamedError.create( + "APIError", + z.object({ + message: z.string(), + statusCode: z.number().optional(), + isRetryable: z.boolean(), + responseHeaders: z.record(z.string(), z.string()).optional(), + responseBody: z.string().optional(), + metadata: z.record(z.string(), z.string()).optional(), + }), +) +export type APIError = z.infer +export const ContextOverflowError = NamedError.create( + "ContextOverflowError", + z.object({ message: z.string(), responseBody: z.string().optional() }), +) - export const OutputFormatText = z - .object({ - type: z.literal("text"), - }) - .meta({ - ref: "OutputFormatText", - }) - - export const OutputFormatJsonSchema = z - .object({ - type: z.literal("json_schema"), - schema: z.record(z.string(), z.any()).meta({ ref: "JSONSchema" }), - retryCount: z.number().int().min(0).default(2), - }) - .meta({ - ref: "OutputFormatJsonSchema", - }) - - export const Format = z.discriminatedUnion("type", [OutputFormatText, OutputFormatJsonSchema]).meta({ - ref: "OutputFormat", - }) - export type OutputFormat = z.infer - - const PartBase = z.object({ - id: PartID.zod, - sessionID: SessionID.zod, - messageID: MessageID.zod, - }) - - export const SnapshotPart = PartBase.extend({ - type: z.literal("snapshot"), - snapshot: z.string(), - }).meta({ - ref: "SnapshotPart", - }) - export type SnapshotPart = z.infer - - export const PatchPart = PartBase.extend({ - type: z.literal("patch"), - hash: z.string(), - files: z.string().array(), - }).meta({ - ref: "PatchPart", - }) - export type PatchPart = z.infer - - export const TextPart = PartBase.extend({ +export const OutputFormatText = z + .object({ type: z.literal("text"), - text: z.string(), - synthetic: z.boolean().optional(), - ignored: z.boolean().optional(), - time: z - .object({ - start: z.number(), - end: z.number().optional(), - }) - .optional(), - metadata: z.record(z.string(), z.any()).optional(), - }).meta({ - ref: "TextPart", }) - export type TextPart = z.infer + .meta({ + ref: "OutputFormatText", + }) - export const ReasoningPart = PartBase.extend({ - type: z.literal("reasoning"), - text: z.string(), +export const OutputFormatJsonSchema = z + .object({ + type: z.literal("json_schema"), + schema: z.record(z.string(), z.any()).meta({ ref: "JSONSchema" }), + retryCount: z.number().int().min(0).default(2), + }) + .meta({ + ref: "OutputFormatJsonSchema", + }) + +export const Format = z.discriminatedUnion("type", [OutputFormatText, OutputFormatJsonSchema]).meta({ + ref: "OutputFormat", +}) +export type OutputFormat = z.infer + +const PartBase = z.object({ + id: PartID.zod, + sessionID: SessionID.zod, + messageID: MessageID.zod, +}) + +export const SnapshotPart = PartBase.extend({ + type: z.literal("snapshot"), + snapshot: z.string(), +}).meta({ + ref: "SnapshotPart", +}) +export type SnapshotPart = z.infer + +export const PatchPart = PartBase.extend({ + type: z.literal("patch"), + hash: z.string(), + files: z.string().array(), +}).meta({ + ref: "PatchPart", +}) +export type PatchPart = z.infer + +export const TextPart = PartBase.extend({ + type: z.literal("text"), + text: z.string(), + synthetic: z.boolean().optional(), + ignored: z.boolean().optional(), + time: z + .object({ + start: z.number(), + end: z.number().optional(), + }) + .optional(), + metadata: z.record(z.string(), z.any()).optional(), +}).meta({ + ref: "TextPart", +}) +export type TextPart = z.infer + +export const ReasoningPart = PartBase.extend({ + type: z.literal("reasoning"), + text: z.string(), + metadata: z.record(z.string(), z.any()).optional(), + time: z.object({ + start: z.number(), + end: z.number().optional(), + }), +}).meta({ + ref: "ReasoningPart", +}) +export type ReasoningPart = z.infer + +const FilePartSourceBase = z.object({ + text: z + .object({ + value: z.string(), + start: z.number().int(), + end: z.number().int(), + }) + .meta({ + ref: "FilePartSourceText", + }), +}) + +export const FileSource = FilePartSourceBase.extend({ + type: z.literal("file"), + path: z.string(), +}).meta({ + ref: "FileSource", +}) + +export const SymbolSource = FilePartSourceBase.extend({ + type: z.literal("symbol"), + path: z.string(), + range: LSP.Range, + name: z.string(), + kind: z.number().int(), +}).meta({ + ref: "SymbolSource", +}) + +export const ResourceSource = FilePartSourceBase.extend({ + type: z.literal("resource"), + clientName: z.string(), + uri: z.string(), +}).meta({ + ref: "ResourceSource", +}) + +export const FilePartSource = z.discriminatedUnion("type", [FileSource, SymbolSource, ResourceSource]).meta({ + ref: "FilePartSource", +}) + +export const FilePart = PartBase.extend({ + type: z.literal("file"), + mime: z.string(), + filename: z.string().optional(), + url: z.string(), + source: FilePartSource.optional(), +}).meta({ + ref: "FilePart", +}) +export type FilePart = z.infer + +export const AgentPart = PartBase.extend({ + type: z.literal("agent"), + name: z.string(), + source: z + .object({ + value: z.string(), + start: z.number().int(), + end: z.number().int(), + }) + .optional(), +}).meta({ + ref: "AgentPart", +}) +export type AgentPart = z.infer + +export const CompactionPart = PartBase.extend({ + type: z.literal("compaction"), + auto: z.boolean(), + overflow: z.boolean().optional(), +}).meta({ + ref: "CompactionPart", +}) +export type CompactionPart = z.infer + +export const SubtaskPart = PartBase.extend({ + type: z.literal("subtask"), + prompt: z.string(), + description: z.string(), + agent: z.string(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + command: z.string().optional(), +}).meta({ + ref: "SubtaskPart", +}) +export type SubtaskPart = z.infer + +export const RetryPart = PartBase.extend({ + type: z.literal("retry"), + attempt: z.number(), + error: APIError.Schema, + time: z.object({ + created: z.number(), + }), +}).meta({ + ref: "RetryPart", +}) +export type RetryPart = z.infer + +export const StepStartPart = PartBase.extend({ + type: z.literal("step-start"), + snapshot: z.string().optional(), +}).meta({ + ref: "StepStartPart", +}) +export type StepStartPart = z.infer + +export const StepFinishPart = PartBase.extend({ + type: z.literal("step-finish"), + reason: z.string(), + snapshot: z.string().optional(), + cost: z.number(), + tokens: z.object({ + total: z.number().optional(), + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), + }), + }), +}).meta({ + ref: "StepFinishPart", +}) +export type StepFinishPart = z.infer + +export const ToolStatePending = z + .object({ + status: z.literal("pending"), + input: z.record(z.string(), z.any()), + raw: z.string(), + }) + .meta({ + ref: "ToolStatePending", + }) + +export type ToolStatePending = z.infer + +export const ToolStateRunning = z + .object({ + status: z.literal("running"), + input: z.record(z.string(), z.any()), + title: z.string().optional(), metadata: z.record(z.string(), z.any()).optional(), time: z.object({ start: z.number(), - end: z.number().optional(), }), - }).meta({ - ref: "ReasoningPart", }) - export type ReasoningPart = z.infer - - const FilePartSourceBase = z.object({ - text: z - .object({ - value: z.string(), - start: z.number().int(), - end: z.number().int(), - }) - .meta({ - ref: "FilePartSourceText", - }), + .meta({ + ref: "ToolStateRunning", }) +export type ToolStateRunning = z.infer - export const FileSource = FilePartSourceBase.extend({ - type: z.literal("file"), - path: z.string(), - }).meta({ - ref: "FileSource", - }) - - export const SymbolSource = FilePartSourceBase.extend({ - type: z.literal("symbol"), - path: z.string(), - range: LSP.Range, - name: z.string(), - kind: z.number().int(), - }).meta({ - ref: "SymbolSource", - }) - - export const ResourceSource = FilePartSourceBase.extend({ - type: z.literal("resource"), - clientName: z.string(), - uri: z.string(), - }).meta({ - ref: "ResourceSource", - }) - - export const FilePartSource = z.discriminatedUnion("type", [FileSource, SymbolSource, ResourceSource]).meta({ - ref: "FilePartSource", - }) - - export const FilePart = PartBase.extend({ - type: z.literal("file"), - mime: z.string(), - filename: z.string().optional(), - url: z.string(), - source: FilePartSource.optional(), - }).meta({ - ref: "FilePart", - }) - export type FilePart = z.infer - - export const AgentPart = PartBase.extend({ - type: z.literal("agent"), - name: z.string(), - source: z - .object({ - value: z.string(), - start: z.number().int(), - end: z.number().int(), - }) - .optional(), - }).meta({ - ref: "AgentPart", - }) - export type AgentPart = z.infer - - export const CompactionPart = PartBase.extend({ - type: z.literal("compaction"), - auto: z.boolean(), - overflow: z.boolean().optional(), - }).meta({ - ref: "CompactionPart", - }) - export type CompactionPart = z.infer - - export const SubtaskPart = PartBase.extend({ - type: z.literal("subtask"), - prompt: z.string(), - description: z.string(), - agent: z.string(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - }) - .optional(), - command: z.string().optional(), - }).meta({ - ref: "SubtaskPart", - }) - export type SubtaskPart = z.infer - - export const RetryPart = PartBase.extend({ - type: z.literal("retry"), - attempt: z.number(), - error: APIError.Schema, +export const ToolStateCompleted = z + .object({ + status: z.literal("completed"), + input: z.record(z.string(), z.any()), + output: z.string(), + title: z.string(), + metadata: z.record(z.string(), z.any()), time: z.object({ - created: z.number(), + start: z.number(), + end: z.number(), + compacted: z.number().optional(), }), - }).meta({ - ref: "RetryPart", + attachments: FilePart.array().optional(), }) - export type RetryPart = z.infer - - export const StepStartPart = PartBase.extend({ - type: z.literal("step-start"), - snapshot: z.string().optional(), - }).meta({ - ref: "StepStartPart", + .meta({ + ref: "ToolStateCompleted", }) - export type StepStartPart = z.infer +export type ToolStateCompleted = z.infer - export const StepFinishPart = PartBase.extend({ - type: z.literal("step-finish"), - reason: z.string(), - snapshot: z.string().optional(), - cost: z.number(), - tokens: z.object({ - total: z.number().optional(), - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - }).meta({ - ref: "StepFinishPart", - }) - export type StepFinishPart = z.infer - - export const ToolStatePending = z - .object({ - status: z.literal("pending"), - input: z.record(z.string(), z.any()), - raw: z.string(), - }) - .meta({ - ref: "ToolStatePending", - }) - - export type ToolStatePending = z.infer - - export const ToolStateRunning = z - .object({ - status: z.literal("running"), - input: z.record(z.string(), z.any()), - title: z.string().optional(), - metadata: z.record(z.string(), z.any()).optional(), - time: z.object({ - start: z.number(), - }), - }) - .meta({ - ref: "ToolStateRunning", - }) - export type ToolStateRunning = z.infer - - export const ToolStateCompleted = z - .object({ - status: z.literal("completed"), - input: z.record(z.string(), z.any()), - output: z.string(), - title: z.string(), - metadata: z.record(z.string(), z.any()), - time: z.object({ - start: z.number(), - end: z.number(), - compacted: z.number().optional(), - }), - attachments: FilePart.array().optional(), - }) - .meta({ - ref: "ToolStateCompleted", - }) - export type ToolStateCompleted = z.infer - - export const ToolStateError = z - .object({ - status: z.literal("error"), - input: z.record(z.string(), z.any()), - error: z.string(), - metadata: z.record(z.string(), z.any()).optional(), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .meta({ - ref: "ToolStateError", - }) - export type ToolStateError = z.infer - - export const ToolState = z - .discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) - .meta({ - ref: "ToolState", - }) - - export const ToolPart = PartBase.extend({ - type: z.literal("tool"), - callID: z.string(), - tool: z.string(), - state: ToolState, +export const ToolStateError = z + .object({ + status: z.literal("error"), + input: z.record(z.string(), z.any()), + error: z.string(), metadata: z.record(z.string(), z.any()).optional(), - }).meta({ - ref: "ToolPart", - }) - export type ToolPart = z.infer - - const Base = z.object({ - id: MessageID.zod, - sessionID: SessionID.zod, - }) - - export const User = Base.extend({ - role: z.literal("user"), time: z.object({ - created: z.number(), + start: z.number(), + end: z.number(), }), - format: Format.optional(), - summary: z - .object({ - title: z.string().optional(), - body: z.string().optional(), - diffs: Snapshot.FileDiff.array(), - }) - .optional(), - agent: z.string(), - model: z.object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - variant: z.string().optional(), - }), - system: z.string().optional(), - tools: z.record(z.string(), z.boolean()).optional(), - }).meta({ - ref: "UserMessage", }) - export type User = z.infer + .meta({ + ref: "ToolStateError", + }) +export type ToolStateError = z.infer - export const Part = z - .discriminatedUnion("type", [ - TextPart, - SubtaskPart, - ReasoningPart, - FilePart, - ToolPart, - StepStartPart, - StepFinishPart, - SnapshotPart, - PatchPart, - AgentPart, - RetryPart, - CompactionPart, - ]) - .meta({ - ref: "Part", +export const ToolState = z + .discriminatedUnion("status", [ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) + .meta({ + ref: "ToolState", + }) + +export const ToolPart = PartBase.extend({ + type: z.literal("tool"), + callID: z.string(), + tool: z.string(), + state: ToolState, + metadata: z.record(z.string(), z.any()).optional(), +}).meta({ + ref: "ToolPart", +}) +export type ToolPart = z.infer + +const Base = z.object({ + id: MessageID.zod, + sessionID: SessionID.zod, +}) + +export const User = Base.extend({ + role: z.literal("user"), + time: z.object({ + created: z.number(), + }), + format: Format.optional(), + summary: z + .object({ + title: z.string().optional(), + body: z.string().optional(), + diffs: Snapshot.FileDiff.array(), }) - export type Part = z.infer - - export const Assistant = Base.extend({ - role: z.literal("assistant"), - time: z.object({ - created: z.number(), - completed: z.number().optional(), - }), - error: z - .discriminatedUnion("name", [ - AuthError.Schema, - NamedError.Unknown.Schema, - OutputLengthError.Schema, - AbortedError.Schema, - StructuredOutputError.Schema, - ContextOverflowError.Schema, - APIError.Schema, - ]) - .optional(), - parentID: MessageID.zod, - modelID: ModelID.zod, + .optional(), + agent: z.string(), + model: z.object({ providerID: ProviderID.zod, - /** - * @deprecated - */ - mode: z.string(), - agent: z.string(), - path: z.object({ - cwd: z.string(), - root: z.string(), - }), - summary: z.boolean().optional(), - cost: z.number(), - tokens: z.object({ - total: z.number().optional(), - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }), - structured: z.any().optional(), + modelID: ModelID.zod, variant: z.string().optional(), - finish: z.string().optional(), - }).meta({ - ref: "AssistantMessage", + }), + system: z.string().optional(), + tools: z.record(z.string(), z.boolean()).optional(), +}).meta({ + ref: "UserMessage", +}) +export type User = z.infer + +export const Part = z + .discriminatedUnion("type", [ + TextPart, + SubtaskPart, + ReasoningPart, + FilePart, + ToolPart, + StepStartPart, + StepFinishPart, + SnapshotPart, + PatchPart, + AgentPart, + RetryPart, + CompactionPart, + ]) + .meta({ + ref: "Part", }) - export type Assistant = z.infer +export type Part = z.infer - export const Info = z.discriminatedUnion("role", [User, Assistant]).meta({ - ref: "Message", - }) - export type Info = z.infer - - export const Event = { - Updated: SyncEvent.define({ - type: "message.updated", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - info: Info, - }), +export const Assistant = Base.extend({ + role: z.literal("assistant"), + time: z.object({ + created: z.number(), + completed: z.number().optional(), + }), + error: z + .discriminatedUnion("name", [ + AuthError.Schema, + NamedError.Unknown.Schema, + OutputLengthError.Schema, + AbortedError.Schema, + StructuredOutputError.Schema, + ContextOverflowError.Schema, + APIError.Schema, + ]) + .optional(), + parentID: MessageID.zod, + modelID: ModelID.zod, + providerID: ProviderID.zod, + /** + * @deprecated + */ + mode: z.string(), + agent: z.string(), + path: z.object({ + cwd: z.string(), + root: z.string(), + }), + summary: z.boolean().optional(), + cost: z.number(), + tokens: z.object({ + total: z.number().optional(), + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), }), - Removed: SyncEvent.define({ - type: "message.removed", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - }), + }), + structured: z.any().optional(), + variant: z.string().optional(), + finish: z.string().optional(), +}).meta({ + ref: "AssistantMessage", +}) +export type Assistant = z.infer + +export const Info = z.discriminatedUnion("role", [User, Assistant]).meta({ + ref: "Message", +}) +export type Info = z.infer + +export const Event = { + Updated: SyncEvent.define({ + type: "message.updated", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + info: Info, }), - PartUpdated: SyncEvent.define({ - type: "message.part.updated", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - part: Part, - time: z.number(), - }), + }), + Removed: SyncEvent.define({ + type: "message.removed", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, }), - PartDelta: BusEvent.define( - "message.part.delta", - z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod, - field: z.string(), - delta: z.string(), - }), - ), - PartRemoved: SyncEvent.define({ - type: "message.part.removed", - version: 1, - aggregate: "sessionID", - schema: z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod, - }), + }), + PartUpdated: SyncEvent.define({ + type: "message.part.updated", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + part: Part, + time: z.number(), }), - } + }), + PartDelta: BusEvent.define( + "message.part.delta", + z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, + field: z.string(), + delta: z.string(), + }), + ), + PartRemoved: SyncEvent.define({ + type: "message.part.removed", + version: 1, + aggregate: "sessionID", + schema: z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, + }), + }), +} - export const WithParts = z.object({ - info: Info, - parts: z.array(Part), - }) - export type WithParts = z.infer +export const WithParts = z.object({ + info: Info, + parts: z.array(Part), +}) +export type WithParts = z.infer - const Cursor = z.object({ - id: MessageID.zod, - time: z.number(), - }) - type Cursor = z.infer +const Cursor = z.object({ + id: MessageID.zod, + time: z.number(), +}) +type Cursor = z.infer - export const cursor = { - encode(input: Cursor) { - return Buffer.from(JSON.stringify(input)).toString("base64url") - }, - decode(input: string) { - return Cursor.parse(JSON.parse(Buffer.from(input, "base64url").toString("utf8"))) - }, - } +export const cursor = { + encode(input: Cursor) { + return Buffer.from(JSON.stringify(input)).toString("base64url") + }, + decode(input: string) { + return Cursor.parse(JSON.parse(Buffer.from(input, "base64url").toString("utf8"))) + }, +} - const info = (row: typeof MessageTable.$inferSelect) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - }) as MessageV2.Info +const info = (row: typeof MessageTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + }) as Info - const part = (row: typeof PartTable.$inferSelect) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - messageID: row.message_id, - }) as MessageV2.Part +const part = (row: typeof PartTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + messageID: row.message_id, + }) as Part - const older = (row: Cursor) => - or( - lt(MessageTable.time_created, row.time), - and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), +const older = (row: Cursor) => + or( + lt(MessageTable.time_created, row.time), + and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), + ) + +function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { + const ids = rows.map((row) => row.id) + const partByMessage = new Map() + if (ids.length > 0) { + const partRows = Database.use((db) => + db + .select() + .from(PartTable) + .where(inArray(PartTable.message_id, ids)) + .orderBy(PartTable.message_id, PartTable.id) + .all(), ) + for (const row of partRows) { + const next = part(row) + const list = partByMessage.get(row.message_id) + if (list) list.push(next) + else partByMessage.set(row.message_id, [next]) + } + } - function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { - const ids = rows.map((row) => row.id) - const partByMessage = new Map() - if (ids.length > 0) { - const partRows = Database.use((db) => - db - .select() - .from(PartTable) - .where(inArray(PartTable.message_id, ids)) - .orderBy(PartTable.message_id, PartTable.id) - .all(), - ) - for (const row of partRows) { - const next = part(row) - const list = partByMessage.get(row.message_id) - if (list) list.push(next) - else partByMessage.set(row.message_id, [next]) + return rows.map((row) => ({ + info: info(row), + parts: partByMessage.get(row.id) ?? [], + })) +} + +function providerMeta(metadata: Record | undefined) { + if (!metadata) return undefined + const { providerExecuted: _, ...rest } = metadata + return Object.keys(rest).length > 0 ? rest : undefined +} + +export const toModelMessagesEffect = Effect.fnUntraced(function* ( + input: WithParts[], + model: Provider.Model, + options?: { stripMedia?: boolean }, +) { + const result: UIMessage[] = [] + const toolNames = new Set() + // Track media from tool results that need to be injected as user messages + // for providers that don't support media in tool results. + // + // OpenAI-compatible APIs only support string content in tool results, so we need + // to extract media and inject as user messages. Other SDKs (anthropic, google, + // bedrock) handle type: "content" with media parts natively. + // + // Only apply this workaround if the model actually supports image input - + // otherwise there's no point extracting images. + const supportsMediaInToolResults = (() => { + if (model.api.npm === "@ai-sdk/anthropic") return true + if (model.api.npm === "@ai-sdk/openai") return true + if (model.api.npm === "@ai-sdk/amazon-bedrock") return true + if (model.api.npm === "@ai-sdk/google-vertex/anthropic") return true + if (model.api.npm === "@ai-sdk/google") { + const id = model.api.id.toLowerCase() + return id.includes("gemini-3") && !id.includes("gemini-2") + } + return false + })() + + const toModelOutput = (options: { toolCallId: string; input: unknown; output: unknown }) => { + const output = options.output + if (typeof output === "string") { + return { type: "text", value: output } + } + + if (typeof output === "object") { + const outputObject = output as { + text: string + attachments?: Array<{ mime: string; url: string }> + } + const attachments = (outputObject.attachments ?? []).filter((attachment) => { + return attachment.url.startsWith("data:") && attachment.url.includes(",") + }) + + return { + type: "content", + value: [ + { type: "text", text: outputObject.text }, + ...attachments.map((attachment) => ({ + type: "media", + mediaType: attachment.mime, + data: iife(() => { + const commaIndex = attachment.url.indexOf(",") + return commaIndex === -1 ? attachment.url : attachment.url.slice(commaIndex + 1) + }), + })), + ], } } - return rows.map((row) => ({ - info: info(row), - parts: partByMessage.get(row.id) ?? [], - })) + return { type: "json", value: output as never } } - function providerMeta(metadata: Record | undefined) { - if (!metadata) return undefined - const { providerExecuted: _, ...rest } = metadata - return Object.keys(rest).length > 0 ? rest : undefined - } + for (const msg of input) { + if (msg.parts.length === 0) continue - export const toModelMessagesEffect = Effect.fnUntraced(function* ( - input: WithParts[], - model: Provider.Model, - options?: { stripMedia?: boolean }, - ) { - const result: UIMessage[] = [] - const toolNames = new Set() - // Track media from tool results that need to be injected as user messages - // for providers that don't support media in tool results. - // - // OpenAI-compatible APIs only support string content in tool results, so we need - // to extract media and inject as user messages. Other SDKs (anthropic, google, - // bedrock) handle type: "content" with media parts natively. - // - // Only apply this workaround if the model actually supports image input - - // otherwise there's no point extracting images. - const supportsMediaInToolResults = (() => { - if (model.api.npm === "@ai-sdk/anthropic") return true - if (model.api.npm === "@ai-sdk/openai") return true - if (model.api.npm === "@ai-sdk/amazon-bedrock") return true - if (model.api.npm === "@ai-sdk/google-vertex/anthropic") return true - if (model.api.npm === "@ai-sdk/google") { - const id = model.api.id.toLowerCase() - return id.includes("gemini-3") && !id.includes("gemini-2") + if (msg.info.role === "user") { + const userMessage: UIMessage = { + id: msg.info.id, + role: "user", + parts: [], } - return false - })() - - const toModelOutput = (options: { toolCallId: string; input: unknown; output: unknown }) => { - const output = options.output - if (typeof output === "string") { - return { type: "text", value: output } - } - - if (typeof output === "object") { - const outputObject = output as { - text: string - attachments?: Array<{ mime: string; url: string }> - } - const attachments = (outputObject.attachments ?? []).filter((attachment) => { - return attachment.url.startsWith("data:") && attachment.url.includes(",") - }) - - return { - type: "content", - value: [ - { type: "text", text: outputObject.text }, - ...attachments.map((attachment) => ({ - type: "media", - mediaType: attachment.mime, - data: iife(() => { - const commaIndex = attachment.url.indexOf(",") - return commaIndex === -1 ? attachment.url : attachment.url.slice(commaIndex + 1) - }), - })), - ], - } - } - - return { type: "json", value: output as never } - } - - for (const msg of input) { - if (msg.parts.length === 0) continue - - if (msg.info.role === "user") { - const userMessage: UIMessage = { - id: msg.info.id, - role: "user", - parts: [], - } - result.push(userMessage) - for (const part of msg.parts) { - if (part.type === "text" && !part.ignored) + result.push(userMessage) + for (const part of msg.parts) { + if (part.type === "text" && !part.ignored) + userMessage.parts.push({ + type: "text", + text: part.text, + }) + // text/plain and directory files are converted into text parts, ignore them + if (part.type === "file" && part.mime !== "text/plain" && part.mime !== "application/x-directory") { + if (options?.stripMedia && isMedia(part.mime)) { userMessage.parts.push({ type: "text", - text: part.text, + text: `[Attached ${part.mime}: ${part.filename ?? "file"}]`, }) - // text/plain and directory files are converted into text parts, ignore them - if (part.type === "file" && part.mime !== "text/plain" && part.mime !== "application/x-directory") { - if (options?.stripMedia && isMedia(part.mime)) { - userMessage.parts.push({ - type: "text", - text: `[Attached ${part.mime}: ${part.filename ?? "file"}]`, - }) - } else { - userMessage.parts.push({ - type: "file", - url: part.url, - mediaType: part.mime, - filename: part.filename, - }) + } else { + userMessage.parts.push({ + type: "file", + url: part.url, + mediaType: part.mime, + filename: part.filename, + }) + } + } + + if (part.type === "compaction") { + userMessage.parts.push({ + type: "text", + text: "What did we do so far?", + }) + } + if (part.type === "subtask") { + userMessage.parts.push({ + type: "text", + text: "The following tool was executed by the user", + }) + } + } + } + + if (msg.info.role === "assistant") { + const differentModel = `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}` + const media: Array<{ mime: string; url: string }> = [] + + if ( + msg.info.error && + !( + AbortedError.isInstance(msg.info.error) && + msg.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning") + ) + ) { + continue + } + const assistantMessage: UIMessage = { + id: msg.info.id, + role: "assistant", + parts: [], + } + for (const part of msg.parts) { + if (part.type === "text") + assistantMessage.parts.push({ + type: "text", + text: part.text, + ...(differentModel ? {} : { providerMetadata: part.metadata }), + }) + if (part.type === "step-start") + assistantMessage.parts.push({ + type: "step-start", + }) + if (part.type === "tool") { + toolNames.add(part.tool) + if (part.state.status === "completed") { + const outputText = part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output + const attachments = part.state.time.compacted || options?.stripMedia ? [] : (part.state.attachments ?? []) + + // For providers that don't support media in tool results, extract media files + // (images, PDFs) to be sent as a separate user message + const mediaAttachments = attachments.filter((a) => isMedia(a.mime)) + const nonMediaAttachments = attachments.filter((a) => !isMedia(a.mime)) + if (!supportsMediaInToolResults && mediaAttachments.length > 0) { + media.push(...mediaAttachments) } - } + const finalAttachments = supportsMediaInToolResults ? attachments : nonMediaAttachments - if (part.type === "compaction") { - userMessage.parts.push({ - type: "text", - text: "What did we do so far?", - }) - } - if (part.type === "subtask") { - userMessage.parts.push({ - type: "text", - text: "The following tool was executed by the user", - }) - } - } - } + const output = + finalAttachments.length > 0 + ? { + text: outputText, + attachments: finalAttachments, + } + : outputText - if (msg.info.role === "assistant") { - const differentModel = `${model.providerID}/${model.id}` !== `${msg.info.providerID}/${msg.info.modelID}` - const media: Array<{ mime: string; url: string }> = [] - - if ( - msg.info.error && - !( - MessageV2.AbortedError.isInstance(msg.info.error) && - msg.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning") - ) - ) { - continue - } - const assistantMessage: UIMessage = { - id: msg.info.id, - role: "assistant", - parts: [], - } - for (const part of msg.parts) { - if (part.type === "text") assistantMessage.parts.push({ - type: "text", - text: part.text, - ...(differentModel ? {} : { providerMetadata: part.metadata }), + type: ("tool-" + part.tool) as `tool-${string}`, + state: "output-available", + toolCallId: part.callID, + input: part.state.input, + output, + ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), + ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - if (part.type === "step-start") - assistantMessage.parts.push({ - type: "step-start", - }) - if (part.type === "tool") { - toolNames.add(part.tool) - if (part.state.status === "completed") { - const outputText = part.state.time.compacted ? "[Old tool result content cleared]" : part.state.output - const attachments = part.state.time.compacted || options?.stripMedia ? [] : (part.state.attachments ?? []) - - // For providers that don't support media in tool results, extract media files - // (images, PDFs) to be sent as a separate user message - const mediaAttachments = attachments.filter((a) => isMedia(a.mime)) - const nonMediaAttachments = attachments.filter((a) => !isMedia(a.mime)) - if (!supportsMediaInToolResults && mediaAttachments.length > 0) { - media.push(...mediaAttachments) - } - const finalAttachments = supportsMediaInToolResults ? attachments : nonMediaAttachments - - const output = - finalAttachments.length > 0 - ? { - text: outputText, - attachments: finalAttachments, - } - : outputText - + } + if (part.state.status === "error") { + const output = part.state.metadata?.interrupted === true ? part.state.metadata.output : undefined + if (typeof output === "string") { assistantMessage.parts.push({ type: ("tool-" + part.tool) as `tool-${string}`, state: "output-available", @@ -753,305 +765,293 @@ export namespace MessageV2 { ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - } - if (part.state.status === "error") { - const output = part.state.metadata?.interrupted === true ? part.state.metadata.output : undefined - if (typeof output === "string") { - assistantMessage.parts.push({ - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-available", - toolCallId: part.callID, - input: part.state.input, - output, - ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), - ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), - }) - } else { - assistantMessage.parts.push({ - type: ("tool-" + part.tool) as `tool-${string}`, - state: "output-error", - toolCallId: part.callID, - input: part.state.input, - errorText: part.state.error, - ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), - ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), - }) - } - } - // Handle pending/running tool calls to prevent dangling tool_use blocks - // Anthropic/Claude APIs require every tool_use to have a corresponding tool_result - if (part.state.status === "pending" || part.state.status === "running") + } else { assistantMessage.parts.push({ type: ("tool-" + part.tool) as `tool-${string}`, state: "output-error", toolCallId: part.callID, input: part.state.input, - errorText: "[Tool execution was interrupted]", + errorText: part.state.error, ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), }) - } - if (part.type === "reasoning") { - assistantMessage.parts.push({ - type: "reasoning", - text: part.text, - ...(differentModel ? {} : { providerMetadata: part.metadata }), - }) - } - } - if (assistantMessage.parts.length > 0) { - result.push(assistantMessage) - // Inject pending media as a user message for providers that don't support - // media (images, PDFs) in tool results - if (media.length > 0) { - result.push({ - id: MessageID.ascending(), - role: "user", - parts: [ - { - type: "text" as const, - text: SYNTHETIC_ATTACHMENT_PROMPT, - }, - ...media.map((attachment) => ({ - type: "file" as const, - url: attachment.url, - mediaType: attachment.mime, - })), - ], - }) - } - } - } - } - - const tools = Object.fromEntries(Array.from(toolNames).map((toolName) => [toolName, { toModelOutput }])) - - return yield* Effect.promise(() => - convertToModelMessages( - result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")), - { - //@ts-expect-error (convertToModelMessages expects a ToolSet but only actually needs tools[name]?.toModelOutput) - tools, - }, - ), - ) - }) - - export function toModelMessages( - input: WithParts[], - model: Provider.Model, - options?: { stripMedia?: boolean }, - ): Promise { - return Effect.runPromise(toModelMessagesEffect(input, model, options).pipe(Effect.provide(EffectLogger.layer))) - } - - export function page(input: { sessionID: SessionID; limit: number; before?: string }) { - const before = input.before ? cursor.decode(input.before) : undefined - const where = before - ? and(eq(MessageTable.session_id, input.sessionID), older(before)) - : eq(MessageTable.session_id, input.sessionID) - const rows = Database.use((db) => - db - .select() - .from(MessageTable) - .where(where) - .orderBy(desc(MessageTable.time_created), desc(MessageTable.id)) - .limit(input.limit + 1) - .all(), - ) - if (rows.length === 0) { - const row = Database.use((db) => - db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.id, input.sessionID)).get(), - ) - if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` }) - return { - items: [] as MessageV2.WithParts[], - more: false, - } - } - - const more = rows.length > input.limit - const slice = more ? rows.slice(0, input.limit) : rows - const items = hydrate(slice) - items.reverse() - const tail = slice.at(-1) - return { - items, - more, - cursor: more && tail ? cursor.encode({ id: tail.id, time: tail.time_created }) : undefined, - } - } - - export function* stream(sessionID: SessionID) { - const size = 50 - let before: string | undefined - while (true) { - const next = page({ sessionID, limit: size, before }) - if (next.items.length === 0) break - for (let i = next.items.length - 1; i >= 0; i--) { - yield next.items[i] - } - if (!next.more || !next.cursor) break - before = next.cursor - } - } - - export function parts(message_id: MessageID) { - const rows = Database.use((db) => - db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(), - ) - return rows.map( - (row) => - ({ - ...row.data, - id: row.id, - sessionID: row.session_id, - messageID: row.message_id, - }) as MessageV2.Part, - ) - } - - export function get(input: { sessionID: SessionID; messageID: MessageID }): WithParts { - const row = Database.use((db) => - db - .select() - .from(MessageTable) - .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) - .get(), - ) - if (!row) throw new NotFoundError({ message: `Message not found: ${input.messageID}` }) - return { - info: info(row), - parts: parts(input.messageID), - } - } - - export function filterCompacted(msgs: Iterable) { - const result = [] as MessageV2.WithParts[] - const completed = new Set() - for (const msg of msgs) { - result.push(msg) - if ( - msg.info.role === "user" && - completed.has(msg.info.id) && - msg.parts.some((part) => part.type === "compaction") - ) - break - if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) - completed.add(msg.info.parentID) - } - result.reverse() - return result - } - - export const filterCompactedEffect = Effect.fnUntraced(function* (sessionID: SessionID) { - return filterCompacted(stream(sessionID)) - }) - - export function fromError( - e: unknown, - ctx: { providerID: ProviderID; aborted?: boolean }, - ): NonNullable { - switch (true) { - case e instanceof DOMException && e.name === "AbortError": - return new MessageV2.AbortedError( - { message: e.message }, - { - cause: e, - }, - ).toObject() - case MessageV2.OutputLengthError.isInstance(e): - return e - case LoadAPIKeyError.isInstance(e): - return new MessageV2.AuthError( - { - providerID: ctx.providerID, - message: e.message, - }, - { cause: e }, - ).toObject() - case (e as SystemError)?.code === "ECONNRESET": - return new MessageV2.APIError( - { - message: "Connection reset by server", - isRetryable: true, - metadata: { - code: (e as SystemError).code ?? "", - syscall: (e as SystemError).syscall ?? "", - message: (e as SystemError).message ?? "", - }, - }, - { cause: e }, - ).toObject() - case e instanceof Error && (e as FetchDecompressionError).code === "ZlibError": - if (ctx.aborted) { - return new MessageV2.AbortedError({ message: e.message }, { cause: e }).toObject() - } - return new MessageV2.APIError( - { - message: "Response decompression failed", - isRetryable: true, - metadata: { - code: (e as FetchDecompressionError).code, - message: e.message, - }, - }, - { cause: e }, - ).toObject() - case APICallError.isInstance(e): - const parsed = ProviderError.parseAPICallError({ - providerID: ctx.providerID, - error: e, - }) - if (parsed.type === "context_overflow") { - return new MessageV2.ContextOverflowError( - { - message: parsed.message, - responseBody: parsed.responseBody, - }, - { cause: e }, - ).toObject() - } - - return new MessageV2.APIError( - { - message: parsed.message, - statusCode: parsed.statusCode, - isRetryable: parsed.isRetryable, - responseHeaders: parsed.responseHeaders, - responseBody: parsed.responseBody, - metadata: parsed.metadata, - }, - { cause: e }, - ).toObject() - case e instanceof Error: - return new NamedError.Unknown({ message: errorMessage(e) }, { cause: e }).toObject() - default: - try { - const parsed = ProviderError.parseStreamError(e) - if (parsed) { - if (parsed.type === "context_overflow") { - return new MessageV2.ContextOverflowError( - { - message: parsed.message, - responseBody: parsed.responseBody, - }, - { cause: e }, - ).toObject() } - return new MessageV2.APIError( - { - message: parsed.message, - isRetryable: parsed.isRetryable, - responseBody: parsed.responseBody, - }, - { - cause: e, - }, - ).toObject() } - } catch {} - return new NamedError.Unknown({ message: JSON.stringify(e) }, { cause: e }).toObject() + // Handle pending/running tool calls to prevent dangling tool_use blocks + // Anthropic/Claude APIs require every tool_use to have a corresponding tool_result + if (part.state.status === "pending" || part.state.status === "running") + assistantMessage.parts.push({ + type: ("tool-" + part.tool) as `tool-${string}`, + state: "output-error", + toolCallId: part.callID, + input: part.state.input, + errorText: "[Tool execution was interrupted]", + ...(part.metadata?.providerExecuted ? { providerExecuted: true } : {}), + ...(differentModel ? {} : { callProviderMetadata: providerMeta(part.metadata) }), + }) + } + if (part.type === "reasoning") { + assistantMessage.parts.push({ + type: "reasoning", + text: part.text, + ...(differentModel ? {} : { providerMetadata: part.metadata }), + }) + } + } + if (assistantMessage.parts.length > 0) { + result.push(assistantMessage) + // Inject pending media as a user message for providers that don't support + // media (images, PDFs) in tool results + if (media.length > 0) { + result.push({ + id: MessageID.ascending(), + role: "user", + parts: [ + { + type: "text" as const, + text: SYNTHETIC_ATTACHMENT_PROMPT, + }, + ...media.map((attachment) => ({ + type: "file" as const, + url: attachment.url, + mediaType: attachment.mime, + })), + ], + }) + } + } } } + + const tools = Object.fromEntries(Array.from(toolNames).map((toolName) => [toolName, { toModelOutput }])) + + return yield* Effect.promise(() => + convertToModelMessages( + result.filter((msg) => msg.parts.some((part) => part.type !== "step-start")), + { + //@ts-expect-error (convertToModelMessages expects a ToolSet but only actually needs tools[name]?.toModelOutput) + tools, + }, + ), + ) +}) + +export function toModelMessages( + input: WithParts[], + model: Provider.Model, + options?: { stripMedia?: boolean }, +): Promise { + return Effect.runPromise(toModelMessagesEffect(input, model, options).pipe(Effect.provide(EffectLogger.layer))) +} + +export function page(input: { sessionID: SessionID; limit: number; before?: string }) { + const before = input.before ? cursor.decode(input.before) : undefined + const where = before + ? and(eq(MessageTable.session_id, input.sessionID), older(before)) + : eq(MessageTable.session_id, input.sessionID) + const rows = Database.use((db) => + db + .select() + .from(MessageTable) + .where(where) + .orderBy(desc(MessageTable.time_created), desc(MessageTable.id)) + .limit(input.limit + 1) + .all(), + ) + if (rows.length === 0) { + const row = Database.use((db) => + db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.id, input.sessionID)).get(), + ) + if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` }) + return { + items: [] as WithParts[], + more: false, + } + } + + const more = rows.length > input.limit + const slice = more ? rows.slice(0, input.limit) : rows + const items = hydrate(slice) + items.reverse() + const tail = slice.at(-1) + return { + items, + more, + cursor: more && tail ? cursor.encode({ id: tail.id, time: tail.time_created }) : undefined, + } } + +export function* stream(sessionID: SessionID) { + const size = 50 + let before: string | undefined + while (true) { + const next = page({ sessionID, limit: size, before }) + if (next.items.length === 0) break + for (let i = next.items.length - 1; i >= 0; i--) { + yield next.items[i] + } + if (!next.more || !next.cursor) break + before = next.cursor + } +} + +export function parts(message_id: MessageID) { + const rows = Database.use((db) => + db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(), + ) + return rows.map( + (row) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + messageID: row.message_id, + }) as Part, + ) +} + +export function get(input: { sessionID: SessionID; messageID: MessageID }): WithParts { + const row = Database.use((db) => + db + .select() + .from(MessageTable) + .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) + .get(), + ) + if (!row) throw new NotFoundError({ message: `Message not found: ${input.messageID}` }) + return { + info: info(row), + parts: parts(input.messageID), + } +} + +export function filterCompacted(msgs: Iterable) { + const result = [] as WithParts[] + const completed = new Set() + for (const msg of msgs) { + result.push(msg) + if ( + msg.info.role === "user" && + completed.has(msg.info.id) && + msg.parts.some((part) => part.type === "compaction") + ) + break + if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) + completed.add(msg.info.parentID) + } + result.reverse() + return result +} + +export const filterCompactedEffect = Effect.fnUntraced(function* (sessionID: SessionID) { + return filterCompacted(stream(sessionID)) +}) + +export function fromError( + e: unknown, + ctx: { providerID: ProviderID; aborted?: boolean }, +): NonNullable { + switch (true) { + case e instanceof DOMException && e.name === "AbortError": + return new AbortedError( + { message: e.message }, + { + cause: e, + }, + ).toObject() + case OutputLengthError.isInstance(e): + return e + case LoadAPIKeyError.isInstance(e): + return new AuthError( + { + providerID: ctx.providerID, + message: e.message, + }, + { cause: e }, + ).toObject() + case (e as SystemError)?.code === "ECONNRESET": + return new APIError( + { + message: "Connection reset by server", + isRetryable: true, + metadata: { + code: (e as SystemError).code ?? "", + syscall: (e as SystemError).syscall ?? "", + message: (e as SystemError).message ?? "", + }, + }, + { cause: e }, + ).toObject() + case e instanceof Error && (e as FetchDecompressionError).code === "ZlibError": + if (ctx.aborted) { + return new AbortedError({ message: e.message }, { cause: e }).toObject() + } + return new APIError( + { + message: "Response decompression failed", + isRetryable: true, + metadata: { + code: (e as FetchDecompressionError).code, + message: e.message, + }, + }, + { cause: e }, + ).toObject() + case APICallError.isInstance(e): + const parsed = ProviderError.parseAPICallError({ + providerID: ctx.providerID, + error: e, + }) + if (parsed.type === "context_overflow") { + return new ContextOverflowError( + { + message: parsed.message, + responseBody: parsed.responseBody, + }, + { cause: e }, + ).toObject() + } + + return new APIError( + { + message: parsed.message, + statusCode: parsed.statusCode, + isRetryable: parsed.isRetryable, + responseHeaders: parsed.responseHeaders, + responseBody: parsed.responseBody, + metadata: parsed.metadata, + }, + { cause: e }, + ).toObject() + case e instanceof Error: + return new NamedError.Unknown({ message: errorMessage(e) }, { cause: e }).toObject() + default: + try { + const parsed = ProviderError.parseStreamError(e) + if (parsed) { + if (parsed.type === "context_overflow") { + return new ContextOverflowError( + { + message: parsed.message, + responseBody: parsed.responseBody, + }, + { cause: e }, + ).toObject() + } + return new APIError( + { + message: parsed.message, + isRetryable: parsed.isRetryable, + responseBody: parsed.responseBody, + }, + { + cause: e, + }, + ).toObject() + } + } catch {} + return new NamedError.Unknown({ message: JSON.stringify(e) }, { cause: e }).toObject() + } +} + +export * as MessageV2 from "./message-v2" diff --git a/packages/opencode/src/session/message.ts b/packages/opencode/src/session/message.ts index 396034825a..ced04b8e9d 100644 --- a/packages/opencode/src/session/message.ts +++ b/packages/opencode/src/session/message.ts @@ -3,189 +3,189 @@ import { SessionID } from "./schema" import { ModelID, ProviderID } from "../provider/schema" import { NamedError } from "@opencode-ai/shared/util/error" -export namespace Message { - export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) - export const AuthError = NamedError.create( - "ProviderAuthError", - z.object({ - providerID: z.string(), - message: z.string(), - }), - ) +export const OutputLengthError = NamedError.create("MessageOutputLengthError", z.object({})) +export const AuthError = NamedError.create( + "ProviderAuthError", + z.object({ + providerID: z.string(), + message: z.string(), + }), +) - export const ToolCall = z - .object({ - state: z.literal("call"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - }) - .meta({ - ref: "ToolCall", - }) - export type ToolCall = z.infer - - export const ToolPartialCall = z - .object({ - state: z.literal("partial-call"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - }) - .meta({ - ref: "ToolPartialCall", - }) - export type ToolPartialCall = z.infer - - export const ToolResult = z - .object({ - state: z.literal("result"), - step: z.number().optional(), - toolCallId: z.string(), - toolName: z.string(), - args: z.custom>(), - result: z.string(), - }) - .meta({ - ref: "ToolResult", - }) - export type ToolResult = z.infer - - export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).meta({ - ref: "ToolInvocation", +export const ToolCall = z + .object({ + state: z.literal("call"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), }) - export type ToolInvocation = z.infer + .meta({ + ref: "ToolCall", + }) +export type ToolCall = z.infer - export const TextPart = z - .object({ - type: z.literal("text"), - text: z.string(), - }) - .meta({ - ref: "TextPart", - }) - export type TextPart = z.infer +export const ToolPartialCall = z + .object({ + state: z.literal("partial-call"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), + }) + .meta({ + ref: "ToolPartialCall", + }) +export type ToolPartialCall = z.infer - export const ReasoningPart = z - .object({ - type: z.literal("reasoning"), - text: z.string(), - providerMetadata: z.record(z.string(), z.any()).optional(), - }) - .meta({ - ref: "ReasoningPart", - }) - export type ReasoningPart = z.infer +export const ToolResult = z + .object({ + state: z.literal("result"), + step: z.number().optional(), + toolCallId: z.string(), + toolName: z.string(), + args: z.custom>(), + result: z.string(), + }) + .meta({ + ref: "ToolResult", + }) +export type ToolResult = z.infer - export const ToolInvocationPart = z - .object({ - type: z.literal("tool-invocation"), - toolInvocation: ToolInvocation, - }) - .meta({ - ref: "ToolInvocationPart", - }) - export type ToolInvocationPart = z.infer +export const ToolInvocation = z.discriminatedUnion("state", [ToolCall, ToolPartialCall, ToolResult]).meta({ + ref: "ToolInvocation", +}) +export type ToolInvocation = z.infer - export const SourceUrlPart = z - .object({ - type: z.literal("source-url"), - sourceId: z.string(), - url: z.string(), - title: z.string().optional(), - providerMetadata: z.record(z.string(), z.any()).optional(), - }) - .meta({ - ref: "SourceUrlPart", - }) - export type SourceUrlPart = z.infer +export const TextPart = z + .object({ + type: z.literal("text"), + text: z.string(), + }) + .meta({ + ref: "TextPart", + }) +export type TextPart = z.infer - export const FilePart = z - .object({ - type: z.literal("file"), - mediaType: z.string(), - filename: z.string().optional(), - url: z.string(), - }) - .meta({ - ref: "FilePart", - }) - export type FilePart = z.infer +export const ReasoningPart = z + .object({ + type: z.literal("reasoning"), + text: z.string(), + providerMetadata: z.record(z.string(), z.any()).optional(), + }) + .meta({ + ref: "ReasoningPart", + }) +export type ReasoningPart = z.infer - export const StepStartPart = z - .object({ - type: z.literal("step-start"), - }) - .meta({ - ref: "StepStartPart", - }) - export type StepStartPart = z.infer +export const ToolInvocationPart = z + .object({ + type: z.literal("tool-invocation"), + toolInvocation: ToolInvocation, + }) + .meta({ + ref: "ToolInvocationPart", + }) +export type ToolInvocationPart = z.infer - export const MessagePart = z - .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart]) - .meta({ - ref: "MessagePart", - }) - export type MessagePart = z.infer +export const SourceUrlPart = z + .object({ + type: z.literal("source-url"), + sourceId: z.string(), + url: z.string(), + title: z.string().optional(), + providerMetadata: z.record(z.string(), z.any()).optional(), + }) + .meta({ + ref: "SourceUrlPart", + }) +export type SourceUrlPart = z.infer - export const Info = z - .object({ - id: z.string(), - role: z.enum(["user", "assistant"]), - parts: z.array(MessagePart), - metadata: z - .object({ - time: z.object({ - created: z.number(), - completed: z.number().optional(), - }), - error: z - .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) - .optional(), - sessionID: SessionID.zod, - tool: z.record( - z.string(), - z - .object({ - title: z.string(), - snapshot: z.string().optional(), - time: z.object({ - start: z.number(), - end: z.number(), - }), - }) - .catchall(z.any()), - ), - assistant: z +export const FilePart = z + .object({ + type: z.literal("file"), + mediaType: z.string(), + filename: z.string().optional(), + url: z.string(), + }) + .meta({ + ref: "FilePart", + }) +export type FilePart = z.infer + +export const StepStartPart = z + .object({ + type: z.literal("step-start"), + }) + .meta({ + ref: "StepStartPart", + }) +export type StepStartPart = z.infer + +export const MessagePart = z + .discriminatedUnion("type", [TextPart, ReasoningPart, ToolInvocationPart, SourceUrlPart, FilePart, StepStartPart]) + .meta({ + ref: "MessagePart", + }) +export type MessagePart = z.infer + +export const Info = z + .object({ + id: z.string(), + role: z.enum(["user", "assistant"]), + parts: z.array(MessagePart), + metadata: z + .object({ + time: z.object({ + created: z.number(), + completed: z.number().optional(), + }), + error: z + .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) + .optional(), + sessionID: SessionID.zod, + tool: z.record( + z.string(), + z .object({ - system: z.string().array(), - modelID: ModelID.zod, - providerID: ProviderID.zod, - path: z.object({ - cwd: z.string(), - root: z.string(), - }), - cost: z.number(), - summary: z.boolean().optional(), - tokens: z.object({ - input: z.number(), - output: z.number(), - reasoning: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), + title: z.string(), + snapshot: z.string().optional(), + time: z.object({ + start: z.number(), + end: z.number(), }), }) - .optional(), - snapshot: z.string().optional(), - }) - .meta({ ref: "MessageMetadata" }), - }) - .meta({ - ref: "Message", - }) - export type Info = z.infer -} + .catchall(z.any()), + ), + assistant: z + .object({ + system: z.string().array(), + modelID: ModelID.zod, + providerID: ProviderID.zod, + path: z.object({ + cwd: z.string(), + root: z.string(), + }), + cost: z.number(), + summary: z.boolean().optional(), + tokens: z.object({ + input: z.number(), + output: z.number(), + reasoning: z.number(), + cache: z.object({ + read: z.number(), + write: z.number(), + }), + }), + }) + .optional(), + snapshot: z.string().optional(), + }) + .meta({ ref: "MessageMetadata" }), + }) + .meta({ + ref: "Message", + }) +export type Info = z.infer + +export * as Message from "./message" diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 415639fbe5..820c61aa91 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -21,599 +21,599 @@ import { errorMessage } from "@/util/error" import { Log } from "@/util" import { isRecord } from "@/util/record" -export namespace SessionProcessor { - const DOOM_LOOP_THRESHOLD = 3 - const log = Log.create({ service: "session.processor" }) +const DOOM_LOOP_THRESHOLD = 3 +const log = Log.create({ service: "session.processor" }) - export type Result = "compact" | "stop" | "continue" +export type Result = "compact" | "stop" | "continue" - export type Event = LLM.Event +export type Event = LLM.Event - export interface Handle { - readonly message: MessageV2.Assistant - readonly updateToolCall: ( - toolCallID: string, - update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, - ) => Effect.Effect - readonly completeToolCall: ( - toolCallID: string, - output: { - title: string - metadata: Record - output: string - attachments?: MessageV2.FilePart[] - }, - ) => Effect.Effect - readonly process: (streamInput: LLM.StreamInput) => Effect.Effect - } +export interface Handle { + readonly message: MessageV2.Assistant + readonly updateToolCall: ( + toolCallID: string, + update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, + ) => Effect.Effect + readonly completeToolCall: ( + toolCallID: string, + output: { + title: string + metadata: Record + output: string + attachments?: MessageV2.FilePart[] + }, + ) => Effect.Effect + readonly process: (streamInput: LLM.StreamInput) => Effect.Effect +} - type Input = { - assistantMessage: MessageV2.Assistant - sessionID: SessionID - model: Provider.Model - } +type Input = { + assistantMessage: MessageV2.Assistant + sessionID: SessionID + model: Provider.Model +} - export interface Interface { - readonly create: (input: Input) => Effect.Effect - } +export interface Interface { + readonly create: (input: Input) => Effect.Effect +} - type ToolCall = { - partID: MessageV2.ToolPart["id"] - messageID: MessageV2.ToolPart["messageID"] - sessionID: MessageV2.ToolPart["sessionID"] - done: Deferred.Deferred - } +type ToolCall = { + partID: MessageV2.ToolPart["id"] + messageID: MessageV2.ToolPart["messageID"] + sessionID: MessageV2.ToolPart["sessionID"] + done: Deferred.Deferred +} - interface ProcessorContext extends Input { - toolcalls: Record - shouldBreak: boolean - snapshot: string | undefined - blocked: boolean - needsCompaction: boolean - currentText: MessageV2.TextPart | undefined - reasoningMap: Record - } +interface ProcessorContext extends Input { + toolcalls: Record + shouldBreak: boolean + snapshot: string | undefined + blocked: boolean + needsCompaction: boolean + currentText: MessageV2.TextPart | undefined + reasoningMap: Record +} - type StreamEvent = Event +type StreamEvent = Event - export class Service extends Context.Service()("@opencode/SessionProcessor") {} +export class Service extends Context.Service()("@opencode/SessionProcessor") {} - export const layer: Layer.Layer< - Service, - never, - | Session.Service - | Config.Service - | Bus.Service - | Snapshot.Service - | Agent.Service - | LLM.Service - | Permission.Service - | Plugin.Service - | SessionSummary.Service - | SessionStatus.Service - > = Layer.effect( - Service, - Effect.gen(function* () { - const session = yield* Session.Service - const config = yield* Config.Service - const bus = yield* Bus.Service - const snapshot = yield* Snapshot.Service - const agents = yield* Agent.Service - const llm = yield* LLM.Service - const permission = yield* Permission.Service - const plugin = yield* Plugin.Service - const summary = yield* SessionSummary.Service - const scope = yield* Scope.Scope - const status = yield* SessionStatus.Service +export const layer: Layer.Layer< + Service, + never, + | Session.Service + | Config.Service + | Bus.Service + | Snapshot.Service + | Agent.Service + | LLM.Service + | Permission.Service + | Plugin.Service + | SessionSummary.Service + | SessionStatus.Service +> = Layer.effect( + Service, + Effect.gen(function* () { + const session = yield* Session.Service + const config = yield* Config.Service + const bus = yield* Bus.Service + const snapshot = yield* Snapshot.Service + const agents = yield* Agent.Service + const llm = yield* LLM.Service + const permission = yield* Permission.Service + const plugin = yield* Plugin.Service + const summary = yield* SessionSummary.Service + const scope = yield* Scope.Scope + const status = yield* SessionStatus.Service - const create = Effect.fn("SessionProcessor.create")(function* (input: Input) { - // Pre-capture snapshot before the LLM stream starts. The AI SDK - // may execute tools internally before emitting start-step events, - // so capturing inside the event handler can be too late. - const initialSnapshot = yield* snapshot.track() - const ctx: ProcessorContext = { - assistantMessage: input.assistantMessage, - sessionID: input.sessionID, - model: input.model, - toolcalls: {}, - shouldBreak: false, - snapshot: initialSnapshot, - blocked: false, - needsCompaction: false, - currentText: undefined, - reasoningMap: {}, - } - let aborted = false - const slog = log.clone().tag("sessionID", input.sessionID).tag("messageID", input.assistantMessage.id) + const create = Effect.fn("SessionProcessor.create")(function* (input: Input) { + // Pre-capture snapshot before the LLM stream starts. The AI SDK + // may execute tools internally before emitting start-step events, + // so capturing inside the event handler can be too late. + const initialSnapshot = yield* snapshot.track() + const ctx: ProcessorContext = { + assistantMessage: input.assistantMessage, + sessionID: input.sessionID, + model: input.model, + toolcalls: {}, + shouldBreak: false, + snapshot: initialSnapshot, + blocked: false, + needsCompaction: false, + currentText: undefined, + reasoningMap: {}, + } + let aborted = false + const slog = log.clone().tag("sessionID", input.sessionID).tag("messageID", input.assistantMessage.id) - const parse = (e: unknown) => - MessageV2.fromError(e, { - providerID: input.model.providerID, - aborted, - }) - - const settleToolCall = Effect.fn("SessionProcessor.settleToolCall")(function* (toolCallID: string) { - const done = ctx.toolcalls[toolCallID]?.done - delete ctx.toolcalls[toolCallID] - if (done) yield* Deferred.succeed(done, undefined).pipe(Effect.ignore) + const parse = (e: unknown) => + MessageV2.fromError(e, { + providerID: input.model.providerID, + aborted, }) - const readToolCall = Effect.fn("SessionProcessor.readToolCall")(function* (toolCallID: string) { - const call = ctx.toolcalls[toolCallID] - if (!call) return - const part = yield* session.getPart({ - partID: call.partID, - messageID: call.messageID, - sessionID: call.sessionID, - }) - if (!part || part.type !== "tool") { - delete ctx.toolcalls[toolCallID] - return - } - return { call, part } - }) - - const updateToolCall = Effect.fn("SessionProcessor.updateToolCall")(function* ( - toolCallID: string, - update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, - ) { - const match = yield* readToolCall(toolCallID) - if (!match) return - const part = yield* session.updatePart(update(match.part)) - ctx.toolcalls[toolCallID] = { - ...match.call, - partID: part.id, - messageID: part.messageID, - sessionID: part.sessionID, - } - return part - }) - - const completeToolCall = Effect.fn("SessionProcessor.completeToolCall")(function* ( - toolCallID: string, - output: { - title: string - metadata: Record - output: string - attachments?: MessageV2.FilePart[] - }, - ) { - const match = yield* readToolCall(toolCallID) - if (!match || match.part.state.status !== "running") return - yield* session.updatePart({ - ...match.part, - state: { - status: "completed", - input: match.part.state.input, - output: output.output, - metadata: output.metadata, - title: output.title, - time: { start: match.part.state.time.start, end: Date.now() }, - attachments: output.attachments, - }, - }) - yield* settleToolCall(toolCallID) - }) - - const failToolCall = Effect.fn("SessionProcessor.failToolCall")(function* (toolCallID: string, error: unknown) { - const match = yield* readToolCall(toolCallID) - if (!match || match.part.state.status !== "running") return false - yield* session.updatePart({ - ...match.part, - state: { - status: "error", - input: match.part.state.input, - error: errorMessage(error), - time: { start: match.part.state.time.start, end: Date.now() }, - }, - }) - if (error instanceof Permission.RejectedError || error instanceof Question.RejectedError) { - ctx.blocked = ctx.shouldBreak - } - yield* settleToolCall(toolCallID) - return true - }) - - const handleEvent = Effect.fn("SessionProcessor.handleEvent")(function* (value: StreamEvent) { - switch (value.type) { - case "start": - yield* status.set(ctx.sessionID, { type: "busy" }) - return - - case "reasoning-start": - if (value.id in ctx.reasoningMap) return - ctx.reasoningMap[value.id] = { - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "reasoning", - text: "", - time: { start: Date.now() }, - metadata: value.providerMetadata, - } - yield* session.updatePart(ctx.reasoningMap[value.id]) - return - - case "reasoning-delta": - if (!(value.id in ctx.reasoningMap)) return - ctx.reasoningMap[value.id].text += value.text - if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata - yield* session.updatePartDelta({ - sessionID: ctx.reasoningMap[value.id].sessionID, - messageID: ctx.reasoningMap[value.id].messageID, - partID: ctx.reasoningMap[value.id].id, - field: "text", - delta: value.text, - }) - return - - case "reasoning-end": - if (!(value.id in ctx.reasoningMap)) return - // oxlint-disable-next-line no-self-assign -- reactivity trigger - ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text - ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() } - if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata - yield* session.updatePart(ctx.reasoningMap[value.id]) - delete ctx.reasoningMap[value.id] - return - - case "tool-input-start": - if (ctx.assistantMessage.summary) { - throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) - } - const part = yield* session.updatePart({ - id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "tool", - tool: value.toolName, - callID: value.id, - state: { status: "pending", input: {}, raw: "" }, - metadata: value.providerExecuted ? { providerExecuted: true } : undefined, - } satisfies MessageV2.ToolPart) - ctx.toolcalls[value.id] = { - done: yield* Deferred.make(), - partID: part.id, - messageID: part.messageID, - sessionID: part.sessionID, - } - return - - case "tool-input-delta": - return - - case "tool-input-end": - return - - case "tool-call": { - if (ctx.assistantMessage.summary) { - throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) - } - yield* updateToolCall(value.toolCallId, (match) => ({ - ...match, - tool: value.toolName, - state: { - ...match.state, - status: "running", - input: value.input, - time: { start: Date.now() }, - }, - metadata: match.metadata?.providerExecuted - ? { ...value.providerMetadata, providerExecuted: true } - : value.providerMetadata, - })) - - const parts = MessageV2.parts(ctx.assistantMessage.id) - const recentParts = parts.slice(-DOOM_LOOP_THRESHOLD) - - if ( - recentParts.length !== DOOM_LOOP_THRESHOLD || - !recentParts.every( - (part) => - part.type === "tool" && - part.tool === value.toolName && - part.state.status !== "pending" && - JSON.stringify(part.state.input) === JSON.stringify(value.input), - ) - ) { - return - } - - const agent = yield* agents.get(ctx.assistantMessage.agent) - yield* permission.ask({ - permission: "doom_loop", - patterns: [value.toolName], - sessionID: ctx.assistantMessage.sessionID, - metadata: { tool: value.toolName, input: value.input }, - always: [value.toolName], - ruleset: agent.permission, - }) - return - } - - case "tool-result": { - yield* completeToolCall(value.toolCallId, value.output) - return - } - - case "tool-error": { - yield* failToolCall(value.toolCallId, value.error) - return - } - - case "error": - throw value.error - - case "start-step": - if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track() - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - snapshot: ctx.snapshot, - type: "step-start", - }) - return - - case "finish-step": { - const usage = Session.getUsage({ - model: ctx.model, - usage: value.usage, - metadata: value.providerMetadata, - }) - ctx.assistantMessage.finish = value.finishReason - ctx.assistantMessage.cost += usage.cost - ctx.assistantMessage.tokens = usage.tokens - yield* session.updatePart({ - id: PartID.ascending(), - reason: value.finishReason, - snapshot: yield* snapshot.track(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "step-finish", - tokens: usage.tokens, - cost: usage.cost, - }) - yield* session.updateMessage(ctx.assistantMessage) - if (ctx.snapshot) { - const patch = yield* snapshot.patch(ctx.snapshot) - if (patch.files.length) { - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - type: "patch", - hash: patch.hash, - files: patch.files, - }) - } - ctx.snapshot = undefined - } - yield* summary - .summarize({ - sessionID: ctx.sessionID, - messageID: ctx.assistantMessage.parentID, - }) - .pipe(Effect.ignore, Effect.forkIn(scope)) - if ( - !ctx.assistantMessage.summary && - isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model }) - ) { - ctx.needsCompaction = true - } - return - } - - case "text-start": - ctx.currentText = { - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.assistantMessage.sessionID, - type: "text", - text: "", - time: { start: Date.now() }, - metadata: value.providerMetadata, - } - yield* session.updatePart(ctx.currentText) - return - - case "text-delta": - if (!ctx.currentText) return - ctx.currentText.text += value.text - if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata - yield* session.updatePartDelta({ - sessionID: ctx.currentText.sessionID, - messageID: ctx.currentText.messageID, - partID: ctx.currentText.id, - field: "text", - delta: value.text, - }) - return - - case "text-end": - if (!ctx.currentText) return - // oxlint-disable-next-line no-self-assign -- reactivity trigger - ctx.currentText.text = ctx.currentText.text - ctx.currentText.text = (yield* plugin.trigger( - "experimental.text.complete", - { - sessionID: ctx.sessionID, - messageID: ctx.assistantMessage.id, - partID: ctx.currentText.id, - }, - { text: ctx.currentText.text }, - )).text - { - const end = Date.now() - ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } - } - if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata - yield* session.updatePart(ctx.currentText) - ctx.currentText = undefined - return - - case "finish": - return - - default: - slog.info("unhandled", { event: value.type, value }) - return - } - }) - - const cleanup = Effect.fn("SessionProcessor.cleanup")(function* () { - if (ctx.snapshot) { - const patch = yield* snapshot.patch(ctx.snapshot) - if (patch.files.length) { - yield* session.updatePart({ - id: PartID.ascending(), - messageID: ctx.assistantMessage.id, - sessionID: ctx.sessionID, - type: "patch", - hash: patch.hash, - files: patch.files, - }) - } - ctx.snapshot = undefined - } - - if (ctx.currentText) { - const end = Date.now() - ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } - yield* session.updatePart(ctx.currentText) - ctx.currentText = undefined - } - - for (const part of Object.values(ctx.reasoningMap)) { - const end = Date.now() - yield* session.updatePart({ - ...part, - time: { start: part.time.start ?? end, end }, - }) - } - ctx.reasoningMap = {} - - yield* Effect.forEach( - Object.values(ctx.toolcalls), - (call) => Deferred.await(call.done).pipe(Effect.timeout("250 millis"), Effect.ignore), - { concurrency: "unbounded" }, - ) - - for (const toolCallID of Object.keys(ctx.toolcalls)) { - const match = yield* readToolCall(toolCallID) - if (!match) continue - const part = match.part - const end = Date.now() - const metadata = "metadata" in part.state && isRecord(part.state.metadata) ? part.state.metadata : {} - yield* session.updatePart({ - ...part, - state: { - ...part.state, - status: "error", - error: "Tool execution aborted", - metadata: { ...metadata, interrupted: true }, - time: { start: "time" in part.state ? part.state.time.start : end, end }, - }, - }) - } - ctx.toolcalls = {} - ctx.assistantMessage.time.completed = Date.now() - yield* session.updateMessage(ctx.assistantMessage) - }) - - const halt = Effect.fn("SessionProcessor.halt")(function* (e: unknown) { - slog.error("process", { error: errorMessage(e), stack: e instanceof Error ? e.stack : undefined }) - const error = parse(e) - if (MessageV2.ContextOverflowError.isInstance(error)) { - ctx.needsCompaction = true - yield* bus.publish(Session.Event.Error, { sessionID: ctx.sessionID, error }) - return - } - ctx.assistantMessage.error = error - yield* bus.publish(Session.Event.Error, { - sessionID: ctx.assistantMessage.sessionID, - error: ctx.assistantMessage.error, - }) - yield* status.set(ctx.sessionID, { type: "idle" }) - }) - - const process = Effect.fn("SessionProcessor.process")(function* (streamInput: LLM.StreamInput) { - slog.info("process") - ctx.needsCompaction = false - ctx.shouldBreak = (yield* config.get()).experimental?.continue_loop_on_deny !== true - - return yield* Effect.gen(function* () { - yield* Effect.gen(function* () { - ctx.currentText = undefined - ctx.reasoningMap = {} - const stream = llm.stream(streamInput) - - yield* stream.pipe( - Stream.tap((event) => handleEvent(event)), - Stream.takeUntil(() => ctx.needsCompaction), - Stream.runDrain, - ) - }).pipe( - Effect.onInterrupt(() => - Effect.gen(function* () { - aborted = true - if (!ctx.assistantMessage.error) { - yield* halt(new DOMException("Aborted", "AbortError")) - } - }), - ), - Effect.catchCauseIf( - (cause) => !Cause.hasInterruptsOnly(cause), - (cause) => Effect.fail(Cause.squash(cause)), - ), - Effect.retry( - SessionRetry.policy({ - parse, - set: (info) => - status.set(ctx.sessionID, { - type: "retry", - attempt: info.attempt, - message: info.message, - next: info.next, - }), - }), - ), - Effect.catch(halt), - Effect.ensuring(cleanup()), - ) - - if (ctx.needsCompaction) return "compact" - if (ctx.blocked || ctx.assistantMessage.error) return "stop" - return "continue" - }) - }) - - return { - get message() { - return ctx.assistantMessage - }, - updateToolCall, - completeToolCall, - process, - } satisfies Handle + const settleToolCall = Effect.fn("SessionProcessor.settleToolCall")(function* (toolCallID: string) { + const done = ctx.toolcalls[toolCallID]?.done + delete ctx.toolcalls[toolCallID] + if (done) yield* Deferred.succeed(done, undefined).pipe(Effect.ignore) }) - return Service.of({ create }) - }), - ) + const readToolCall = Effect.fn("SessionProcessor.readToolCall")(function* (toolCallID: string) { + const call = ctx.toolcalls[toolCallID] + if (!call) return + const part = yield* session.getPart({ + partID: call.partID, + messageID: call.messageID, + sessionID: call.sessionID, + }) + if (!part || part.type !== "tool") { + delete ctx.toolcalls[toolCallID] + return + } + return { call, part } + }) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Agent.defaultLayer), - Layer.provide(LLM.defaultLayer), - Layer.provide(Permission.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(SessionSummary.defaultLayer), - Layer.provide(SessionStatus.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(Config.defaultLayer), - ), - ) -} + const updateToolCall = Effect.fn("SessionProcessor.updateToolCall")(function* ( + toolCallID: string, + update: (part: MessageV2.ToolPart) => MessageV2.ToolPart, + ) { + const match = yield* readToolCall(toolCallID) + if (!match) return + const part = yield* session.updatePart(update(match.part)) + ctx.toolcalls[toolCallID] = { + ...match.call, + partID: part.id, + messageID: part.messageID, + sessionID: part.sessionID, + } + return part + }) + + const completeToolCall = Effect.fn("SessionProcessor.completeToolCall")(function* ( + toolCallID: string, + output: { + title: string + metadata: Record + output: string + attachments?: MessageV2.FilePart[] + }, + ) { + const match = yield* readToolCall(toolCallID) + if (!match || match.part.state.status !== "running") return + yield* session.updatePart({ + ...match.part, + state: { + status: "completed", + input: match.part.state.input, + output: output.output, + metadata: output.metadata, + title: output.title, + time: { start: match.part.state.time.start, end: Date.now() }, + attachments: output.attachments, + }, + }) + yield* settleToolCall(toolCallID) + }) + + const failToolCall = Effect.fn("SessionProcessor.failToolCall")(function* (toolCallID: string, error: unknown) { + const match = yield* readToolCall(toolCallID) + if (!match || match.part.state.status !== "running") return false + yield* session.updatePart({ + ...match.part, + state: { + status: "error", + input: match.part.state.input, + error: errorMessage(error), + time: { start: match.part.state.time.start, end: Date.now() }, + }, + }) + if (error instanceof Permission.RejectedError || error instanceof Question.RejectedError) { + ctx.blocked = ctx.shouldBreak + } + yield* settleToolCall(toolCallID) + return true + }) + + const handleEvent = Effect.fn("SessionProcessor.handleEvent")(function* (value: StreamEvent) { + switch (value.type) { + case "start": + yield* status.set(ctx.sessionID, { type: "busy" }) + return + + case "reasoning-start": + if (value.id in ctx.reasoningMap) return + ctx.reasoningMap[value.id] = { + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "reasoning", + text: "", + time: { start: Date.now() }, + metadata: value.providerMetadata, + } + yield* session.updatePart(ctx.reasoningMap[value.id]) + return + + case "reasoning-delta": + if (!(value.id in ctx.reasoningMap)) return + ctx.reasoningMap[value.id].text += value.text + if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata + yield* session.updatePartDelta({ + sessionID: ctx.reasoningMap[value.id].sessionID, + messageID: ctx.reasoningMap[value.id].messageID, + partID: ctx.reasoningMap[value.id].id, + field: "text", + delta: value.text, + }) + return + + case "reasoning-end": + if (!(value.id in ctx.reasoningMap)) return + // oxlint-disable-next-line no-self-assign -- reactivity trigger + ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text + ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() } + if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata + yield* session.updatePart(ctx.reasoningMap[value.id]) + delete ctx.reasoningMap[value.id] + return + + case "tool-input-start": + if (ctx.assistantMessage.summary) { + throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) + } + const part = yield* session.updatePart({ + id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "tool", + tool: value.toolName, + callID: value.id, + state: { status: "pending", input: {}, raw: "" }, + metadata: value.providerExecuted ? { providerExecuted: true } : undefined, + } satisfies MessageV2.ToolPart) + ctx.toolcalls[value.id] = { + done: yield* Deferred.make(), + partID: part.id, + messageID: part.messageID, + sessionID: part.sessionID, + } + return + + case "tool-input-delta": + return + + case "tool-input-end": + return + + case "tool-call": { + if (ctx.assistantMessage.summary) { + throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`) + } + yield* updateToolCall(value.toolCallId, (match) => ({ + ...match, + tool: value.toolName, + state: { + ...match.state, + status: "running", + input: value.input, + time: { start: Date.now() }, + }, + metadata: match.metadata?.providerExecuted + ? { ...value.providerMetadata, providerExecuted: true } + : value.providerMetadata, + })) + + const parts = MessageV2.parts(ctx.assistantMessage.id) + const recentParts = parts.slice(-DOOM_LOOP_THRESHOLD) + + if ( + recentParts.length !== DOOM_LOOP_THRESHOLD || + !recentParts.every( + (part) => + part.type === "tool" && + part.tool === value.toolName && + part.state.status !== "pending" && + JSON.stringify(part.state.input) === JSON.stringify(value.input), + ) + ) { + return + } + + const agent = yield* agents.get(ctx.assistantMessage.agent) + yield* permission.ask({ + permission: "doom_loop", + patterns: [value.toolName], + sessionID: ctx.assistantMessage.sessionID, + metadata: { tool: value.toolName, input: value.input }, + always: [value.toolName], + ruleset: agent.permission, + }) + return + } + + case "tool-result": { + yield* completeToolCall(value.toolCallId, value.output) + return + } + + case "tool-error": { + yield* failToolCall(value.toolCallId, value.error) + return + } + + case "error": + throw value.error + + case "start-step": + if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track() + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + snapshot: ctx.snapshot, + type: "step-start", + }) + return + + case "finish-step": { + const usage = Session.getUsage({ + model: ctx.model, + usage: value.usage, + metadata: value.providerMetadata, + }) + ctx.assistantMessage.finish = value.finishReason + ctx.assistantMessage.cost += usage.cost + ctx.assistantMessage.tokens = usage.tokens + yield* session.updatePart({ + id: PartID.ascending(), + reason: value.finishReason, + snapshot: yield* snapshot.track(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "step-finish", + tokens: usage.tokens, + cost: usage.cost, + }) + yield* session.updateMessage(ctx.assistantMessage) + if (ctx.snapshot) { + const patch = yield* snapshot.patch(ctx.snapshot) + if (patch.files.length) { + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + type: "patch", + hash: patch.hash, + files: patch.files, + }) + } + ctx.snapshot = undefined + } + yield* summary + .summarize({ + sessionID: ctx.sessionID, + messageID: ctx.assistantMessage.parentID, + }) + .pipe(Effect.ignore, Effect.forkIn(scope)) + if ( + !ctx.assistantMessage.summary && + isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model }) + ) { + ctx.needsCompaction = true + } + return + } + + case "text-start": + ctx.currentText = { + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.assistantMessage.sessionID, + type: "text", + text: "", + time: { start: Date.now() }, + metadata: value.providerMetadata, + } + yield* session.updatePart(ctx.currentText) + return + + case "text-delta": + if (!ctx.currentText) return + ctx.currentText.text += value.text + if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata + yield* session.updatePartDelta({ + sessionID: ctx.currentText.sessionID, + messageID: ctx.currentText.messageID, + partID: ctx.currentText.id, + field: "text", + delta: value.text, + }) + return + + case "text-end": + if (!ctx.currentText) return + // oxlint-disable-next-line no-self-assign -- reactivity trigger + ctx.currentText.text = ctx.currentText.text + ctx.currentText.text = (yield* plugin.trigger( + "experimental.text.complete", + { + sessionID: ctx.sessionID, + messageID: ctx.assistantMessage.id, + partID: ctx.currentText.id, + }, + { text: ctx.currentText.text }, + )).text + { + const end = Date.now() + ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } + } + if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata + yield* session.updatePart(ctx.currentText) + ctx.currentText = undefined + return + + case "finish": + return + + default: + slog.info("unhandled", { event: value.type, value }) + return + } + }) + + const cleanup = Effect.fn("SessionProcessor.cleanup")(function* () { + if (ctx.snapshot) { + const patch = yield* snapshot.patch(ctx.snapshot) + if (patch.files.length) { + yield* session.updatePart({ + id: PartID.ascending(), + messageID: ctx.assistantMessage.id, + sessionID: ctx.sessionID, + type: "patch", + hash: patch.hash, + files: patch.files, + }) + } + ctx.snapshot = undefined + } + + if (ctx.currentText) { + const end = Date.now() + ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end } + yield* session.updatePart(ctx.currentText) + ctx.currentText = undefined + } + + for (const part of Object.values(ctx.reasoningMap)) { + const end = Date.now() + yield* session.updatePart({ + ...part, + time: { start: part.time.start ?? end, end }, + }) + } + ctx.reasoningMap = {} + + yield* Effect.forEach( + Object.values(ctx.toolcalls), + (call) => Deferred.await(call.done).pipe(Effect.timeout("250 millis"), Effect.ignore), + { concurrency: "unbounded" }, + ) + + for (const toolCallID of Object.keys(ctx.toolcalls)) { + const match = yield* readToolCall(toolCallID) + if (!match) continue + const part = match.part + const end = Date.now() + const metadata = "metadata" in part.state && isRecord(part.state.metadata) ? part.state.metadata : {} + yield* session.updatePart({ + ...part, + state: { + ...part.state, + status: "error", + error: "Tool execution aborted", + metadata: { ...metadata, interrupted: true }, + time: { start: "time" in part.state ? part.state.time.start : end, end }, + }, + }) + } + ctx.toolcalls = {} + ctx.assistantMessage.time.completed = Date.now() + yield* session.updateMessage(ctx.assistantMessage) + }) + + const halt = Effect.fn("SessionProcessor.halt")(function* (e: unknown) { + slog.error("process", { error: errorMessage(e), stack: e instanceof Error ? e.stack : undefined }) + const error = parse(e) + if (MessageV2.ContextOverflowError.isInstance(error)) { + ctx.needsCompaction = true + yield* bus.publish(Session.Event.Error, { sessionID: ctx.sessionID, error }) + return + } + ctx.assistantMessage.error = error + yield* bus.publish(Session.Event.Error, { + sessionID: ctx.assistantMessage.sessionID, + error: ctx.assistantMessage.error, + }) + yield* status.set(ctx.sessionID, { type: "idle" }) + }) + + const process = Effect.fn("SessionProcessor.process")(function* (streamInput: LLM.StreamInput) { + slog.info("process") + ctx.needsCompaction = false + ctx.shouldBreak = (yield* config.get()).experimental?.continue_loop_on_deny !== true + + return yield* Effect.gen(function* () { + yield* Effect.gen(function* () { + ctx.currentText = undefined + ctx.reasoningMap = {} + const stream = llm.stream(streamInput) + + yield* stream.pipe( + Stream.tap((event) => handleEvent(event)), + Stream.takeUntil(() => ctx.needsCompaction), + Stream.runDrain, + ) + }).pipe( + Effect.onInterrupt(() => + Effect.gen(function* () { + aborted = true + if (!ctx.assistantMessage.error) { + yield* halt(new DOMException("Aborted", "AbortError")) + } + }), + ), + Effect.catchCauseIf( + (cause) => !Cause.hasInterruptsOnly(cause), + (cause) => Effect.fail(Cause.squash(cause)), + ), + Effect.retry( + SessionRetry.policy({ + parse, + set: (info) => + status.set(ctx.sessionID, { + type: "retry", + attempt: info.attempt, + message: info.message, + next: info.next, + }), + }), + ), + Effect.catch(halt), + Effect.ensuring(cleanup()), + ) + + if (ctx.needsCompaction) return "compact" + if (ctx.blocked || ctx.assistantMessage.error) return "stop" + return "continue" + }) + }) + + return { + get message() { + return ctx.assistantMessage + }, + updateToolCall, + completeToolCall, + process, + } satisfies Handle + }) + + return Service.of({ create }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Agent.defaultLayer), + Layer.provide(LLM.defaultLayer), + Layer.provide(Permission.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(SessionSummary.defaultLayer), + Layer.provide(SessionStatus.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(Config.defaultLayer), + ), +) + +export * as SessionProcessor from "./processor" diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 004ee19abe..14fdf30780 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -64,221 +64,220 @@ IMPORTANT: const STRUCTURED_OUTPUT_SYSTEM_PROMPT = `IMPORTANT: The user has requested structured output. You MUST use the StructuredOutput tool to provide your final response. Do NOT respond with plain text - you MUST call the StructuredOutput tool with your answer formatted according to the schema.` -export namespace SessionPrompt { - const log = Log.create({ service: "session.prompt" }) - const elog = EffectLogger.create({ service: "session.prompt" }) +const log = Log.create({ service: "session.prompt" }) +const elog = EffectLogger.create({ service: "session.prompt" }) - export interface Interface { - readonly cancel: (sessionID: SessionID) => Effect.Effect - readonly prompt: (input: PromptInput) => Effect.Effect - readonly loop: (input: z.infer) => Effect.Effect - readonly shell: (input: ShellInput) => Effect.Effect - readonly command: (input: CommandInput) => Effect.Effect - readonly resolvePromptParts: (template: string) => Effect.Effect - } +export interface Interface { + readonly cancel: (sessionID: SessionID) => Effect.Effect + readonly prompt: (input: PromptInput) => Effect.Effect + readonly loop: (input: z.infer) => Effect.Effect + readonly shell: (input: ShellInput) => Effect.Effect + readonly command: (input: CommandInput) => Effect.Effect + readonly resolvePromptParts: (template: string) => Effect.Effect +} - export class Service extends Context.Service()("@opencode/SessionPrompt") {} +export class Service extends Context.Service()("@opencode/SessionPrompt") {} - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - const status = yield* SessionStatus.Service - const sessions = yield* Session.Service - const agents = yield* Agent.Service - const provider = yield* Provider.Service - const processor = yield* SessionProcessor.Service - const compaction = yield* SessionCompaction.Service - const plugin = yield* Plugin.Service - const commands = yield* Command.Service - const permission = yield* Permission.Service - const fsys = yield* AppFileSystem.Service - const mcp = yield* MCP.Service - const lsp = yield* LSP.Service - const filetime = yield* FileTime.Service - const registry = yield* ToolRegistry.Service - const truncate = yield* Truncate.Service - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner - const scope = yield* Scope.Scope - const instruction = yield* Instruction.Service - const state = yield* SessionRunState.Service - const revert = yield* SessionRevert.Service - const summary = yield* SessionSummary.Service - const sys = yield* SystemPrompt.Service - const llm = yield* LLM.Service - const runner = Effect.fn("SessionPrompt.runner")(function* () { - return yield* EffectBridge.make() - }) - const ops = Effect.fn("SessionPrompt.ops")(function* () { - const run = yield* runner() - return { - cancel: (sessionID: SessionID) => run.fork(cancel(sessionID)), - resolvePromptParts: (template: string) => resolvePromptParts(template), - prompt: (input: PromptInput) => prompt(input), - } satisfies TaskPromptOps - }) +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + const status = yield* SessionStatus.Service + const sessions = yield* Session.Service + const agents = yield* Agent.Service + const provider = yield* Provider.Service + const processor = yield* SessionProcessor.Service + const compaction = yield* SessionCompaction.Service + const plugin = yield* Plugin.Service + const commands = yield* Command.Service + const permission = yield* Permission.Service + const fsys = yield* AppFileSystem.Service + const mcp = yield* MCP.Service + const lsp = yield* LSP.Service + const filetime = yield* FileTime.Service + const registry = yield* ToolRegistry.Service + const truncate = yield* Truncate.Service + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner + const scope = yield* Scope.Scope + const instruction = yield* Instruction.Service + const state = yield* SessionRunState.Service + const revert = yield* SessionRevert.Service + const summary = yield* SessionSummary.Service + const sys = yield* SystemPrompt.Service + const llm = yield* LLM.Service + const runner = Effect.fn("SessionPrompt.runner")(function* () { + return yield* EffectBridge.make() + }) + const ops = Effect.fn("SessionPrompt.ops")(function* () { + const run = yield* runner() + return { + cancel: (sessionID: SessionID) => run.fork(cancel(sessionID)), + resolvePromptParts: (template: string) => resolvePromptParts(template), + prompt: (input: PromptInput) => prompt(input), + } satisfies TaskPromptOps + }) - const cancel = Effect.fn("SessionPrompt.cancel")(function* (sessionID: SessionID) { - yield* elog.info("cancel", { sessionID }) - yield* state.cancel(sessionID) - }) + const cancel = Effect.fn("SessionPrompt.cancel")(function* (sessionID: SessionID) { + yield* elog.info("cancel", { sessionID }) + yield* state.cancel(sessionID) + }) - const resolvePromptParts = Effect.fn("SessionPrompt.resolvePromptParts")(function* (template: string) { - const ctx = yield* InstanceState.context - const parts: PromptInput["parts"] = [{ type: "text", text: template }] - const files = ConfigMarkdown.files(template) - const seen = new Set() - yield* Effect.forEach( - files, - Effect.fnUntraced(function* (match) { - const name = match[1] - if (seen.has(name)) return - seen.add(name) - const filepath = name.startsWith("~/") - ? path.join(os.homedir(), name.slice(2)) - : path.resolve(ctx.worktree, name) + const resolvePromptParts = Effect.fn("SessionPrompt.resolvePromptParts")(function* (template: string) { + const ctx = yield* InstanceState.context + const parts: PromptInput["parts"] = [{ type: "text", text: template }] + const files = ConfigMarkdown.files(template) + const seen = new Set() + yield* Effect.forEach( + files, + Effect.fnUntraced(function* (match) { + const name = match[1] + if (seen.has(name)) return + seen.add(name) + const filepath = name.startsWith("~/") + ? path.join(os.homedir(), name.slice(2)) + : path.resolve(ctx.worktree, name) - const info = yield* fsys.stat(filepath).pipe(Effect.option) - if (Option.isNone(info)) { - const found = yield* agents.get(name) - if (found) parts.push({ type: "agent", name: found.name }) - return - } - const stat = info.value - parts.push({ - type: "file", - url: pathToFileURL(filepath).href, - filename: name, - mime: stat.type === "Directory" ? "application/x-directory" : "text/plain", - }) - }), - { concurrency: "unbounded", discard: true }, - ) - return parts - }) - - const title = Effect.fn("SessionPrompt.ensureTitle")(function* (input: { - session: Session.Info - history: MessageV2.WithParts[] - providerID: ProviderID - modelID: ModelID - }) { - if (input.session.parentID) return - if (!Session.isDefaultTitle(input.session.title)) return - - const real = (m: MessageV2.WithParts) => - m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic) - const idx = input.history.findIndex(real) - if (idx === -1) return - if (input.history.filter(real).length !== 1) return - - const context = input.history.slice(0, idx + 1) - const firstUser = context[idx] - if (!firstUser || firstUser.info.role !== "user") return - const firstInfo = firstUser.info - - const subtasks = firstUser.parts.filter((p): p is MessageV2.SubtaskPart => p.type === "subtask") - const onlySubtasks = subtasks.length > 0 && firstUser.parts.every((p) => p.type === "subtask") - - const ag = yield* agents.get("title") - if (!ag) return - const mdl = ag.model - ? yield* provider.getModel(ag.model.providerID, ag.model.modelID) - : ((yield* provider.getSmallModel(input.providerID)) ?? - (yield* provider.getModel(input.providerID, input.modelID))) - const msgs = onlySubtasks - ? [{ role: "user" as const, content: subtasks.map((p) => p.prompt).join("\n") }] - : yield* MessageV2.toModelMessagesEffect(context, mdl) - const text = yield* llm - .stream({ - agent: ag, - user: firstInfo, - system: [], - small: true, - tools: {}, - model: mdl, - sessionID: input.session.id, - retries: 2, - messages: [{ role: "user", content: "Generate a title for this conversation:\n" }, ...msgs], + const info = yield* fsys.stat(filepath).pipe(Effect.option) + if (Option.isNone(info)) { + const found = yield* agents.get(name) + if (found) parts.push({ type: "agent", name: found.name }) + return + } + const stat = info.value + parts.push({ + type: "file", + url: pathToFileURL(filepath).href, + filename: name, + mime: stat.type === "Directory" ? "application/x-directory" : "text/plain", }) - .pipe( - Stream.filter((e): e is Extract => e.type === "text-delta"), - Stream.map((e) => e.text), - Stream.mkString, - Effect.orDie, - ) - const cleaned = text - .replace(/[\s\S]*?<\/think>\s*/g, "") - .split("\n") - .map((line) => line.trim()) - .find((line) => line.length > 0) - if (!cleaned) return - const t = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned - yield* sessions - .setTitle({ sessionID: input.session.id, title: t }) - .pipe(Effect.catchCause((cause) => elog.error("failed to generate title", { error: Cause.squash(cause) }))) - }) + }), + { concurrency: "unbounded", discard: true }, + ) + return parts + }) - const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: { - messages: MessageV2.WithParts[] - agent: Agent.Info - session: Session.Info - }) { - const userMessage = input.messages.findLast((msg) => msg.info.role === "user") - if (!userMessage) return input.messages + const title = Effect.fn("SessionPrompt.ensureTitle")(function* (input: { + session: Session.Info + history: MessageV2.WithParts[] + providerID: ProviderID + modelID: ModelID + }) { + if (input.session.parentID) return + if (!Session.isDefaultTitle(input.session.title)) return - if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) { - if (input.agent.name === "plan") { - userMessage.parts.push({ - id: PartID.ascending(), - messageID: userMessage.info.id, - sessionID: userMessage.info.sessionID, - type: "text", - text: PROMPT_PLAN, - synthetic: true, - }) - } - const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan") - if (wasPlan && input.agent.name === "build") { - userMessage.parts.push({ - id: PartID.ascending(), - messageID: userMessage.info.id, - sessionID: userMessage.info.sessionID, - type: "text", - text: BUILD_SWITCH, - synthetic: true, - }) - } - return input.messages - } + const real = (m: MessageV2.WithParts) => + m.info.role === "user" && !m.parts.every((p) => "synthetic" in p && p.synthetic) + const idx = input.history.findIndex(real) + if (idx === -1) return + if (input.history.filter(real).length !== 1) return - const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant") - if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") { - const plan = Session.plan(input.session) - if (!(yield* fsys.existsSafe(plan))) return input.messages - const part = yield* sessions.updatePart({ + const context = input.history.slice(0, idx + 1) + const firstUser = context[idx] + if (!firstUser || firstUser.info.role !== "user") return + const firstInfo = firstUser.info + + const subtasks = firstUser.parts.filter((p): p is MessageV2.SubtaskPart => p.type === "subtask") + const onlySubtasks = subtasks.length > 0 && firstUser.parts.every((p) => p.type === "subtask") + + const ag = yield* agents.get("title") + if (!ag) return + const mdl = ag.model + ? yield* provider.getModel(ag.model.providerID, ag.model.modelID) + : ((yield* provider.getSmallModel(input.providerID)) ?? + (yield* provider.getModel(input.providerID, input.modelID))) + const msgs = onlySubtasks + ? [{ role: "user" as const, content: subtasks.map((p) => p.prompt).join("\n") }] + : yield* MessageV2.toModelMessagesEffect(context, mdl) + const text = yield* llm + .stream({ + agent: ag, + user: firstInfo, + system: [], + small: true, + tools: {}, + model: mdl, + sessionID: input.session.id, + retries: 2, + messages: [{ role: "user", content: "Generate a title for this conversation:\n" }, ...msgs], + }) + .pipe( + Stream.filter((e): e is Extract => e.type === "text-delta"), + Stream.map((e) => e.text), + Stream.mkString, + Effect.orDie, + ) + const cleaned = text + .replace(/[\s\S]*?<\/think>\s*/g, "") + .split("\n") + .map((line) => line.trim()) + .find((line) => line.length > 0) + if (!cleaned) return + const t = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned + yield* sessions + .setTitle({ sessionID: input.session.id, title: t }) + .pipe(Effect.catchCause((cause) => elog.error("failed to generate title", { error: Cause.squash(cause) }))) + }) + + const insertReminders = Effect.fn("SessionPrompt.insertReminders")(function* (input: { + messages: MessageV2.WithParts[] + agent: Agent.Info + session: Session.Info + }) { + const userMessage = input.messages.findLast((msg) => msg.info.role === "user") + if (!userMessage) return input.messages + + if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) { + if (input.agent.name === "plan") { + userMessage.parts.push({ id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", - text: `${BUILD_SWITCH}\n\nA plan file exists at ${plan}. You should execute on the plan defined within it`, + text: PROMPT_PLAN, synthetic: true, }) - userMessage.parts.push(part) - return input.messages } + const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan") + if (wasPlan && input.agent.name === "build") { + userMessage.parts.push({ + id: PartID.ascending(), + messageID: userMessage.info.id, + sessionID: userMessage.info.sessionID, + type: "text", + text: BUILD_SWITCH, + synthetic: true, + }) + } + return input.messages + } - if (input.agent.name !== "plan" || assistantMessage?.info.agent === "plan") return input.messages - + const assistantMessage = input.messages.findLast((msg) => msg.info.role === "assistant") + if (input.agent.name !== "plan" && assistantMessage?.info.agent === "plan") { const plan = Session.plan(input.session) - const exists = yield* fsys.existsSafe(plan) - if (!exists) yield* fsys.ensureDir(path.dirname(plan)).pipe(Effect.catch(Effect.die)) + if (!(yield* fsys.existsSafe(plan))) return input.messages const part = yield* sessions.updatePart({ id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", - text: ` + text: `${BUILD_SWITCH}\n\nA plan file exists at ${plan}. You should execute on the plan defined within it`, + synthetic: true, + }) + userMessage.parts.push(part) + return input.messages + } + + if (input.agent.name !== "plan" || assistantMessage?.info.agent === "plan") return input.messages + + const plan = Session.plan(input.session) + const exists = yield* fsys.existsSafe(plan) + if (!exists) yield* fsys.ensureDir(path.dirname(plan)).pipe(Effect.catch(Effect.die)) + const part = yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: userMessage.info.id, + sessionID: userMessage.info.sessionID, + type: "text", + text: ` Plan mode is active. The user indicated that they do not want you to execute yet -- you MUST NOT make any edits (with the exception of the plan file mentioned below), run any non-readonly tools (including changing configs or making commits), or otherwise make any changes to the system. This supersedes any other instructions you have received. ## Plan File Info: @@ -293,10 +292,10 @@ Goal: Gain a comprehensive understanding of the user's request by reading throug 1. Focus on understanding the user's request and the code associated with their request 2. **Launch up to 3 explore agents IN PARALLEL** (single message, multiple tool calls) to efficiently explore the codebase. - - Use 1 agent when the task is isolated to known files, the user provided specific file paths, or you're making a small targeted change. - - Use multiple agents when: the scope is uncertain, multiple areas of the codebase are involved, or you need to understand existing patterns before planning. - - Quality over quantity - 3 agents maximum, but you should try to use the minimum number of agents necessary (usually just 1) - - If using multiple agents: Provide each agent with a specific search focus or area to explore. Example: One agent searches for existing implementations, another explores related components, a third investigates testing patterns + - Use 1 agent when the task is isolated to known files, the user provided specific file paths, or you're making a small targeted change. + - Use multiple agents when: the scope is uncertain, multiple areas of the codebase are involved, or you need to understand existing patterns before planning. + - Quality over quantity - 3 agents maximum, but you should try to use the minimum number of agents necessary (usually just 1) + - If using multiple agents: Provide each agent with a specific search focus or area to explore. Example: One agent searches for existing implementations, another explores related components, a third investigates testing patterns 3. After exploring the code, use the question tool to clarify ambiguities in the user request up front. @@ -348,1507 +347,1508 @@ This is critical - your turn should only end with either asking the user a quest NOTE: At any point in time through this workflow you should feel free to ask the user questions or clarifications. Don't make large assumptions about user intent. The goal is to present a well researched plan to the user, and tie any loose ends before implementation begins. `, - synthetic: true, - }) - userMessage.parts.push(part) - return input.messages + synthetic: true, + }) + userMessage.parts.push(part) + return input.messages + }) + + const resolveTools = Effect.fn("SessionPrompt.resolveTools")(function* (input: { + agent: Agent.Info + model: Provider.Model + session: Session.Info + tools?: Record + processor: Pick + bypassAgentCheck: boolean + messages: MessageV2.WithParts[] + }) { + using _ = log.time("resolveTools") + const tools: Record = {} + const run = yield* runner() + const promptOps = yield* ops() + + const context = (args: any, options: ToolExecutionOptions): Tool.Context => ({ + sessionID: input.session.id, + abort: options.abortSignal!, + messageID: input.processor.message.id, + callID: options.toolCallId, + extra: { model: input.model, bypassAgentCheck: input.bypassAgentCheck, promptOps }, + agent: input.agent.name, + messages: input.messages, + metadata: (val) => + input.processor.updateToolCall(options.toolCallId, (match) => { + if (!["running", "pending"].includes(match.state.status)) return match + return { + ...match, + state: { + title: val.title, + metadata: val.metadata, + status: "running", + input: args, + time: { start: Date.now() }, + }, + } + }), + ask: (req) => + permission + .ask({ + ...req, + sessionID: input.session.id, + tool: { messageID: input.processor.message.id, callID: options.toolCallId }, + ruleset: Permission.merge(input.agent.permission, input.session.permission ?? []), + }) + .pipe(Effect.orDie), }) - const resolveTools = Effect.fn("SessionPrompt.resolveTools")(function* (input: { - agent: Agent.Info - model: Provider.Model - session: Session.Info - tools?: Record - processor: Pick - bypassAgentCheck: boolean - messages: MessageV2.WithParts[] - }) { - using _ = log.time("resolveTools") - const tools: Record = {} - const run = yield* runner() - const promptOps = yield* ops() - - const context = (args: any, options: ToolExecutionOptions): Tool.Context => ({ - sessionID: input.session.id, - abort: options.abortSignal!, - messageID: input.processor.message.id, - callID: options.toolCallId, - extra: { model: input.model, bypassAgentCheck: input.bypassAgentCheck, promptOps }, - agent: input.agent.name, - messages: input.messages, - metadata: (val) => - input.processor.updateToolCall(options.toolCallId, (match) => { - if (!["running", "pending"].includes(match.state.status)) return match - return { - ...match, - state: { - title: val.title, - metadata: val.metadata, - status: "running", - input: args, - time: { start: Date.now() }, - }, - } - }), - ask: (req) => - permission - .ask({ - ...req, - sessionID: input.session.id, - tool: { messageID: input.processor.message.id, callID: options.toolCallId }, - ruleset: Permission.merge(input.agent.permission, input.session.permission ?? []), - }) - .pipe(Effect.orDie), - }) - - for (const item of yield* registry.tools({ - modelID: ModelID.make(input.model.api.id), - providerID: input.model.providerID, - agent: input.agent, - })) { - const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) - tools[item.id] = tool({ - description: item.description, - inputSchema: jsonSchema(schema), - execute(args, options) { - return run.promise( - Effect.gen(function* () { - const ctx = context(args, options) - yield* plugin.trigger( - "tool.execute.before", - { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID }, - { args }, - ) - const result = yield* item.execute(args, ctx) - const output = { - ...result, - attachments: result.attachments?.map((attachment) => ({ - ...attachment, - id: PartID.ascending(), - sessionID: ctx.sessionID, - messageID: input.processor.message.id, - })), - } - yield* plugin.trigger( - "tool.execute.after", - { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID, args }, - output, - ) - if (options.abortSignal?.aborted) { - yield* input.processor.completeToolCall(options.toolCallId, output) - } - return output - }), - ) - }, - }) - } - - for (const [key, item] of Object.entries(yield* mcp.tools())) { - const execute = item.execute - if (!execute) continue - - const schema = yield* Effect.promise(() => Promise.resolve(asSchema(item.inputSchema).jsonSchema)) - const transformed = ProviderTransform.schema(input.model, schema) - item.inputSchema = jsonSchema(transformed) - item.execute = (args, opts) => - run.promise( + for (const item of yield* registry.tools({ + modelID: ModelID.make(input.model.api.id), + providerID: input.model.providerID, + agent: input.agent, + })) { + const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) + tools[item.id] = tool({ + description: item.description, + inputSchema: jsonSchema(schema), + execute(args, options) { + return run.promise( Effect.gen(function* () { - const ctx = context(args, opts) + const ctx = context(args, options) yield* plugin.trigger( "tool.execute.before", - { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId }, + { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID }, { args }, ) - yield* ctx.ask({ permission: key, metadata: {}, patterns: ["*"], always: ["*"] }) - const result: Awaited>> = yield* Effect.promise(() => - execute(args, opts), - ) - yield* plugin.trigger( - "tool.execute.after", - { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId, args }, - result, - ) - - const textParts: string[] = [] - const attachments: Omit[] = [] - for (const contentItem of result.content) { - if (contentItem.type === "text") textParts.push(contentItem.text) - else if (contentItem.type === "image") { - attachments.push({ - type: "file", - mime: contentItem.mimeType, - url: `data:${contentItem.mimeType};base64,${contentItem.data}`, - }) - } else if (contentItem.type === "resource") { - const { resource } = contentItem - if (resource.text) textParts.push(resource.text) - if (resource.blob) { - attachments.push({ - type: "file", - mime: resource.mimeType ?? "application/octet-stream", - url: `data:${resource.mimeType ?? "application/octet-stream"};base64,${resource.blob}`, - filename: resource.uri, - }) - } - } - } - - const truncated = yield* truncate.output(textParts.join("\n\n"), {}, input.agent) - const metadata = { - ...result.metadata, - truncated: truncated.truncated, - ...(truncated.truncated && { outputPath: truncated.outputPath }), - } - + const result = yield* item.execute(args, ctx) const output = { - title: "", - metadata, - output: truncated.content, - attachments: attachments.map((attachment) => ({ + ...result, + attachments: result.attachments?.map((attachment) => ({ ...attachment, id: PartID.ascending(), sessionID: ctx.sessionID, messageID: input.processor.message.id, })), - content: result.content, } - if (opts.abortSignal?.aborted) { - yield* input.processor.completeToolCall(opts.toolCallId, output) + yield* plugin.trigger( + "tool.execute.after", + { tool: item.id, sessionID: ctx.sessionID, callID: ctx.callID, args }, + output, + ) + if (options.abortSignal?.aborted) { + yield* input.processor.completeToolCall(options.toolCallId, output) } return output }), ) - tools[key] = item - } - - return tools - }) - - const handleSubtask = Effect.fn("SessionPrompt.handleSubtask")(function* (input: { - task: MessageV2.SubtaskPart - model: Provider.Model - lastUser: MessageV2.User - sessionID: SessionID - session: Session.Info - msgs: MessageV2.WithParts[] - }) { - const { task, model, lastUser, sessionID, session, msgs } = input - const ctx = yield* InstanceState.context - const promptOps = yield* ops() - const { task: taskTool } = yield* registry.named() - const taskModel = task.model ? yield* getModel(task.model.providerID, task.model.modelID, sessionID) : model - const assistantMessage: MessageV2.Assistant = yield* sessions.updateMessage({ - id: MessageID.ascending(), - role: "assistant", - parentID: lastUser.id, - sessionID, - mode: task.agent, - agent: task.agent, - variant: lastUser.model.variant, - path: { cwd: ctx.directory, root: ctx.worktree }, - cost: 0, - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: taskModel.id, - providerID: taskModel.providerID, - time: { created: Date.now() }, - }) - let part: MessageV2.ToolPart = yield* sessions.updatePart({ - id: PartID.ascending(), - messageID: assistantMessage.id, - sessionID: assistantMessage.sessionID, - type: "tool", - callID: ulid(), - tool: TaskTool.id, - state: { - status: "running", - input: { - prompt: task.prompt, - description: task.description, - subagent_type: task.agent, - command: task.command, - }, - time: { start: Date.now() }, }, }) - const taskArgs = { - prompt: task.prompt, - description: task.description, - subagent_type: task.agent, - command: task.command, - } - yield* plugin.trigger( - "tool.execute.before", - { tool: TaskTool.id, sessionID, callID: part.id }, - { args: taskArgs }, - ) + } - const taskAgent = yield* agents.get(task.agent) - if (!taskAgent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${task.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) - throw error - } + for (const [key, item] of Object.entries(yield* mcp.tools())) { + const execute = item.execute + if (!execute) continue - let error: Error | undefined - const taskAbort = new AbortController() - const result = yield* taskTool - .execute(taskArgs, { - agent: task.agent, - messageID: assistantMessage.id, - sessionID, - abort: taskAbort.signal, - callID: part.callID, - extra: { bypassAgentCheck: true, promptOps }, - messages: msgs, - metadata: (val: { title?: string; metadata?: Record }) => - Effect.gen(function* () { - part = yield* sessions.updatePart({ - ...part, - type: "tool", - state: { ...part.state, ...val }, - } satisfies MessageV2.ToolPart) - }), - ask: (req: any) => - permission - .ask({ - ...req, - sessionID, - ruleset: Permission.merge(taskAgent.permission, session.permission ?? []), - }) - .pipe(Effect.orDie), - }) - .pipe( - Effect.catchCause((cause) => { - const defect = Cause.squash(cause) - error = defect instanceof Error ? defect : new Error(String(defect)) - log.error("subtask execution failed", { error, agent: task.agent, description: task.description }) - return Effect.void - }), - Effect.onInterrupt(() => - Effect.gen(function* () { - taskAbort.abort() - assistantMessage.finish = "tool-calls" - assistantMessage.time.completed = Date.now() - yield* sessions.updateMessage(assistantMessage) - if (part.state.status === "running") { - yield* sessions.updatePart({ - ...part, - state: { - status: "error", - error: "Cancelled", - time: { start: part.state.time.start, end: Date.now() }, - metadata: part.state.metadata, - input: part.state.input, - }, - } satisfies MessageV2.ToolPart) + const schema = yield* Effect.promise(() => Promise.resolve(asSchema(item.inputSchema).jsonSchema)) + const transformed = ProviderTransform.schema(input.model, schema) + item.inputSchema = jsonSchema(transformed) + item.execute = (args, opts) => + run.promise( + Effect.gen(function* () { + const ctx = context(args, opts) + yield* plugin.trigger( + "tool.execute.before", + { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId }, + { args }, + ) + yield* ctx.ask({ permission: key, metadata: {}, patterns: ["*"], always: ["*"] }) + const result: Awaited>> = yield* Effect.promise(() => + execute(args, opts), + ) + yield* plugin.trigger( + "tool.execute.after", + { tool: key, sessionID: ctx.sessionID, callID: opts.toolCallId, args }, + result, + ) + + const textParts: string[] = [] + const attachments: Omit[] = [] + for (const contentItem of result.content) { + if (contentItem.type === "text") textParts.push(contentItem.text) + else if (contentItem.type === "image") { + attachments.push({ + type: "file", + mime: contentItem.mimeType, + url: `data:${contentItem.mimeType};base64,${contentItem.data}`, + }) + } else if (contentItem.type === "resource") { + const { resource } = contentItem + if (resource.text) textParts.push(resource.text) + if (resource.blob) { + attachments.push({ + type: "file", + mime: resource.mimeType ?? "application/octet-stream", + url: `data:${resource.mimeType ?? "application/octet-stream"};base64,${resource.blob}`, + filename: resource.uri, + }) + } } - }), - ), + } + + const truncated = yield* truncate.output(textParts.join("\n\n"), {}, input.agent) + const metadata = { + ...result.metadata, + truncated: truncated.truncated, + ...(truncated.truncated && { outputPath: truncated.outputPath }), + } + + const output = { + title: "", + metadata, + output: truncated.content, + attachments: attachments.map((attachment) => ({ + ...attachment, + id: PartID.ascending(), + sessionID: ctx.sessionID, + messageID: input.processor.message.id, + })), + content: result.content, + } + if (opts.abortSignal?.aborted) { + yield* input.processor.completeToolCall(opts.toolCallId, output) + } + return output + }), ) + tools[key] = item + } - const attachments = result?.attachments?.map((attachment) => ({ - ...attachment, - id: PartID.ascending(), - sessionID, + return tools + }) + + const handleSubtask = Effect.fn("SessionPrompt.handleSubtask")(function* (input: { + task: MessageV2.SubtaskPart + model: Provider.Model + lastUser: MessageV2.User + sessionID: SessionID + session: Session.Info + msgs: MessageV2.WithParts[] + }) { + const { task, model, lastUser, sessionID, session, msgs } = input + const ctx = yield* InstanceState.context + const promptOps = yield* ops() + const { task: taskTool } = yield* registry.named() + const taskModel = task.model ? yield* getModel(task.model.providerID, task.model.modelID, sessionID) : model + const assistantMessage: MessageV2.Assistant = yield* sessions.updateMessage({ + id: MessageID.ascending(), + role: "assistant", + parentID: lastUser.id, + sessionID, + mode: task.agent, + agent: task.agent, + variant: lastUser.model.variant, + path: { cwd: ctx.directory, root: ctx.worktree }, + cost: 0, + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: taskModel.id, + providerID: taskModel.providerID, + time: { created: Date.now() }, + }) + let part: MessageV2.ToolPart = yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: assistantMessage.id, + sessionID: assistantMessage.sessionID, + type: "tool", + callID: ulid(), + tool: TaskTool.id, + state: { + status: "running", + input: { + prompt: task.prompt, + description: task.description, + subagent_type: task.agent, + command: task.command, + }, + time: { start: Date.now() }, + }, + }) + const taskArgs = { + prompt: task.prompt, + description: task.description, + subagent_type: task.agent, + command: task.command, + } + yield* plugin.trigger( + "tool.execute.before", + { tool: TaskTool.id, sessionID, callID: part.id }, + { args: taskArgs }, + ) + + const taskAgent = yield* agents.get(task.agent) + if (!taskAgent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${task.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) + throw error + } + + let error: Error | undefined + const taskAbort = new AbortController() + const result = yield* taskTool + .execute(taskArgs, { + agent: task.agent, messageID: assistantMessage.id, - })) - - yield* plugin.trigger( - "tool.execute.after", - { tool: TaskTool.id, sessionID, callID: part.id, args: taskArgs }, - result, + sessionID, + abort: taskAbort.signal, + callID: part.callID, + extra: { bypassAgentCheck: true, promptOps }, + messages: msgs, + metadata: (val: { title?: string; metadata?: Record }) => + Effect.gen(function* () { + part = yield* sessions.updatePart({ + ...part, + type: "tool", + state: { ...part.state, ...val }, + } satisfies MessageV2.ToolPart) + }), + ask: (req: any) => + permission + .ask({ + ...req, + sessionID, + ruleset: Permission.merge(taskAgent.permission, session.permission ?? []), + }) + .pipe(Effect.orDie), + }) + .pipe( + Effect.catchCause((cause) => { + const defect = Cause.squash(cause) + error = defect instanceof Error ? defect : new Error(String(defect)) + log.error("subtask execution failed", { error, agent: task.agent, description: task.description }) + return Effect.void + }), + Effect.onInterrupt(() => + Effect.gen(function* () { + taskAbort.abort() + assistantMessage.finish = "tool-calls" + assistantMessage.time.completed = Date.now() + yield* sessions.updateMessage(assistantMessage) + if (part.state.status === "running") { + yield* sessions.updatePart({ + ...part, + state: { + status: "error", + error: "Cancelled", + time: { start: part.state.time.start, end: Date.now() }, + metadata: part.state.metadata, + input: part.state.input, + }, + } satisfies MessageV2.ToolPart) + } + }), + ), ) - assistantMessage.finish = "tool-calls" - assistantMessage.time.completed = Date.now() - yield* sessions.updateMessage(assistantMessage) + const attachments = result?.attachments?.map((attachment) => ({ + ...attachment, + id: PartID.ascending(), + sessionID, + messageID: assistantMessage.id, + })) - if (result && part.state.status === "running") { - yield* sessions.updatePart({ - ...part, - state: { - status: "completed", - input: part.state.input, - title: result.title, - metadata: result.metadata, - output: result.output, - attachments, - time: { ...part.state.time, end: Date.now() }, - }, - } satisfies MessageV2.ToolPart) - } + yield* plugin.trigger( + "tool.execute.after", + { tool: TaskTool.id, sessionID, callID: part.id, args: taskArgs }, + result, + ) - if (!result) { - yield* sessions.updatePart({ - ...part, - state: { - status: "error", - error: error ? `Tool execution failed: ${error.message}` : "Tool execution failed", - time: { - start: part.state.status === "running" ? part.state.time.start : Date.now(), - end: Date.now(), - }, - metadata: part.state.status === "pending" ? undefined : part.state.metadata, - input: part.state.input, - }, - } satisfies MessageV2.ToolPart) - } + assistantMessage.finish = "tool-calls" + assistantMessage.time.completed = Date.now() + yield* sessions.updateMessage(assistantMessage) - if (!task.command) return - - const summaryUserMsg: MessageV2.User = { - id: MessageID.ascending(), - sessionID, - role: "user", - time: { created: Date.now() }, - agent: lastUser.agent, - model: lastUser.model, - } - yield* sessions.updateMessage(summaryUserMsg) + if (result && part.state.status === "running") { yield* sessions.updatePart({ - id: PartID.ascending(), - messageID: summaryUserMsg.id, - sessionID, - type: "text", - text: "Summarize the task tool output above and continue with your task.", - synthetic: true, - } satisfies MessageV2.TextPart) + ...part, + state: { + status: "completed", + input: part.state.input, + title: result.title, + metadata: result.metadata, + output: result.output, + attachments, + time: { ...part.state.time, end: Date.now() }, + }, + } satisfies MessageV2.ToolPart) + } + + if (!result) { + yield* sessions.updatePart({ + ...part, + state: { + status: "error", + error: error ? `Tool execution failed: ${error.message}` : "Tool execution failed", + time: { + start: part.state.status === "running" ? part.state.time.start : Date.now(), + end: Date.now(), + }, + metadata: part.state.status === "pending" ? undefined : part.state.metadata, + input: part.state.input, + }, + } satisfies MessageV2.ToolPart) + } + + if (!task.command) return + + const summaryUserMsg: MessageV2.User = { + id: MessageID.ascending(), + sessionID, + role: "user", + time: { created: Date.now() }, + agent: lastUser.agent, + model: lastUser.model, + } + yield* sessions.updateMessage(summaryUserMsg) + yield* sessions.updatePart({ + id: PartID.ascending(), + messageID: summaryUserMsg.id, + sessionID, + type: "text", + text: "Summarize the task tool output above and continue with your task.", + synthetic: true, + } satisfies MessageV2.TextPart) + }) + + const shellImpl = Effect.fn("SessionPrompt.shellImpl")(function* (input: ShellInput) { + const ctx = yield* InstanceState.context + const run = yield* runner() + const session = yield* sessions.get(input.sessionID) + if (session.revert) { + yield* revert.cleanup(session) + } + const agent = yield* agents.get(input.agent) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${input.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + const model = input.model ?? agent.model ?? (yield* lastModel(input.sessionID)) + const userMsg: MessageV2.User = { + id: input.messageID ?? MessageID.ascending(), + sessionID: input.sessionID, + time: { created: Date.now() }, + role: "user", + agent: input.agent, + model: { providerID: model.providerID, modelID: model.modelID }, + } + yield* sessions.updateMessage(userMsg) + const userPart: MessageV2.Part = { + type: "text", + id: PartID.ascending(), + messageID: userMsg.id, + sessionID: input.sessionID, + text: "The following tool was executed by the user", + synthetic: true, + } + yield* sessions.updatePart(userPart) + + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + sessionID: input.sessionID, + parentID: userMsg.id, + mode: input.agent, + agent: input.agent, + cost: 0, + path: { cwd: ctx.directory, root: ctx.worktree }, + time: { created: Date.now() }, + role: "assistant", + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: model.modelID, + providerID: model.providerID, + } + yield* sessions.updateMessage(msg) + const part: MessageV2.ToolPart = { + type: "tool", + id: PartID.ascending(), + messageID: msg.id, + sessionID: input.sessionID, + tool: "bash", + callID: ulid(), + state: { + status: "running", + time: { start: Date.now() }, + input: { command: input.command }, + }, + } + yield* sessions.updatePart(part) + + const sh = Shell.preferred() + const shellName = ( + process.platform === "win32" ? path.win32.basename(sh, ".exe") : path.basename(sh) + ).toLowerCase() + const invocations: Record = { + nu: { args: ["-c", input.command] }, + fish: { args: ["-c", input.command] }, + zsh: { + args: [ + "-l", + "-c", + ` + __oc_cwd=$PWD + [[ -f ~/.zshenv ]] && source ~/.zshenv >/dev/null 2>&1 || true + [[ -f "\${ZDOTDIR:-$HOME}/.zshrc" ]] && source "\${ZDOTDIR:-$HOME}/.zshrc" >/dev/null 2>&1 || true + cd "$__oc_cwd" + eval ${JSON.stringify(input.command)} + `, + ], + }, + bash: { + args: [ + "-l", + "-c", + ` + __oc_cwd=$PWD + shopt -s expand_aliases + [[ -f ~/.bashrc ]] && source ~/.bashrc >/dev/null 2>&1 || true + cd "$__oc_cwd" + eval ${JSON.stringify(input.command)} + `, + ], + }, + cmd: { args: ["/c", input.command] }, + powershell: { args: ["-NoProfile", "-Command", input.command] }, + pwsh: { args: ["-NoProfile", "-Command", input.command] }, + "": { args: ["-c", input.command] }, + } + + const args = (invocations[shellName] ?? invocations[""]).args + const cwd = ctx.directory + const shellEnv = yield* plugin.trigger( + "shell.env", + { cwd, sessionID: input.sessionID, callID: part.callID }, + { env: {} }, + ) + + const cmd = ChildProcess.make(sh, args, { + cwd, + extendEnv: true, + env: { ...shellEnv.env, TERM: "dumb" }, + stdin: "ignore", + forceKillAfter: "3 seconds", }) - const shellImpl = Effect.fn("SessionPrompt.shellImpl")(function* (input: ShellInput) { - const ctx = yield* InstanceState.context - const run = yield* runner() - const session = yield* sessions.get(input.sessionID) - if (session.revert) { - yield* revert.cleanup(session) - } - const agent = yield* agents.get(input.agent) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${input.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - const model = input.model ?? agent.model ?? (yield* lastModel(input.sessionID)) - const userMsg: MessageV2.User = { - id: input.messageID ?? MessageID.ascending(), - sessionID: input.sessionID, - time: { created: Date.now() }, - role: "user", - agent: input.agent, - model: { providerID: model.providerID, modelID: model.modelID }, - } - yield* sessions.updateMessage(userMsg) - const userPart: MessageV2.Part = { - type: "text", - id: PartID.ascending(), - messageID: userMsg.id, - sessionID: input.sessionID, - text: "The following tool was executed by the user", - synthetic: true, - } - yield* sessions.updatePart(userPart) + let output = "" + let aborted = false - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - sessionID: input.sessionID, - parentID: userMsg.id, - mode: input.agent, - agent: input.agent, - cost: 0, - path: { cwd: ctx.directory, root: ctx.worktree }, - time: { created: Date.now() }, - role: "assistant", - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: model.modelID, - providerID: model.providerID, - } - yield* sessions.updateMessage(msg) - const part: MessageV2.ToolPart = { - type: "tool", - id: PartID.ascending(), - messageID: msg.id, - sessionID: input.sessionID, - tool: "bash", - callID: ulid(), - state: { - status: "running", - time: { start: Date.now() }, - input: { command: input.command }, - }, - } - yield* sessions.updatePart(part) - - const sh = Shell.preferred() - const shellName = ( - process.platform === "win32" ? path.win32.basename(sh, ".exe") : path.basename(sh) - ).toLowerCase() - const invocations: Record = { - nu: { args: ["-c", input.command] }, - fish: { args: ["-c", input.command] }, - zsh: { - args: [ - "-l", - "-c", - ` - __oc_cwd=$PWD - [[ -f ~/.zshenv ]] && source ~/.zshenv >/dev/null 2>&1 || true - [[ -f "\${ZDOTDIR:-$HOME}/.zshrc" ]] && source "\${ZDOTDIR:-$HOME}/.zshrc" >/dev/null 2>&1 || true - cd "$__oc_cwd" - eval ${JSON.stringify(input.command)} - `, - ], - }, - bash: { - args: [ - "-l", - "-c", - ` - __oc_cwd=$PWD - shopt -s expand_aliases - [[ -f ~/.bashrc ]] && source ~/.bashrc >/dev/null 2>&1 || true - cd "$__oc_cwd" - eval ${JSON.stringify(input.command)} - `, - ], - }, - cmd: { args: ["/c", input.command] }, - powershell: { args: ["-NoProfile", "-Command", input.command] }, - pwsh: { args: ["-NoProfile", "-Command", input.command] }, - "": { args: ["-c", input.command] }, - } - - const args = (invocations[shellName] ?? invocations[""]).args - const cwd = ctx.directory - const shellEnv = yield* plugin.trigger( - "shell.env", - { cwd, sessionID: input.sessionID, callID: part.callID }, - { env: {} }, - ) - - const cmd = ChildProcess.make(sh, args, { - cwd, - extendEnv: true, - env: { ...shellEnv.env, TERM: "dumb" }, - stdin: "ignore", - forceKillAfter: "3 seconds", - }) - - let output = "" - let aborted = false - - const finish = Effect.uninterruptible( - Effect.gen(function* () { - if (aborted) { - output += "\n\n" + ["", "User aborted the command", ""].join("\n") - } - if (!msg.time.completed) { - msg.time.completed = Date.now() - yield* sessions.updateMessage(msg) + const finish = Effect.uninterruptible( + Effect.gen(function* () { + if (aborted) { + output += "\n\n" + ["", "User aborted the command", ""].join("\n") + } + if (!msg.time.completed) { + msg.time.completed = Date.now() + yield* sessions.updateMessage(msg) + } + if (part.state.status === "running") { + part.state = { + status: "completed", + time: { ...part.state.time, end: Date.now() }, + input: part.state.input, + title: "", + metadata: { output, description: "" }, + output, } + yield* sessions.updatePart(part) + } + }), + ) + + const exit = yield* Effect.gen(function* () { + const handle = yield* spawner.spawn(cmd) + yield* Stream.runForEach(Stream.decodeText(handle.all), (chunk) => + Effect.sync(() => { + output += chunk if (part.state.status === "running") { - part.state = { - status: "completed", - time: { ...part.state.time, end: Date.now() }, - input: part.state.input, - title: "", - metadata: { output, description: "" }, - output, - } - yield* sessions.updatePart(part) + part.state.metadata = { output, description: "" } + void run.fork(sessions.updatePart(part)) } }), ) + yield* handle.exitCode + }).pipe( + Effect.scoped, + Effect.onInterrupt(() => + Effect.sync(() => { + aborted = true + }), + ), + Effect.orDie, + Effect.ensuring(finish), + Effect.exit, + ) - const exit = yield* Effect.gen(function* () { - const handle = yield* spawner.spawn(cmd) - yield* Stream.runForEach(Stream.decodeText(handle.all), (chunk) => - Effect.sync(() => { - output += chunk - if (part.state.status === "running") { - part.state.metadata = { output, description: "" } - void run.fork(sessions.updatePart(part)) - } - }), - ) - yield* handle.exitCode - }).pipe( - Effect.scoped, - Effect.onInterrupt(() => - Effect.sync(() => { - aborted = true - }), - ), - Effect.orDie, - Effect.ensuring(finish), - Effect.exit, - ) - - if (Exit.isFailure(exit) && !Cause.hasInterruptsOnly(exit.cause)) { - return yield* Effect.failCause(exit.cause) - } - - return { info: msg, parts: [part] } - }) - - const getModel = Effect.fn("SessionPrompt.getModel")(function* ( - providerID: ProviderID, - modelID: ModelID, - sessionID: SessionID, - ) { - const exit = yield* provider.getModel(providerID, modelID).pipe(Effect.exit) - if (Exit.isSuccess(exit)) return exit.value - const err = Cause.squash(exit.cause) - if (Provider.ModelNotFoundError.isInstance(err)) { - const hint = err.data.suggestions?.length ? ` Did you mean: ${err.data.suggestions.join(", ")}?` : "" - yield* bus.publish(Session.Event.Error, { - sessionID, - error: new NamedError.Unknown({ - message: `Model not found: ${err.data.providerID}/${err.data.modelID}.${hint}`, - }).toObject(), - }) - } + if (Exit.isFailure(exit) && !Cause.hasInterruptsOnly(exit.cause)) { return yield* Effect.failCause(exit.cause) - }) + } - const lastModel = Effect.fnUntraced(function* (sessionID: SessionID) { - const match = yield* sessions.findMessage(sessionID, (m) => m.info.role === "user" && !!m.info.model) - if (Option.isSome(match) && match.value.info.role === "user") return match.value.info.model - return yield* provider.defaultModel() - }) + return { info: msg, parts: [part] } + }) - const createUserMessage = Effect.fn("SessionPrompt.createUserMessage")(function* (input: PromptInput) { - const agentName = input.agent || (yield* agents.defaultAgent()) - const ag = yield* agents.get(agentName) - if (!ag) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - - const model = input.model ?? ag.model ?? (yield* lastModel(input.sessionID)) - const same = ag.model && model.providerID === ag.model.providerID && model.modelID === ag.model.modelID - const full = - !input.variant && ag.variant && same - ? yield* provider.getModel(model.providerID, model.modelID).pipe(Effect.catchDefect(() => Effect.void)) - : undefined - const variant = input.variant ?? (ag.variant && full?.variants?.[ag.variant] ? ag.variant : undefined) - - const info: MessageV2.User = { - id: input.messageID ?? MessageID.ascending(), - role: "user", - sessionID: input.sessionID, - time: { created: Date.now() }, - tools: input.tools, - agent: ag.name, - model: { - providerID: model.providerID, - modelID: model.modelID, - variant, - }, - system: input.system, - format: input.format, - } - - yield* Effect.addFinalizer(() => instruction.clear(info.id)) - - type Draft = T extends MessageV2.Part ? Omit & { id?: string } : never - const assign = (part: Draft): MessageV2.Part => ({ - ...part, - id: part.id ? PartID.make(part.id) : PartID.ascending(), + const getModel = Effect.fn("SessionPrompt.getModel")(function* ( + providerID: ProviderID, + modelID: ModelID, + sessionID: SessionID, + ) { + const exit = yield* provider.getModel(providerID, modelID).pipe(Effect.exit) + if (Exit.isSuccess(exit)) return exit.value + const err = Cause.squash(exit.cause) + if (Provider.ModelNotFoundError.isInstance(err)) { + const hint = err.data.suggestions?.length ? ` Did you mean: ${err.data.suggestions.join(", ")}?` : "" + yield* bus.publish(Session.Event.Error, { + sessionID, + error: new NamedError.Unknown({ + message: `Model not found: ${err.data.providerID}/${err.data.modelID}.${hint}`, + }).toObject(), }) + } + return yield* Effect.failCause(exit.cause) + }) - const resolvePart: (part: PromptInput["parts"][number]) => Effect.Effect[]> = Effect.fn( - "SessionPrompt.resolveUserPart", - )(function* (part) { - if (part.type === "file") { - if (part.source?.type === "resource") { - const { clientName, uri } = part.source - log.info("mcp resource", { clientName, uri, mime: part.mime }) - const pieces: Draft[] = [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Reading MCP resource: ${part.filename} (${uri})`, - }, - ] - const exit = yield* mcp.readResource(clientName, uri).pipe(Effect.exit) - if (Exit.isSuccess(exit)) { - const content = exit.value - if (!content) throw new Error(`Resource not found: ${clientName}/${uri}`) - const items = Array.isArray(content.contents) ? content.contents : [content.contents] - for (const c of items) { - if ("text" in c && c.text) { - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: c.text, - }) - } else if ("blob" in c && c.blob) { - const mime = "mimeType" in c ? c.mimeType : part.mime - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `[Binary content: ${mime}]`, - }) - } + const lastModel = Effect.fnUntraced(function* (sessionID: SessionID) { + const match = yield* sessions.findMessage(sessionID, (m) => m.info.role === "user" && !!m.info.model) + if (Option.isSome(match) && match.value.info.role === "user") return match.value.info.model + return yield* provider.defaultModel() + }) + + const createUserMessage = Effect.fn("SessionPrompt.createUserMessage")(function* (input: PromptInput) { + const agentName = input.agent || (yield* agents.defaultAgent()) + const ag = yield* agents.get(agentName) + if (!ag) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + + const model = input.model ?? ag.model ?? (yield* lastModel(input.sessionID)) + const same = ag.model && model.providerID === ag.model.providerID && model.modelID === ag.model.modelID + const full = + !input.variant && ag.variant && same + ? yield* provider.getModel(model.providerID, model.modelID).pipe(Effect.catchDefect(() => Effect.void)) + : undefined + const variant = input.variant ?? (ag.variant && full?.variants?.[ag.variant] ? ag.variant : undefined) + + const info: MessageV2.User = { + id: input.messageID ?? MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + tools: input.tools, + agent: ag.name, + model: { + providerID: model.providerID, + modelID: model.modelID, + variant, + }, + system: input.system, + format: input.format, + } + + yield* Effect.addFinalizer(() => instruction.clear(info.id)) + + type Draft = T extends MessageV2.Part ? Omit & { id?: string } : never + const assign = (part: Draft): MessageV2.Part => ({ + ...part, + id: part.id ? PartID.make(part.id) : PartID.ascending(), + }) + + const resolvePart: (part: PromptInput["parts"][number]) => Effect.Effect[]> = Effect.fn( + "SessionPrompt.resolveUserPart", + )(function* (part) { + if (part.type === "file") { + if (part.source?.type === "resource") { + const { clientName, uri } = part.source + log.info("mcp resource", { clientName, uri, mime: part.mime }) + const pieces: Draft[] = [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Reading MCP resource: ${part.filename} (${uri})`, + }, + ] + const exit = yield* mcp.readResource(clientName, uri).pipe(Effect.exit) + if (Exit.isSuccess(exit)) { + const content = exit.value + if (!content) throw new Error(`Resource not found: ${clientName}/${uri}`) + const items = Array.isArray(content.contents) ? content.contents : [content.contents] + for (const c of items) { + if ("text" in c && c.text) { + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: c.text, + }) + } else if ("blob" in c && c.blob) { + const mime = "mimeType" in c ? c.mimeType : part.mime + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `[Binary content: ${mime}]`, + }) } - pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) - } else { - const error = Cause.squash(exit.cause) - log.error("failed to read MCP resource", { error, clientName, uri }) - const message = error instanceof Error ? error.message : String(error) - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Failed to read MCP resource ${part.filename}: ${message}`, - }) } - return pieces + pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) + } else { + const error = Cause.squash(exit.cause) + log.error("failed to read MCP resource", { error, clientName, uri }) + const message = error instanceof Error ? error.message : String(error) + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Failed to read MCP resource ${part.filename}: ${message}`, + }) } - const url = new URL(part.url) - switch (url.protocol) { - case "data:": - if (part.mime === "text/plain") { - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify({ filePath: part.filename })}`, - }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: decodeDataUrl(part.url), - }, - { ...part, messageID: info.id, sessionID: input.sessionID }, - ] - } - break - case "file:": { - log.info("file", { mime: part.mime }) - const filepath = fileURLToPath(part.url) - if (yield* fsys.isDir(filepath)) part.mime = "application/x-directory" - - const { read } = yield* registry.named() - const execRead = (args: Parameters[0], extra?: Tool.Context["extra"]) => { - const controller = new AbortController() - return read - .execute(args, { - sessionID: input.sessionID, - abort: controller.signal, - agent: input.agent!, - messageID: info.id, - extra: { bypassCwdCheck: true, ...extra }, - messages: [], - metadata: () => Effect.void, - ask: () => Effect.void, - }) - .pipe(Effect.onInterrupt(() => Effect.sync(() => controller.abort()))) - } - - if (part.mime === "text/plain") { - let offset: number | undefined - let limit: number | undefined - const range = { start: url.searchParams.get("start"), end: url.searchParams.get("end") } - if (range.start != null) { - const filePathURI = part.url.split("?")[0] - let start = parseInt(range.start) - let end = range.end ? parseInt(range.end) : undefined - if (start === end) { - const symbols = yield* lsp - .documentSymbol(filePathURI) - .pipe(Effect.catch(() => Effect.succeed([]))) - for (const symbol of symbols) { - let r: LSP.Range | undefined - if ("range" in symbol) r = symbol.range - else if ("location" in symbol) r = symbol.location.range - if (r?.start?.line && r?.start?.line === start) { - start = r.start.line - end = r?.end?.line ?? start - break - } - } - } - offset = Math.max(start, 1) - if (end) limit = end - (offset - 1) - } - const args = { filePath: filepath, offset, limit } - const pieces: Draft[] = [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, - }, - ] - const exit = yield* provider.getModel(info.model.providerID, info.model.modelID).pipe( - Effect.flatMap((mdl) => execRead(args, { model: mdl })), - Effect.exit, - ) - if (Exit.isSuccess(exit)) { - const result = exit.value - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: result.output, - }) - if (result.attachments?.length) { - pieces.push( - ...result.attachments.map((a) => ({ - ...a, - synthetic: true, - filename: a.filename ?? part.filename, - messageID: info.id, - sessionID: input.sessionID, - })), - ) - } else { - pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) - } - } else { - const error = Cause.squash(exit.cause) - log.error("failed to read file", { error }) - const message = error instanceof Error ? error.message : String(error) - yield* bus.publish(Session.Event.Error, { - sessionID: input.sessionID, - error: new NamedError.Unknown({ message }).toObject(), - }) - pieces.push({ - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Read tool failed to read ${filepath} with the following error: ${message}`, - }) - } - return pieces - } - - if (part.mime === "application/x-directory") { - const args = { filePath: filepath } - const exit = yield* execRead(args).pipe(Effect.exit) - if (Exit.isFailure(exit)) { - const error = Cause.squash(exit.cause) - log.error("failed to read directory", { error }) - const message = error instanceof Error ? error.message : String(error) - yield* bus.publish(Session.Event.Error, { - sessionID: input.sessionID, - error: new NamedError.Unknown({ message }).toObject(), - }) - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Read tool failed to read ${filepath} with the following error: ${message}`, - }, - ] - } - return [ - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, - }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: exit.value.output, - }, - { ...part, messageID: info.id, sessionID: input.sessionID }, - ] - } - - yield* filetime.read(input.sessionID, filepath) + return pieces + } + const url = new URL(part.url) + switch (url.protocol) { + case "data:": + if (part.mime === "text/plain") { return [ { messageID: info.id, sessionID: input.sessionID, type: "text", synthetic: true, - text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`, + text: `Called the Read tool with the following input: ${JSON.stringify({ filePath: part.filename })}`, }, { - id: part.id, messageID: info.id, sessionID: input.sessionID, - type: "file", - url: - `data:${part.mime};base64,` + - Buffer.from(yield* fsys.readFile(filepath).pipe(Effect.catch(Effect.die))).toString("base64"), - mime: part.mime, - filename: part.filename!, - source: part.source, + type: "text", + synthetic: true, + text: decodeDataUrl(part.url), }, + { ...part, messageID: info.id, sessionID: input.sessionID }, ] } + break + case "file:": { + log.info("file", { mime: part.mime }) + const filepath = fileURLToPath(part.url) + if (yield* fsys.isDir(filepath)) part.mime = "application/x-directory" + + const { read } = yield* registry.named() + const execRead = (args: Parameters[0], extra?: Tool.Context["extra"]) => { + const controller = new AbortController() + return read + .execute(args, { + sessionID: input.sessionID, + abort: controller.signal, + agent: input.agent!, + messageID: info.id, + extra: { bypassCwdCheck: true, ...extra }, + messages: [], + metadata: () => Effect.void, + ask: () => Effect.void, + }) + .pipe(Effect.onInterrupt(() => Effect.sync(() => controller.abort()))) + } + + if (part.mime === "text/plain") { + let offset: number | undefined + let limit: number | undefined + const range = { start: url.searchParams.get("start"), end: url.searchParams.get("end") } + if (range.start != null) { + const filePathURI = part.url.split("?")[0] + let start = parseInt(range.start) + let end = range.end ? parseInt(range.end) : undefined + if (start === end) { + const symbols = yield* lsp + .documentSymbol(filePathURI) + .pipe(Effect.catch(() => Effect.succeed([]))) + for (const symbol of symbols) { + let r: LSP.Range | undefined + if ("range" in symbol) r = symbol.range + else if ("location" in symbol) r = symbol.location.range + if (r?.start?.line && r?.start?.line === start) { + start = r.start.line + end = r?.end?.line ?? start + break + } + } + } + offset = Math.max(start, 1) + if (end) limit = end - (offset - 1) + } + const args = { filePath: filepath, offset, limit } + const pieces: Draft[] = [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, + }, + ] + const exit = yield* provider.getModel(info.model.providerID, info.model.modelID).pipe( + Effect.flatMap((mdl) => execRead(args, { model: mdl })), + Effect.exit, + ) + if (Exit.isSuccess(exit)) { + const result = exit.value + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: result.output, + }) + if (result.attachments?.length) { + pieces.push( + ...result.attachments.map((a) => ({ + ...a, + synthetic: true, + filename: a.filename ?? part.filename, + messageID: info.id, + sessionID: input.sessionID, + })), + ) + } else { + pieces.push({ ...part, messageID: info.id, sessionID: input.sessionID }) + } + } else { + const error = Cause.squash(exit.cause) + log.error("failed to read file", { error }) + const message = error instanceof Error ? error.message : String(error) + yield* bus.publish(Session.Event.Error, { + sessionID: input.sessionID, + error: new NamedError.Unknown({ message }).toObject(), + }) + pieces.push({ + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Read tool failed to read ${filepath} with the following error: ${message}`, + }) + } + return pieces + } + + if (part.mime === "application/x-directory") { + const args = { filePath: filepath } + const exit = yield* execRead(args).pipe(Effect.exit) + if (Exit.isFailure(exit)) { + const error = Cause.squash(exit.cause) + log.error("failed to read directory", { error }) + const message = error instanceof Error ? error.message : String(error) + yield* bus.publish(Session.Event.Error, { + sessionID: input.sessionID, + error: new NamedError.Unknown({ message }).toObject(), + }) + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Read tool failed to read ${filepath} with the following error: ${message}`, + }, + ] + } + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: ${JSON.stringify(args)}`, + }, + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: exit.value.output, + }, + { ...part, messageID: info.id, sessionID: input.sessionID }, + ] + } + + yield* filetime.read(input.sessionID, filepath) + return [ + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: `Called the Read tool with the following input: {"filePath":"${filepath}"}`, + }, + { + id: part.id, + messageID: info.id, + sessionID: input.sessionID, + type: "file", + url: + `data:${part.mime};base64,` + + Buffer.from(yield* fsys.readFile(filepath).pipe(Effect.catch(Effect.die))).toString("base64"), + mime: part.mime, + filename: part.filename!, + source: part.source, + }, + ] } } - - if (part.type === "agent") { - const perm = Permission.evaluate("task", part.name, ag.permission) - const hint = perm.action === "deny" ? " . Invoked by user; guaranteed to exist." : "" - return [ - { ...part, messageID: info.id, sessionID: input.sessionID }, - { - messageID: info.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: - " Use the above message and context to generate a prompt and call the task tool with subagent: " + - part.name + - hint, - }, - ] - } - - return [{ ...part, messageID: info.id, sessionID: input.sessionID }] - }) - - const parts = yield* Effect.forEach(input.parts, resolvePart, { concurrency: "unbounded" }).pipe( - Effect.map((x) => x.flat().map(assign)), - ) - - yield* plugin.trigger( - "chat.message", - { - sessionID: input.sessionID, - agent: input.agent, - model: input.model, - messageID: input.messageID, - variant: input.variant, - }, - { message: info, parts }, - ) - - const parsed = MessageV2.Info.safeParse(info) - if (!parsed.success) { - log.error("invalid user message before save", { - sessionID: input.sessionID, - messageID: info.id, - agent: info.agent, - model: info.model, - issues: parsed.error.issues, - }) } - parts.forEach((part, index) => { - const p = MessageV2.Part.safeParse(part) - if (p.success) return - log.error("invalid user part before save", { - sessionID: input.sessionID, - messageID: info.id, - partID: part.id, - partType: part.type, - index, - issues: p.error.issues, - part, - }) - }) - yield* sessions.updateMessage(info) - for (const part of parts) yield* sessions.updatePart(part) + if (part.type === "agent") { + const perm = Permission.evaluate("task", part.name, ag.permission) + const hint = perm.action === "deny" ? " . Invoked by user; guaranteed to exist." : "" + return [ + { ...part, messageID: info.id, sessionID: input.sessionID }, + { + messageID: info.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text: + " Use the above message and context to generate a prompt and call the task tool with subagent: " + + part.name + + hint, + }, + ] + } - return { info, parts } - }, Effect.scoped) - - const prompt: (input: PromptInput) => Effect.Effect = Effect.fn("SessionPrompt.prompt")( - function* (input: PromptInput) { - const session = yield* sessions.get(input.sessionID) - yield* revert.cleanup(session) - const message = yield* createUserMessage(input) - yield* sessions.touch(input.sessionID) - - const permissions: Permission.Ruleset = [] - for (const [t, enabled] of Object.entries(input.tools ?? {})) { - permissions.push({ permission: t, action: enabled ? "allow" : "deny", pattern: "*" }) - } - if (permissions.length > 0) { - session.permission = permissions - yield* sessions.setPermission({ sessionID: session.id, permission: permissions }) - } - - if (input.noReply === true) return message - return yield* loop({ sessionID: input.sessionID }) - }, - ) - - const lastAssistant = Effect.fnUntraced(function* (sessionID: SessionID) { - const match = yield* sessions.findMessage(sessionID, (m) => m.info.role !== "user") - if (Option.isSome(match)) return match.value - const msgs = yield* sessions.messages({ sessionID, limit: 1 }) - if (msgs.length > 0) return msgs[0] - throw new Error("Impossible") + return [{ ...part, messageID: info.id, sessionID: input.sessionID }] }) - const runLoop: (sessionID: SessionID) => Effect.Effect = Effect.fn("SessionPrompt.run")( - function* (sessionID: SessionID) { - const ctx = yield* InstanceState.context - const slog = elog.with({ sessionID }) - let structured: unknown | undefined - let step = 0 - const session = yield* sessions.get(sessionID) + const parts = yield* Effect.forEach(input.parts, resolvePart, { concurrency: "unbounded" }).pipe( + Effect.map((x) => x.flat().map(assign)), + ) - while (true) { - yield* status.set(sessionID, { type: "busy" }) - yield* slog.info("loop", { step }) + yield* plugin.trigger( + "chat.message", + { + sessionID: input.sessionID, + agent: input.agent, + model: input.model, + messageID: input.messageID, + variant: input.variant, + }, + { message: info, parts }, + ) - let msgs = yield* MessageV2.filterCompactedEffect(sessionID) + const parsed = MessageV2.Info.safeParse(info) + if (!parsed.success) { + log.error("invalid user message before save", { + sessionID: input.sessionID, + messageID: info.id, + agent: info.agent, + model: info.model, + issues: parsed.error.issues, + }) + } + parts.forEach((part, index) => { + const p = MessageV2.Part.safeParse(part) + if (p.success) return + log.error("invalid user part before save", { + sessionID: input.sessionID, + messageID: info.id, + partID: part.id, + partType: part.type, + index, + issues: p.error.issues, + part, + }) + }) - let lastUser: MessageV2.User | undefined - let lastAssistant: MessageV2.Assistant | undefined - let lastFinished: MessageV2.Assistant | undefined - let tasks: (MessageV2.CompactionPart | MessageV2.SubtaskPart)[] = [] - for (let i = msgs.length - 1; i >= 0; i--) { - const msg = msgs[i] - if (!lastUser && msg.info.role === "user") lastUser = msg.info - if (!lastAssistant && msg.info.role === "assistant") lastAssistant = msg.info - if (!lastFinished && msg.info.role === "assistant" && msg.info.finish) lastFinished = msg.info - if (lastUser && lastFinished) break - const task = msg.parts.filter((part) => part.type === "compaction" || part.type === "subtask") - if (task && !lastFinished) tasks.push(...task) - } + yield* sessions.updateMessage(info) + for (const part of parts) yield* sessions.updatePart(part) - if (!lastUser) throw new Error("No user message found in stream. This should never happen.") + return { info, parts } + }, Effect.scoped) - const lastAssistantMsg = msgs.findLast( - (msg) => msg.info.role === "assistant" && msg.info.id === lastAssistant?.id, - ) - // Some providers return "stop" even when the assistant message contains tool calls. - // Keep the loop running so tool results can be sent back to the model. - // Skip provider-executed tool parts — those were fully handled within the - // provider's stream (e.g. DWS Agent Platform) and don't need a re-loop. - const hasToolCalls = - lastAssistantMsg?.parts.some((part) => part.type === "tool" && !part.metadata?.providerExecuted) ?? false + const prompt: (input: PromptInput) => Effect.Effect = Effect.fn("SessionPrompt.prompt")( + function* (input: PromptInput) { + const session = yield* sessions.get(input.sessionID) + yield* revert.cleanup(session) + const message = yield* createUserMessage(input) + yield* sessions.touch(input.sessionID) - if ( - lastAssistant?.finish && - !["tool-calls"].includes(lastAssistant.finish) && - !hasToolCalls && - lastUser.id < lastAssistant.id - ) { - yield* slog.info("exiting loop") - break - } + const permissions: Permission.Ruleset = [] + for (const [t, enabled] of Object.entries(input.tools ?? {})) { + permissions.push({ permission: t, action: enabled ? "allow" : "deny", pattern: "*" }) + } + if (permissions.length > 0) { + session.permission = permissions + yield* sessions.setPermission({ sessionID: session.id, permission: permissions }) + } - step++ - if (step === 1) - yield* title({ - session, - modelID: lastUser.model.modelID, - providerID: lastUser.model.providerID, - history: msgs, - }).pipe(Effect.ignore, Effect.forkIn(scope)) + if (input.noReply === true) return message + return yield* loop({ sessionID: input.sessionID }) + }, + ) - const model = yield* getModel(lastUser.model.providerID, lastUser.model.modelID, sessionID) - const task = tasks.pop() + const lastAssistant = Effect.fnUntraced(function* (sessionID: SessionID) { + const match = yield* sessions.findMessage(sessionID, (m) => m.info.role !== "user") + if (Option.isSome(match)) return match.value + const msgs = yield* sessions.messages({ sessionID, limit: 1 }) + if (msgs.length > 0) return msgs[0] + throw new Error("Impossible") + }) - if (task?.type === "subtask") { - yield* handleSubtask({ task, model, lastUser, sessionID, session, msgs }) - continue - } + const runLoop: (sessionID: SessionID) => Effect.Effect = Effect.fn("SessionPrompt.run")( + function* (sessionID: SessionID) { + const ctx = yield* InstanceState.context + const slog = elog.with({ sessionID }) + let structured: unknown | undefined + let step = 0 + const session = yield* sessions.get(sessionID) - if (task?.type === "compaction") { - const result = yield* compaction.process({ - messages: msgs, - parentID: lastUser.id, - sessionID, - auto: task.auto, - overflow: task.overflow, - }) - if (result === "stop") break - continue - } + while (true) { + yield* status.set(sessionID, { type: "busy" }) + yield* slog.info("loop", { step }) - if ( - lastFinished && - lastFinished.summary !== true && - (yield* compaction.isOverflow({ tokens: lastFinished.tokens, model })) - ) { - yield* compaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true }) - continue - } + let msgs = yield* MessageV2.filterCompactedEffect(sessionID) - const agent = yield* agents.get(lastUser.agent) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${lastUser.agent}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) - throw error - } - const maxSteps = agent.steps ?? Infinity - const isLastStep = step >= maxSteps - msgs = yield* insertReminders({ messages: msgs, agent, session }) + let lastUser: MessageV2.User | undefined + let lastAssistant: MessageV2.Assistant | undefined + let lastFinished: MessageV2.Assistant | undefined + let tasks: (MessageV2.CompactionPart | MessageV2.SubtaskPart)[] = [] + for (let i = msgs.length - 1; i >= 0; i--) { + const msg = msgs[i] + if (!lastUser && msg.info.role === "user") lastUser = msg.info + if (!lastAssistant && msg.info.role === "assistant") lastAssistant = msg.info + if (!lastFinished && msg.info.role === "assistant" && msg.info.finish) lastFinished = msg.info + if (lastUser && lastFinished) break + const task = msg.parts.filter((part) => part.type === "compaction" || part.type === "subtask") + if (task && !lastFinished) tasks.push(...task) + } - const msg: MessageV2.Assistant = { - id: MessageID.ascending(), - parentID: lastUser.id, - role: "assistant", - mode: agent.name, - agent: agent.name, - variant: lastUser.model.variant, - path: { cwd: ctx.directory, root: ctx.worktree }, - cost: 0, - tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, - modelID: model.id, - providerID: model.providerID, - time: { created: Date.now() }, - sessionID, - } - yield* sessions.updateMessage(msg) - const handle = yield* processor.create({ - assistantMessage: msg, - sessionID, - model, - }) + if (!lastUser) throw new Error("No user message found in stream. This should never happen.") - const outcome: "break" | "continue" = yield* Effect.gen(function* () { - const lastUserMsg = msgs.findLast((m) => m.info.role === "user") - const bypassAgentCheck = lastUserMsg?.parts.some((p) => p.type === "agent") ?? false + const lastAssistantMsg = msgs.findLast( + (msg) => msg.info.role === "assistant" && msg.info.id === lastAssistant?.id, + ) + // Some providers return "stop" even when the assistant message contains tool calls. + // Keep the loop running so tool results can be sent back to the model. + // Skip provider-executed tool parts — those were fully handled within the + // provider's stream (e.g. DWS Agent Platform) and don't need a re-loop. + const hasToolCalls = + lastAssistantMsg?.parts.some((part) => part.type === "tool" && !part.metadata?.providerExecuted) ?? false - const tools = yield* resolveTools({ - agent, - session, - model, - tools: lastUser.tools, - processor: handle, - bypassAgentCheck, - messages: msgs, - }) + if ( + lastAssistant?.finish && + !["tool-calls"].includes(lastAssistant.finish) && + !hasToolCalls && + lastUser.id < lastAssistant.id + ) { + yield* slog.info("exiting loop") + break + } - if (lastUser.format?.type === "json_schema") { - tools["StructuredOutput"] = createStructuredOutputTool({ - schema: lastUser.format.schema, - onSuccess(output) { - structured = output - }, - }) - } + step++ + if (step === 1) + yield* title({ + session, + modelID: lastUser.model.modelID, + providerID: lastUser.model.providerID, + history: msgs, + }).pipe(Effect.ignore, Effect.forkIn(scope)) - if (step === 1) - yield* summary - .summarize({ sessionID, messageID: lastUser.id }) - .pipe(Effect.ignore, Effect.forkIn(scope)) + const model = yield* getModel(lastUser.model.providerID, lastUser.model.modelID, sessionID) + const task = tasks.pop() - if (step > 1 && lastFinished) { - for (const m of msgs) { - if (m.info.role !== "user" || m.info.id <= lastFinished.id) continue - for (const p of m.parts) { - if (p.type !== "text" || p.ignored || p.synthetic) continue - if (!p.text.trim()) continue - p.text = [ - "", - "The user sent the following message:", - p.text, - "", - "Please address this message and continue with your tasks.", - "", - ].join("\n") - } - } - } - - yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) - - const [skills, env, instructions, modelMsgs] = yield* Effect.all([ - sys.skills(agent), - Effect.sync(() => sys.environment(model)), - instruction.system().pipe(Effect.orDie), - MessageV2.toModelMessagesEffect(msgs, model), - ]) - const system = [...env, ...(skills ? [skills] : []), ...instructions] - const format = lastUser.format ?? { type: "text" as const } - if (format.type === "json_schema") system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT) - const result = yield* handle.process({ - user: lastUser, - agent, - permission: session.permission, - sessionID, - parentSessionID: session.parentID, - system, - messages: [...modelMsgs, ...(isLastStep ? [{ role: "assistant" as const, content: MAX_STEPS }] : [])], - tools, - model, - toolChoice: format.type === "json_schema" ? "required" : undefined, - }) - - if (structured !== undefined) { - handle.message.structured = structured - handle.message.finish = handle.message.finish ?? "stop" - yield* sessions.updateMessage(handle.message) - return "break" as const - } - - const finished = handle.message.finish && !["tool-calls", "unknown"].includes(handle.message.finish) - if (finished && !handle.message.error) { - if (format.type === "json_schema") { - handle.message.error = new MessageV2.StructuredOutputError({ - message: "Model did not produce structured output", - retries: 0, - }).toObject() - yield* sessions.updateMessage(handle.message) - return "break" as const - } - } - - if (result === "stop") return "break" as const - if (result === "compact") { - yield* compaction.create({ - sessionID, - agent: lastUser.agent, - model: lastUser.model, - auto: true, - overflow: !handle.message.finish, - }) - } - return "continue" as const - }).pipe(Effect.ensuring(instruction.clear(handle.message.id))) - if (outcome === "break") break + if (task?.type === "subtask") { + yield* handleSubtask({ task, model, lastUser, sessionID, session, msgs }) continue } - yield* compaction.prune({ sessionID }).pipe(Effect.ignore, Effect.forkIn(scope)) - return yield* lastAssistant(sessionID) - }, - ) - - const loop: (input: z.infer) => Effect.Effect = Effect.fn( - "SessionPrompt.loop", - )(function* (input: z.infer) { - return yield* state.ensureRunning(input.sessionID, lastAssistant(input.sessionID), runLoop(input.sessionID)) - }) - - const shell: (input: ShellInput) => Effect.Effect = Effect.fn("SessionPrompt.shell")( - function* (input: ShellInput) { - return yield* state.startShell(input.sessionID, lastAssistant(input.sessionID), shellImpl(input)) - }, - ) - - const command = Effect.fn("SessionPrompt.command")(function* (input: CommandInput) { - yield* elog.info("command", { sessionID: input.sessionID, command: input.command, agent: input.agent }) - const cmd = yield* commands.get(input.command) - if (!cmd) { - const available = (yield* commands.list()).map((c) => c.name) - const hint = available.length ? ` Available commands: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Command not found: "${input.command}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error - } - const agentName = cmd.agent ?? input.agent ?? (yield* agents.defaultAgent()) - - const raw = input.arguments.match(argsRegex) ?? [] - const args = raw.map((arg) => arg.replace(quoteTrimRegex, "")) - const templateCommand = yield* Effect.promise(async () => cmd.template) - - const placeholders = templateCommand.match(placeholderRegex) ?? [] - let last = 0 - for (const item of placeholders) { - const value = Number(item.slice(1)) - if (value > last) last = value - } - - const withArgs = templateCommand.replaceAll(placeholderRegex, (_, index) => { - const position = Number(index) - const argIndex = position - 1 - if (argIndex >= args.length) return "" - if (position === last) return args.slice(argIndex).join(" ") - return args[argIndex] - }) - const usesArgumentsPlaceholder = templateCommand.includes("$ARGUMENTS") - let template = withArgs.replaceAll("$ARGUMENTS", input.arguments) - - if (placeholders.length === 0 && !usesArgumentsPlaceholder && input.arguments.trim()) { - template = template + "\n\n" + input.arguments - } - - const shellMatches = ConfigMarkdown.shell(template) - if (shellMatches.length > 0) { - const sh = Shell.preferred() - const results = yield* Effect.promise(() => - Promise.all( - shellMatches.map(async ([, cmd]) => (await Process.text([cmd], { shell: sh, nothrow: true })).text), - ), - ) - let index = 0 - template = template.replace(bashRegex, () => results[index++]) - } - template = template.trim() - - const taskModel = yield* Effect.gen(function* () { - if (cmd.model) return Provider.parseModel(cmd.model) - if (cmd.agent) { - const cmdAgent = yield* agents.get(cmd.agent) - if (cmdAgent?.model) return cmdAgent.model + if (task?.type === "compaction") { + const result = yield* compaction.process({ + messages: msgs, + parentID: lastUser.id, + sessionID, + auto: task.auto, + overflow: task.overflow, + }) + if (result === "stop") break + continue } - if (input.model) return Provider.parseModel(input.model) - return yield* lastModel(input.sessionID) - }) - yield* getModel(taskModel.providerID, taskModel.modelID, input.sessionID) + if ( + lastFinished && + lastFinished.summary !== true && + (yield* compaction.isOverflow({ tokens: lastFinished.tokens, model })) + ) { + yield* compaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true }) + continue + } - const agent = yield* agents.get(agentName) - if (!agent) { - const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) - const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" - const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) - yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) - throw error + const agent = yield* agents.get(lastUser.agent) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${lastUser.agent}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID, error: error.toObject() }) + throw error + } + const maxSteps = agent.steps ?? Infinity + const isLastStep = step >= maxSteps + msgs = yield* insertReminders({ messages: msgs, agent, session }) + + const msg: MessageV2.Assistant = { + id: MessageID.ascending(), + parentID: lastUser.id, + role: "assistant", + mode: agent.name, + agent: agent.name, + variant: lastUser.model.variant, + path: { cwd: ctx.directory, root: ctx.worktree }, + cost: 0, + tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } }, + modelID: model.id, + providerID: model.providerID, + time: { created: Date.now() }, + sessionID, + } + yield* sessions.updateMessage(msg) + const handle = yield* processor.create({ + assistantMessage: msg, + sessionID, + model, + }) + + const outcome: "break" | "continue" = yield* Effect.gen(function* () { + const lastUserMsg = msgs.findLast((m) => m.info.role === "user") + const bypassAgentCheck = lastUserMsg?.parts.some((p) => p.type === "agent") ?? false + + const tools = yield* resolveTools({ + agent, + session, + model, + tools: lastUser.tools, + processor: handle, + bypassAgentCheck, + messages: msgs, + }) + + if (lastUser.format?.type === "json_schema") { + tools["StructuredOutput"] = createStructuredOutputTool({ + schema: lastUser.format.schema, + onSuccess(output) { + structured = output + }, + }) + } + + if (step === 1) + yield* summary + .summarize({ sessionID, messageID: lastUser.id }) + .pipe(Effect.ignore, Effect.forkIn(scope)) + + if (step > 1 && lastFinished) { + for (const m of msgs) { + if (m.info.role !== "user" || m.info.id <= lastFinished.id) continue + for (const p of m.parts) { + if (p.type !== "text" || p.ignored || p.synthetic) continue + if (!p.text.trim()) continue + p.text = [ + "", + "The user sent the following message:", + p.text, + "", + "Please address this message and continue with your tasks.", + "", + ].join("\n") + } + } + } + + yield* plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) + + const [skills, env, instructions, modelMsgs] = yield* Effect.all([ + sys.skills(agent), + Effect.sync(() => sys.environment(model)), + instruction.system().pipe(Effect.orDie), + MessageV2.toModelMessagesEffect(msgs, model), + ]) + const system = [...env, ...(skills ? [skills] : []), ...instructions] + const format = lastUser.format ?? { type: "text" as const } + if (format.type === "json_schema") system.push(STRUCTURED_OUTPUT_SYSTEM_PROMPT) + const result = yield* handle.process({ + user: lastUser, + agent, + permission: session.permission, + sessionID, + parentSessionID: session.parentID, + system, + messages: [...modelMsgs, ...(isLastStep ? [{ role: "assistant" as const, content: MAX_STEPS }] : [])], + tools, + model, + toolChoice: format.type === "json_schema" ? "required" : undefined, + }) + + if (structured !== undefined) { + handle.message.structured = structured + handle.message.finish = handle.message.finish ?? "stop" + yield* sessions.updateMessage(handle.message) + return "break" as const + } + + const finished = handle.message.finish && !["tool-calls", "unknown"].includes(handle.message.finish) + if (finished && !handle.message.error) { + if (format.type === "json_schema") { + handle.message.error = new MessageV2.StructuredOutputError({ + message: "Model did not produce structured output", + retries: 0, + }).toObject() + yield* sessions.updateMessage(handle.message) + return "break" as const + } + } + + if (result === "stop") return "break" as const + if (result === "compact") { + yield* compaction.create({ + sessionID, + agent: lastUser.agent, + model: lastUser.model, + auto: true, + overflow: !handle.message.finish, + }) + } + return "continue" as const + }).pipe(Effect.ensuring(instruction.clear(handle.message.id))) + if (outcome === "break") break + continue } - const templateParts = yield* resolvePromptParts(template) - const isSubtask = (agent.mode === "subagent" && cmd.subtask !== false) || cmd.subtask === true - const parts = isSubtask - ? [ - { - type: "subtask" as const, - agent: agent.name, - description: cmd.description ?? "", - command: input.command, - model: { providerID: taskModel.providerID, modelID: taskModel.modelID }, - prompt: templateParts.find((y) => y.type === "text")?.text ?? "", - }, - ] - : [...templateParts, ...(input.parts ?? [])] + yield* compaction.prune({ sessionID }).pipe(Effect.ignore, Effect.forkIn(scope)) + return yield* lastAssistant(sessionID) + }, + ) - const userAgent = isSubtask ? (input.agent ?? (yield* agents.defaultAgent())) : agentName - const userModel = isSubtask - ? input.model - ? Provider.parseModel(input.model) - : yield* lastModel(input.sessionID) - : taskModel + const loop: (input: z.infer) => Effect.Effect = Effect.fn( + "SessionPrompt.loop", + )(function* (input: z.infer) { + return yield* state.ensureRunning(input.sessionID, lastAssistant(input.sessionID), runLoop(input.sessionID)) + }) - yield* plugin.trigger( - "command.execute.before", - { command: input.command, sessionID: input.sessionID, arguments: input.arguments }, - { parts }, + const shell: (input: ShellInput) => Effect.Effect = Effect.fn("SessionPrompt.shell")( + function* (input: ShellInput) { + return yield* state.startShell(input.sessionID, lastAssistant(input.sessionID), shellImpl(input)) + }, + ) + + const command = Effect.fn("SessionPrompt.command")(function* (input: CommandInput) { + yield* elog.info("command", { sessionID: input.sessionID, command: input.command, agent: input.agent }) + const cmd = yield* commands.get(input.command) + if (!cmd) { + const available = (yield* commands.list()).map((c) => c.name) + const hint = available.length ? ` Available commands: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Command not found: "${input.command}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + const agentName = cmd.agent ?? input.agent ?? (yield* agents.defaultAgent()) + + const raw = input.arguments.match(argsRegex) ?? [] + const args = raw.map((arg) => arg.replace(quoteTrimRegex, "")) + const templateCommand = yield* Effect.promise(async () => cmd.template) + + const placeholders = templateCommand.match(placeholderRegex) ?? [] + let last = 0 + for (const item of placeholders) { + const value = Number(item.slice(1)) + if (value > last) last = value + } + + const withArgs = templateCommand.replaceAll(placeholderRegex, (_, index) => { + const position = Number(index) + const argIndex = position - 1 + if (argIndex >= args.length) return "" + if (position === last) return args.slice(argIndex).join(" ") + return args[argIndex] + }) + const usesArgumentsPlaceholder = templateCommand.includes("$ARGUMENTS") + let template = withArgs.replaceAll("$ARGUMENTS", input.arguments) + + if (placeholders.length === 0 && !usesArgumentsPlaceholder && input.arguments.trim()) { + template = template + "\n\n" + input.arguments + } + + const shellMatches = ConfigMarkdown.shell(template) + if (shellMatches.length > 0) { + const sh = Shell.preferred() + const results = yield* Effect.promise(() => + Promise.all( + shellMatches.map(async ([, cmd]) => (await Process.text([cmd], { shell: sh, nothrow: true })).text), + ), ) + let index = 0 + template = template.replace(bashRegex, () => results[index++]) + } + template = template.trim() - const result = yield* prompt({ - sessionID: input.sessionID, - messageID: input.messageID, - model: userModel, - agent: userAgent, - parts, - variant: input.variant, - }) - yield* bus.publish(Command.Event.Executed, { - name: input.command, - sessionID: input.sessionID, - arguments: input.arguments, - messageID: result.info.id, - }) - return result + const taskModel = yield* Effect.gen(function* () { + if (cmd.model) return Provider.parseModel(cmd.model) + if (cmd.agent) { + const cmdAgent = yield* agents.get(cmd.agent) + if (cmdAgent?.model) return cmdAgent.model + } + if (input.model) return Provider.parseModel(input.model) + return yield* lastModel(input.sessionID) }) - return Service.of({ - cancel, - prompt, - loop, - shell, - command, - resolvePromptParts, - }) - }), - ) + yield* getModel(taskModel.providerID, taskModel.modelID, input.sessionID) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(SessionRunState.defaultLayer), - Layer.provide(SessionStatus.defaultLayer), - Layer.provide(SessionCompaction.defaultLayer), - Layer.provide(SessionProcessor.defaultLayer), - Layer.provide(Command.defaultLayer), - Layer.provide(Permission.defaultLayer), - Layer.provide(MCP.defaultLayer), - Layer.provide(LSP.defaultLayer), - Layer.provide(FileTime.defaultLayer), - Layer.provide(ToolRegistry.defaultLayer), - Layer.provide(Truncate.defaultLayer), - Layer.provide(Provider.defaultLayer), - Layer.provide(Instruction.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Plugin.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(SessionRevert.defaultLayer), - Layer.provide(SessionSummary.defaultLayer), - Layer.provide( - Layer.mergeAll( - Agent.defaultLayer, - SystemPrompt.defaultLayer, - LLM.defaultLayer, - Bus.layer, - CrossSpawnSpawner.defaultLayer, - ), + const agent = yield* agents.get(agentName) + if (!agent) { + const available = (yield* agents.list()).filter((a) => !a.hidden).map((a) => a.name) + const hint = available.length ? ` Available agents: ${available.join(", ")}` : "" + const error = new NamedError.Unknown({ message: `Agent not found: "${agentName}".${hint}` }) + yield* bus.publish(Session.Event.Error, { sessionID: input.sessionID, error: error.toObject() }) + throw error + } + + const templateParts = yield* resolvePromptParts(template) + const isSubtask = (agent.mode === "subagent" && cmd.subtask !== false) || cmd.subtask === true + const parts = isSubtask + ? [ + { + type: "subtask" as const, + agent: agent.name, + description: cmd.description ?? "", + command: input.command, + model: { providerID: taskModel.providerID, modelID: taskModel.modelID }, + prompt: templateParts.find((y) => y.type === "text")?.text ?? "", + }, + ] + : [...templateParts, ...(input.parts ?? [])] + + const userAgent = isSubtask ? (input.agent ?? (yield* agents.defaultAgent())) : agentName + const userModel = isSubtask + ? input.model + ? Provider.parseModel(input.model) + : yield* lastModel(input.sessionID) + : taskModel + + yield* plugin.trigger( + "command.execute.before", + { command: input.command, sessionID: input.sessionID, arguments: input.arguments }, + { parts }, + ) + + const result = yield* prompt({ + sessionID: input.sessionID, + messageID: input.messageID, + model: userModel, + agent: userAgent, + parts, + variant: input.variant, + }) + yield* bus.publish(Command.Event.Executed, { + name: input.command, + sessionID: input.sessionID, + arguments: input.arguments, + messageID: result.info.id, + }) + return result + }) + + return Service.of({ + cancel, + prompt, + loop, + shell, + command, + resolvePromptParts, + }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(SessionRunState.defaultLayer), + Layer.provide(SessionStatus.defaultLayer), + Layer.provide(SessionCompaction.defaultLayer), + Layer.provide(SessionProcessor.defaultLayer), + Layer.provide(Command.defaultLayer), + Layer.provide(Permission.defaultLayer), + Layer.provide(MCP.defaultLayer), + Layer.provide(LSP.defaultLayer), + Layer.provide(FileTime.defaultLayer), + Layer.provide(ToolRegistry.defaultLayer), + Layer.provide(Truncate.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Instruction.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), + Layer.provide(Plugin.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(SessionRevert.defaultLayer), + Layer.provide(SessionSummary.defaultLayer), + Layer.provide( + Layer.mergeAll( + Agent.defaultLayer, + SystemPrompt.defaultLayer, + LLM.defaultLayer, + Bus.layer, + CrossSpawnSpawner.defaultLayer, ), ), - ) - export const PromptInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, + ), +) +export const PromptInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + agent: z.string().optional(), + noReply: z.boolean().optional(), + tools: z + .record(z.string(), z.boolean()) + .optional() + .describe( + "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", + ), + format: MessageV2.Format.optional(), + system: z.string().optional(), + variant: z.string().optional(), + parts: z.array( + z.discriminatedUnion("type", [ + MessageV2.TextPart.omit({ + messageID: true, + sessionID: true, }) - .optional(), - agent: z.string().optional(), - noReply: z.boolean().optional(), - tools: z - .record(z.string(), z.boolean()) - .optional() - .describe( - "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", - ), - format: MessageV2.Format.optional(), - system: z.string().optional(), - variant: z.string().optional(), - parts: z.array( - z.discriminatedUnion("type", [ - MessageV2.TextPart.omit({ - messageID: true, - sessionID: true, + .partial({ + id: true, }) - .partial({ - id: true, - }) - .meta({ - ref: "TextPartInput", - }), + .meta({ + ref: "TextPartInput", + }), + MessageV2.FilePart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "FilePartInput", + }), + MessageV2.AgentPart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "AgentPartInput", + }), + MessageV2.SubtaskPart.omit({ + messageID: true, + sessionID: true, + }) + .partial({ + id: true, + }) + .meta({ + ref: "SubtaskPartInput", + }), + ]), + ), +}) +export type PromptInput = z.infer + +export const LoopInput = z.object({ + sessionID: SessionID.zod, +}) + +export const ShellInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), + agent: z.string(), + model: z + .object({ + providerID: ProviderID.zod, + modelID: ModelID.zod, + }) + .optional(), + command: z.string(), +}) +export type ShellInput = z.infer + +export const CommandInput = z.object({ + messageID: MessageID.zod.optional(), + sessionID: SessionID.zod, + agent: z.string().optional(), + model: z.string().optional(), + arguments: z.string(), + command: z.string(), + variant: z.string().optional(), + parts: z + .array( + z.discriminatedUnion("type", [ MessageV2.FilePart.omit({ messageID: true, sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "FilePartInput", - }), - MessageV2.AgentPart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "AgentPartInput", - }), - MessageV2.SubtaskPart.omit({ - messageID: true, - sessionID: true, - }) - .partial({ - id: true, - }) - .meta({ - ref: "SubtaskPartInput", - }), + }).partial({ + id: true, + }), ]), - ), + ) + .optional(), +}) +export type CommandInput = z.infer + +/** @internal Exported for testing */ +export function createStructuredOutputTool(input: { + schema: Record + onSuccess: (output: unknown) => void +}): AITool { + // Remove $schema property if present (not needed for tool input) + const { $schema: _, ...toolSchema } = input.schema + + return tool({ + description: STRUCTURED_OUTPUT_DESCRIPTION, + inputSchema: jsonSchema(toolSchema as JSONSchema7), + async execute(args) { + // AI SDK validates args against inputSchema before calling execute() + input.onSuccess(args) + return { + output: "Structured output captured successfully.", + title: "Structured Output", + metadata: { valid: true }, + } + }, + toModelOutput({ output }) { + return { + type: "text", + value: output.output, + } + }, }) - export type PromptInput = z.infer - - export const LoopInput = z.object({ - sessionID: SessionID.zod, - }) - - export const ShellInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - agent: z.string(), - model: z - .object({ - providerID: ProviderID.zod, - modelID: ModelID.zod, - }) - .optional(), - command: z.string(), - }) - export type ShellInput = z.infer - - export const CommandInput = z.object({ - messageID: MessageID.zod.optional(), - sessionID: SessionID.zod, - agent: z.string().optional(), - model: z.string().optional(), - arguments: z.string(), - command: z.string(), - variant: z.string().optional(), - parts: z - .array( - z.discriminatedUnion("type", [ - MessageV2.FilePart.omit({ - messageID: true, - sessionID: true, - }).partial({ - id: true, - }), - ]), - ) - .optional(), - }) - export type CommandInput = z.infer - - /** @internal Exported for testing */ - export function createStructuredOutputTool(input: { - schema: Record - onSuccess: (output: unknown) => void - }): AITool { - // Remove $schema property if present (not needed for tool input) - const { $schema: _, ...toolSchema } = input.schema - - return tool({ - description: STRUCTURED_OUTPUT_DESCRIPTION, - inputSchema: jsonSchema(toolSchema as JSONSchema7), - async execute(args) { - // AI SDK validates args against inputSchema before calling execute() - input.onSuccess(args) - return { - output: "Structured output captured successfully.", - title: "Structured Output", - metadata: { valid: true }, - } - }, - toModelOutput({ output }) { - return { - type: "text", - value: output.output, - } - }, - }) - } - const bashRegex = /!`([^`]+)`/g - // Match [Image N] as single token, quoted strings, or non-space sequences - const argsRegex = /(?:\[Image\s+\d+\]|"[^"]*"|'[^']*'|[^\s"']+)/gi - const placeholderRegex = /\$(\d+)/g - const quoteTrimRegex = /^["']|["']$/g } +const bashRegex = /!`([^`]+)`/g +// Match [Image N] as single token, quoted strings, or non-space sequences +const argsRegex = /(?:\[Image\s+\d+\]|"[^"]*"|'[^']*'|[^\s"']+)/gi +const placeholderRegex = /\$(\d+)/g +const quoteTrimRegex = /^["']|["']$/g + +export * as SessionPrompt from "./prompt" diff --git a/packages/opencode/src/session/retry.ts b/packages/opencode/src/session/retry.ts index 6aad55f3f8..12fd4d345d 100644 --- a/packages/opencode/src/session/retry.ts +++ b/packages/opencode/src/session/retry.ts @@ -3,123 +3,123 @@ import { Cause, Clock, Duration, Effect, Schedule } from "effect" import { MessageV2 } from "./message-v2" import { iife } from "@/util/iife" -export namespace SessionRetry { - export type Err = ReturnType +export type Err = ReturnType - // This exported message is shared with the TUI upsell detector. Matching on a - // literal error string kind of sucks, but it is the simplest for now. - export const GO_UPSELL_MESSAGE = "Free usage exceeded, subscribe to Go https://opencode.ai/go" +// This exported message is shared with the TUI upsell detector. Matching on a +// literal error string kind of sucks, but it is the simplest for now. +export const GO_UPSELL_MESSAGE = "Free usage exceeded, subscribe to Go https://opencode.ai/go" - export const RETRY_INITIAL_DELAY = 2000 - export const RETRY_BACKOFF_FACTOR = 2 - export const RETRY_MAX_DELAY_NO_HEADERS = 30_000 // 30 seconds - export const RETRY_MAX_DELAY = 2_147_483_647 // max 32-bit signed integer for setTimeout +export const RETRY_INITIAL_DELAY = 2000 +export const RETRY_BACKOFF_FACTOR = 2 +export const RETRY_MAX_DELAY_NO_HEADERS = 30_000 // 30 seconds +export const RETRY_MAX_DELAY = 2_147_483_647 // max 32-bit signed integer for setTimeout - function cap(ms: number) { - return Math.min(ms, RETRY_MAX_DELAY) - } - - export function delay(attempt: number, error?: MessageV2.APIError) { - if (error) { - const headers = error.data.responseHeaders - if (headers) { - const retryAfterMs = headers["retry-after-ms"] - if (retryAfterMs) { - const parsedMs = Number.parseFloat(retryAfterMs) - if (!Number.isNaN(parsedMs)) { - return cap(parsedMs) - } - } - - const retryAfter = headers["retry-after"] - if (retryAfter) { - const parsedSeconds = Number.parseFloat(retryAfter) - if (!Number.isNaN(parsedSeconds)) { - // convert seconds to milliseconds - return cap(Math.ceil(parsedSeconds * 1000)) - } - // Try parsing as HTTP date format - const parsed = Date.parse(retryAfter) - Date.now() - if (!Number.isNaN(parsed) && parsed > 0) { - return cap(Math.ceil(parsed)) - } - } - - return cap(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1)) - } - } - - return cap(Math.min(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1), RETRY_MAX_DELAY_NO_HEADERS)) - } - - export function retryable(error: Err) { - // context overflow errors should not be retried - if (MessageV2.ContextOverflowError.isInstance(error)) return undefined - if (MessageV2.APIError.isInstance(error)) { - const status = error.data.statusCode - // 5xx errors are transient server failures and should always be retried, - // even when the provider SDK doesn't explicitly mark them as retryable. - if (!error.data.isRetryable && !(status !== undefined && status >= 500)) return undefined - if (error.data.responseBody?.includes("FreeUsageLimitError")) return GO_UPSELL_MESSAGE - return error.data.message.includes("Overloaded") ? "Provider is overloaded" : error.data.message - } - - // Check for rate limit patterns in plain text error messages - const msg = error.data?.message - if (typeof msg === "string") { - const lower = msg.toLowerCase() - if ( - lower.includes("rate increased too quickly") || - lower.includes("rate limit") || - lower.includes("too many requests") - ) { - return msg - } - } - - const json = iife(() => { - try { - if (typeof error.data?.message === "string") { - const parsed = JSON.parse(error.data.message) - return parsed - } - - return JSON.parse(error.data.message) - } catch { - return undefined - } - }) - if (!json || typeof json !== "object") return undefined - const code = typeof json.code === "string" ? json.code : "" - - if (json.type === "error" && json.error?.type === "too_many_requests") { - return "Too Many Requests" - } - if (code.includes("exhausted") || code.includes("unavailable")) { - return "Provider is overloaded" - } - if (json.type === "error" && typeof json.error?.code === "string" && json.error.code.includes("rate_limit")) { - return "Rate Limited" - } - return undefined - } - - export function policy(opts: { - parse: (error: unknown) => Err - set: (input: { attempt: number; message: string; next: number }) => Effect.Effect - }) { - return Schedule.fromStepWithMetadata( - Effect.succeed((meta: Schedule.InputMetadata) => { - const error = opts.parse(meta.input) - const message = retryable(error) - if (!message) return Cause.done(meta.attempt) - return Effect.gen(function* () { - const wait = delay(meta.attempt, MessageV2.APIError.isInstance(error) ? error : undefined) - const now = yield* Clock.currentTimeMillis - yield* opts.set({ attempt: meta.attempt, message, next: now + wait }) - return [meta.attempt, Duration.millis(wait)] as [number, Duration.Duration] - }) - }), - ) - } +function cap(ms: number) { + return Math.min(ms, RETRY_MAX_DELAY) } + +export function delay(attempt: number, error?: MessageV2.APIError) { + if (error) { + const headers = error.data.responseHeaders + if (headers) { + const retryAfterMs = headers["retry-after-ms"] + if (retryAfterMs) { + const parsedMs = Number.parseFloat(retryAfterMs) + if (!Number.isNaN(parsedMs)) { + return cap(parsedMs) + } + } + + const retryAfter = headers["retry-after"] + if (retryAfter) { + const parsedSeconds = Number.parseFloat(retryAfter) + if (!Number.isNaN(parsedSeconds)) { + // convert seconds to milliseconds + return cap(Math.ceil(parsedSeconds * 1000)) + } + // Try parsing as HTTP date format + const parsed = Date.parse(retryAfter) - Date.now() + if (!Number.isNaN(parsed) && parsed > 0) { + return cap(Math.ceil(parsed)) + } + } + + return cap(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1)) + } + } + + return cap(Math.min(RETRY_INITIAL_DELAY * Math.pow(RETRY_BACKOFF_FACTOR, attempt - 1), RETRY_MAX_DELAY_NO_HEADERS)) +} + +export function retryable(error: Err) { + // context overflow errors should not be retried + if (MessageV2.ContextOverflowError.isInstance(error)) return undefined + if (MessageV2.APIError.isInstance(error)) { + const status = error.data.statusCode + // 5xx errors are transient server failures and should always be retried, + // even when the provider SDK doesn't explicitly mark them as retryable. + if (!error.data.isRetryable && !(status !== undefined && status >= 500)) return undefined + if (error.data.responseBody?.includes("FreeUsageLimitError")) return GO_UPSELL_MESSAGE + return error.data.message.includes("Overloaded") ? "Provider is overloaded" : error.data.message + } + + // Check for rate limit patterns in plain text error messages + const msg = error.data?.message + if (typeof msg === "string") { + const lower = msg.toLowerCase() + if ( + lower.includes("rate increased too quickly") || + lower.includes("rate limit") || + lower.includes("too many requests") + ) { + return msg + } + } + + const json = iife(() => { + try { + if (typeof error.data?.message === "string") { + const parsed = JSON.parse(error.data.message) + return parsed + } + + return JSON.parse(error.data.message) + } catch { + return undefined + } + }) + if (!json || typeof json !== "object") return undefined + const code = typeof json.code === "string" ? json.code : "" + + if (json.type === "error" && json.error?.type === "too_many_requests") { + return "Too Many Requests" + } + if (code.includes("exhausted") || code.includes("unavailable")) { + return "Provider is overloaded" + } + if (json.type === "error" && typeof json.error?.code === "string" && json.error.code.includes("rate_limit")) { + return "Rate Limited" + } + return undefined +} + +export function policy(opts: { + parse: (error: unknown) => Err + set: (input: { attempt: number; message: string; next: number }) => Effect.Effect +}) { + return Schedule.fromStepWithMetadata( + Effect.succeed((meta: Schedule.InputMetadata) => { + const error = opts.parse(meta.input) + const message = retryable(error) + if (!message) return Cause.done(meta.attempt) + return Effect.gen(function* () { + const wait = delay(meta.attempt, MessageV2.APIError.isInstance(error) ? error : undefined) + const now = yield* Clock.currentTimeMillis + yield* opts.set({ attempt: meta.attempt, message, next: now + wait }) + return [meta.attempt, Duration.millis(wait)] as [number, Duration.Duration] + }) + }), + ) +} + +export * as SessionRetry from "./retry" diff --git a/packages/opencode/src/session/revert.ts b/packages/opencode/src/session/revert.ts index f09ccf24ad..c7e5220f12 100644 --- a/packages/opencode/src/session/revert.ts +++ b/packages/opencode/src/session/revert.ts @@ -11,151 +11,151 @@ import { SessionID, MessageID, PartID } from "./schema" import { SessionRunState } from "./run-state" import { SessionSummary } from "./summary" -export namespace SessionRevert { - const log = Log.create({ service: "session.revert" }) +const log = Log.create({ service: "session.revert" }) - export const RevertInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod, - partID: PartID.zod.optional(), - }) - export type RevertInput = z.infer +export const RevertInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod.optional(), +}) +export type RevertInput = z.infer - export interface Interface { - readonly revert: (input: RevertInput) => Effect.Effect - readonly unrevert: (input: { sessionID: SessionID }) => Effect.Effect - readonly cleanup: (session: Session.Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionRevert") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const sessions = yield* Session.Service - const snap = yield* Snapshot.Service - const storage = yield* Storage.Service - const bus = yield* Bus.Service - const summary = yield* SessionSummary.Service - const state = yield* SessionRunState.Service - - const revert = Effect.fn("SessionRevert.revert")(function* (input: RevertInput) { - yield* state.assertNotBusy(input.sessionID) - const all = yield* sessions.messages({ sessionID: input.sessionID }) - let lastUser: MessageV2.User | undefined - const session = yield* sessions.get(input.sessionID) - - let rev: Session.Info["revert"] - const patches: Snapshot.Patch[] = [] - for (const msg of all) { - if (msg.info.role === "user") lastUser = msg.info - const remaining = [] - for (const part of msg.parts) { - if (rev) { - if (part.type === "patch") patches.push(part) - continue - } - - if (!rev) { - if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) { - const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined - rev = { - messageID: !partID && lastUser ? lastUser.id : msg.info.id, - partID, - } - } - remaining.push(part) - } - } - } - - if (!rev) return session - - rev.snapshot = session.revert?.snapshot ?? (yield* snap.track()) - if (session.revert?.snapshot) yield* snap.restore(session.revert.snapshot) - yield* snap.revert(patches) - if (rev.snapshot) rev.diff = yield* snap.diff(rev.snapshot as string) - const range = all.filter((msg) => msg.info.id >= rev!.messageID) - const diffs = yield* summary.computeDiff({ messages: range }) - yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) - yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - yield* sessions.setRevert({ - sessionID: input.sessionID, - revert: rev, - summary: { - additions: diffs.reduce((sum, x) => sum + x.additions, 0), - deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), - files: diffs.length, - }, - }) - return yield* sessions.get(input.sessionID) - }) - - const unrevert = Effect.fn("SessionRevert.unrevert")(function* (input: { sessionID: SessionID }) { - log.info("unreverting", input) - yield* state.assertNotBusy(input.sessionID) - const session = yield* sessions.get(input.sessionID) - if (!session.revert) return session - if (session.revert.snapshot) yield* snap.restore(session.revert!.snapshot!) - yield* sessions.clearRevert(input.sessionID) - return yield* sessions.get(input.sessionID) - }) - - const cleanup = Effect.fn("SessionRevert.cleanup")(function* (session: Session.Info) { - if (!session.revert) return - const sessionID = session.id - const msgs = yield* sessions.messages({ sessionID }) - const messageID = session.revert.messageID - const remove = [] as MessageV2.WithParts[] - let target: MessageV2.WithParts | undefined - for (const msg of msgs) { - if (msg.info.id < messageID) continue - if (msg.info.id > messageID) { - remove.push(msg) - continue - } - if (session.revert.partID) { - target = msg - continue - } - remove.push(msg) - } - for (const msg of remove) { - SyncEvent.run(MessageV2.Event.Removed, { - sessionID, - messageID: msg.info.id, - }) - } - if (session.revert.partID && target) { - const partID = session.revert.partID - const idx = target.parts.findIndex((part) => part.id === partID) - if (idx >= 0) { - const removeParts = target.parts.slice(idx) - target.parts = target.parts.slice(0, idx) - for (const part of removeParts) { - SyncEvent.run(MessageV2.Event.PartRemoved, { - sessionID, - messageID: target.info.id, - partID: part.id, - }) - } - } - } - yield* sessions.clearRevert(sessionID) - }) - - return Service.of({ revert, unrevert, cleanup }) - }), - ) - - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(SessionRunState.defaultLayer), - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Storage.defaultLayer), - Layer.provide(Bus.layer), - Layer.provide(SessionSummary.defaultLayer), - ), - ) +export interface Interface { + readonly revert: (input: RevertInput) => Effect.Effect + readonly unrevert: (input: { sessionID: SessionID }) => Effect.Effect + readonly cleanup: (session: Session.Info) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SessionRevert") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const sessions = yield* Session.Service + const snap = yield* Snapshot.Service + const storage = yield* Storage.Service + const bus = yield* Bus.Service + const summary = yield* SessionSummary.Service + const state = yield* SessionRunState.Service + + const revert = Effect.fn("SessionRevert.revert")(function* (input: RevertInput) { + yield* state.assertNotBusy(input.sessionID) + const all = yield* sessions.messages({ sessionID: input.sessionID }) + let lastUser: MessageV2.User | undefined + const session = yield* sessions.get(input.sessionID) + + let rev: Session.Info["revert"] + const patches: Snapshot.Patch[] = [] + for (const msg of all) { + if (msg.info.role === "user") lastUser = msg.info + const remaining = [] + for (const part of msg.parts) { + if (rev) { + if (part.type === "patch") patches.push(part) + continue + } + + if (!rev) { + if ((msg.info.id === input.messageID && !input.partID) || part.id === input.partID) { + const partID = remaining.some((item) => ["text", "tool"].includes(item.type)) ? input.partID : undefined + rev = { + messageID: !partID && lastUser ? lastUser.id : msg.info.id, + partID, + } + } + remaining.push(part) + } + } + } + + if (!rev) return session + + rev.snapshot = session.revert?.snapshot ?? (yield* snap.track()) + if (session.revert?.snapshot) yield* snap.restore(session.revert.snapshot) + yield* snap.revert(patches) + if (rev.snapshot) rev.diff = yield* snap.diff(rev.snapshot as string) + const range = all.filter((msg) => msg.info.id >= rev!.messageID) + const diffs = yield* summary.computeDiff({ messages: range }) + yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) + yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) + yield* sessions.setRevert({ + sessionID: input.sessionID, + revert: rev, + summary: { + additions: diffs.reduce((sum, x) => sum + x.additions, 0), + deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), + files: diffs.length, + }, + }) + return yield* sessions.get(input.sessionID) + }) + + const unrevert = Effect.fn("SessionRevert.unrevert")(function* (input: { sessionID: SessionID }) { + log.info("unreverting", input) + yield* state.assertNotBusy(input.sessionID) + const session = yield* sessions.get(input.sessionID) + if (!session.revert) return session + if (session.revert.snapshot) yield* snap.restore(session.revert!.snapshot!) + yield* sessions.clearRevert(input.sessionID) + return yield* sessions.get(input.sessionID) + }) + + const cleanup = Effect.fn("SessionRevert.cleanup")(function* (session: Session.Info) { + if (!session.revert) return + const sessionID = session.id + const msgs = yield* sessions.messages({ sessionID }) + const messageID = session.revert.messageID + const remove = [] as MessageV2.WithParts[] + let target: MessageV2.WithParts | undefined + for (const msg of msgs) { + if (msg.info.id < messageID) continue + if (msg.info.id > messageID) { + remove.push(msg) + continue + } + if (session.revert.partID) { + target = msg + continue + } + remove.push(msg) + } + for (const msg of remove) { + SyncEvent.run(MessageV2.Event.Removed, { + sessionID, + messageID: msg.info.id, + }) + } + if (session.revert.partID && target) { + const partID = session.revert.partID + const idx = target.parts.findIndex((part) => part.id === partID) + if (idx >= 0) { + const removeParts = target.parts.slice(idx) + target.parts = target.parts.slice(0, idx) + for (const part of removeParts) { + SyncEvent.run(MessageV2.Event.PartRemoved, { + sessionID, + messageID: target.info.id, + partID: part.id, + }) + } + } + } + yield* sessions.clearRevert(sessionID) + }) + + return Service.of({ revert, unrevert, cleanup }) + }), +) + +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(SessionRunState.defaultLayer), + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Storage.defaultLayer), + Layer.provide(Bus.layer), + Layer.provide(SessionSummary.defaultLayer), + ), +) + +export * as SessionRevert from "./revert" diff --git a/packages/opencode/src/session/run-state.ts b/packages/opencode/src/session/run-state.ts index a18e0b5732..7a106f8a4c 100644 --- a/packages/opencode/src/session/run-state.ts +++ b/packages/opencode/src/session/run-state.ts @@ -6,103 +6,103 @@ import { MessageV2 } from "./message-v2" import { SessionID } from "./schema" import { SessionStatus } from "./status" -export namespace SessionRunState { - export interface Interface { - readonly assertNotBusy: (sessionID: SessionID) => Effect.Effect - readonly cancel: (sessionID: SessionID) => Effect.Effect - readonly ensureRunning: ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) => Effect.Effect - readonly startShell: ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionRunState") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const status = yield* SessionStatus.Service - - const state = yield* InstanceState.make( - Effect.fn("SessionRunState.state")(function* () { - const scope = yield* Scope.Scope - const runners = new Map>() - yield* Effect.addFinalizer( - Effect.fnUntraced(function* () { - yield* Effect.forEach(runners.values(), (runner) => runner.cancel, { - concurrency: "unbounded", - discard: true, - }) - runners.clear() - }), - ) - return { runners, scope } - }), - ) - - const runner = Effect.fn("SessionRunState.runner")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - ) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (existing) return existing - const next = Runner.make(data.scope, { - onIdle: Effect.gen(function* () { - data.runners.delete(sessionID) - yield* status.set(sessionID, { type: "idle" }) - }), - onBusy: status.set(sessionID, { type: "busy" }), - onInterrupt, - busy: () => { - throw new Session.BusyError(sessionID) - }, - }) - data.runners.set(sessionID, next) - return next - }) - - const assertNotBusy = Effect.fn("SessionRunState.assertNotBusy")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (existing?.busy) throw new Session.BusyError(sessionID) - }) - - const cancel = Effect.fn("SessionRunState.cancel")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - const existing = data.runners.get(sessionID) - if (!existing || !existing.busy) { - yield* status.set(sessionID, { type: "idle" }) - return - } - yield* existing.cancel - }) - - const ensureRunning = Effect.fn("SessionRunState.ensureRunning")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) { - return yield* (yield* runner(sessionID, onInterrupt)).ensureRunning(work) - }) - - const startShell = Effect.fn("SessionRunState.startShell")(function* ( - sessionID: SessionID, - onInterrupt: Effect.Effect, - work: Effect.Effect, - ) { - return yield* (yield* runner(sessionID, onInterrupt)).startShell(work) - }) - - return Service.of({ assertNotBusy, cancel, ensureRunning, startShell }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(SessionStatus.defaultLayer)) +export interface Interface { + readonly assertNotBusy: (sessionID: SessionID) => Effect.Effect + readonly cancel: (sessionID: SessionID) => Effect.Effect + readonly ensureRunning: ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) => Effect.Effect + readonly startShell: ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/SessionRunState") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const status = yield* SessionStatus.Service + + const state = yield* InstanceState.make( + Effect.fn("SessionRunState.state")(function* () { + const scope = yield* Scope.Scope + const runners = new Map>() + yield* Effect.addFinalizer( + Effect.fnUntraced(function* () { + yield* Effect.forEach(runners.values(), (runner) => runner.cancel, { + concurrency: "unbounded", + discard: true, + }) + runners.clear() + }), + ) + return { runners, scope } + }), + ) + + const runner = Effect.fn("SessionRunState.runner")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + ) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (existing) return existing + const next = Runner.make(data.scope, { + onIdle: Effect.gen(function* () { + data.runners.delete(sessionID) + yield* status.set(sessionID, { type: "idle" }) + }), + onBusy: status.set(sessionID, { type: "busy" }), + onInterrupt, + busy: () => { + throw new Session.BusyError(sessionID) + }, + }) + data.runners.set(sessionID, next) + return next + }) + + const assertNotBusy = Effect.fn("SessionRunState.assertNotBusy")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (existing?.busy) throw new Session.BusyError(sessionID) + }) + + const cancel = Effect.fn("SessionRunState.cancel")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + const existing = data.runners.get(sessionID) + if (!existing || !existing.busy) { + yield* status.set(sessionID, { type: "idle" }) + return + } + yield* existing.cancel + }) + + const ensureRunning = Effect.fn("SessionRunState.ensureRunning")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) { + return yield* (yield* runner(sessionID, onInterrupt)).ensureRunning(work) + }) + + const startShell = Effect.fn("SessionRunState.startShell")(function* ( + sessionID: SessionID, + onInterrupt: Effect.Effect, + work: Effect.Effect, + ) { + return yield* (yield* runner(sessionID, onInterrupt)).startShell(work) + }) + + return Service.of({ assertNotBusy, cancel, ensureRunning, startShell }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(SessionStatus.defaultLayer)) + +export * as SessionRunState from "./run-state" diff --git a/packages/opencode/src/session/status.ts b/packages/opencode/src/session/status.ts index f0d4e6cf79..7f46c70a8a 100644 --- a/packages/opencode/src/session/status.ts +++ b/packages/opencode/src/session/status.ts @@ -5,84 +5,84 @@ import { SessionID } from "./schema" import { Effect, Layer, Context } from "effect" import z from "zod" -export namespace SessionStatus { - export const Info = z - .union([ - z.object({ - type: z.literal("idle"), - }), - z.object({ - type: z.literal("retry"), - attempt: z.number(), - message: z.string(), - next: z.number(), - }), - z.object({ - type: z.literal("busy"), - }), - ]) - .meta({ - ref: "SessionStatus", - }) - export type Info = z.infer - - export const Event = { - Status: BusEvent.define( - "session.status", - z.object({ - sessionID: SessionID.zod, - status: Info, - }), - ), - // deprecated - Idle: BusEvent.define( - "session.idle", - z.object({ - sessionID: SessionID.zod, - }), - ), - } - - export interface Interface { - readonly get: (sessionID: SessionID) => Effect.Effect - readonly list: () => Effect.Effect> - readonly set: (sessionID: SessionID, status: Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionStatus") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - - const state = yield* InstanceState.make( - Effect.fn("SessionStatus.state")(() => Effect.succeed(new Map())), - ) - - const get = Effect.fn("SessionStatus.get")(function* (sessionID: SessionID) { - const data = yield* InstanceState.get(state) - return data.get(sessionID) ?? { type: "idle" as const } - }) - - const list = Effect.fn("SessionStatus.list")(function* () { - return new Map(yield* InstanceState.get(state)) - }) - - const set = Effect.fn("SessionStatus.set")(function* (sessionID: SessionID, status: Info) { - const data = yield* InstanceState.get(state) - yield* bus.publish(Event.Status, { sessionID, status }) - if (status.type === "idle") { - yield* bus.publish(Event.Idle, { sessionID }) - data.delete(sessionID) - return - } - data.set(sessionID, status) - }) - - return Service.of({ get, list, set }) +export const Info = z + .union([ + z.object({ + type: z.literal("idle"), }), - ) + z.object({ + type: z.literal("retry"), + attempt: z.number(), + message: z.string(), + next: z.number(), + }), + z.object({ + type: z.literal("busy"), + }), + ]) + .meta({ + ref: "SessionStatus", + }) +export type Info = z.infer - export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) +export const Event = { + Status: BusEvent.define( + "session.status", + z.object({ + sessionID: SessionID.zod, + status: Info, + }), + ), + // deprecated + Idle: BusEvent.define( + "session.idle", + z.object({ + sessionID: SessionID.zod, + }), + ), } + +export interface Interface { + readonly get: (sessionID: SessionID) => Effect.Effect + readonly list: () => Effect.Effect> + readonly set: (sessionID: SessionID, status: Info) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionStatus") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + + const state = yield* InstanceState.make( + Effect.fn("SessionStatus.state")(() => Effect.succeed(new Map())), + ) + + const get = Effect.fn("SessionStatus.get")(function* (sessionID: SessionID) { + const data = yield* InstanceState.get(state) + return data.get(sessionID) ?? { type: "idle" as const } + }) + + const list = Effect.fn("SessionStatus.list")(function* () { + return new Map(yield* InstanceState.get(state)) + }) + + const set = Effect.fn("SessionStatus.set")(function* (sessionID: SessionID, status: Info) { + const data = yield* InstanceState.get(state) + yield* bus.publish(Event.Status, { sessionID, status }) + if (status.type === "idle") { + yield* bus.publish(Event.Idle, { sessionID }) + data.delete(sessionID) + return + } + data.set(sessionID, status) + }) + + return Service.of({ get, list, set }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as SessionStatus from "./status" diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index 9f8e70f162..2be08f3f43 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -7,159 +7,159 @@ import * as Session from "./session" import { MessageV2 } from "./message-v2" import { SessionID, MessageID } from "./schema" -export namespace SessionSummary { - function unquoteGitPath(input: string) { - if (!input.startsWith('"')) return input - if (!input.endsWith('"')) return input - const body = input.slice(1, -1) - const bytes: number[] = [] +function unquoteGitPath(input: string) { + if (!input.startsWith('"')) return input + if (!input.endsWith('"')) return input + const body = input.slice(1, -1) + const bytes: number[] = [] - for (let i = 0; i < body.length; i++) { - const char = body[i]! - if (char !== "\\") { - bytes.push(char.charCodeAt(0)) - continue - } - - const next = body[i + 1] - if (!next) { - bytes.push("\\".charCodeAt(0)) - continue - } - - if (next >= "0" && next <= "7") { - const chunk = body.slice(i + 1, i + 4) - const match = chunk.match(/^[0-7]{1,3}/) - if (!match) { - bytes.push(next.charCodeAt(0)) - i++ - continue - } - bytes.push(parseInt(match[0], 8)) - i += match[0].length - continue - } - - const escaped = - next === "n" - ? "\n" - : next === "r" - ? "\r" - : next === "t" - ? "\t" - : next === "b" - ? "\b" - : next === "f" - ? "\f" - : next === "v" - ? "\v" - : next === "\\" || next === '"' - ? next - : undefined - - bytes.push((escaped ?? next).charCodeAt(0)) - i++ + for (let i = 0; i < body.length; i++) { + const char = body[i]! + if (char !== "\\") { + bytes.push(char.charCodeAt(0)) + continue } - return Buffer.from(bytes).toString() + const next = body[i + 1] + if (!next) { + bytes.push("\\".charCodeAt(0)) + continue + } + + if (next >= "0" && next <= "7") { + const chunk = body.slice(i + 1, i + 4) + const match = chunk.match(/^[0-7]{1,3}/) + if (!match) { + bytes.push(next.charCodeAt(0)) + i++ + continue + } + bytes.push(parseInt(match[0], 8)) + i += match[0].length + continue + } + + const escaped = + next === "n" + ? "\n" + : next === "r" + ? "\r" + : next === "t" + ? "\t" + : next === "b" + ? "\b" + : next === "f" + ? "\f" + : next === "v" + ? "\v" + : next === "\\" || next === '"' + ? next + : undefined + + bytes.push((escaped ?? next).charCodeAt(0)) + i++ } - export interface Interface { - readonly summarize: (input: { sessionID: SessionID; messageID: MessageID }) => Effect.Effect - readonly diff: (input: { sessionID: SessionID; messageID?: MessageID }) => Effect.Effect - readonly computeDiff: (input: { messages: MessageV2.WithParts[] }) => Effect.Effect - } + return Buffer.from(bytes).toString() +} - export class Service extends Context.Service()("@opencode/SessionSummary") {} +export interface Interface { + readonly summarize: (input: { sessionID: SessionID; messageID: MessageID }) => Effect.Effect + readonly diff: (input: { sessionID: SessionID; messageID?: MessageID }) => Effect.Effect + readonly computeDiff: (input: { messages: MessageV2.WithParts[] }) => Effect.Effect +} - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const sessions = yield* Session.Service - const snapshot = yield* Snapshot.Service - const storage = yield* Storage.Service - const bus = yield* Bus.Service +export class Service extends Context.Service()("@opencode/SessionSummary") {} - const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { - messages: MessageV2.WithParts[] - }) { - let from: string | undefined - let to: string | undefined - for (const item of input.messages) { - if (!from) { - for (const part of item.parts) { - if (part.type === "step-start" && part.snapshot) { - from = part.snapshot - break - } +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const sessions = yield* Session.Service + const snapshot = yield* Snapshot.Service + const storage = yield* Storage.Service + const bus = yield* Bus.Service + + const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { + messages: MessageV2.WithParts[] + }) { + let from: string | undefined + let to: string | undefined + for (const item of input.messages) { + if (!from) { + for (const part of item.parts) { + if (part.type === "step-start" && part.snapshot) { + from = part.snapshot + break } } - for (const part of item.parts) { - if (part.type === "step-finish" && part.snapshot) to = part.snapshot - } } - if (from && to) return yield* snapshot.diffFull(from, to) - return [] + for (const part of item.parts) { + if (part.type === "step-finish" && part.snapshot) to = part.snapshot + } + } + if (from && to) return yield* snapshot.diffFull(from, to) + return [] + }) + + const summarize = Effect.fn("SessionSummary.summarize")(function* (input: { + sessionID: SessionID + messageID: MessageID + }) { + const all = yield* sessions.messages({ sessionID: input.sessionID }) + if (!all.length) return + + const diffs = yield* computeDiff({ messages: all }) + yield* sessions.setSummary({ + sessionID: input.sessionID, + summary: { + additions: diffs.reduce((sum, x) => sum + x.additions, 0), + deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), + files: diffs.length, + }, }) + yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) + yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - const summarize = Effect.fn("SessionSummary.summarize")(function* (input: { - sessionID: SessionID - messageID: MessageID - }) { - const all = yield* sessions.messages({ sessionID: input.sessionID }) - if (!all.length) return + const messages = all.filter( + (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID), + ) + const target = messages.find((m) => m.info.id === input.messageID) + if (!target || target.info.role !== "user") return + const msgDiffs = yield* computeDiff({ messages }) + target.info.summary = { ...target.info.summary, diffs: msgDiffs } + yield* sessions.updateMessage(target.info) + }) - const diffs = yield* computeDiff({ messages: all }) - yield* sessions.setSummary({ - sessionID: input.sessionID, - summary: { - additions: diffs.reduce((sum, x) => sum + x.additions, 0), - deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), - files: diffs.length, - }, - }) - yield* storage.write(["session_diff", input.sessionID], diffs).pipe(Effect.ignore) - yield* bus.publish(Session.Event.Diff, { sessionID: input.sessionID, diff: diffs }) - - const messages = all.filter( - (m) => m.info.id === input.messageID || (m.info.role === "assistant" && m.info.parentID === input.messageID), - ) - const target = messages.find((m) => m.info.id === input.messageID) - if (!target || target.info.role !== "user") return - const msgDiffs = yield* computeDiff({ messages }) - target.info.summary = { ...target.info.summary, diffs: msgDiffs } - yield* sessions.updateMessage(target.info) + const diff = Effect.fn("SessionSummary.diff")(function* (input: { sessionID: SessionID; messageID?: MessageID }) { + const diffs = yield* storage + .read(["session_diff", input.sessionID]) + .pipe(Effect.catch(() => Effect.succeed([] as Snapshot.FileDiff[]))) + const next = diffs.map((item) => { + const file = unquoteGitPath(item.file) + if (file === item.file) return item + return { ...item, file } }) + const changed = next.some((item, i) => item.file !== diffs[i]?.file) + if (changed) yield* storage.write(["session_diff", input.sessionID], next).pipe(Effect.ignore) + return next + }) - const diff = Effect.fn("SessionSummary.diff")(function* (input: { sessionID: SessionID; messageID?: MessageID }) { - const diffs = yield* storage - .read(["session_diff", input.sessionID]) - .pipe(Effect.catch(() => Effect.succeed([] as Snapshot.FileDiff[]))) - const next = diffs.map((item) => { - const file = unquoteGitPath(item.file) - if (file === item.file) return item - return { ...item, file } - }) - const changed = next.some((item, i) => item.file !== diffs[i]?.file) - if (changed) yield* storage.write(["session_diff", input.sessionID], next).pipe(Effect.ignore) - return next - }) + return Service.of({ summarize, diff, computeDiff }) + }), +) - return Service.of({ summarize, diff, computeDiff }) - }), - ) +export const defaultLayer = Layer.suspend(() => + layer.pipe( + Layer.provide(Session.defaultLayer), + Layer.provide(Snapshot.defaultLayer), + Layer.provide(Storage.defaultLayer), + Layer.provide(Bus.layer), + ), +) - export const defaultLayer = Layer.suspend(() => - layer.pipe( - Layer.provide(Session.defaultLayer), - Layer.provide(Snapshot.defaultLayer), - Layer.provide(Storage.defaultLayer), - Layer.provide(Bus.layer), - ), - ) +export const DiffInput = z.object({ + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), +}) - export const DiffInput = z.object({ - sessionID: SessionID.zod, - messageID: MessageID.zod.optional(), - }) -} +export * as SessionSummary from "./summary" diff --git a/packages/opencode/src/session/system.ts b/packages/opencode/src/session/system.ts index 952ff5b04b..ec60f6eef7 100644 --- a/packages/opencode/src/session/system.ts +++ b/packages/opencode/src/session/system.ts @@ -16,69 +16,69 @@ import type { Agent } from "@/agent/agent" import { Permission } from "@/permission" import { Skill } from "@/skill" -export namespace SystemPrompt { - export function provider(model: Provider.Model) { - if (model.api.id.includes("gpt-4") || model.api.id.includes("o1") || model.api.id.includes("o3")) - return [PROMPT_BEAST] - if (model.api.id.includes("gpt")) { - if (model.api.id.includes("codex")) { - return [PROMPT_CODEX] - } - return [PROMPT_GPT] +export function provider(model: Provider.Model) { + if (model.api.id.includes("gpt-4") || model.api.id.includes("o1") || model.api.id.includes("o3")) + return [PROMPT_BEAST] + if (model.api.id.includes("gpt")) { + if (model.api.id.includes("codex")) { + return [PROMPT_CODEX] } - if (model.api.id.includes("gemini-")) return [PROMPT_GEMINI] - if (model.api.id.includes("claude")) return [PROMPT_ANTHROPIC] - if (model.api.id.toLowerCase().includes("trinity")) return [PROMPT_TRINITY] - if (model.api.id.toLowerCase().includes("kimi")) return [PROMPT_KIMI] - return [PROMPT_DEFAULT] + return [PROMPT_GPT] } - - export interface Interface { - readonly environment: (model: Provider.Model) => string[] - readonly skills: (agent: Agent.Info) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SystemPrompt") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const skill = yield* Skill.Service - - return Service.of({ - environment(model) { - const project = Instance.project - return [ - [ - `You are powered by the model named ${model.api.id}. The exact model ID is ${model.providerID}/${model.api.id}`, - `Here is some useful information about the environment you are running in:`, - ``, - ` Working directory: ${Instance.directory}`, - ` Workspace root folder: ${Instance.worktree}`, - ` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`, - ` Platform: ${process.platform}`, - ` Today's date: ${new Date().toDateString()}`, - ``, - ].join("\n"), - ] - }, - - skills: Effect.fn("SystemPrompt.skills")(function* (agent: Agent.Info) { - if (Permission.disabled(["skill"], agent.permission).has("skill")) return - - const list = yield* skill.available(agent) - - return [ - "Skills provide specialized instructions and workflows for specific tasks.", - "Use the skill tool to load a skill when a task matches its description.", - // the agents seem to ingest the information about skills a bit better if we present a more verbose - // version of them here and a less verbose version in tool description, rather than vice versa. - Skill.fmt(list, { verbose: true }), - ].join("\n") - }), - }) - }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Skill.defaultLayer)) + if (model.api.id.includes("gemini-")) return [PROMPT_GEMINI] + if (model.api.id.includes("claude")) return [PROMPT_ANTHROPIC] + if (model.api.id.toLowerCase().includes("trinity")) return [PROMPT_TRINITY] + if (model.api.id.toLowerCase().includes("kimi")) return [PROMPT_KIMI] + return [PROMPT_DEFAULT] } + +export interface Interface { + readonly environment: (model: Provider.Model) => string[] + readonly skills: (agent: Agent.Info) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SystemPrompt") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const skill = yield* Skill.Service + + return Service.of({ + environment(model) { + const project = Instance.project + return [ + [ + `You are powered by the model named ${model.api.id}. The exact model ID is ${model.providerID}/${model.api.id}`, + `Here is some useful information about the environment you are running in:`, + ``, + ` Working directory: ${Instance.directory}`, + ` Workspace root folder: ${Instance.worktree}`, + ` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`, + ` Platform: ${process.platform}`, + ` Today's date: ${new Date().toDateString()}`, + ``, + ].join("\n"), + ] + }, + + skills: Effect.fn("SystemPrompt.skills")(function* (agent: Agent.Info) { + if (Permission.disabled(["skill"], agent.permission).has("skill")) return + + const list = yield* skill.available(agent) + + return [ + "Skills provide specialized instructions and workflows for specific tasks.", + "Use the skill tool to load a skill when a task matches its description.", + // the agents seem to ingest the information about skills a bit better if we present a more verbose + // version of them here and a less verbose version in tool description, rather than vice versa. + Skill.fmt(list, { verbose: true }), + ].join("\n") + }), + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Skill.defaultLayer)) + +export * as SystemPrompt from "./system" diff --git a/packages/opencode/src/session/todo.ts b/packages/opencode/src/session/todo.ts index eec2bb3a30..5523fdc86a 100644 --- a/packages/opencode/src/session/todo.ts +++ b/packages/opencode/src/session/todo.ts @@ -6,80 +6,80 @@ import z from "zod" import { Database, eq, asc } from "../storage" import { TodoTable } from "./session.sql" -export namespace Todo { - export const Info = z - .object({ - content: z.string().describe("Brief description of the task"), - status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"), - priority: z.string().describe("Priority level of the task: high, medium, low"), - }) - .meta({ ref: "Todo" }) - export type Info = z.infer +export const Info = z + .object({ + content: z.string().describe("Brief description of the task"), + status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"), + priority: z.string().describe("Priority level of the task: high, medium, low"), + }) + .meta({ ref: "Todo" }) +export type Info = z.infer - export const Event = { - Updated: BusEvent.define( - "todo.updated", - z.object({ - sessionID: SessionID.zod, - todos: z.array(Info), - }), - ), - } - - export interface Interface { - readonly update: (input: { sessionID: SessionID; todos: Info[] }) => Effect.Effect - readonly get: (sessionID: SessionID) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionTodo") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const bus = yield* Bus.Service - - const update = Effect.fn("Todo.update")(function* (input: { sessionID: SessionID; todos: Info[] }) { - yield* Effect.sync(() => - Database.transaction((db) => { - db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run() - if (input.todos.length === 0) return - db.insert(TodoTable) - .values( - input.todos.map((todo, position) => ({ - session_id: input.sessionID, - content: todo.content, - status: todo.status, - priority: todo.priority, - position, - })), - ) - .run() - }), - ) - yield* bus.publish(Event.Updated, input) - }) - - const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { - const rows = yield* Effect.sync(() => - Database.use((db) => - db - .select() - .from(TodoTable) - .where(eq(TodoTable.session_id, sessionID)) - .orderBy(asc(TodoTable.position)) - .all(), - ), - ) - return rows.map((row) => ({ - content: row.content, - status: row.status, - priority: row.priority, - })) - }) - - return Service.of({ update, get }) +export const Event = { + Updated: BusEvent.define( + "todo.updated", + z.object({ + sessionID: SessionID.zod, + todos: z.array(Info), }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + ), } + +export interface Interface { + readonly update: (input: { sessionID: SessionID; todos: Info[] }) => Effect.Effect + readonly get: (sessionID: SessionID) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionTodo") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const bus = yield* Bus.Service + + const update = Effect.fn("Todo.update")(function* (input: { sessionID: SessionID; todos: Info[] }) { + yield* Effect.sync(() => + Database.transaction((db) => { + db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run() + if (input.todos.length === 0) return + db.insert(TodoTable) + .values( + input.todos.map((todo, position) => ({ + session_id: input.sessionID, + content: todo.content, + status: todo.status, + priority: todo.priority, + position, + })), + ) + .run() + }), + ) + yield* bus.publish(Event.Updated, input) + }) + + const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { + const rows = yield* Effect.sync(() => + Database.use((db) => + db + .select() + .from(TodoTable) + .where(eq(TodoTable.session_id, sessionID)) + .orderBy(asc(TodoTable.position)) + .all(), + ), + ) + return rows.map((row) => ({ + content: row.content, + status: row.status, + priority: row.priority, + })) + }) + + return Service.of({ update, get }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Bus.layer)) + +export * as Todo from "./todo" From 266fb9342238a62cf9d66437c81c91ee40e8632c Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 00:50:44 +0000 Subject: [PATCH 093/201] chore: generate --- packages/opencode/src/session/message-v2.ts | 11 ++--------- packages/opencode/src/session/prompt.ts | 12 +++--------- packages/opencode/src/session/summary.ts | 4 +--- packages/opencode/src/session/todo.ts | 7 +------ 4 files changed, 7 insertions(+), 27 deletions(-) diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index 5e7e008401..46686947e1 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -544,10 +544,7 @@ const part = (row: typeof PartTable.$inferSelect) => }) as Part const older = (row: Cursor) => - or( - lt(MessageTable.time_created, row.time), - and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), - ) + or(lt(MessageTable.time_created, row.time), and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id))) function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { const ids = rows.map((row) => row.id) @@ -930,11 +927,7 @@ export function filterCompacted(msgs: Iterable) { const completed = new Set() for (const msg of msgs) { result.push(msg) - if ( - msg.info.role === "user" && - completed.has(msg.info.id) && - msg.parts.some((part) => part.type === "compaction") - ) + if (msg.info.role === "user" && completed.has(msg.info.id) && msg.parts.some((part) => part.type === "compaction")) break if (msg.info.role === "assistant" && msg.info.summary && msg.info.finish && !msg.info.error) completed.add(msg.info.parentID) diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 14fdf30780..9faa618788 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -1073,9 +1073,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the let start = parseInt(range.start) let end = range.end ? parseInt(range.end) : undefined if (start === end) { - const symbols = yield* lsp - .documentSymbol(filePathURI) - .pipe(Effect.catch(() => Effect.succeed([]))) + const symbols = yield* lsp.documentSymbol(filePathURI).pipe(Effect.catch(() => Effect.succeed([]))) for (const symbol of symbols) { let r: LSP.Range | undefined if ("range" in symbol) r = symbol.range @@ -1453,9 +1451,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the } if (step === 1) - yield* summary - .summarize({ sessionID, messageID: lastUser.id }) - .pipe(Effect.ignore, Effect.forkIn(scope)) + yield* summary.summarize({ sessionID, messageID: lastUser.id }).pipe(Effect.ignore, Effect.forkIn(scope)) if (step > 1 && lastFinished) { for (const m of msgs) { @@ -1723,9 +1719,7 @@ export const PromptInput = z.object({ tools: z .record(z.string(), z.boolean()) .optional() - .describe( - "@deprecated tools and permissions have been merged, you can set permissions on the session itself now", - ), + .describe("@deprecated tools and permissions have been merged, you can set permissions on the session itself now"), format: MessageV2.Format.optional(), system: z.string().optional(), variant: z.string().optional(), diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index 2be08f3f43..70b3102f6e 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -79,9 +79,7 @@ export const layer = Layer.effect( const storage = yield* Storage.Service const bus = yield* Bus.Service - const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { - messages: MessageV2.WithParts[] - }) { + const computeDiff = Effect.fn("SessionSummary.computeDiff")(function* (input: { messages: MessageV2.WithParts[] }) { let from: string | undefined let to: string | undefined for (const item of input.messages) { diff --git a/packages/opencode/src/session/todo.ts b/packages/opencode/src/session/todo.ts index 5523fdc86a..4840f86a3d 100644 --- a/packages/opencode/src/session/todo.ts +++ b/packages/opencode/src/session/todo.ts @@ -61,12 +61,7 @@ export const layer = Layer.effect( const get = Effect.fn("Todo.get")(function* (sessionID: SessionID) { const rows = yield* Effect.sync(() => Database.use((db) => - db - .select() - .from(TodoTable) - .where(eq(TodoTable.session_id, sessionID)) - .orderBy(asc(TodoTable.position)) - .all(), + db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(), ), ) return rows.map((row) => ({ From d2cb1613ace83609db6a60f3065914e322f93d02 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 20:59:42 -0400 Subject: [PATCH 094/201] refactor: unwrap SessionEntry namespace + self-reexport (#22977) --- packages/opencode/src/v2/session-entry.ts | 618 +++++++++++----------- 1 file changed, 309 insertions(+), 309 deletions(-) diff --git a/packages/opencode/src/v2/session-entry.ts b/packages/opencode/src/v2/session-entry.ts index 490f184227..140fa47d23 100644 --- a/packages/opencode/src/v2/session-entry.ts +++ b/packages/opencode/src/v2/session-entry.ts @@ -2,317 +2,317 @@ import { Schema } from "effect" import { SessionEvent } from "./session-event" import { produce } from "immer" -export namespace SessionEntry { - export const ID = SessionEvent.ID - export type ID = Schema.Schema.Type +export const ID = SessionEvent.ID +export type ID = Schema.Schema.Type - const Base = { - id: SessionEvent.ID, - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - }), - } +const Base = { + id: SessionEvent.ID, + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + }), +} - export class User extends Schema.Class("Session.Entry.User")({ - ...Base, - text: SessionEvent.Prompt.fields.text, - files: SessionEvent.Prompt.fields.files, - agents: SessionEvent.Prompt.fields.agents, - type: Schema.Literal("user"), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - }), - }) { - static fromEvent(event: SessionEvent.Prompt) { - return new User({ - id: event.id, - type: "user", - metadata: event.metadata, - text: event.text, - files: event.files, - agents: event.agents, - time: { created: event.timestamp }, - }) - } - } - - export class Synthetic extends Schema.Class("Session.Entry.Synthetic")({ - ...SessionEvent.Synthetic.fields, - ...Base, - type: Schema.Literal("synthetic"), - }) {} - - export class ToolStatePending extends Schema.Class("Session.Entry.ToolState.Pending")({ - status: Schema.Literal("pending"), - input: Schema.String, - }) {} - - export class ToolStateRunning extends Schema.Class("Session.Entry.ToolState.Running")({ - status: Schema.Literal("running"), - input: Schema.Record(Schema.String, Schema.Unknown), - title: Schema.String.pipe(Schema.optional), - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - }) {} - - export class ToolStateCompleted extends Schema.Class("Session.Entry.ToolState.Completed")({ - status: Schema.Literal("completed"), - input: Schema.Record(Schema.String, Schema.Unknown), - output: Schema.String, - title: Schema.String, - metadata: Schema.Record(Schema.String, Schema.Unknown), - attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional), - }) {} - - export class ToolStateError extends Schema.Class("Session.Entry.ToolState.Error")({ - status: Schema.Literal("error"), - input: Schema.Record(Schema.String, Schema.Unknown), - error: Schema.String, - metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), - }) {} - - export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) - export type ToolState = Schema.Schema.Type - - export class AssistantTool extends Schema.Class("Session.Entry.Assistant.Tool")({ - type: Schema.Literal("tool"), - callID: Schema.String, - name: Schema.String, - state: ToolState, - time: Schema.Struct({ - created: Schema.DateTimeUtc, - ran: Schema.DateTimeUtc.pipe(Schema.optional), - completed: Schema.DateTimeUtc.pipe(Schema.optional), - pruned: Schema.DateTimeUtc.pipe(Schema.optional), - }), - }) {} - - export class AssistantText extends Schema.Class("Session.Entry.Assistant.Text")({ - type: Schema.Literal("text"), - text: Schema.String, - }) {} - - export class AssistantReasoning extends Schema.Class("Session.Entry.Assistant.Reasoning")({ - type: Schema.Literal("reasoning"), - text: Schema.String, - }) {} - - export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]) - export type AssistantContent = Schema.Schema.Type - - export class Assistant extends Schema.Class("Session.Entry.Assistant")({ - ...Base, - type: Schema.Literal("assistant"), - content: AssistantContent.pipe(Schema.Array), - cost: Schema.Number.pipe(Schema.optional), - tokens: Schema.Struct({ - input: Schema.Number, - output: Schema.Number, - reasoning: Schema.Number, - cache: Schema.Struct({ - read: Schema.Number, - write: Schema.Number, - }), - }).pipe(Schema.optional), - error: Schema.String.pipe(Schema.optional), - time: Schema.Struct({ - created: Schema.DateTimeUtc, - completed: Schema.DateTimeUtc.pipe(Schema.optional), - }), - }) {} - - export class Compaction extends Schema.Class("Session.Entry.Compaction")({ - ...SessionEvent.Compacted.fields, - type: Schema.Literal("compaction"), - ...Base, - }) {} - - export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]) - - export type Entry = Schema.Schema.Type - - export type Type = Entry["type"] - - export type History = { - entries: Entry[] - pending: Entry[] - } - - export function step(old: History, event: SessionEvent.Event): History { - return produce(old, (draft) => { - const lastAssistant = draft.entries.findLast((x) => x.type === "assistant") - const pendingAssistant = lastAssistant && !lastAssistant.time.completed ? lastAssistant : undefined - - switch (event.type) { - case "prompt": { - if (pendingAssistant) { - // @ts-expect-error - draft.pending.push(User.fromEvent(event)) - break - } - // @ts-expect-error - draft.entries.push(User.fromEvent(event)) - break - } - case "step.started": { - if (pendingAssistant) pendingAssistant.time.completed = event.timestamp - draft.entries.push({ - id: event.id, - type: "assistant", - time: { - created: event.timestamp, - }, - content: [], - }) - break - } - case "text.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "text", - text: "", - }) - break - } - case "text.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "text") - if (match) match.text += event.delta - break - } - case "text.ended": { - break - } - case "tool.input.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "tool", - callID: event.callID, - name: event.name, - time: { - created: event.timestamp, - }, - state: { - status: "pending", - input: "", - }, - }) - break - } - case "tool.input.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - // oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string) - if (match) match.state.input += event.delta - break - } - case "tool.input.ended": { - break - } - case "tool.called": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match) { - match.time.ran = event.timestamp - match.state = { - status: "running", - input: event.input, - } - } - break - } - case "tool.success": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match && match.state.status === "running") { - match.state = { - status: "completed", - input: match.state.input, - output: event.output ?? "", - title: event.title, - metadata: event.metadata ?? {}, - // @ts-expect-error - attachments: event.attachments ?? [], - } - } - break - } - case "tool.error": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "tool") - if (match && match.state.status === "running") { - match.state = { - status: "error", - error: event.error, - input: match.state.input, - metadata: event.metadata ?? {}, - } - } - break - } - case "reasoning.started": { - if (!pendingAssistant) break - pendingAssistant.content.push({ - type: "reasoning", - text: "", - }) - break - } - case "reasoning.delta": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") - if (match) match.text += event.delta - break - } - case "reasoning.ended": { - if (!pendingAssistant) break - const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") - if (match) match.text = event.text - break - } - case "step.ended": { - if (!pendingAssistant) break - pendingAssistant.time.completed = event.timestamp - pendingAssistant.cost = event.cost - pendingAssistant.tokens = event.tokens - break - } - } +export class User extends Schema.Class("Session.Entry.User")({ + ...Base, + text: SessionEvent.Prompt.fields.text, + files: SessionEvent.Prompt.fields.files, + agents: SessionEvent.Prompt.fields.agents, + type: Schema.Literal("user"), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + }), +}) { + static fromEvent(event: SessionEvent.Prompt) { + return new User({ + id: event.id, + type: "user", + metadata: event.metadata, + text: event.text, + files: event.files, + agents: event.agents, + time: { created: event.timestamp }, }) } - - /* - export interface Interface { - readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry - readonly fromSession: (sessionID: SessionID) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/SessionEntry") {} - - export const layer: Layer.Layer = Layer.effect( - Service, - Effect.gen(function* () { - const decodeEntry = Schema.decodeUnknownSync(Entry) - - const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type }) - - const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) { - return Database.use((db) => - db - .select() - .from(SessionEntryTable) - .where(eq(SessionEntryTable.session_id, sessionID)) - .orderBy(SessionEntryTable.id) - .all() - .map((row) => decode(row)), - ) - }) - - return Service.of({ - decode, - fromSession, - }) - }), - ) - */ } + +export class Synthetic extends Schema.Class("Session.Entry.Synthetic")({ + ...SessionEvent.Synthetic.fields, + ...Base, + type: Schema.Literal("synthetic"), +}) {} + +export class ToolStatePending extends Schema.Class("Session.Entry.ToolState.Pending")({ + status: Schema.Literal("pending"), + input: Schema.String, +}) {} + +export class ToolStateRunning extends Schema.Class("Session.Entry.ToolState.Running")({ + status: Schema.Literal("running"), + input: Schema.Record(Schema.String, Schema.Unknown), + title: Schema.String.pipe(Schema.optional), + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), +}) {} + +export class ToolStateCompleted extends Schema.Class("Session.Entry.ToolState.Completed")({ + status: Schema.Literal("completed"), + input: Schema.Record(Schema.String, Schema.Unknown), + output: Schema.String, + title: Schema.String, + metadata: Schema.Record(Schema.String, Schema.Unknown), + attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional), +}) {} + +export class ToolStateError extends Schema.Class("Session.Entry.ToolState.Error")({ + status: Schema.Literal("error"), + input: Schema.Record(Schema.String, Schema.Unknown), + error: Schema.String, + metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional), +}) {} + +export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]) +export type ToolState = Schema.Schema.Type + +export class AssistantTool extends Schema.Class("Session.Entry.Assistant.Tool")({ + type: Schema.Literal("tool"), + callID: Schema.String, + name: Schema.String, + state: ToolState, + time: Schema.Struct({ + created: Schema.DateTimeUtc, + ran: Schema.DateTimeUtc.pipe(Schema.optional), + completed: Schema.DateTimeUtc.pipe(Schema.optional), + pruned: Schema.DateTimeUtc.pipe(Schema.optional), + }), +}) {} + +export class AssistantText extends Schema.Class("Session.Entry.Assistant.Text")({ + type: Schema.Literal("text"), + text: Schema.String, +}) {} + +export class AssistantReasoning extends Schema.Class("Session.Entry.Assistant.Reasoning")({ + type: Schema.Literal("reasoning"), + text: Schema.String, +}) {} + +export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]) +export type AssistantContent = Schema.Schema.Type + +export class Assistant extends Schema.Class("Session.Entry.Assistant")({ + ...Base, + type: Schema.Literal("assistant"), + content: AssistantContent.pipe(Schema.Array), + cost: Schema.Number.pipe(Schema.optional), + tokens: Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + reasoning: Schema.Number, + cache: Schema.Struct({ + read: Schema.Number, + write: Schema.Number, + }), + }).pipe(Schema.optional), + error: Schema.String.pipe(Schema.optional), + time: Schema.Struct({ + created: Schema.DateTimeUtc, + completed: Schema.DateTimeUtc.pipe(Schema.optional), + }), +}) {} + +export class Compaction extends Schema.Class("Session.Entry.Compaction")({ + ...SessionEvent.Compacted.fields, + type: Schema.Literal("compaction"), + ...Base, +}) {} + +export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]) + +export type Entry = Schema.Schema.Type + +export type Type = Entry["type"] + +export type History = { + entries: Entry[] + pending: Entry[] +} + +export function step(old: History, event: SessionEvent.Event): History { + return produce(old, (draft) => { + const lastAssistant = draft.entries.findLast((x) => x.type === "assistant") + const pendingAssistant = lastAssistant && !lastAssistant.time.completed ? lastAssistant : undefined + + switch (event.type) { + case "prompt": { + if (pendingAssistant) { + // @ts-expect-error + draft.pending.push(User.fromEvent(event)) + break + } + // @ts-expect-error + draft.entries.push(User.fromEvent(event)) + break + } + case "step.started": { + if (pendingAssistant) pendingAssistant.time.completed = event.timestamp + draft.entries.push({ + id: event.id, + type: "assistant", + time: { + created: event.timestamp, + }, + content: [], + }) + break + } + case "text.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "text", + text: "", + }) + break + } + case "text.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "text") + if (match) match.text += event.delta + break + } + case "text.ended": { + break + } + case "tool.input.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "tool", + callID: event.callID, + name: event.name, + time: { + created: event.timestamp, + }, + state: { + status: "pending", + input: "", + }, + }) + break + } + case "tool.input.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + // oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string) + if (match) match.state.input += event.delta + break + } + case "tool.input.ended": { + break + } + case "tool.called": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match) { + match.time.ran = event.timestamp + match.state = { + status: "running", + input: event.input, + } + } + break + } + case "tool.success": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match && match.state.status === "running") { + match.state = { + status: "completed", + input: match.state.input, + output: event.output ?? "", + title: event.title, + metadata: event.metadata ?? {}, + // @ts-expect-error + attachments: event.attachments ?? [], + } + } + break + } + case "tool.error": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "tool") + if (match && match.state.status === "running") { + match.state = { + status: "error", + error: event.error, + input: match.state.input, + metadata: event.metadata ?? {}, + } + } + break + } + case "reasoning.started": { + if (!pendingAssistant) break + pendingAssistant.content.push({ + type: "reasoning", + text: "", + }) + break + } + case "reasoning.delta": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") + if (match) match.text += event.delta + break + } + case "reasoning.ended": { + if (!pendingAssistant) break + const match = pendingAssistant.content.findLast((x) => x.type === "reasoning") + if (match) match.text = event.text + break + } + case "step.ended": { + if (!pendingAssistant) break + pendingAssistant.time.completed = event.timestamp + pendingAssistant.cost = event.cost + pendingAssistant.tokens = event.tokens + break + } + } + }) +} + +/* +export interface Interface { + readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry + readonly fromSession: (sessionID: SessionID) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/SessionEntry") {} + +export const layer: Layer.Layer = Layer.effect( + Service, + Effect.gen(function* () { + const decodeEntry = Schema.decodeUnknownSync(Entry) + + const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type }) + + const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) { + return Database.use((db) => + db + .select() + .from(SessionEntryTable) + .where(eq(SessionEntryTable.session_id, sessionID)) + .orderBy(SessionEntryTable.id) + .all() + .map((row) => decode(row)), + ) + }) + + return Service.of({ + decode, + fromSession, + }) + }), +) +*/ + +export * as SessionEntry from "./session-entry" From 54046e0b985d8ffd5e343cadcc479570b96f8a5b Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:00:30 -0400 Subject: [PATCH 095/201] refactor: unwrap SessionV2 namespace + self-reexport (#22978) --- packages/opencode/src/v2/session.ts | 114 ++++++++++++++-------------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/packages/opencode/src/v2/session.ts b/packages/opencode/src/v2/session.ts index ce1b39031f..79a6916120 100644 --- a/packages/opencode/src/v2/session.ts +++ b/packages/opencode/src/v2/session.ts @@ -4,66 +4,66 @@ import { Struct } from "effect" import { Session } from "@/session" import { SessionID } from "@/session/schema" -export namespace SessionV2 { - export const ID = SessionID +export const ID = SessionID - export type ID = Schema.Schema.Type +export type ID = Schema.Schema.Type - export class PromptInput extends Schema.Class("Session.PromptInput")({ - ...Struct.omit(SessionEntry.User.fields, ["time", "type"]), - id: Schema.optionalKey(SessionEntry.ID), - sessionID: SessionV2.ID, - }) {} +export class PromptInput extends Schema.Class("Session.PromptInput")({ + ...Struct.omit(SessionEntry.User.fields, ["time", "type"]), + id: Schema.optionalKey(SessionEntry.ID), + sessionID: ID, +}) {} - export class CreateInput extends Schema.Class("Session.CreateInput")({ - id: Schema.optionalKey(SessionV2.ID), - }) {} +export class CreateInput extends Schema.Class("Session.CreateInput")({ + id: Schema.optionalKey(ID), +}) {} - export class Info extends Schema.Class("Session.Info")({ - id: SessionV2.ID, - model: Schema.Struct({ - id: Schema.String, - providerID: Schema.String, - modelID: Schema.String, - }).pipe(Schema.optional), - }) {} +export class Info extends Schema.Class("Session.Info")({ + id: ID, + model: Schema.Struct({ + id: Schema.String, + providerID: Schema.String, + modelID: Schema.String, + }).pipe(Schema.optional), +}) {} - export interface Interface { - fromID: (id: SessionV2.ID) => Effect.Effect - create: (input: CreateInput) => Effect.Effect - prompt: (input: PromptInput) => Effect.Effect - } - - export class Service extends Context.Service()("Session.Service") {} - - export const layer = Layer.effect(Service)( - Effect.gen(function* () { - const session = yield* Session.Service - - const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) { - throw new Error("Not implemented") - }) - - const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) { - throw new Error("Not implemented") - }) - - const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) { - const match = yield* session.get(id) - return fromV1(match) - }) - - return Service.of({ - create, - prompt, - fromID, - }) - }), - ) - - function fromV1(input: Session.Info): Info { - return new Info({ - id: SessionV2.ID.make(input.id), - }) - } +export interface Interface { + fromID: (id: ID) => Effect.Effect + create: (input: CreateInput) => Effect.Effect + prompt: (input: PromptInput) => Effect.Effect } + +export class Service extends Context.Service()("Session.Service") {} + +export const layer = Layer.effect(Service)( + Effect.gen(function* () { + const session = yield* Session.Service + + const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) { + throw new Error("Not implemented") + }) + + const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) { + throw new Error("Not implemented") + }) + + const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) { + const match = yield* session.get(id) + return fromV1(match) + }) + + return Service.of({ + create, + prompt, + fromID, + }) + }), +) + +function fromV1(input: Session.Info): Info { + return new Info({ + id: ID.make(input.id), + }) +} + +export * as SessionV2 from "./session" From 5022895e2b9b556275c5cd419cb32452329ada08 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:01:24 -0400 Subject: [PATCH 096/201] refactor: unwrap ExperimentalHttpApiServer namespace + self-reexport (#22979) --- .../src/server/instance/httpapi/server.ts | 202 +++++++++--------- 1 file changed, 101 insertions(+), 101 deletions(-) diff --git a/packages/opencode/src/server/instance/httpapi/server.ts b/packages/opencode/src/server/instance/httpapi/server.ts index 299a177f50..362d0970b9 100644 --- a/packages/opencode/src/server/instance/httpapi/server.ts +++ b/packages/opencode/src/server/instance/httpapi/server.ts @@ -25,106 +25,106 @@ const Headers = Schema.Struct({ "x-opencode-directory": Schema.optional(Schema.String), }) -export namespace ExperimentalHttpApiServer { - function decode(input: string) { - try { - return decodeURIComponent(input) - } catch { - return input - } +function decode(input: string) { + try { + return decodeURIComponent(input) + } catch { + return input } - - class Unauthorized extends Schema.TaggedErrorClass()( - "Unauthorized", - { message: Schema.String }, - { httpApiStatus: 401 }, - ) {} - - class Authorization extends HttpApiMiddleware.Service()("@opencode/ExperimentalHttpApiAuthorization", { - error: Unauthorized, - security: { - basic: HttpApiSecurity.basic, - }, - }) {} - - const normalize = HttpRouter.middleware()( - Effect.gen(function* () { - return (effect) => - Effect.gen(function* () { - const query = yield* HttpServerRequest.schemaSearchParams(Query) - if (!query.auth_token) return yield* effect - const req = yield* HttpServerRequest.HttpServerRequest - const next = req.modify({ - headers: { - ...req.headers, - authorization: `Basic ${query.auth_token}`, - }, - }) - return yield* effect.pipe(Effect.provideService(HttpServerRequest.HttpServerRequest, next)) - }) - }), - ).layer - - const auth = Layer.succeed( - Authorization, - Authorization.of({ - basic: (effect, { credential }) => - Effect.gen(function* () { - if (!Flag.OPENCODE_SERVER_PASSWORD) return yield* effect - - const user = Flag.OPENCODE_SERVER_USERNAME ?? "opencode" - if (credential.username !== user) { - return yield* new Unauthorized({ message: "Unauthorized" }) - } - if (Redacted.value(credential.password) !== Flag.OPENCODE_SERVER_PASSWORD) { - return yield* new Unauthorized({ message: "Unauthorized" }) - } - return yield* effect - }), - }), - ) - - const instance = HttpRouter.middleware()( - Effect.gen(function* () { - return (effect) => - Effect.gen(function* () { - const query = yield* HttpServerRequest.schemaSearchParams(Query) - const headers = yield* HttpServerRequest.schemaHeaders(Headers) - const raw = query.directory || headers["x-opencode-directory"] || process.cwd() - const workspace = query.workspace || undefined - const ctx = yield* Effect.promise(() => - Instance.provide({ - directory: Filesystem.resolve(decode(raw)), - init: () => AppRuntime.runPromise(InstanceBootstrap), - fn: () => Instance.current, - }), - ) - - const next = workspace ? effect.pipe(Effect.provideService(WorkspaceRef, workspace)) : effect - return yield* next.pipe(Effect.provideService(InstanceRef, ctx)) - }) - }), - ).layer - - const QuestionSecured = QuestionApi.middleware(Authorization) - const PermissionSecured = PermissionApi.middleware(Authorization) - const ProviderSecured = ProviderApi.middleware(Authorization) - - export const routes = Layer.mergeAll( - HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), - HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), - HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), - ).pipe( - Layer.provide(auth), - Layer.provide(normalize), - Layer.provide(instance), - Layer.provide(HttpServer.layerServices), - Layer.provideMerge(Observability.layer), - ) - - export const webHandler = lazy(() => - HttpRouter.toWebHandler(routes, { - memoMap, - }), - ) } + +class Unauthorized extends Schema.TaggedErrorClass()( + "Unauthorized", + { message: Schema.String }, + { httpApiStatus: 401 }, +) {} + +class Authorization extends HttpApiMiddleware.Service()("@opencode/ExperimentalHttpApiAuthorization", { + error: Unauthorized, + security: { + basic: HttpApiSecurity.basic, + }, +}) {} + +const normalize = HttpRouter.middleware()( + Effect.gen(function* () { + return (effect) => + Effect.gen(function* () { + const query = yield* HttpServerRequest.schemaSearchParams(Query) + if (!query.auth_token) return yield* effect + const req = yield* HttpServerRequest.HttpServerRequest + const next = req.modify({ + headers: { + ...req.headers, + authorization: `Basic ${query.auth_token}`, + }, + }) + return yield* effect.pipe(Effect.provideService(HttpServerRequest.HttpServerRequest, next)) + }) + }), +).layer + +const auth = Layer.succeed( + Authorization, + Authorization.of({ + basic: (effect, { credential }) => + Effect.gen(function* () { + if (!Flag.OPENCODE_SERVER_PASSWORD) return yield* effect + + const user = Flag.OPENCODE_SERVER_USERNAME ?? "opencode" + if (credential.username !== user) { + return yield* new Unauthorized({ message: "Unauthorized" }) + } + if (Redacted.value(credential.password) !== Flag.OPENCODE_SERVER_PASSWORD) { + return yield* new Unauthorized({ message: "Unauthorized" }) + } + return yield* effect + }), + }), +) + +const instance = HttpRouter.middleware()( + Effect.gen(function* () { + return (effect) => + Effect.gen(function* () { + const query = yield* HttpServerRequest.schemaSearchParams(Query) + const headers = yield* HttpServerRequest.schemaHeaders(Headers) + const raw = query.directory || headers["x-opencode-directory"] || process.cwd() + const workspace = query.workspace || undefined + const ctx = yield* Effect.promise(() => + Instance.provide({ + directory: Filesystem.resolve(decode(raw)), + init: () => AppRuntime.runPromise(InstanceBootstrap), + fn: () => Instance.current, + }), + ) + + const next = workspace ? effect.pipe(Effect.provideService(WorkspaceRef, workspace)) : effect + return yield* next.pipe(Effect.provideService(InstanceRef, ctx)) + }) + }), +).layer + +const QuestionSecured = QuestionApi.middleware(Authorization) +const PermissionSecured = PermissionApi.middleware(Authorization) +const ProviderSecured = ProviderApi.middleware(Authorization) + +export const routes = Layer.mergeAll( + HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), + HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), + HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), +).pipe( + Layer.provide(auth), + Layer.provide(normalize), + Layer.provide(instance), + Layer.provide(HttpServer.layerServices), + Layer.provideMerge(Observability.layer), +) + +export const webHandler = lazy(() => + HttpRouter.toWebHandler(routes, { + memoMap, + }), +) + +export * as ExperimentalHttpApiServer from "./server" From 94878d76f8a32c36909647a0d9e1f6e383f60908 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:02:07 -0400 Subject: [PATCH 097/201] refactor: unwrap TuiPluginRuntime namespace + self-reexport (#22980) --- .../src/cli/cmd/tui/plugin/runtime.ts | 194 +++++++++--------- 1 file changed, 97 insertions(+), 97 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index e1b2eca1dd..e4a0e59eb1 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -918,113 +918,113 @@ async function installPluginBySpec( } } -export namespace TuiPluginRuntime { - let dir = "" - let loaded: Promise | undefined - let runtime: RuntimeState | undefined - export const Slot = View +let dir = "" +let loaded: Promise | undefined +let runtime: RuntimeState | undefined +export const Slot = View - export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { - const cwd = process.cwd() - if (loaded) { - if (dir !== cwd) { - throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) - } - return loaded +export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { + const cwd = process.cwd() + if (loaded) { + if (dir !== cwd) { + throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) } - - dir = cwd - loaded = load(input) return loaded } - export function list() { - if (!runtime) return [] - return listPluginStatus(runtime) - } + dir = cwd + loaded = load(input) + return loaded +} - export async function activatePlugin(id: string) { - return activatePluginById(runtime, id, true) - } +export function list() { + if (!runtime) return [] + return listPluginStatus(runtime) +} - export async function deactivatePlugin(id: string) { - return deactivatePluginById(runtime, id, true) - } +export async function activatePlugin(id: string) { + return activatePluginById(runtime, id, true) +} - export async function addPlugin(spec: string) { - return addPluginBySpec(runtime, spec) - } +export async function deactivatePlugin(id: string) { + return deactivatePluginById(runtime, id, true) +} - export async function installPlugin(spec: string, options?: { global?: boolean }) { - return installPluginBySpec(runtime, spec, options?.global) - } +export async function addPlugin(spec: string) { + return addPluginBySpec(runtime, spec) +} - export async function dispose() { - const task = loaded - loaded = undefined - dir = "" - if (task) await task - const state = runtime - runtime = undefined - if (!state) return - const queue = [...state.plugins].reverse() - for (const plugin of queue) { - await deactivatePluginEntry(state, plugin, false) - } - } +export async function installPlugin(spec: string, options?: { global?: boolean }) { + return installPluginBySpec(runtime, spec, options?.global) +} - async function load(input: { api: Api; config: TuiConfig.Info }) { - const { api, config } = input - const cwd = process.cwd() - const slots = setupSlots(api) - const next: RuntimeState = { - directory: cwd, - api, - slots, - plugins: [], - plugins_by_id: new Map(), - pending: new Map(), - } - runtime = next - try { - await Instance.provide({ - directory: cwd, - fn: async () => { - const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { - log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) - } - - for (const item of INTERNAL_TUI_PLUGINS) { - log.info("loading internal tui plugin", { id: item.id }) - const entry = loadInternalPlugin(item) - const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) - addPluginEntry(next, { - id: entry.id, - load: entry, - meta, - themes: {}, - plugin: entry.module.tui, - enabled: true, - }) - } - - const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) - await addExternalPluginEntries(next, ready) - - applyInitialPluginEnabledState(next, config) - for (const plugin of next.plugins) { - if (!plugin.enabled) continue - // Keep plugin execution sequential for deterministic side effects: - // command registration order affects keybind/command precedence, - // route registration is last-wins when ids collide, - // and hook chains rely on stable plugin ordering. - await activatePluginEntry(next, plugin, false) - } - }, - }) - } catch (error) { - fail("failed to load tui plugins", { directory: cwd, error }) - } +export async function dispose() { + const task = loaded + loaded = undefined + dir = "" + if (task) await task + const state = runtime + runtime = undefined + if (!state) return + const queue = [...state.plugins].reverse() + for (const plugin of queue) { + await deactivatePluginEntry(state, plugin, false) } } + +async function load(input: { api: Api; config: TuiConfig.Info }) { + const { api, config } = input + const cwd = process.cwd() + const slots = setupSlots(api) + const next: RuntimeState = { + directory: cwd, + api, + slots, + plugins: [], + plugins_by_id: new Map(), + pending: new Map(), + } + runtime = next + try { + await Instance.provide({ + directory: cwd, + fn: async () => { + const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { + log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) + } + + for (const item of INTERNAL_TUI_PLUGINS) { + log.info("loading internal tui plugin", { id: item.id }) + const entry = loadInternalPlugin(item) + const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) + addPluginEntry(next, { + id: entry.id, + load: entry, + meta, + themes: {}, + plugin: entry.module.tui, + enabled: true, + }) + } + + const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) + await addExternalPluginEntries(next, ready) + + applyInitialPluginEnabledState(next, config) + for (const plugin of next.plugins) { + if (!plugin.enabled) continue + // Keep plugin execution sequential for deterministic side effects: + // command registration order affects keybind/command precedence, + // route registration is last-wins when ids collide, + // and hook chains rely on stable plugin ordering. + await activatePluginEntry(next, plugin, false) + } + }, + }) + } catch (error) { + fail("failed to load tui plugins", { directory: cwd, error }) + } +} + +export * as TuiPluginRuntime from "./runtime" From c59df636cc3d9b203e2b84dcefecba15eda5b457 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:02:09 -0400 Subject: [PATCH 098/201] chore: delete empty v2/session-common + collapse patch barrel (#22981) --- packages/opencode/src/patch/index.ts | 681 ++++++++++++++++++++- packages/opencode/src/patch/patch.ts | 678 -------------------- packages/opencode/src/v2/session-common.ts | 1 - 3 files changed, 680 insertions(+), 680 deletions(-) delete mode 100644 packages/opencode/src/patch/patch.ts delete mode 100644 packages/opencode/src/v2/session-common.ts diff --git a/packages/opencode/src/patch/index.ts b/packages/opencode/src/patch/index.ts index cec24614d8..19e1d7555b 100644 --- a/packages/opencode/src/patch/index.ts +++ b/packages/opencode/src/patch/index.ts @@ -1 +1,680 @@ -export * as Patch from "./patch" +import z from "zod" +import * as path from "path" +import * as fs from "fs/promises" +import { readFileSync } from "fs" +import { Log } from "../util" + +const log = Log.create({ service: "patch" }) + +// Schema definitions +export const PatchSchema = z.object({ + patchText: z.string().describe("The full patch text that describes all changes to be made"), +}) + +export type PatchParams = z.infer + +// Core types matching the Rust implementation +export interface ApplyPatchArgs { + patch: string + hunks: Hunk[] + workdir?: string +} + +export type Hunk = + | { type: "add"; path: string; contents: string } + | { type: "delete"; path: string } + | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } + +export interface UpdateFileChunk { + old_lines: string[] + new_lines: string[] + change_context?: string + is_end_of_file?: boolean +} + +export interface ApplyPatchAction { + changes: Map + patch: string + cwd: string +} + +export type ApplyPatchFileChange = + | { type: "add"; content: string } + | { type: "delete"; content: string } + | { type: "update"; unified_diff: string; move_path?: string; new_content: string } + +export interface AffectedPaths { + added: string[] + modified: string[] + deleted: string[] +} + +export enum ApplyPatchError { + ParseError = "ParseError", + IoError = "IoError", + ComputeReplacements = "ComputeReplacements", + ImplicitInvocation = "ImplicitInvocation", +} + +export enum MaybeApplyPatch { + Body = "Body", + ShellParseError = "ShellParseError", + PatchParseError = "PatchParseError", + NotApplyPatch = "NotApplyPatch", +} + +export enum MaybeApplyPatchVerified { + Body = "Body", + ShellParseError = "ShellParseError", + CorrectnessError = "CorrectnessError", + NotApplyPatch = "NotApplyPatch", +} + +// Parser implementation +function parsePatchHeader( + lines: string[], + startIdx: number, +): { filePath: string; movePath?: string; nextIdx: number } | null { + const line = lines[startIdx] + + if (line.startsWith("*** Add File:")) { + const filePath = line.slice("*** Add File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Delete File:")) { + const filePath = line.slice("*** Delete File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Update File:")) { + const filePath = line.slice("*** Update File:".length).trim() + let movePath: string | undefined + let nextIdx = startIdx + 1 + + // Check for move directive + if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { + movePath = lines[nextIdx].slice("*** Move to:".length).trim() + nextIdx++ + } + + return filePath ? { filePath, movePath, nextIdx } : null + } + + return null +} + +function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { + const chunks: UpdateFileChunk[] = [] + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("@@")) { + // Parse context line + const contextLine = lines[i].substring(2).trim() + i++ + + const oldLines: string[] = [] + const newLines: string[] = [] + let isEndOfFile = false + + // Parse change lines + while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { + const changeLine = lines[i] + + if (changeLine === "*** End of File") { + isEndOfFile = true + i++ + break + } + + if (changeLine.startsWith(" ")) { + // Keep line - appears in both old and new + const content = changeLine.substring(1) + oldLines.push(content) + newLines.push(content) + } else if (changeLine.startsWith("-")) { + // Remove line - only in old + oldLines.push(changeLine.substring(1)) + } else if (changeLine.startsWith("+")) { + // Add line - only in new + newLines.push(changeLine.substring(1)) + } + + i++ + } + + chunks.push({ + old_lines: oldLines, + new_lines: newLines, + change_context: contextLine || undefined, + is_end_of_file: isEndOfFile || undefined, + }) + } else { + i++ + } + } + + return { chunks, nextIdx: i } +} + +function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { + let content = "" + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("+")) { + content += lines[i].substring(1) + "\n" + } + i++ + } + + // Remove trailing newline + if (content.endsWith("\n")) { + content = content.slice(0, -1) + } + + return { content, nextIdx: i } +} + +function stripHeredoc(input: string): string { + // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) + const endIdx = lines.findIndex((line) => line.trim() === endMarker) + + if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { + throw new Error("Invalid patch format: missing Begin/End markers") + } + + // Parse content between markers + i = beginIdx + 1 + + while (i < endIdx) { + const header = parsePatchHeader(lines, i) + if (!header) { + i++ + continue + } + + if (lines[i].startsWith("*** Add File:")) { + const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) + hunks.push({ + type: "add", + path: header.filePath, + contents: content, + }) + i = nextIdx + } else if (lines[i].startsWith("*** Delete File:")) { + hunks.push({ + type: "delete", + path: header.filePath, + }) + i = header.nextIdx + } else if (lines[i].startsWith("*** Update File:")) { + const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) + hunks.push({ + type: "update", + path: header.filePath, + move_path: header.movePath, + chunks, + }) + i = nextIdx + } else { + i++ + } + } + + return { hunks } +} + +// Apply patch functionality +export function maybeParseApplyPatch( + argv: string[], +): + | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } + | { type: MaybeApplyPatch.PatchParseError; error: Error } + | { type: MaybeApplyPatch.NotApplyPatch } { + const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] + + // Direct invocation: apply_patch + if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { + try { + const { hunks } = parsePatch(argv[1]) + return { + type: MaybeApplyPatch.Body, + args: { + patch: argv[1], + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + + // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' + if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { + // Simple extraction - in real implementation would need proper bash parsing + const script = argv[2] + const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) + + if (heredocMatch) { + const patchContent = heredocMatch[2] + try { + const { hunks } = parsePatch(patchContent) + return { + type: MaybeApplyPatch.Body, + args: { + patch: patchContent, + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + } + + return { type: MaybeApplyPatch.NotApplyPatch } +} + +// File content manipulation +interface ApplyPatchFileUpdate { + unified_diff: string + content: string +} + +export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { + // Read original file content + let originalContent: string + try { + originalContent = readFileSync(filePath, "utf-8") + } catch (error) { + throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) + } + + let originalLines = originalContent.split("\n") + + // Drop trailing empty element for consistent line counting + if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { + originalLines.pop() + } + + const replacements = computeReplacements(originalLines, filePath, chunks) + let newLines = applyReplacements(originalLines, replacements) + + // Ensure trailing newline + if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { + newLines.push("") + } + + const newContent = newLines.join("\n") + + // Generate unified diff + const unifiedDiff = generateUnifiedDiff(originalContent, newContent) + + return { + unified_diff: unifiedDiff, + content: newContent, + } +} + +function computeReplacements( + originalLines: string[], + filePath: string, + chunks: UpdateFileChunk[], +): Array<[number, number, string[]]> { + const replacements: Array<[number, number, string[]]> = [] + let lineIndex = 0 + + for (const chunk of chunks) { + // Handle context-based seeking + if (chunk.change_context) { + const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) + if (contextIdx === -1) { + throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) + } + lineIndex = contextIdx + 1 + } + + // Handle pure addition (no old lines) + if (chunk.old_lines.length === 0) { + const insertionIdx = + originalLines.length > 0 && originalLines[originalLines.length - 1] === "" + ? originalLines.length - 1 + : originalLines.length + replacements.push([insertionIdx, 0, chunk.new_lines]) + continue + } + + // Try to match old lines in the file + let pattern = chunk.old_lines + let newSlice = chunk.new_lines + let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + + // Retry without trailing empty line if not found + if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { + pattern = pattern.slice(0, -1) + if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { + newSlice = newSlice.slice(0, -1) + } + found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + } + + if (found !== -1) { + replacements.push([found, pattern.length, newSlice]) + lineIndex = found + pattern.length + } else { + throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) + } + } + + // Sort replacements by index to apply in order + replacements.sort((a, b) => a[0] - b[0]) + + return replacements +} + +function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { + // Apply replacements in reverse order to avoid index shifting + const result = [...lines] + + for (let i = replacements.length - 1; i >= 0; i--) { + const [startIdx, oldLen, newSegment] = replacements[i] + + // Remove old lines + result.splice(startIdx, oldLen) + + // Insert new lines + for (let j = 0; j < newSegment.length; j++) { + result.splice(startIdx + j, 0, newSegment[j]) + } + } + + return result +} + +// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) +function normalizeUnicode(str: string): string { + return str + .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes + .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes + .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes + .replace(/\u2026/g, "...") // ellipsis + .replace(/\u00A0/g, " ") // non-breaking space +} + +type Comparator = (a: string, b: string) => boolean + +function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { + // If EOF anchor, try matching from end of file first + if (eof) { + const fromEnd = lines.length - pattern.length + if (fromEnd >= startIndex) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[fromEnd + j], pattern[j])) { + matches = false + break + } + } + if (matches) return fromEnd + } + } + + // Forward search from startIndex + for (let i = startIndex; i <= lines.length - pattern.length; i++) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[i + j], pattern[j])) { + matches = false + break + } + } + if (matches) return i + } + + return -1 +} + +function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { + if (pattern.length === 0) return -1 + + // Pass 1: exact match + const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) + if (exact !== -1) return exact + + // Pass 2: rstrip (trim trailing whitespace) + const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) + if (rstrip !== -1) return rstrip + + // Pass 3: trim (both ends) + const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) + if (trim !== -1) return trim + + // Pass 4: normalized (Unicode punctuation to ASCII) + const normalized = tryMatch( + lines, + pattern, + startIndex, + (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), + eof, + ) + return normalized +} + +function generateUnifiedDiff(oldContent: string, newContent: string): string { + const oldLines = oldContent.split("\n") + const newLines = newContent.split("\n") + + // Simple diff generation - in a real implementation you'd use a proper diff algorithm + let diff = "@@ -1 +1 @@\n" + + // Find changes (simplified approach) + const maxLen = Math.max(oldLines.length, newLines.length) + let hasChanges = false + + for (let i = 0; i < maxLen; i++) { + const oldLine = oldLines[i] || "" + const newLine = newLines[i] || "" + + if (oldLine !== newLine) { + if (oldLine) diff += `-${oldLine}\n` + if (newLine) diff += `+${newLine}\n` + hasChanges = true + } else if (oldLine) { + diff += ` ${oldLine}\n` + } + } + + return hasChanges ? diff : "" +} + +// Apply hunks to filesystem +export async function applyHunksToFiles(hunks: Hunk[]): Promise { + if (hunks.length === 0) { + throw new Error("No files were modified.") + } + + const added: string[] = [] + const modified: string[] = [] + const deleted: string[] = [] + + for (const hunk of hunks) { + switch (hunk.type) { + case "add": + // Create parent directories + const addDir = path.dirname(hunk.path) + if (addDir !== "." && addDir !== "/") { + await fs.mkdir(addDir, { recursive: true }) + } + + await fs.writeFile(hunk.path, hunk.contents, "utf-8") + added.push(hunk.path) + log.info(`Added file: ${hunk.path}`) + break + + case "delete": + await fs.unlink(hunk.path) + deleted.push(hunk.path) + log.info(`Deleted file: ${hunk.path}`) + break + + case "update": + const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) + + if (hunk.move_path) { + // Handle file move + const moveDir = path.dirname(hunk.move_path) + if (moveDir !== "." && moveDir !== "/") { + await fs.mkdir(moveDir, { recursive: true }) + } + + await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") + await fs.unlink(hunk.path) + modified.push(hunk.move_path) + log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) + } else { + // Regular update + await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") + modified.push(hunk.path) + log.info(`Updated file: ${hunk.path}`) + } + break + } + } + + return { added, modified, deleted } +} + +// Main patch application function +export async function applyPatch(patchText: string): Promise { + const { hunks } = parsePatch(patchText) + return applyHunksToFiles(hunks) +} + +// Async version of maybeParseApplyPatchVerified +export async function maybeParseApplyPatchVerified( + argv: string[], + cwd: string, +): Promise< + | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } + | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } + | { type: MaybeApplyPatchVerified.NotApplyPatch } +> { + // Detect implicit patch invocation (raw patch without apply_patch command) + if (argv.length === 1) { + try { + parsePatch(argv[0]) + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(ApplyPatchError.ImplicitInvocation), + } + } catch { + // Not a patch, continue + } + } + + const result = maybeParseApplyPatch(argv) + + switch (result.type) { + case MaybeApplyPatch.Body: + const { args } = result + const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd + const changes = new Map() + + for (const hunk of args.hunks) { + const resolvedPath = path.resolve( + effectiveCwd, + hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, + ) + + switch (hunk.type) { + case "add": + changes.set(resolvedPath, { + type: "add", + content: hunk.contents, + }) + break + + case "delete": + // For delete, we need to read the current content + const deletePath = path.resolve(effectiveCwd, hunk.path) + try { + const content = await fs.readFile(deletePath, "utf-8") + changes.set(resolvedPath, { + type: "delete", + content, + }) + } catch { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(`Failed to read file for deletion: ${deletePath}`), + } + } + break + + case "update": + const updatePath = path.resolve(effectiveCwd, hunk.path) + try { + const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) + changes.set(resolvedPath, { + type: "update", + unified_diff: fileUpdate.unified_diff, + move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, + new_content: fileUpdate.content, + }) + } catch (error) { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: error as Error, + } + } + break + } + } + + return { + type: MaybeApplyPatchVerified.Body, + action: { + changes, + patch: args.patch, + cwd: effectiveCwd, + }, + } + + case MaybeApplyPatch.PatchParseError: + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: result.error, + } + + case MaybeApplyPatch.NotApplyPatch: + return { type: MaybeApplyPatchVerified.NotApplyPatch } + } +} + +export * as Patch from "." diff --git a/packages/opencode/src/patch/patch.ts b/packages/opencode/src/patch/patch.ts deleted file mode 100644 index 1dc99b4da9..0000000000 --- a/packages/opencode/src/patch/patch.ts +++ /dev/null @@ -1,678 +0,0 @@ -import z from "zod" -import * as path from "path" -import * as fs from "fs/promises" -import { readFileSync } from "fs" -import { Log } from "../util" - -const log = Log.create({ service: "patch" }) - -// Schema definitions -export const PatchSchema = z.object({ - patchText: z.string().describe("The full patch text that describes all changes to be made"), -}) - -export type PatchParams = z.infer - -// Core types matching the Rust implementation -export interface ApplyPatchArgs { - patch: string - hunks: Hunk[] - workdir?: string -} - -export type Hunk = - | { type: "add"; path: string; contents: string } - | { type: "delete"; path: string } - | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } - -export interface UpdateFileChunk { - old_lines: string[] - new_lines: string[] - change_context?: string - is_end_of_file?: boolean -} - -export interface ApplyPatchAction { - changes: Map - patch: string - cwd: string -} - -export type ApplyPatchFileChange = - | { type: "add"; content: string } - | { type: "delete"; content: string } - | { type: "update"; unified_diff: string; move_path?: string; new_content: string } - -export interface AffectedPaths { - added: string[] - modified: string[] - deleted: string[] -} - -export enum ApplyPatchError { - ParseError = "ParseError", - IoError = "IoError", - ComputeReplacements = "ComputeReplacements", - ImplicitInvocation = "ImplicitInvocation", -} - -export enum MaybeApplyPatch { - Body = "Body", - ShellParseError = "ShellParseError", - PatchParseError = "PatchParseError", - NotApplyPatch = "NotApplyPatch", -} - -export enum MaybeApplyPatchVerified { - Body = "Body", - ShellParseError = "ShellParseError", - CorrectnessError = "CorrectnessError", - NotApplyPatch = "NotApplyPatch", -} - -// Parser implementation -function parsePatchHeader( - lines: string[], - startIdx: number, -): { filePath: string; movePath?: string; nextIdx: number } | null { - const line = lines[startIdx] - - if (line.startsWith("*** Add File:")) { - const filePath = line.slice("*** Add File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Delete File:")) { - const filePath = line.slice("*** Delete File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Update File:")) { - const filePath = line.slice("*** Update File:".length).trim() - let movePath: string | undefined - let nextIdx = startIdx + 1 - - // Check for move directive - if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { - movePath = lines[nextIdx].slice("*** Move to:".length).trim() - nextIdx++ - } - - return filePath ? { filePath, movePath, nextIdx } : null - } - - return null -} - -function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { - const chunks: UpdateFileChunk[] = [] - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("@@")) { - // Parse context line - const contextLine = lines[i].substring(2).trim() - i++ - - const oldLines: string[] = [] - const newLines: string[] = [] - let isEndOfFile = false - - // Parse change lines - while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { - const changeLine = lines[i] - - if (changeLine === "*** End of File") { - isEndOfFile = true - i++ - break - } - - if (changeLine.startsWith(" ")) { - // Keep line - appears in both old and new - const content = changeLine.substring(1) - oldLines.push(content) - newLines.push(content) - } else if (changeLine.startsWith("-")) { - // Remove line - only in old - oldLines.push(changeLine.substring(1)) - } else if (changeLine.startsWith("+")) { - // Add line - only in new - newLines.push(changeLine.substring(1)) - } - - i++ - } - - chunks.push({ - old_lines: oldLines, - new_lines: newLines, - change_context: contextLine || undefined, - is_end_of_file: isEndOfFile || undefined, - }) - } else { - i++ - } - } - - return { chunks, nextIdx: i } -} - -function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { - let content = "" - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("+")) { - content += lines[i].substring(1) + "\n" - } - i++ - } - - // Remove trailing newline - if (content.endsWith("\n")) { - content = content.slice(0, -1) - } - - return { content, nextIdx: i } -} - -function stripHeredoc(input: string): string { - // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) - const endIdx = lines.findIndex((line) => line.trim() === endMarker) - - if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { - throw new Error("Invalid patch format: missing Begin/End markers") - } - - // Parse content between markers - i = beginIdx + 1 - - while (i < endIdx) { - const header = parsePatchHeader(lines, i) - if (!header) { - i++ - continue - } - - if (lines[i].startsWith("*** Add File:")) { - const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) - hunks.push({ - type: "add", - path: header.filePath, - contents: content, - }) - i = nextIdx - } else if (lines[i].startsWith("*** Delete File:")) { - hunks.push({ - type: "delete", - path: header.filePath, - }) - i = header.nextIdx - } else if (lines[i].startsWith("*** Update File:")) { - const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) - hunks.push({ - type: "update", - path: header.filePath, - move_path: header.movePath, - chunks, - }) - i = nextIdx - } else { - i++ - } - } - - return { hunks } -} - -// Apply patch functionality -export function maybeParseApplyPatch( - argv: string[], -): - | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } - | { type: MaybeApplyPatch.PatchParseError; error: Error } - | { type: MaybeApplyPatch.NotApplyPatch } { - const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] - - // Direct invocation: apply_patch - if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { - try { - const { hunks } = parsePatch(argv[1]) - return { - type: MaybeApplyPatch.Body, - args: { - patch: argv[1], - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - - // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' - if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { - // Simple extraction - in real implementation would need proper bash parsing - const script = argv[2] - const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) - - if (heredocMatch) { - const patchContent = heredocMatch[2] - try { - const { hunks } = parsePatch(patchContent) - return { - type: MaybeApplyPatch.Body, - args: { - patch: patchContent, - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - } - - return { type: MaybeApplyPatch.NotApplyPatch } -} - -// File content manipulation -interface ApplyPatchFileUpdate { - unified_diff: string - content: string -} - -export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { - // Read original file content - let originalContent: string - try { - originalContent = readFileSync(filePath, "utf-8") - } catch (error) { - throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) - } - - let originalLines = originalContent.split("\n") - - // Drop trailing empty element for consistent line counting - if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { - originalLines.pop() - } - - const replacements = computeReplacements(originalLines, filePath, chunks) - let newLines = applyReplacements(originalLines, replacements) - - // Ensure trailing newline - if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { - newLines.push("") - } - - const newContent = newLines.join("\n") - - // Generate unified diff - const unifiedDiff = generateUnifiedDiff(originalContent, newContent) - - return { - unified_diff: unifiedDiff, - content: newContent, - } -} - -function computeReplacements( - originalLines: string[], - filePath: string, - chunks: UpdateFileChunk[], -): Array<[number, number, string[]]> { - const replacements: Array<[number, number, string[]]> = [] - let lineIndex = 0 - - for (const chunk of chunks) { - // Handle context-based seeking - if (chunk.change_context) { - const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) - if (contextIdx === -1) { - throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) - } - lineIndex = contextIdx + 1 - } - - // Handle pure addition (no old lines) - if (chunk.old_lines.length === 0) { - const insertionIdx = - originalLines.length > 0 && originalLines[originalLines.length - 1] === "" - ? originalLines.length - 1 - : originalLines.length - replacements.push([insertionIdx, 0, chunk.new_lines]) - continue - } - - // Try to match old lines in the file - let pattern = chunk.old_lines - let newSlice = chunk.new_lines - let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - - // Retry without trailing empty line if not found - if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { - pattern = pattern.slice(0, -1) - if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { - newSlice = newSlice.slice(0, -1) - } - found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - } - - if (found !== -1) { - replacements.push([found, pattern.length, newSlice]) - lineIndex = found + pattern.length - } else { - throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) - } - } - - // Sort replacements by index to apply in order - replacements.sort((a, b) => a[0] - b[0]) - - return replacements -} - -function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { - // Apply replacements in reverse order to avoid index shifting - const result = [...lines] - - for (let i = replacements.length - 1; i >= 0; i--) { - const [startIdx, oldLen, newSegment] = replacements[i] - - // Remove old lines - result.splice(startIdx, oldLen) - - // Insert new lines - for (let j = 0; j < newSegment.length; j++) { - result.splice(startIdx + j, 0, newSegment[j]) - } - } - - return result -} - -// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) -function normalizeUnicode(str: string): string { - return str - .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes - .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes - .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes - .replace(/\u2026/g, "...") // ellipsis - .replace(/\u00A0/g, " ") // non-breaking space -} - -type Comparator = (a: string, b: string) => boolean - -function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { - // If EOF anchor, try matching from end of file first - if (eof) { - const fromEnd = lines.length - pattern.length - if (fromEnd >= startIndex) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[fromEnd + j], pattern[j])) { - matches = false - break - } - } - if (matches) return fromEnd - } - } - - // Forward search from startIndex - for (let i = startIndex; i <= lines.length - pattern.length; i++) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[i + j], pattern[j])) { - matches = false - break - } - } - if (matches) return i - } - - return -1 -} - -function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { - if (pattern.length === 0) return -1 - - // Pass 1: exact match - const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) - if (exact !== -1) return exact - - // Pass 2: rstrip (trim trailing whitespace) - const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) - if (rstrip !== -1) return rstrip - - // Pass 3: trim (both ends) - const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) - if (trim !== -1) return trim - - // Pass 4: normalized (Unicode punctuation to ASCII) - const normalized = tryMatch( - lines, - pattern, - startIndex, - (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), - eof, - ) - return normalized -} - -function generateUnifiedDiff(oldContent: string, newContent: string): string { - const oldLines = oldContent.split("\n") - const newLines = newContent.split("\n") - - // Simple diff generation - in a real implementation you'd use a proper diff algorithm - let diff = "@@ -1 +1 @@\n" - - // Find changes (simplified approach) - const maxLen = Math.max(oldLines.length, newLines.length) - let hasChanges = false - - for (let i = 0; i < maxLen; i++) { - const oldLine = oldLines[i] || "" - const newLine = newLines[i] || "" - - if (oldLine !== newLine) { - if (oldLine) diff += `-${oldLine}\n` - if (newLine) diff += `+${newLine}\n` - hasChanges = true - } else if (oldLine) { - diff += ` ${oldLine}\n` - } - } - - return hasChanges ? diff : "" -} - -// Apply hunks to filesystem -export async function applyHunksToFiles(hunks: Hunk[]): Promise { - if (hunks.length === 0) { - throw new Error("No files were modified.") - } - - const added: string[] = [] - const modified: string[] = [] - const deleted: string[] = [] - - for (const hunk of hunks) { - switch (hunk.type) { - case "add": - // Create parent directories - const addDir = path.dirname(hunk.path) - if (addDir !== "." && addDir !== "/") { - await fs.mkdir(addDir, { recursive: true }) - } - - await fs.writeFile(hunk.path, hunk.contents, "utf-8") - added.push(hunk.path) - log.info(`Added file: ${hunk.path}`) - break - - case "delete": - await fs.unlink(hunk.path) - deleted.push(hunk.path) - log.info(`Deleted file: ${hunk.path}`) - break - - case "update": - const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) - - if (hunk.move_path) { - // Handle file move - const moveDir = path.dirname(hunk.move_path) - if (moveDir !== "." && moveDir !== "/") { - await fs.mkdir(moveDir, { recursive: true }) - } - - await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") - await fs.unlink(hunk.path) - modified.push(hunk.move_path) - log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) - } else { - // Regular update - await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") - modified.push(hunk.path) - log.info(`Updated file: ${hunk.path}`) - } - break - } - } - - return { added, modified, deleted } -} - -// Main patch application function -export async function applyPatch(patchText: string): Promise { - const { hunks } = parsePatch(patchText) - return applyHunksToFiles(hunks) -} - -// Async version of maybeParseApplyPatchVerified -export async function maybeParseApplyPatchVerified( - argv: string[], - cwd: string, -): Promise< - | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } - | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } - | { type: MaybeApplyPatchVerified.NotApplyPatch } -> { - // Detect implicit patch invocation (raw patch without apply_patch command) - if (argv.length === 1) { - try { - parsePatch(argv[0]) - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(ApplyPatchError.ImplicitInvocation), - } - } catch { - // Not a patch, continue - } - } - - const result = maybeParseApplyPatch(argv) - - switch (result.type) { - case MaybeApplyPatch.Body: - const { args } = result - const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd - const changes = new Map() - - for (const hunk of args.hunks) { - const resolvedPath = path.resolve( - effectiveCwd, - hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, - ) - - switch (hunk.type) { - case "add": - changes.set(resolvedPath, { - type: "add", - content: hunk.contents, - }) - break - - case "delete": - // For delete, we need to read the current content - const deletePath = path.resolve(effectiveCwd, hunk.path) - try { - const content = await fs.readFile(deletePath, "utf-8") - changes.set(resolvedPath, { - type: "delete", - content, - }) - } catch { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(`Failed to read file for deletion: ${deletePath}`), - } - } - break - - case "update": - const updatePath = path.resolve(effectiveCwd, hunk.path) - try { - const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) - changes.set(resolvedPath, { - type: "update", - unified_diff: fileUpdate.unified_diff, - move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, - new_content: fileUpdate.content, - }) - } catch (error) { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: error as Error, - } - } - break - } - } - - return { - type: MaybeApplyPatchVerified.Body, - action: { - changes, - patch: args.patch, - cwd: effectiveCwd, - }, - } - - case MaybeApplyPatch.PatchParseError: - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: result.error, - } - - case MaybeApplyPatch.NotApplyPatch: - return { type: MaybeApplyPatchVerified.NotApplyPatch } - } -} diff --git a/packages/opencode/src/v2/session-common.ts b/packages/opencode/src/v2/session-common.ts deleted file mode 100644 index 556bd79b61..0000000000 --- a/packages/opencode/src/v2/session-common.ts +++ /dev/null @@ -1 +0,0 @@ -export namespace SessionCommon {} From 8afb625bab10c44e5b0437af4550f020f332cdf5 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:19:01 -0400 Subject: [PATCH 099/201] refactor: extract Diagnostic namespace into lsp/diagnostic.ts + self-reexport (#22983) --- packages/opencode/src/lsp/diagnostic.ts | 29 +++++++++++++++++++++++++ packages/opencode/src/lsp/lsp.ts | 28 +----------------------- 2 files changed, 30 insertions(+), 27 deletions(-) create mode 100644 packages/opencode/src/lsp/diagnostic.ts diff --git a/packages/opencode/src/lsp/diagnostic.ts b/packages/opencode/src/lsp/diagnostic.ts new file mode 100644 index 0000000000..4bc085e788 --- /dev/null +++ b/packages/opencode/src/lsp/diagnostic.ts @@ -0,0 +1,29 @@ +import * as LSPClient from "./client" + +const MAX_PER_FILE = 20 + +export function pretty(diagnostic: LSPClient.Diagnostic) { + const severityMap = { + 1: "ERROR", + 2: "WARN", + 3: "INFO", + 4: "HINT", + } + + const severity = severityMap[diagnostic.severity || 1] + const line = diagnostic.range.start.line + 1 + const col = diagnostic.range.start.character + 1 + + return `${severity} [${line}:${col}] ${diagnostic.message}` +} + +export function report(file: string, issues: LSPClient.Diagnostic[]) { + const errors = issues.filter((item) => item.severity === 1) + if (errors.length === 0) return "" + const limited = errors.slice(0, MAX_PER_FILE) + const more = errors.length - MAX_PER_FILE + const suffix = more > 0 ? `\n... and ${more} more` : "" + return `\n${limited.map(pretty).join("\n")}${suffix}\n` +} + +export * as Diagnostic from "./diagnostic" diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index d895e73256..97af8209bb 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -505,30 +505,4 @@ export const layer = Layer.effect( export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer)) -export namespace Diagnostic { - const MAX_PER_FILE = 20 - - export function pretty(diagnostic: LSPClient.Diagnostic) { - const severityMap = { - 1: "ERROR", - 2: "WARN", - 3: "INFO", - 4: "HINT", - } - - const severity = severityMap[diagnostic.severity || 1] - const line = diagnostic.range.start.line + 1 - const col = diagnostic.range.start.character + 1 - - return `${severity} [${line}:${col}] ${diagnostic.message}` - } - - export function report(file: string, issues: LSPClient.Diagnostic[]) { - const errors = issues.filter((item) => item.severity === 1) - if (errors.length === 0) return "" - const limited = errors.slice(0, MAX_PER_FILE) - const more = errors.length - MAX_PER_FILE - const suffix = more > 0 ? `\n... and ${more} more` : "" - return `\n${limited.map(pretty).join("\n")}${suffix}\n` - } -} +export * as Diagnostic from "./diagnostic" From 6405e3a7b12d49ade2f251360d221999836ccc02 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:32:36 -0400 Subject: [PATCH 100/201] tui: stabilize session dialog ordering (#22987) --- .../src/cli/cmd/tui/component/dialog-session-list.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index 75c79dcdd8..60ef6087ba 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -113,7 +113,11 @@ export function DialogSessionList() { const today = new Date().toDateString() return sessions() .filter((x) => x.parentID === undefined) - .toSorted((a, b) => b.time.updated - a.time.updated) + .toSorted((a, b) => { + const updatedDay = new Date(b.time.updated).setHours(0, 0, 0, 0) - new Date(a.time.updated).setHours(0, 0, 0, 0) + if (updatedDay !== 0) return updatedDay + return b.time.created - a.time.created + }) .map((x) => { const workspace = x.workspaceID ? project.workspace.get(x.workspaceID) : undefined From 326471a25c50cb83d33e6b327bc88faf38a4db11 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:35:26 -0400 Subject: [PATCH 101/201] refactor: split config lsp and formatter schemas (#22986) --- AGENTS.md | 1 + packages/opencode/AGENTS.md | 4 ++ packages/opencode/src/config/config.ts | 55 ++--------------------- packages/opencode/src/config/formatter.ts | 13 ++++++ packages/opencode/src/config/index.ts | 2 + packages/opencode/src/config/lsp.ts | 39 ++++++++++++++++ 6 files changed, 63 insertions(+), 51 deletions(-) create mode 100644 packages/opencode/src/config/formatter.ts create mode 100644 packages/opencode/src/config/lsp.ts diff --git a/AGENTS.md b/AGENTS.md index a7895c831f..44d08ae955 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,6 +14,7 @@ - Use Bun APIs when possible, like `Bun.file()` - Rely on type inference when possible; avoid explicit type annotations or interfaces unless necessary for exports or clarity - Prefer functional array methods (flatMap, filter, map) over for loops; use type guards on filter to maintain type inference downstream +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. Reduce total variable count by inlining when a value is only used once. diff --git a/packages/opencode/AGENTS.md b/packages/opencode/AGENTS.md index f0f32fdd16..761b9b5c5e 100644 --- a/packages/opencode/AGENTS.md +++ b/packages/opencode/AGENTS.md @@ -23,6 +23,10 @@ See `specs/effect/migration.md` for the compact pattern reference and examples. - Use `Effect.callback` for callback-based APIs. - Prefer `DateTime.nowAsDate` over `new Date(yield* Clock.currentTimeMillis)` when you need a `Date`. +## Module conventions + +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. + ## Schemas and errors - Use `Schema.Class` for multi-field data. diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index adccb6353b..2edc455df3 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -12,7 +12,6 @@ import { Auth } from "../auth" import { Env } from "../env" import { applyEdits, modify } from "jsonc-parser" import { Instance, type InstanceContext } from "../project/instance" -import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" import { existsSync } from "fs" import { GlobalBus } from "@/bus/global" @@ -37,6 +36,8 @@ import { ConfigPermission } from "./permission" import { ConfigProvider } from "./provider" import { ConfigSkills } from "./skills" import { ConfigPaths } from "./paths" +import { ConfigFormatter } from "./formatter" +import { ConfigLSP } from "./lsp" const log = Log.create({ service: "config" }) @@ -186,56 +187,8 @@ export const Info = z ) .optional() .describe("MCP (Model Context Protocol) server configurations"), - formatter: z - .union([ - z.literal(false), - z.record( - z.string(), - z.object({ - disabled: z.boolean().optional(), - command: z.array(z.string()).optional(), - environment: z.record(z.string(), z.string()).optional(), - extensions: z.array(z.string()).optional(), - }), - ), - ]) - .optional(), - lsp: z - .union([ - z.literal(false), - z.record( - z.string(), - z.union([ - z.object({ - disabled: z.literal(true), - }), - z.object({ - command: z.array(z.string()), - extensions: z.array(z.string()).optional(), - disabled: z.boolean().optional(), - env: z.record(z.string(), z.string()).optional(), - initialization: z.record(z.string(), z.any()).optional(), - }), - ]), - ), - ]) - .optional() - .refine( - (data) => { - if (!data) return true - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((s) => s.id)) - - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ), + formatter: ConfigFormatter.Info.optional(), + lsp: ConfigLSP.Info.optional(), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), permission: ConfigPermission.Info.optional(), diff --git a/packages/opencode/src/config/formatter.ts b/packages/opencode/src/config/formatter.ts new file mode 100644 index 0000000000..7ac56214c9 --- /dev/null +++ b/packages/opencode/src/config/formatter.ts @@ -0,0 +1,13 @@ +export * as ConfigFormatter from "./formatter" + +import z from "zod" + +export const Entry = z.object({ + disabled: z.boolean().optional(), + command: z.array(z.string()).optional(), + environment: z.record(z.string(), z.string()).optional(), + extensions: z.array(z.string()).optional(), +}) + +export const Info = z.union([z.literal(false), z.record(z.string(), Entry)]) +export type Info = z.infer diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index c4a1c608b1..a05c29d25c 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -2,6 +2,8 @@ export * as Config from "./config" export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" export * as ConfigError from "./error" +export * as ConfigFormatter from "./formatter" +export * as ConfigLSP from "./lsp" export * as ConfigVariable from "./variable" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts new file mode 100644 index 0000000000..afb83908b9 --- /dev/null +++ b/packages/opencode/src/config/lsp.ts @@ -0,0 +1,39 @@ +export * as ConfigLSP from "./lsp" + +import z from "zod" +import * as LSPServer from "../lsp/server" + +export const Disabled = z.object({ + disabled: z.literal(true), +}) + +export const Entry = z.union([ + Disabled, + z.object({ + command: z.array(z.string()), + extensions: z.array(z.string()).optional(), + disabled: z.boolean().optional(), + env: z.record(z.string(), z.string()).optional(), + initialization: z.record(z.string(), z.any()).optional(), + }), +]) + +export const Info = z + .union([z.literal(false), z.record(z.string(), Entry)]) + .refine( + (data) => { + if (typeof data === "boolean") return true + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) + + return Object.entries(data).every(([id, config]) => { + if (config.disabled) return true + if (serverIds.has(id)) return true + return Boolean(config.extensions) + }) + }, + { + error: "For custom LSP servers, 'extensions' array is required.", + }, + ) + +export type Info = z.infer From f13778215ae8927e8bb500f421f835566eb9a017 Mon Sep 17 00:00:00 2001 From: Dax Date: Thu, 16 Apr 2026 21:35:47 -0400 Subject: [PATCH 102/201] perf: speed up skill directory discovery (#22990) --- packages/opencode/src/skill/index.ts | 60 +++++++++++++++++++--------- 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/packages/opencode/src/skill/index.ts b/packages/opencode/src/skill/index.ts index b139b39e6e..dd5cc4e5d5 100644 --- a/packages/opencode/src/skill/index.ts +++ b/packages/opencode/src/skill/index.ts @@ -54,6 +54,16 @@ type State = { dirs: Set } +type DiscoveryState = { + matches: string[] + dirs: string[] +} + +type ScanState = { + matches: Set + dirs: Set +} + export interface Interface { readonly get: (name: string) => Effect.Effect readonly all: () => Effect.Effect @@ -102,8 +112,7 @@ const add = Effect.fnUntraced(function* (state: State, match: string, bus: Bus.I }) const scan = Effect.fnUntraced(function* ( - state: State, - bus: Bus.Interface, + state: ScanState, root: string, pattern: string, opts?: { dot?: boolean; scope?: string }, @@ -126,26 +135,26 @@ const scan = Effect.fnUntraced(function* ( }), ) - yield* Effect.forEach(matches, (match) => add(state, match, bus), { - concurrency: "unbounded", - discard: true, - }) + for (const match of matches) { + state.matches.add(match) + state.dirs.add(path.dirname(match)) + } }) -const loadSkills = Effect.fnUntraced(function* ( - state: State, +const discoverSkills = Effect.fnUntraced(function* ( config: Config.Interface, discovery: Discovery.Interface, - bus: Bus.Interface, fsys: AppFileSystem.Interface, directory: string, worktree: string, ) { + const state: ScanState = { matches: new Set(), dirs: new Set() } + if (!Flag.OPENCODE_DISABLE_EXTERNAL_SKILLS) { for (const dir of EXTERNAL_DIRS) { const root = path.join(Global.Path.home, dir) if (!(yield* fsys.isDir(root))) continue - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) + yield* scan(state, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "global" }) } const upDirs = yield* fsys @@ -153,13 +162,13 @@ const loadSkills = Effect.fnUntraced(function* ( .pipe(Effect.catch(() => Effect.succeed([] as string[]))) for (const root of upDirs) { - yield* scan(state, bus, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) + yield* scan(state, root, EXTERNAL_SKILL_PATTERN, { dot: true, scope: "project" }) } } const configDirs = yield* config.directories() for (const dir of configDirs) { - yield* scan(state, bus, dir, OPENCODE_SKILL_PATTERN) + yield* scan(state, dir, OPENCODE_SKILL_PATTERN) } const cfg = yield* config.get() @@ -171,17 +180,28 @@ const loadSkills = Effect.fnUntraced(function* ( continue } - yield* scan(state, bus, dir, SKILL_PATTERN) + yield* scan(state, dir, SKILL_PATTERN) } for (const url of cfg.skills?.urls ?? []) { const pulledDirs = yield* discovery.pull(url) for (const dir of pulledDirs) { - state.dirs.add(dir) - yield* scan(state, bus, dir, SKILL_PATTERN) + yield* scan(state, dir, SKILL_PATTERN) } } + return { + matches: Array.from(state.matches), + dirs: Array.from(state.dirs), + } +}) + +const loadSkills = Effect.fnUntraced(function* (state: State, discovered: DiscoveryState, bus: Bus.Interface) { + yield* Effect.forEach(discovered.matches, (match) => add(state, match, bus), { + concurrency: "unbounded", + discard: true, + }) + log.info("init", { count: Object.keys(state.skills).length }) }) @@ -194,10 +214,15 @@ export const layer = Layer.effect( const config = yield* Config.Service const bus = yield* Bus.Service const fsys = yield* AppFileSystem.Service + const discovered = yield* InstanceState.make( + Effect.fn("Skill.discovery")(function* (ctx) { + return yield* discoverSkills(config, discovery, fsys, ctx.directory, ctx.worktree) + }), + ) const state = yield* InstanceState.make( Effect.fn("Skill.state")(function* (ctx) { const s: State = { skills: {}, dirs: new Set() } - yield* loadSkills(s, config, discovery, bus, fsys, ctx.directory, ctx.worktree) + yield* loadSkills(s, yield* InstanceState.get(discovered), bus) return s }), ) @@ -213,8 +238,7 @@ export const layer = Layer.effect( }) const dirs = Effect.fn("Skill.dirs")(function* () { - const s = yield* InstanceState.get(state) - return Array.from(s.dirs) + return (yield* InstanceState.get(discovered)).dirs }) const available = Effect.fn("Skill.available")(function* (agent?: Agent.Info) { From 5b9fa322551b77ddf2ab004c5491a82d37081b22 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 01:36:45 +0000 Subject: [PATCH 103/201] chore: generate --- packages/opencode/src/config/lsp.ts | 30 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts index afb83908b9..233f7e523c 100644 --- a/packages/opencode/src/config/lsp.ts +++ b/packages/opencode/src/config/lsp.ts @@ -18,22 +18,20 @@ export const Entry = z.union([ }), ]) -export const Info = z - .union([z.literal(false), z.record(z.string(), Entry)]) - .refine( - (data) => { - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) +export const Info = z.union([z.literal(false), z.record(z.string(), Entry)]).refine( + (data) => { + if (typeof data === "boolean") return true + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ) + return Object.entries(data).every(([id, config]) => { + if (config.disabled) return true + if (serverIds.has(id)) return true + return Boolean(config.extensions) + }) + }, + { + error: "For custom LSP servers, 'extensions' array is required.", + }, +) export type Info = z.infer From 9c87a144e879dd9b76c90cb1415e63005aac2843 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:43:57 -0400 Subject: [PATCH 104/201] refactor: normalize AccountRepo to canonical Effect service pattern (#22991) --- packages/opencode/src/account/account.ts | 4 +- packages/opencode/src/account/repo.ts | 264 +++++++++--------- packages/opencode/test/account/repo.test.ts | 78 +++--- .../opencode/test/account/service.test.ts | 20 +- .../opencode/test/share/share-next.test.ts | 2 +- 5 files changed, 184 insertions(+), 184 deletions(-) diff --git a/packages/opencode/src/account/account.ts b/packages/opencode/src/account/account.ts index 657c61b1e5..23981fd852 100644 --- a/packages/opencode/src/account/account.ts +++ b/packages/opencode/src/account/account.ts @@ -181,10 +181,10 @@ export interface Interface { export class Service extends Context.Service()("@opencode/Account") {} -export const layer: Layer.Layer = Layer.effect( +export const layer: Layer.Layer = Layer.effect( Service, Effect.gen(function* () { - const repo = yield* AccountRepo + const repo = yield* AccountRepo.Service const http = yield* HttpClient.HttpClient const httpRead = withTransientReadRetry(http) const httpOk = HttpClient.filterStatusOk(http) diff --git a/packages/opencode/src/account/repo.ts b/packages/opencode/src/account/repo.ts index 5d8a8e33f6..450db1bd74 100644 --- a/packages/opencode/src/account/repo.ts +++ b/packages/opencode/src/account/repo.ts @@ -13,154 +13,154 @@ type DbTransactionCallback = Parameters>[0] const ACCOUNT_STATE_ID = 1 -export namespace AccountRepo { - export interface Service { - readonly active: () => Effect.Effect, AccountRepoError> - readonly list: () => Effect.Effect - readonly remove: (accountID: AccountID) => Effect.Effect - readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect - readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> - readonly persistToken: (input: { - accountID: AccountID - accessToken: AccessToken - refreshToken: RefreshToken - expiry: Option.Option - }) => Effect.Effect - readonly persistAccount: (input: { - id: AccountID - email: string - url: string - accessToken: AccessToken - refreshToken: RefreshToken - expiry: number - orgID: Option.Option - }) => Effect.Effect - } +export interface Interface { + readonly active: () => Effect.Effect, AccountRepoError> + readonly list: () => Effect.Effect + readonly remove: (accountID: AccountID) => Effect.Effect + readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect + readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> + readonly persistToken: (input: { + accountID: AccountID + accessToken: AccessToken + refreshToken: RefreshToken + expiry: Option.Option + }) => Effect.Effect + readonly persistAccount: (input: { + id: AccountID + email: string + url: string + accessToken: AccessToken + refreshToken: RefreshToken + expiry: number + orgID: Option.Option + }) => Effect.Effect } -export class AccountRepo extends Context.Service()("@opencode/AccountRepo") { - static readonly layer: Layer.Layer = Layer.effect( - AccountRepo, - Effect.gen(function* () { - const decode = Schema.decodeUnknownSync(Info) +export class Service extends Context.Service()("@opencode/AccountRepo") {} - const query = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.use(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), +export const layer: Layer.Layer = Layer.effect( + Service, + Effect.gen(function* () { + const decode = Schema.decodeUnknownSync(Info) + + const query = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.use(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const tx = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.transaction(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const current = (db: DbClient) => { + const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() + if (!state?.active_account_id) return + const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() + if (!account) return + return { ...account, active_org_id: state.active_org_id ?? null } + } + + const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { + const id = Option.getOrNull(orgID) + return db + .insert(AccountStateTable) + .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) + .onConflictDoUpdate({ + target: AccountStateTable.id, + set: { active_account_id: accountID, active_org_id: id }, }) + .run() + } - const tx = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.transaction(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), - }) + const active = Effect.fn("AccountRepo.active")(() => + query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), + ) - const current = (db: DbClient) => { - const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() - if (!state?.active_account_id) return - const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() - if (!account) return - return { ...account, active_org_id: state.active_org_id ?? null } - } + const list = Effect.fn("AccountRepo.list")(() => + query((db) => + db + .select() + .from(AccountTable) + .all() + .map((row: AccountRow) => decode({ ...row, active_org_id: null })), + ), + ) - const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { - const id = Option.getOrNull(orgID) - return db - .insert(AccountStateTable) - .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) - .onConflictDoUpdate({ - target: AccountStateTable.id, - set: { active_account_id: accountID, active_org_id: id }, - }) + const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => + tx((db) => { + db.update(AccountStateTable) + .set({ active_account_id: null, active_org_id: null }) + .where(eq(AccountStateTable.active_account_id, accountID)) .run() - } + db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() + }).pipe(Effect.asVoid), + ) - const active = Effect.fn("AccountRepo.active")(() => - query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), - ) + const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => + query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), + ) - const list = Effect.fn("AccountRepo.list")(() => - query((db) => - db - .select() - .from(AccountTable) - .all() - .map((row: AccountRow) => decode({ ...row, active_org_id: null })), - ), - ) + const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => + query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( + Effect.map(Option.fromNullishOr), + ), + ) - const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => - tx((db) => { - db.update(AccountStateTable) - .set({ active_account_id: null, active_org_id: null }) - .where(eq(AccountStateTable.active_account_id, accountID)) - .run() - db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() - }).pipe(Effect.asVoid), - ) + const persistToken = Effect.fn("AccountRepo.persistToken")((input) => + query((db) => + db + .update(AccountTable) + .set({ + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: Option.getOrNull(input.expiry), + }) + .where(eq(AccountTable.id, input.accountID)) + .run(), + ).pipe(Effect.asVoid), + ) - const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => - query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), - ) + const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => + tx((db) => { + const url = normalizeServerUrl(input.url) - const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => - query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( - Effect.map(Option.fromNullishOr), - ), - ) - - const persistToken = Effect.fn("AccountRepo.persistToken")((input) => - query((db) => - db - .update(AccountTable) - .set({ - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: Option.getOrNull(input.expiry), - }) - .where(eq(AccountTable.id, input.accountID)) - .run(), - ).pipe(Effect.asVoid), - ) - - const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => - tx((db) => { - const url = normalizeServerUrl(input.url) - - db.insert(AccountTable) - .values({ - id: input.id, + db.insert(AccountTable) + .values({ + id: input.id, + email: input.email, + url, + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: input.expiry, + }) + .onConflictDoUpdate({ + target: AccountTable.id, + set: { email: input.email, url, access_token: input.accessToken, refresh_token: input.refreshToken, token_expiry: input.expiry, - }) - .onConflictDoUpdate({ - target: AccountTable.id, - set: { - email: input.email, - url, - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: input.expiry, - }, - }) - .run() - void state(db, input.id, input.orgID) - }).pipe(Effect.asVoid), - ) + }, + }) + .run() + void state(db, input.id, input.orgID) + }).pipe(Effect.asVoid), + ) - return AccountRepo.of({ - active, - list, - remove, - use, - getRow, - persistToken, - persistAccount, - }) - }), - ) -} + return Service.of({ + active, + list, + remove, + use, + getRow, + persistToken, + persistAccount, + }) + }), +) + +export * as AccountRepo from "./repo" diff --git a/packages/opencode/test/account/repo.test.ts b/packages/opencode/test/account/repo.test.ts index 93d0481521..8e59b85b31 100644 --- a/packages/opencode/test/account/repo.test.ts +++ b/packages/opencode/test/account/repo.test.ts @@ -18,14 +18,14 @@ const it = testEffect(Layer.merge(AccountRepo.layer, truncate)) it.live("list returns empty when no accounts exist", () => Effect.gen(function* () { - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts).toEqual([]) }), ) it.live("active returns none when no accounts exist", () => Effect.gen(function* () { - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isNone(active)).toBe(true) }), ) @@ -33,7 +33,7 @@ it.live("active returns none when no accounts exist", () => it.live("persistAccount inserts and getRow retrieves", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -45,13 +45,13 @@ it.live("persistAccount inserts and getRow retrieves", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.isSome(row)).toBe(true) const value = Option.getOrThrow(row) expect(value.id).toBe(AccountID.make("user-1")) expect(value.email).toBe("test@example.com") - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-1")) }), ) @@ -60,7 +60,7 @@ it.live("persistAccount normalizes trailing slashes in stored server URLs", () = Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -72,9 +72,9 @@ it.live("persistAccount normalizes trailing slashes in stored server URLs", () = }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) - const active = yield* AccountRepo.use((r) => r.active()) - const list = yield* AccountRepo.use((r) => r.list()) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) + const active = yield* AccountRepo.Service.use((r) => r.active()) + const list = yield* AccountRepo.Service.use((r) => r.list()) expect(Option.getOrThrow(row).url).toBe("https://control.example.com") expect(Option.getOrThrow(active).url).toBe("https://control.example.com") @@ -87,7 +87,7 @@ it.live("persistAccount sets the active account and org", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "first@example.com", @@ -99,7 +99,7 @@ it.live("persistAccount sets the active account and org", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "second@example.com", @@ -112,7 +112,7 @@ it.live("persistAccount sets the active account and org", () => ) // Last persisted account is active with its org - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isSome(active)).toBe(true) expect(Option.getOrThrow(active).id).toBe(AccountID.make("user-2")) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) @@ -124,7 +124,7 @@ it.live("list returns all accounts", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "a@example.com", @@ -136,7 +136,7 @@ it.live("list returns all accounts", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "b@example.com", @@ -148,7 +148,7 @@ it.live("list returns all accounts", () => }), ) - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts.length).toBe(2) expect(accounts.map((a) => a.email).sort()).toEqual(["a@example.com", "b@example.com"]) }), @@ -158,7 +158,7 @@ it.live("remove deletes an account", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -170,9 +170,9 @@ it.live("remove deletes an account", () => }), ) - yield* AccountRepo.use((r) => r.remove(id)) + yield* AccountRepo.Service.use((r) => r.remove(id)) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.isNone(row)).toBe(true) }), ) @@ -182,7 +182,7 @@ it.live("use stores the selected org and marks the account active", () => const id1 = AccountID.make("user-1") const id2 = AccountID.make("user-2") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id1, email: "first@example.com", @@ -194,7 +194,7 @@ it.live("use stores the selected org and marks the account active", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: id2, email: "second@example.com", @@ -206,13 +206,13 @@ it.live("use stores the selected org and marks the account active", () => }), ) - yield* AccountRepo.use((r) => r.use(id1, Option.some(OrgID.make("org-99")))) - const active1 = yield* AccountRepo.use((r) => r.active()) + yield* AccountRepo.Service.use((r) => r.use(id1, Option.some(OrgID.make("org-99")))) + const active1 = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active1).id).toBe(id1) expect(Option.getOrThrow(active1).active_org_id).toBe(OrgID.make("org-99")) - yield* AccountRepo.use((r) => r.use(id1, Option.none())) - const active2 = yield* AccountRepo.use((r) => r.active()) + yield* AccountRepo.Service.use((r) => r.use(id1, Option.none())) + const active2 = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active2).active_org_id).toBeNull() }), ) @@ -221,7 +221,7 @@ it.live("persistToken updates token fields", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -234,7 +234,7 @@ it.live("persistToken updates token fields", () => ) const expiry = Date.now() + 7200_000 - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistToken({ accountID: id, accessToken: AccessToken.make("new_token"), @@ -243,7 +243,7 @@ it.live("persistToken updates token fields", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("new_token")) expect(value.refresh_token).toBe(RefreshToken.make("new_refresh")) @@ -255,7 +255,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -267,7 +267,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistToken({ accountID: id, accessToken: AccessToken.make("new_token"), @@ -276,7 +276,7 @@ it.live("persistToken with no expiry sets token_expiry to null", () => }), ) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) expect(Option.getOrThrow(row).token_expiry).toBeNull() }), ) @@ -285,7 +285,7 @@ it.live("persistAccount upserts on conflict", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -297,7 +297,7 @@ it.live("persistAccount upserts on conflict", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -309,14 +309,14 @@ it.live("persistAccount upserts on conflict", () => }), ) - const accounts = yield* AccountRepo.use((r) => r.list()) + const accounts = yield* AccountRepo.Service.use((r) => r.list()) expect(accounts.length).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_v2")) - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) }), ) @@ -325,7 +325,7 @@ it.live("remove clears active state when deleting the active account", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "test@example.com", @@ -337,16 +337,16 @@ it.live("remove clears active state when deleting the active account", () => }), ) - yield* AccountRepo.use((r) => r.remove(id)) + yield* AccountRepo.Service.use((r) => r.remove(id)) - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.isNone(active)).toBe(true) }), ) it.live("getRow returns none for nonexistent account", () => Effect.gen(function* () { - const row = yield* AccountRepo.use((r) => r.getRow(AccountID.make("nope"))) + const row = yield* AccountRepo.Service.use((r) => r.getRow(AccountID.make("nope"))) expect(Option.isNone(row)).toBe(true) }), ) diff --git a/packages/opencode/test/account/service.test.ts b/packages/opencode/test/account/service.test.ts index 053fd2a0ed..f0daab3a15 100644 --- a/packages/opencode/test/account/service.test.ts +++ b/packages/opencode/test/account/service.test.ts @@ -122,7 +122,7 @@ it.live("login maps transport failures to account transport errors", () => it.live("orgsByAccount groups orgs per account", () => Effect.gen(function* () { - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: AccountID.make("user-1"), email: "one@example.com", @@ -134,7 +134,7 @@ it.live("orgsByAccount groups orgs per account", () => }), ) - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id: AccountID.make("user-2"), email: "two@example.com", @@ -177,7 +177,7 @@ it.live("token refresh persists the new token", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -206,7 +206,7 @@ it.live("token refresh persists the new token", () => expect(Option.getOrThrow(token)).toBeDefined() expect(String(Option.getOrThrow(token))).toBe("at_new") - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -218,7 +218,7 @@ it.live("token refreshes before expiry when inside the eager refresh window", () Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -251,7 +251,7 @@ it.live("token refreshes before expiry when inside the eager refresh window", () expect(String(Option.getOrThrow(token))).toBe("at_new") expect(refreshCalls).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -262,7 +262,7 @@ it.live("concurrent config and token requests coalesce token refresh", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -315,7 +315,7 @@ it.live("concurrent config and token requests coalesce token refresh", () => expect(String(Option.getOrThrow(token))).toBe("at_new") expect(refreshCalls).toBe(1) - const row = yield* AccountRepo.use((r) => r.getRow(id)) + const row = yield* AccountRepo.Service.use((r) => r.getRow(id)) const value = Option.getOrThrow(row) expect(value.access_token).toBe(AccessToken.make("at_new")) expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) @@ -326,7 +326,7 @@ it.live("config sends the selected org header", () => Effect.gen(function* () { const id = AccountID.make("user-1") - yield* AccountRepo.use((r) => + yield* AccountRepo.Service.use((r) => r.persistAccount({ id, email: "user@example.com", @@ -388,7 +388,7 @@ it.live("poll stores the account and first org on success", () => expect(res.email).toBe("user@example.com") } - const active = yield* AccountRepo.use((r) => r.active()) + const active = yield* AccountRepo.Service.use((r) => r.active()) expect(Option.getOrThrow(active)).toEqual( expect.objectContaining({ id: "user-1", diff --git a/packages/opencode/test/share/share-next.test.ts b/packages/opencode/test/share/share-next.test.ts index 2359f06a31..930c4062f6 100644 --- a/packages/opencode/test/share/share-next.test.ts +++ b/packages/opencode/test/share/share-next.test.ts @@ -72,7 +72,7 @@ const share = (id: SessionID) => Database.use((db) => db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, id)).get()) const seed = (url: string, org?: string) => - AccountRepo.use((repo) => + AccountRepo.Service.use((repo) => repo.persistAccount({ id: AccountID.make("account-1"), email: "user@example.com", From 4f8986aa48cbab66ca6e72272c3c7d27ffc8e0eb Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 21:51:02 -0400 Subject: [PATCH 105/201] refactor: unwrap Question namespace + fix script to emit "." for index.ts (#22992) --- .../script/unwrap-and-self-reexport.ts | 11 +- packages/opencode/src/question/index.ts | 384 +++++++++--------- 2 files changed, 200 insertions(+), 195 deletions(-) diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts index 5ae703182e..09256f3a51 100644 --- a/packages/opencode/script/unwrap-and-self-reexport.ts +++ b/packages/opencode/script/unwrap-and-self-reexport.ts @@ -207,10 +207,15 @@ const rewrittenBody = dedented.map(rewriteLine) // Assemble the new file. Collapse multiple trailing blank lines so the // self-reexport sits cleanly at the end. +// +// When the file is itself `index.ts`, prefer `"."` over `"./index"` — both are +// valid but `"."` matches the existing convention in the codebase (e.g. +// pty/index.ts, file/index.ts, etc.) and avoids referencing "index" literally. const basename = path.basename(absPath, ".ts") +const reexportSource = basename === "index" ? "." : `./${basename}` const assembled = [...before, ...rewrittenBody, ...after].join("\n") const trimmed = assembled.replace(/\s+$/g, "") -const output = `${trimmed}\n\nexport * as ${nsName} from "./${basename}"\n` +const output = `${trimmed}\n\nexport * as ${nsName} from "${reexportSource}"\n` if (dryRun) { console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) @@ -218,7 +223,7 @@ if (dryRun) { console.log(`body lines: ${body.length}`) console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) console.log(`self-refs rewr: ${rewriteCount}`) - console.log(`self-reexport: export * as ${nsName} from "./${basename}"`) + console.log(`self-reexport: export * as ${nsName} from "${reexportSource}"`) console.log(`output preview (last 10 lines):`) const outputLines = output.split("\n") for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { @@ -231,7 +236,7 @@ fs.writeFileSync(absPath, output) console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) console.log(` body lines: ${body.length}`) console.log(` self-refs rewr: ${rewriteCount}`) -console.log(` self-reexport: export * as ${nsName} from "./${basename}"`) +console.log(` self-reexport: export * as ${nsName} from "${reexportSource}"`) console.log("") console.log("Next: verify with") console.log(" bunx --bun tsgo --noEmit") diff --git a/packages/opencode/src/question/index.ts b/packages/opencode/src/question/index.ts index 627d04564d..3b377c9827 100644 --- a/packages/opencode/src/question/index.ts +++ b/packages/opencode/src/question/index.ts @@ -8,222 +8,222 @@ import { Log } from "@/util" import { withStatics } from "@/util/schema" import { QuestionID } from "./schema" -export namespace Question { - const log = Log.create({ service: "question" }) +const log = Log.create({ service: "question" }) - // Schemas +// Schemas - export class Option extends Schema.Class` if that directory exists. - * 5. Commit, push with `--no-verify`, and open a PR titled after the - * namespace. - * - * Usage: - * - * bun script/batch-unwrap-pr.ts src/file/ignore.ts - * bun script/batch-unwrap-pr.ts src/file/ignore.ts src/file/watcher.ts # multiple - * bun script/batch-unwrap-pr.ts --dry-run src/file/ignore.ts # plan only - * - * Repo assumptions: - * - * - Main checkout at /Users/kit/code/open-source/opencode (configurable via - * --repo-root=...). - * - Worktree root at /Users/kit/code/open-source/opencode-worktrees - * (configurable via --worktree-root=...). - * - * The script does NOT enable auto-merge; that's a separate manual step if we - * want it. - */ - -import fs from "node:fs" -import path from "node:path" -import { spawnSync, type SpawnSyncReturns } from "node:child_process" - -type Cmd = string[] - -function run( - cwd: string, - cmd: Cmd, - opts: { capture?: boolean; allowFail?: boolean; stdin?: string } = {}, -): SpawnSyncReturns { - const result = spawnSync(cmd[0], cmd.slice(1), { - cwd, - stdio: opts.capture ? ["pipe", "pipe", "pipe"] : ["inherit", "inherit", "inherit"], - encoding: "utf-8", - input: opts.stdin, - }) - if (!opts.allowFail && result.status !== 0) { - const label = `${path.basename(cmd[0])} ${cmd.slice(1).join(" ")}` - console.error(`[fail] ${label} (cwd=${cwd})`) - if (opts.capture) { - if (result.stdout) console.error(result.stdout) - if (result.stderr) console.error(result.stderr) - } - process.exit(result.status ?? 1) - } - return result -} - -function fileSlug(fileArg: string): string { - // src/file/ignore.ts → file-ignore - return fileArg - .replace(/^src\//, "") - .replace(/\.tsx?$/, "") - .replace(/[\/_]/g, "-") -} - -function readNamespace(absFile: string): string { - const content = fs.readFileSync(absFile, "utf-8") - const match = content.match(/^export\s+namespace\s+(\w+)\s*\{/m) - if (!match) { - console.error(`no \`export namespace\` found in ${absFile}`) - process.exit(1) - } - return match[1] -} - -// --------------------------------------------------------------------------- - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const repoRoot = ( - args.find((a) => a.startsWith("--repo-root=")) ?? "--repo-root=/Users/kit/code/open-source/opencode" -).split("=")[1] -const worktreeRoot = ( - args.find((a) => a.startsWith("--worktree-root=")) ?? "--worktree-root=/Users/kit/code/open-source/opencode-worktrees" -).split("=")[1] -const targets = args.filter((a) => !a.startsWith("--")) - -if (targets.length === 0) { - console.error("Usage: bun script/batch-unwrap-pr.ts [more files...] [--dry-run]") - process.exit(1) -} - -if (!fs.existsSync(worktreeRoot)) fs.mkdirSync(worktreeRoot, { recursive: true }) - -for (const rel of targets) { - const absSrc = path.join(repoRoot, "packages", "opencode", rel) - if (!fs.existsSync(absSrc)) { - console.error(`skip ${rel}: file does not exist under ${repoRoot}/packages/opencode`) - continue - } - const slug = fileSlug(rel) - const branch = `kit/ns-${slug}` - const wt = path.join(worktreeRoot, `ns-${slug}`) - const ns = readNamespace(absSrc) - - console.log(`\n=== ${rel} → ${ns} (branch=${branch} wt=${path.basename(wt)}) ===`) - - if (dryRun) { - console.log(` would create worktree ${wt}`) - console.log(` would run unwrap on packages/opencode/${rel}`) - console.log(` would commit, push, and open PR`) - continue - } - - // Sync dev (fetch only; we branch off origin/dev directly). - run(repoRoot, ["git", "fetch", "origin", "dev", "--quiet"]) - - // Create worktree + branch. - if (fs.existsSync(wt)) { - console.log(` worktree already exists at ${wt}; skipping`) - continue - } - run(repoRoot, ["git", "worktree", "add", "-b", branch, wt, "origin/dev"]) - - // Symlink node_modules so bun/tsgo work without a full install. - // We link both the repo root and packages/opencode, since the opencode - // package has its own local node_modules (including bunfig.toml preload deps - // like @opentui/solid) that aren't hoisted to the root. - const wtRootNodeModules = path.join(wt, "node_modules") - if (!fs.existsSync(wtRootNodeModules)) { - fs.symlinkSync(path.join(repoRoot, "node_modules"), wtRootNodeModules) - } - const wtOpencode = path.join(wt, "packages", "opencode") - const wtOpencodeNodeModules = path.join(wtOpencode, "node_modules") - if (!fs.existsSync(wtOpencodeNodeModules)) { - fs.symlinkSync(path.join(repoRoot, "packages", "opencode", "node_modules"), wtOpencodeNodeModules) - } - const wtTarget = path.join(wt, "packages", "opencode", rel) - - // Baseline tsgo output (pre-change). - const baselinePath = path.join(wt, ".ns-baseline.txt") - const baseline = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) - fs.writeFileSync(baselinePath, (baseline.stdout ?? "") + (baseline.stderr ?? "")) - - // Run the unwrap script from the MAIN repo checkout (where the tooling - // lives) targeting the worktree's file by absolute path. We run from the - // worktree root (not `packages/opencode`) to avoid triggering the - // bunfig.toml preload, which needs `@opentui/solid` that only the TUI - // workspace has installed. - const unwrapScript = path.join(repoRoot, "packages", "opencode", "script", "unwrap-and-self-reexport.ts") - run(wt, ["bun", unwrapScript, wtTarget]) - - // Post-change tsgo. - const after = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) - const afterText = (after.stdout ?? "") + (after.stderr ?? "") - - // Compare line-sets to detect NEW tsgo errors. - const sanitize = (s: string) => - s - .split("\n") - .map((l) => l.replace(/\s+$/, "")) - .filter(Boolean) - .sort() - .join("\n") - const baselineSorted = sanitize(fs.readFileSync(baselinePath, "utf-8")) - const afterSorted = sanitize(afterText) - if (baselineSorted !== afterSorted) { - console.log(` tsgo output differs from baseline. Showing diff:`) - const diffResult = spawnSync("diff", ["-u", baselinePath, "-"], { input: afterText, encoding: "utf-8" }) - if (diffResult.stdout) console.log(diffResult.stdout) - if (diffResult.stderr) console.log(diffResult.stderr) - console.error(` aborting ${rel}; investigate manually in ${wt}`) - process.exit(1) - } - - // SDK build. - run(wtOpencode, ["bun", "run", "--conditions=browser", "./src/index.ts", "generate"], { capture: true }) - - // Run tests for the directory, if a matching test dir exists. - const dirName = path.basename(path.dirname(rel)) - const testDir = path.join(wt, "packages", "opencode", "test", dirName) - if (fs.existsSync(testDir)) { - const testResult = run(wtOpencode, ["bun", "run", "test", `test/${dirName}`], { capture: true, allowFail: true }) - const combined = (testResult.stdout ?? "") + (testResult.stderr ?? "") - if (testResult.status !== 0) { - console.error(combined) - console.error(` tests failed for ${rel}; aborting`) - process.exit(1) - } - // Surface the summary line if present. - const summary = combined - .split("\n") - .filter((l) => /\bpass\b|\bfail\b/.test(l)) - .slice(-3) - .join("\n") - if (summary) console.log(` tests: ${summary.replace(/\n/g, " | ")}`) - } else { - console.log(` tests: no test/${dirName} directory, skipping`) - } - - // Clean up baseline file before committing. - fs.unlinkSync(baselinePath) - - // Commit, push, open PR. - const commitMsg = `refactor: unwrap ${ns} namespace + self-reexport` - run(wt, ["git", "add", "-A"]) - run(wt, ["git", "commit", "-m", commitMsg]) - run(wt, ["git", "push", "-u", "origin", branch, "--no-verify"]) - - const prBody = [ - "## Summary", - `- Unwrap the \`${ns}\` namespace in \`packages/opencode/${rel}\` to flat top-level exports.`, - `- Append \`export * as ${ns} from "./${path.basename(rel, ".ts")}"\` so consumers keep the same \`${ns}.x\` import ergonomics.`, - "", - "## Verification (local)", - "- `bunx --bun tsgo --noEmit` — no new errors vs baseline.", - "- `bun run --conditions=browser ./src/index.ts generate` — clean.", - `- \`bun run test test/${dirName}\` — all pass (if applicable).`, - ].join("\n") - run(wt, ["gh", "pr", "create", "--title", commitMsg, "--base", "dev", "--body", prBody]) - - console.log(` PR opened for ${rel}`) -} diff --git a/packages/opencode/script/collapse-barrel.ts b/packages/opencode/script/collapse-barrel.ts deleted file mode 100644 index 05bb11589c..0000000000 --- a/packages/opencode/script/collapse-barrel.ts +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env bun -/** - * Collapse a single-namespace barrel directory into a dir/index.ts module. - * - * Given a directory `src/foo/` that contains: - * - * - `index.ts` (exactly `export * as Foo from "./foo"`) - * - `foo.ts` (the real implementation) - * - zero or more sibling files - * - * this script: - * - * 1. Deletes the old `index.ts` barrel. - * 2. `git mv`s `foo.ts` → `index.ts` so the implementation IS the directory entry. - * 3. Appends `export * as Foo from "."` to the new `index.ts`. - * 4. Rewrites any same-directory sibling `*.ts` files that imported - * `./foo` (with or without the namespace name) to import `"."` instead. - * - * Consumer files outside the directory keep importing from the directory - * (`"@/foo"` / `"../foo"` / etc.) and continue to work, because - * `dir/index.ts` now provides the `Foo` named export directly. - * - * Usage: - * - * bun script/collapse-barrel.ts src/bus - * bun script/collapse-barrel.ts src/bus --dry-run - * - * Notes: - * - * - Only works on directories whose barrel is a single - * `export * as Name from "./file"` line. Refuses otherwise. - * - Refuses if the implementation file name already conflicts with - * `index.ts`. - * - Safe to run repeatedly: a second run on an already-collapsed dir - * will exit with a clear message. - */ - -import fs from "node:fs" -import path from "node:path" -import { spawnSync } from "node:child_process" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const targetArg = args.find((a) => !a.startsWith("--")) - -if (!targetArg) { - console.error("Usage: bun script/collapse-barrel.ts [--dry-run]") - process.exit(1) -} - -const dir = path.resolve(targetArg) -const indexPath = path.join(dir, "index.ts") - -if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) { - console.error(`Not a directory: ${dir}`) - process.exit(1) -} -if (!fs.existsSync(indexPath)) { - console.error(`No index.ts in ${dir}`) - process.exit(1) -} - -// Validate barrel shape. -const indexContent = fs.readFileSync(indexPath, "utf-8").trim() -const match = indexContent.match(/^export\s+\*\s+as\s+(\w+)\s+from\s+["']\.\/([^"']+)["']\s*;?\s*$/) -if (!match) { - console.error(`Not a simple single-namespace barrel:\n${indexContent}`) - process.exit(1) -} -const namespaceName = match[1] -const implRel = match[2].replace(/\.ts$/, "") -const implPath = path.join(dir, `${implRel}.ts`) - -if (!fs.existsSync(implPath)) { - console.error(`Implementation file not found: ${implPath}`) - process.exit(1) -} - -if (implRel === "index") { - console.error(`Nothing to do — impl file is already index.ts`) - process.exit(0) -} - -console.log(`Collapsing ${path.relative(process.cwd(), dir)}`) -console.log(` namespace: ${namespaceName}`) -console.log(` impl file: ${implRel}.ts → index.ts`) - -// Figure out which sibling files need rewriting. -const siblings = fs - .readdirSync(dir) - .filter((f) => f.endsWith(".ts") || f.endsWith(".tsx")) - .filter((f) => f !== "index.ts" && f !== `${implRel}.ts`) - .map((f) => path.join(dir, f)) - -type SiblingEdit = { file: string; content: string } -const siblingEdits: SiblingEdit[] = [] - -for (const sibling of siblings) { - const content = fs.readFileSync(sibling, "utf-8") - // Match any import or re-export referring to "./" inside this directory. - const siblingRegex = new RegExp(`(from\\s*["'])\\.\\/${implRel.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&")}(["'])`, "g") - if (!siblingRegex.test(content)) continue - const updated = content.replace(siblingRegex, `$1.$2`) - siblingEdits.push({ file: sibling, content: updated }) -} - -if (siblingEdits.length > 0) { - console.log(` sibling rewrites: ${siblingEdits.length}`) - for (const edit of siblingEdits) { - console.log(` ${path.relative(process.cwd(), edit.file)}`) - } -} else { - console.log(` sibling rewrites: none`) -} - -if (dryRun) { - console.log(`\n(dry run) would:`) - console.log(` - delete ${path.relative(process.cwd(), indexPath)}`) - console.log(` - git mv ${path.relative(process.cwd(), implPath)} ${path.relative(process.cwd(), indexPath)}`) - console.log(` - append \`export * as ${namespaceName} from "."\` to the new index.ts`) - for (const edit of siblingEdits) { - console.log(` - rewrite sibling: ${path.relative(process.cwd(), edit.file)}`) - } - process.exit(0) -} - -// Apply: remove the old barrel, git-mv the impl onto it, then rewrite content. -// We can't git-mv on top of an existing tracked file, so we remove the barrel first. -function runGit(...cmd: string[]) { - const res = spawnSync("git", cmd, { stdio: "inherit" }) - if (res.status !== 0) { - console.error(`git ${cmd.join(" ")} failed`) - process.exit(res.status ?? 1) - } -} - -// Step 1: remove the barrel -runGit("rm", "-f", indexPath) - -// Step 2: rename the impl file into index.ts -runGit("mv", implPath, indexPath) - -// Step 3: append the self-reexport to the new index.ts -const newContent = fs.readFileSync(indexPath, "utf-8") -const trimmed = newContent.endsWith("\n") ? newContent : newContent + "\n" -fs.writeFileSync(indexPath, `${trimmed}\nexport * as ${namespaceName} from "."\n`) -console.log(` appended: export * as ${namespaceName} from "."`) - -// Step 4: rewrite siblings -for (const edit of siblingEdits) { - fs.writeFileSync(edit.file, edit.content) -} -if (siblingEdits.length > 0) { - console.log(` rewrote ${siblingEdits.length} sibling file(s)`) -} - -console.log(`\nDone. Verify with:`) -console.log(` cd packages/opencode`) -console.log(` bunx --bun tsgo --noEmit`) -console.log(` bun run --conditions=browser ./src/index.ts generate`) -console.log(` bun run test`) diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts deleted file mode 100644 index 09256f3a51..0000000000 --- a/packages/opencode/script/unwrap-and-self-reexport.ts +++ /dev/null @@ -1,246 +0,0 @@ -#!/usr/bin/env bun -/** - * Unwrap a single `export namespace` in a file into flat top-level exports - * plus a self-reexport at the bottom of the same file. - * - * Usage: - * - * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts - * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts --dry-run - * - * Input file shape: - * - * // imports ... - * - * export namespace FileIgnore { - * export function ...(...) { ... } - * const helper = ... - * } - * - * Output shape: - * - * // imports ... - * - * export function ...(...) { ... } - * const helper = ... - * - * export * as FileIgnore from "./ignore" - * - * What the script does: - * - * 1. Uses ast-grep to locate the single `export namespace Foo { ... }` block. - * 2. Removes the `export namespace Foo {` line and the matching closing `}`. - * 3. Dedents the body by one indent level (2 spaces). - * 4. Rewrites `Foo.Bar` self-references inside the file to just `Bar` - * (but only for names that are actually exported from the namespace — - * non-exported members get the same treatment so references remain valid). - * 5. Appends `export * as Foo from "./"` at the end of the file. - * - * What it does NOT do: - * - * - Does not create or modify barrel `index.ts` files. - * - Does not rewrite any consumer imports. Consumers already import from - * the file path itself (e.g. `import { FileIgnore } from "../file/ignore"`); - * the self-reexport keeps that import working unchanged. - * - Does not handle files with more than one `export namespace` declaration. - * The script refuses that case. - * - * Requires: ast-grep (`brew install ast-grep`). - */ - -import fs from "node:fs" -import path from "node:path" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const targetArg = args.find((a) => !a.startsWith("--")) - -if (!targetArg) { - console.error("Usage: bun script/unwrap-and-self-reexport.ts [--dry-run]") - process.exit(1) -} - -const absPath = path.resolve(targetArg) -if (!fs.existsSync(absPath) || !fs.statSync(absPath).isFile()) { - console.error(`Not a file: ${absPath}`) - process.exit(1) -} - -// Locate the namespace block with ast-grep (accurate AST boundaries). -const ast = Bun.spawnSync( - ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], - { stdout: "pipe", stderr: "pipe" }, -) -if (ast.exitCode !== 0) { - console.error("ast-grep failed:", ast.stderr.toString()) - process.exit(1) -} - -type AstMatch = { - range: { start: { line: number; column: number }; end: { line: number; column: number } } - metaVariables: { single: Record } -} -const matches = JSON.parse(ast.stdout.toString()) as AstMatch[] -if (matches.length === 0) { - console.error(`No \`export namespace\` found in ${path.relative(process.cwd(), absPath)}`) - process.exit(1) -} -if (matches.length > 1) { - console.error(`File has ${matches.length} \`export namespace\` declarations — this script handles one per file.`) - for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) - process.exit(1) -} - -const match = matches[0] -const nsName = match.metaVariables.single.NAME.text -const startLine = match.range.start.line -const endLine = match.range.end.line - -const original = fs.readFileSync(absPath, "utf-8") -const lines = original.split("\n") - -// Split the file into before/body/after. -const before = lines.slice(0, startLine) -const body = lines.slice(startLine + 1, endLine) -const after = lines.slice(endLine + 1) - -// Dedent body by one indent level (2 spaces). -const dedented = body.map((line) => { - if (line === "") return "" - if (line.startsWith(" ")) return line.slice(2) - return line -}) - -// Collect all top-level declared identifiers inside the namespace body so we can -// rewrite `Foo.X` → `X` when X is one of them. We gather BOTH exported and -// non-exported names because the namespace body might reference its own -// non-exported helpers via `Foo.helper` too. -const declaredNames = new Set() -const declRe = - /^\s*(?:export\s+)?(?:abstract\s+)?(?:async\s+)?(?:const|let|var|function|class|interface|type|enum)\s+(\w+)/ -for (const line of dedented) { - const m = line.match(declRe) - if (m) declaredNames.add(m[1]) -} -// Also capture `export { X, Y }` re-exports inside the namespace. -const reExportRe = /export\s*\{\s*([^}]+)\}/g -for (const line of dedented) { - for (const reExport of line.matchAll(reExportRe)) { - for (const part of reExport[1].split(",")) { - const name = part - .trim() - .split(/\s+as\s+/) - .pop()! - .trim() - if (name) declaredNames.add(name) - } - } -} - -// Rewrite `Foo.X` → `X` inside the body, avoiding matches in strings, comments, -// templates. We walk the line char-by-char rather than using a regex so we can -// skip over those segments cleanly. -let rewriteCount = 0 -function rewriteLine(line: string): string { - const out: string[] = [] - let i = 0 - let stringQuote: string | null = null - while (i < line.length) { - const ch = line[i] - // String / template literal pass-through. - if (stringQuote) { - out.push(ch) - if (ch === "\\" && i + 1 < line.length) { - out.push(line[i + 1]) - i += 2 - continue - } - if (ch === stringQuote) stringQuote = null - i++ - continue - } - if (ch === '"' || ch === "'" || ch === "`") { - stringQuote = ch - out.push(ch) - i++ - continue - } - // Line comment: emit the rest of the line untouched. - if (ch === "/" && line[i + 1] === "/") { - out.push(line.slice(i)) - i = line.length - continue - } - // Block comment: emit until "*/" if present on same line; else rest of line. - if (ch === "/" && line[i + 1] === "*") { - const end = line.indexOf("*/", i + 2) - if (end === -1) { - out.push(line.slice(i)) - i = line.length - } else { - out.push(line.slice(i, end + 2)) - i = end + 2 - } - continue - } - // Try to match `Foo.` at this position. - if (line.startsWith(nsName + ".", i)) { - // Make sure the char before is NOT a word character (otherwise we'd be in the middle of another identifier). - const prev = i === 0 ? "" : line[i - 1] - if (!/\w/.test(prev)) { - const after = line.slice(i + nsName.length + 1) - const nameMatch = after.match(/^([A-Za-z_$][\w$]*)/) - if (nameMatch && declaredNames.has(nameMatch[1])) { - out.push(nameMatch[1]) - i += nsName.length + 1 + nameMatch[1].length - rewriteCount++ - continue - } - } - } - out.push(ch) - i++ - } - return out.join("") -} -const rewrittenBody = dedented.map(rewriteLine) - -// Assemble the new file. Collapse multiple trailing blank lines so the -// self-reexport sits cleanly at the end. -// -// When the file is itself `index.ts`, prefer `"."` over `"./index"` — both are -// valid but `"."` matches the existing convention in the codebase (e.g. -// pty/index.ts, file/index.ts, etc.) and avoids referencing "index" literally. -const basename = path.basename(absPath, ".ts") -const reexportSource = basename === "index" ? "." : `./${basename}` -const assembled = [...before, ...rewrittenBody, ...after].join("\n") -const trimmed = assembled.replace(/\s+$/g, "") -const output = `${trimmed}\n\nexport * as ${nsName} from "${reexportSource}"\n` - -if (dryRun) { - console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) - console.log(`namespace: ${nsName}`) - console.log(`body lines: ${body.length}`) - console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) - console.log(`self-refs rewr: ${rewriteCount}`) - console.log(`self-reexport: export * as ${nsName} from "${reexportSource}"`) - console.log(`output preview (last 10 lines):`) - const outputLines = output.split("\n") - for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { - console.log(` ${l}`) - } - process.exit(0) -} - -fs.writeFileSync(absPath, output) -console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) -console.log(` body lines: ${body.length}`) -console.log(` self-refs rewr: ${rewriteCount}`) -console.log(` self-reexport: export * as ${nsName} from "${reexportSource}"`) -console.log("") -console.log("Next: verify with") -console.log(" bunx --bun tsgo --noEmit") -console.log(" bun run --conditions=browser ./src/index.ts generate") -console.log( - ` bun run test test/${path.relative(path.join(path.dirname(absPath), "..", ".."), absPath).replace(/\.ts$/, "")}*`, -) diff --git a/packages/opencode/script/unwrap-namespace.ts b/packages/opencode/script/unwrap-namespace.ts deleted file mode 100644 index 45c16f6c73..0000000000 --- a/packages/opencode/script/unwrap-namespace.ts +++ /dev/null @@ -1,305 +0,0 @@ -#!/usr/bin/env bun -/** - * Unwrap a TypeScript `export namespace` into flat exports + barrel. - * - * Usage: - * bun script/unwrap-namespace.ts src/bus/index.ts - * bun script/unwrap-namespace.ts src/bus/index.ts --dry-run - * bun script/unwrap-namespace.ts src/pty/index.ts --name service # avoid collision with pty.ts - * - * What it does: - * 1. Reads the file and finds the `export namespace Foo { ... }` block - * (uses ast-grep for accurate AST-based boundary detection) - * 2. Removes the namespace wrapper and dedents the body - * 3. Fixes self-references (e.g. Config.PermissionAction → PermissionAction) - * 4. If the file is index.ts, renames it to .ts - * 5. Creates/updates index.ts with `export * as Foo from "./"` - * 6. Rewrites import paths across src/, test/, and script/ - * 7. Fixes sibling imports within the same directory - * - * Requires: ast-grep (`brew install ast-grep` or `cargo install ast-grep`) - */ - -import path from "path" -import fs from "fs" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const nameFlag = args.find((a, i) => args[i - 1] === "--name") -const filePath = args.find((a) => !a.startsWith("--") && args[args.indexOf(a) - 1] !== "--name") - -if (!filePath) { - console.error("Usage: bun script/unwrap-namespace.ts [--dry-run] [--name ]") - process.exit(1) -} - -const absPath = path.resolve(filePath) -if (!fs.existsSync(absPath)) { - console.error(`File not found: ${absPath}`) - process.exit(1) -} - -const src = fs.readFileSync(absPath, "utf-8") -const lines = src.split("\n") - -// Use ast-grep to find the namespace boundaries accurately. -// This avoids false matches from braces in strings, templates, comments, etc. -const astResult = Bun.spawnSync( - ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], - { stdout: "pipe", stderr: "pipe" }, -) - -if (astResult.exitCode !== 0) { - console.error("ast-grep failed:", astResult.stderr.toString()) - process.exit(1) -} - -const matches = JSON.parse(astResult.stdout.toString()) as Array<{ - text: string - range: { start: { line: number; column: number }; end: { line: number; column: number } } - metaVariables: { single: Record; multi: Record> } -}> - -if (matches.length === 0) { - console.error("No `export namespace Foo { ... }` found in file") - process.exit(1) -} - -if (matches.length > 1) { - console.error(`Found ${matches.length} namespaces — this script handles one at a time`) - console.error("Namespaces found:") - for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) - process.exit(1) -} - -const match = matches[0] -const nsName = match.metaVariables.single.NAME.text -const nsLine = match.range.start.line // 0-indexed -const closeLine = match.range.end.line // 0-indexed, the line with closing `}` - -console.log(`Found: export namespace ${nsName} { ... }`) -console.log(` Lines ${nsLine + 1}–${closeLine + 1} (${closeLine - nsLine + 1} lines)`) - -// Build the new file content: -// 1. Everything before the namespace declaration (imports, etc.) -// 2. The namespace body, dedented by one level (2 spaces) -// 3. Everything after the closing brace (rare, but possible) -const before = lines.slice(0, nsLine) -const body = lines.slice(nsLine + 1, closeLine) -const after = lines.slice(closeLine + 1) - -// Dedent: remove exactly 2 leading spaces from each line -const dedented = body.map((line) => { - if (line === "") return "" - if (line.startsWith(" ")) return line.slice(2) - return line -}) - -let newContent = [...before, ...dedented, ...after].join("\n") - -// --- Fix self-references --- -// After unwrapping, references like `Config.PermissionAction` inside the same file -// need to become just `PermissionAction`. Only fix code positions, not strings. -const exportedNames = new Set() -const exportRegex = /export\s+(?:const|function|class|interface|type|enum|abstract\s+class)\s+(\w+)/g -for (const line of dedented) { - for (const m of line.matchAll(exportRegex)) exportedNames.add(m[1]) -} -const reExportRegex = /export\s*\{\s*([^}]+)\}/g -for (const line of dedented) { - for (const m of line.matchAll(reExportRegex)) { - for (const name of m[1].split(",")) { - const trimmed = name - .trim() - .split(/\s+as\s+/) - .pop()! - .trim() - if (trimmed) exportedNames.add(trimmed) - } - } -} - -let selfRefCount = 0 -if (exportedNames.size > 0) { - const fixedLines = newContent.split("\n").map((line) => { - // Split line into string-literal and code segments to avoid replacing inside strings - const segments: Array<{ text: string; isString: boolean }> = [] - let i = 0 - let current = "" - let inString: string | null = null - - while (i < line.length) { - const ch = line[i] - if (inString) { - current += ch - if (ch === "\\" && i + 1 < line.length) { - current += line[i + 1] - i += 2 - continue - } - if (ch === inString) { - segments.push({ text: current, isString: true }) - current = "" - inString = null - } - i++ - continue - } - if (ch === '"' || ch === "'" || ch === "`") { - if (current) segments.push({ text: current, isString: false }) - current = ch - inString = ch - i++ - continue - } - if (ch === "/" && i + 1 < line.length && line[i + 1] === "/") { - current += line.slice(i) - segments.push({ text: current, isString: true }) - current = "" - i = line.length - continue - } - current += ch - i++ - } - if (current) segments.push({ text: current, isString: !!inString }) - - return segments - .map((seg) => { - if (seg.isString) return seg.text - let result = seg.text - for (const name of exportedNames) { - const pattern = `${nsName}.${name}` - while (result.includes(pattern)) { - const idx = result.indexOf(pattern) - const charBefore = idx > 0 ? result[idx - 1] : " " - const charAfter = idx + pattern.length < result.length ? result[idx + pattern.length] : " " - if (/\w/.test(charBefore) || /\w/.test(charAfter)) break - result = result.slice(0, idx) + name + result.slice(idx + pattern.length) - selfRefCount++ - } - } - return result - }) - .join("") - }) - newContent = fixedLines.join("\n") -} - -// Figure out file naming -const dir = path.dirname(absPath) -const basename = path.basename(absPath, ".ts") -const isIndex = basename === "index" -const implName = nameFlag ?? (isIndex ? nsName.replace(/([a-z])([A-Z])/g, "$1-$2").toLowerCase() : basename) -const implFile = path.join(dir, `${implName}.ts`) -const indexFile = path.join(dir, "index.ts") -const barrelLine = `export * as ${nsName} from "./${implName}"\n` - -console.log("") -if (isIndex) { - console.log(`Plan: rename ${basename}.ts → ${implName}.ts, create new index.ts barrel`) -} else { - console.log(`Plan: rewrite ${basename}.ts in place, create index.ts barrel`) -} -if (selfRefCount > 0) console.log(`Fixed ${selfRefCount} self-reference(s) (${nsName}.X → X)`) -console.log("") - -if (dryRun) { - console.log("--- DRY RUN ---") - console.log("") - console.log(`=== ${implName}.ts (first 30 lines) ===`) - newContent - .split("\n") - .slice(0, 30) - .forEach((l, i) => console.log(` ${i + 1}: ${l}`)) - console.log(" ...") - console.log("") - console.log(`=== index.ts ===`) - console.log(` ${barrelLine.trim()}`) - console.log("") - if (!isIndex) { - const relDir = path.relative(path.resolve("src"), dir) - console.log(`=== Import rewrites (would apply) ===`) - console.log(` ${relDir}/${basename}" → ${relDir}" across src/, test/, script/`) - } else { - console.log("No import rewrites needed (was index.ts)") - } -} else { - if (isIndex) { - fs.writeFileSync(implFile, newContent) - fs.writeFileSync(indexFile, barrelLine) - console.log(`Wrote ${implName}.ts (${newContent.split("\n").length} lines)`) - console.log(`Wrote index.ts (barrel)`) - } else { - fs.writeFileSync(absPath, newContent) - if (fs.existsSync(indexFile)) { - const existing = fs.readFileSync(indexFile, "utf-8") - if (!existing.includes(`export * as ${nsName}`)) { - fs.appendFileSync(indexFile, barrelLine) - console.log(`Appended to existing index.ts`) - } else { - console.log(`index.ts already has ${nsName} export`) - } - } else { - fs.writeFileSync(indexFile, barrelLine) - console.log(`Wrote index.ts (barrel)`) - } - console.log(`Rewrote ${basename}.ts (${newContent.split("\n").length} lines)`) - } - - // --- Rewrite import paths across src/, test/, script/ --- - const relDir = path.relative(path.resolve("src"), dir) - if (!isIndex) { - const oldTail = `${relDir}/${basename}` - const searchDirs = ["src", "test", "script"].filter((d) => fs.existsSync(d)) - const rgResult = Bun.spawnSync(["rg", "-l", `from.*${oldTail}"`, ...searchDirs], { - stdout: "pipe", - stderr: "pipe", - }) - const filesToRewrite = rgResult.stdout - .toString() - .trim() - .split("\n") - .filter((f) => f.length > 0) - - if (filesToRewrite.length > 0) { - console.log(`\nRewriting imports in ${filesToRewrite.length} file(s)...`) - for (const file of filesToRewrite) { - const content = fs.readFileSync(file, "utf-8") - fs.writeFileSync(file, content.replaceAll(`${oldTail}"`, `${relDir}"`)) - } - console.log(` Done: ${oldTail}" → ${relDir}"`) - } else { - console.log("\nNo import rewrites needed") - } - } else { - console.log("\nNo import rewrites needed (was index.ts)") - } - - // --- Fix sibling imports within the same directory --- - const siblingFiles = fs.readdirSync(dir).filter((f) => { - if (!f.endsWith(".ts")) return false - if (f === "index.ts" || f === `${implName}.ts`) return false - return true - }) - - let siblingFixCount = 0 - for (const sibFile of siblingFiles) { - const sibPath = path.join(dir, sibFile) - const content = fs.readFileSync(sibPath, "utf-8") - const pattern = new RegExp(`from\\s+["']\\./${basename}["']`, "g") - if (pattern.test(content)) { - fs.writeFileSync(sibPath, content.replace(pattern, `from "."`)) - siblingFixCount++ - } - } - if (siblingFixCount > 0) { - console.log(`Fixed ${siblingFixCount} sibling import(s) in ${path.basename(dir)}/ (./${basename} → .)`) - } -} - -console.log("") -console.log("=== Verify ===") -console.log("") -console.log("bunx --bun tsgo --noEmit # typecheck") -console.log("bun run test # run tests") diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md deleted file mode 100644 index ef78c762bb..0000000000 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ /dev/null @@ -1,256 +0,0 @@ -# Namespace → self-reexport migration - -Migrate every `export namespace Foo { ... }` to flat top-level exports plus a -single self-reexport line at the bottom of the same file: - -```ts -export * as Foo from "./foo" -``` - -No barrel `index.ts` files. No cross-directory indirection. Consumers keep the -exact same `import { Foo } from "../foo/foo"` ergonomics. - -## Why this pattern - -We tested three options against Bun, esbuild, Rollup (what Vite uses under the -hood), Bun's runtime, and Node's native TypeScript runner. - -``` - heavy.ts loaded? - A. namespace B. barrel C. self-reexport -Bun bundler YES YES no -esbuild YES YES no -Rollup (Vite) YES YES no -Bun runtime YES YES no -Node --experimental-strip-types SYNTAX ERROR YES no -``` - -- **`export namespace`** compiles to an IIFE. Bundlers see one opaque function - call and can't analyze what's used. Node's native TS runner rejects the - syntax outright: `SyntaxError: TypeScript namespace declaration is not -supported in strip-only mode`. -- **Barrel `index.ts`** files (`export * as Foo from "./foo"` in a separate - file) force every re-exported sibling to evaluate when you import one name. - Siblings with side effects (top-level imports of SDKs, etc.) always load. -- **Self-reexport** keeps the file as plain ESM. Bundlers see static named - exports. The module is only pulled in when something actually imports from - it. There is no barrel hop, so no sibling contamination and no circular - import hazard. - -Bundle overhead for the self-reexport wrapper is roughly 240 bytes per module -(`Object.defineProperty` namespace proxy). At ~100 modules that's ~24KB — -negligible for a CLI binary. - -## The pattern - -### Before - -```ts -// src/permission/arity.ts -export namespace BashArity { - export function prefix(tokens: string[]) { ... } -} -``` - -### After - -```ts -// src/permission/arity.ts -export function prefix(tokens: string[]) { ... } - -export * as BashArity from "./arity" -``` - -Consumers don't change at all: - -```ts -import { BashArity } from "@/permission/arity" -BashArity.prefix(...) // still works -``` - -Editors still auto-import `BashArity` like any named export, because the file -does have a named `BashArity` export at the module top level. - -### Odd but harmless - -`BashArity.BashArity.BashArity.prefix(...)` compiles and runs because the -namespace contains a re-export of itself. Nobody would write that. Not a -problem. - -## Why this is different from what we tried first - -An earlier pass used sibling barrel files (`index.ts` with `export * as ...`). -That turned out to be wrong for our constraints: - -1. The barrel file always loads all its sibling modules when you import - through it, even if you only need one. For our CLI this is exactly the - cost we're trying to avoid. -2. Barrel + sibling imports made it very easy to accidentally create circular - imports that only surface as `ReferenceError` at runtime, not at - typecheck. - -The self-reexport has none of those issues. There is no indirection. The -file and the namespace are the same unit. - -## Why this matters for startup - -The worst import chain in the codebase looks like: - -``` -src/index.ts - └── FormatError from src/cli/error.ts - ├── { Provider } from provider/provider.ts (~1700 lines) - │ ├── 20+ @ai-sdk/* packages - │ ├── @aws-sdk/credential-providers - │ ├── google-auth-library - │ └── more - ├── { Config } from config/config.ts (~1600 lines) - └── { MCP } from mcp/mcp.ts (~900 lines) -``` - -All of that currently gets pulled in just to do `.isInstance()` on a handful -of error classes. The namespace IIFE shape is the main reason bundlers cannot -strip the unused parts. Self-reexport + flat ESM fixes it. - -## Automation - -From `packages/opencode`: - -```bash -bun script/unwrap-namespace.ts [--dry-run] -``` - -The script: - -1. Uses ast-grep to locate the `export namespace Foo { ... }` block accurately. -2. Removes the `export namespace Foo {` line and the matching closing `}`. -3. Dedents the body by one indent level (2 spaces). -4. Rewrites `Foo.Bar` self-references inside the file to just `Bar`. -5. Appends `export * as Foo from "./"` at the bottom of the file. -6. Never creates a barrel `index.ts`. - -### Typical flow for one file - -```bash -# 1. Preview -bun script/unwrap-namespace.ts src/permission/arity.ts --dry-run - -# 2. Apply -bun script/unwrap-namespace.ts src/permission/arity.ts - -# 3. Verify -cd packages/opencode -bunx --bun tsgo --noEmit -bun run --conditions=browser ./src/index.ts generate -bun run test -``` - -### Consumer imports usually don't need to change - -Most consumers already import straight from the file, e.g.: - -```ts -import { BashArity } from "@/permission/arity" -import { Config } from "@/config/config" -``` - -Because the file itself now does `export * as Foo from "./foo"`, those imports -keep working with zero edits. - -The only edits needed are when a consumer was importing through a previous -barrel (`"@/config"` or `"../config"` resolving to `config/index.ts`). In -that case, repoint it at the file: - -```ts -// before -import { Config } from "@/config" - -// after -import { Config } from "@/config/config" -``` - -### Dynamic imports in tests - -If a test did `const { Foo } = await import("../../src/x/y")`, the destructure -still works because of the self-reexport. No change required. - -## Verification checklist (per PR) - -Run all of these locally before pushing: - -```bash -cd packages/opencode -bunx --bun tsgo --noEmit -bun run --conditions=browser ./src/index.ts generate -bun run test -``` - -Also do a quick grep in `src/`, `test/`, and `script/` to make sure no -consumer is still importing the namespace from an old barrel path that no -longer exports it. - -The SDK build step (`bun run --conditions=browser ./src/index.ts generate`) -evaluates every module eagerly and is the most reliable way to catch circular -import regressions at runtime — the typechecker does not catch these. - -## Rules for new code - -- No new `export namespace`. -- Every module directory has a single canonical file — typically - `dir/index.ts` — with flat top-level exports and a self-reexport at the - bottom: - `export * as Foo from "."` -- Consumers import from the directory: - `import { Foo } from "@/dir"` or `import { Foo } from "../dir"`. -- No sibling barrel files. If a directory has multiple independent - namespaces, they each get their own file (e.g. `config/config.ts`, - `config/plugin.ts`) and their own self-reexport; the `index.ts` in that - directory stays minimal or does not exist. -- If a file needs a sibling, import the sibling file directly: - `import * as Sibling from "./sibling"`, not `from "."`. - -### Why `dir/index.ts` + `"."` is fine for us - -A single-file module (e.g. `pty/`) can live entirely in `dir/index.ts` -with `export * as Foo from "."` at the bottom. Consumers write the -short form: - -```ts -import { Pty } from "@/pty" -``` - -This works in Bun runtime, Bun build, esbuild, and Rollup. It does NOT -work under Node's `--experimental-strip-types` runner: - -``` -node --experimental-strip-types entry.ts - ERR_UNSUPPORTED_DIR_IMPORT: Directory import '/.../pty' is not supported -``` - -Node requires an explicit file or a `package.json#exports` map for ESM. -We don't care about that target right now because the opencode CLI is -built with Bun and the web apps are built with Vite/Rollup. If we ever -want to run raw `.ts` through Node, we'll need to either use explicit -`.ts` extensions everywhere or add per-directory `package.json` exports -maps. - -### When NOT to collapse to `index.ts` - -Some directories contain multiple independent namespaces where -`dir/index.ts` would be misleading. Examples: - -- `config/` has `Config`, `ConfigPaths`, `ConfigMarkdown`, `ConfigPlugin`, - `ConfigKeybinds`. Each lives in its own file with its own self-reexport - (`config/config.ts`, `config/plugin.ts`, etc.). Consumers import the - specific one: `import { ConfigPlugin } from "@/config/plugin"`. -- Same shape for `session/`, `server/`, etc. - -Collapsing one of those into `index.ts` would mean picking a single -"canonical" namespace for the directory, which breaks the symmetry and -hides the other files. - -## Scope - -There are still dozens of `export namespace` files left across the codebase. -Each one is its own small PR. Do them one at a time, verified locally, rather -than batching by directory. From ee7339f2c6a4575a81bcaff2240076571db03e11 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 23:10:45 -0400 Subject: [PATCH 118/201] refactor: move provider and config provider routes onto HttpApi (#23004) --- packages/opencode/src/provider/auth.ts | 38 +-- packages/opencode/src/provider/provider.ts | 228 ++++++++++-------- .../opencode/src/server/instance/config.ts | 12 +- .../src/server/instance/httpapi/config.ts | 51 ++++ .../src/server/instance/httpapi/provider.ts | 104 +++++++- .../src/server/instance/httpapi/server.ts | 3 + .../opencode/src/server/instance/index.ts | 19 +- .../opencode/src/server/instance/provider.ts | 26 +- 8 files changed, 323 insertions(+), 158 deletions(-) create mode 100644 packages/opencode/src/server/instance/httpapi/config.ts diff --git a/packages/opencode/src/provider/auth.ts b/packages/opencode/src/provider/auth.ts index c0c73b2cc1..5d8b2765de 100644 --- a/packages/opencode/src/provider/auth.ts +++ b/packages/opencode/src/provider/auth.ts @@ -58,6 +58,18 @@ export class Authorization extends Schema.Class("ProviderAuthAuth static readonly zod = zod(this) } +export const AuthorizeInput = Schema.Struct({ + method: Schema.Number.annotate({ description: "Auth method index" }), + inputs: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ description: "Prompt inputs" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type AuthorizeInput = Schema.Schema.Type + +export const CallbackInput = Schema.Struct({ + method: Schema.Number.annotate({ description: "Auth method index" }), + code: Schema.optional(Schema.String).annotate({ description: "OAuth authorization code" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type CallbackInput = Schema.Schema.Type + export const OauthMissing = NamedError.create("ProviderAuthOauthMissing", z.object({ providerID: ProviderID.zod })) export const OauthCodeMissing = NamedError.create( @@ -86,12 +98,12 @@ type Hook = NonNullable export interface Interface { readonly methods: () => Effect.Effect - readonly authorize: (input: { - providerID: ProviderID - method: number - inputs?: Record - }) => Effect.Effect - readonly callback: (input: { providerID: ProviderID; method: number; code?: string }) => Effect.Effect + readonly authorize: ( + input: { + providerID: ProviderID + } & AuthorizeInput, + ) => Effect.Effect + readonly callback: (input: { providerID: ProviderID } & CallbackInput) => Effect.Effect } interface State { @@ -153,11 +165,9 @@ export const layer: Layer.Layer = ) }) - const authorize = Effect.fn("ProviderAuth.authorize")(function* (input: { - providerID: ProviderID - method: number - inputs?: Record - }) { + const authorize = Effect.fn("ProviderAuth.authorize")(function* ( + input: { providerID: ProviderID } & AuthorizeInput, + ) { const { hooks, pending } = yield* InstanceState.get(state) const method = hooks[input.providerID].methods[input.method] if (method.type !== "oauth") return @@ -180,11 +190,7 @@ export const layer: Layer.Layer = } }) - const callback = Effect.fn("ProviderAuth.callback")(function* (input: { - providerID: ProviderID - method: number - code?: string - }) { + const callback = Effect.fn("ProviderAuth.callback")(function* (input: { providerID: ProviderID } & CallbackInput) { const pending = (yield* InstanceState.get(state)).pending const match = pending.get(input.providerID) if (!match) return yield* Effect.fail(new OauthMissing({ providerID: input.providerID })) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index a7297634e7..711481d80a 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -16,14 +16,16 @@ import { Env } from "../env" import { Instance } from "../project/instance" import { InstallationVersion } from "../installation/version" import { Flag } from "../flag/flag" +import { zod } from "@/util/effect-zod" import { iife } from "@/util/iife" import { Global } from "../global" import path from "path" -import { Effect, Layer, Context } from "effect" +import { Effect, Layer, Context, Schema, Types } from "effect" import { EffectBridge } from "@/effect" import { InstanceState } from "@/effect" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { isRecord } from "@/util/record" +import { withStatics } from "@/util/schema" import * as ProviderTransform from "./transform" import { ModelID, ProviderID } from "./schema" @@ -796,91 +798,111 @@ function custom(dep: CustomDep): Record { } } -export const Model = z - .object({ - id: ModelID.zod, - providerID: ProviderID.zod, - api: z.object({ - id: z.string(), - url: z.string(), - npm: z.string(), - }), - name: z.string(), - family: z.string().optional(), - capabilities: z.object({ - temperature: z.boolean(), - reasoning: z.boolean(), - attachment: z.boolean(), - toolcall: z.boolean(), - input: z.object({ - text: z.boolean(), - audio: z.boolean(), - image: z.boolean(), - video: z.boolean(), - pdf: z.boolean(), - }), - output: z.object({ - text: z.boolean(), - audio: z.boolean(), - image: z.boolean(), - video: z.boolean(), - pdf: z.boolean(), - }), - interleaved: z.union([ - z.boolean(), - z.object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }), - ]), - }), - cost: z.object({ - input: z.number(), - output: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - experimentalOver200K: z - .object({ - input: z.number(), - output: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }) - .optional(), - }), - limit: z.object({ - context: z.number(), - input: z.number().optional(), - output: z.number(), - }), - status: z.enum(["alpha", "beta", "deprecated", "active"]), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()), - release_date: z.string(), - variants: z.record(z.string(), z.record(z.string(), z.any())).optional(), - }) - .meta({ - ref: "Model", - }) -export type Model = z.infer +const ProviderApiInfo = Schema.Struct({ + id: Schema.String, + url: Schema.String, + npm: Schema.String, +}) -export const Info = z - .object({ - id: ProviderID.zod, - name: z.string(), - source: z.enum(["env", "config", "custom", "api"]), - env: z.string().array(), - key: z.string().optional(), - options: z.record(z.string(), z.any()), - models: z.record(z.string(), Model), - }) - .meta({ - ref: "Provider", - }) -export type Info = z.infer +const ProviderModalities = Schema.Struct({ + text: Schema.Boolean, + audio: Schema.Boolean, + image: Schema.Boolean, + video: Schema.Boolean, + pdf: Schema.Boolean, +}) + +const ProviderInterleaved = Schema.Union([ + Schema.Boolean, + Schema.Struct({ + field: Schema.Literals(["reasoning_content", "reasoning_details"]), + }), +]) + +const ProviderCapabilities = Schema.Struct({ + temperature: Schema.Boolean, + reasoning: Schema.Boolean, + attachment: Schema.Boolean, + toolcall: Schema.Boolean, + input: ProviderModalities, + output: ProviderModalities, + interleaved: ProviderInterleaved, +}) + +const ProviderCacheCost = Schema.Struct({ + read: Schema.Number, + write: Schema.Number, +}) + +const ProviderCost = Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache: ProviderCacheCost, + experimentalOver200K: Schema.optional( + Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache: ProviderCacheCost, + }), + ), +}) + +const ProviderLimit = Schema.Struct({ + context: Schema.Number, + input: Schema.optional(Schema.Number), + output: Schema.Number, +}) + +export const Model = Schema.Struct({ + id: ModelID, + providerID: ProviderID, + api: ProviderApiInfo, + name: Schema.String, + family: Schema.optional(Schema.String), + capabilities: ProviderCapabilities, + cost: ProviderCost, + limit: ProviderLimit, + status: Schema.Literals(["alpha", "beta", "deprecated", "active"]), + options: Schema.Record(Schema.String, Schema.Any), + headers: Schema.Record(Schema.String, Schema.String), + release_date: Schema.String, + variants: Schema.optional(Schema.Record(Schema.String, Schema.Record(Schema.String, Schema.Any))), +}) + .annotate({ identifier: "Model" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Model = Types.DeepMutable> + +export const Info = Schema.Struct({ + id: ProviderID, + name: Schema.String, + source: Schema.Literals(["env", "config", "custom", "api"]), + env: Schema.Array(Schema.String), + key: Schema.optional(Schema.String), + options: Schema.Record(Schema.String, Schema.Any), + models: Schema.Record(Schema.String, Model), +}) + .annotate({ identifier: "Provider" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Types.DeepMutable> + +const DefaultModelIDs = Schema.Record(Schema.String, Schema.String) + +export const ListResult = Schema.Struct({ + all: Schema.Array(Info), + default: DefaultModelIDs, + connected: Schema.Array(Schema.String), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ListResult = Types.DeepMutable> + +export const ConfigProvidersResult = Schema.Struct({ + providers: Schema.Array(Info), + default: DefaultModelIDs, +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ConfigProvidersResult = Types.DeepMutable> + +export function defaultModelIDs }>(providers: Record) { + return mapValues(providers, (item) => sort(Object.values(item.models))[0].id) +} export interface Interface { readonly list: () => Effect.Effect> @@ -928,7 +950,7 @@ function cost(c: ModelsDev.Model["cost"]): Model["cost"] { } function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model): Model { - const m: Model = { + const base: Model = { id: ModelID.make(model.id), providerID: ProviderID.make(provider.id), name: model.name, @@ -972,9 +994,10 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model variants: {}, } - m.variants = mapValues(ProviderTransform.variants(m), (v) => v) - - return m + return { + ...base, + variants: mapValues(ProviderTransform.variants(base), (v) => v), + } } export function fromModelsDevProvider(provider: ModelsDev.Provider): Info { @@ -983,17 +1006,22 @@ export function fromModelsDevProvider(provider: ModelsDev.Provider): Info { models[key] = fromModelsDevModel(provider, model) for (const [mode, opts] of Object.entries(model.experimental?.modes ?? {})) { const id = `${model.id}-${mode}` - const m = fromModelsDevModel(provider, model) - m.id = ModelID.make(id) - m.name = `${model.name} ${mode[0].toUpperCase()}${mode.slice(1)}` - if (opts.cost) m.cost = mergeDeep(m.cost, cost(opts.cost)) - // convert body params to camelCase for ai sdk compatibility - if (opts.provider?.body) - m.options = Object.fromEntries( - Object.entries(opts.provider.body).map(([k, v]) => [k.replace(/_([a-z])/g, (_, c) => c.toUpperCase()), v]), - ) - if (opts.provider?.headers) m.headers = opts.provider.headers - models[id] = m + const base = fromModelsDevModel(provider, model) + models[id] = { + ...base, + id: ModelID.make(id), + name: `${model.name} ${mode[0].toUpperCase()}${mode.slice(1)}`, + cost: opts.cost ? mergeDeep(base.cost, cost(opts.cost)) : base.cost, + options: opts.provider?.body + ? Object.fromEntries( + Object.entries(opts.provider.body).map(([k, v]) => [ + k.replace(/_([a-z])/g, (_, c) => c.toUpperCase()), + v, + ]), + ) + : base.options, + headers: opts.provider?.headers ?? base.headers, + } } } return { diff --git a/packages/opencode/src/server/instance/config.ts b/packages/opencode/src/server/instance/config.ts index e3291a8c36..15c393fe5a 100644 --- a/packages/opencode/src/server/instance/config.ts +++ b/packages/opencode/src/server/instance/config.ts @@ -3,7 +3,6 @@ import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { Config } from "../../config" import { Provider } from "../../provider" -import { mapValues } from "remeda" import { errors } from "../error" import { lazy } from "../../util/lazy" import { AppRuntime } from "../../effect/app-runtime" @@ -70,12 +69,7 @@ export const ConfigRoutes = lazy(() => description: "List of providers", content: { "application/json": { - schema: resolver( - z.object({ - providers: Provider.Info.array(), - default: z.record(z.string(), z.string()), - }), - ), + schema: resolver(Provider.ConfigProvidersResult.zod), }, }, }, @@ -84,10 +78,10 @@ export const ConfigRoutes = lazy(() => async (c) => jsonRequest("ConfigRoutes.providers", c, function* () { const svc = yield* Provider.Service - const providers = mapValues(yield* svc.list(), (item) => item) + const providers = yield* svc.list() return { providers: Object.values(providers), - default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id), + default: Provider.defaultModelIDs(providers), } }), ), diff --git a/packages/opencode/src/server/instance/httpapi/config.ts b/packages/opencode/src/server/instance/httpapi/config.ts new file mode 100644 index 0000000000..14aa94f9fc --- /dev/null +++ b/packages/opencode/src/server/instance/httpapi/config.ts @@ -0,0 +1,51 @@ +import { Config } from "@/config" +import { Provider } from "@/provider" +import { Effect, Layer } from "effect" +import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi" + +const root = "/config" + +export const ConfigApi = HttpApi.make("config") + .add( + HttpApiGroup.make("config") + .add( + HttpApiEndpoint.get("providers", `${root}/providers`, { + success: Provider.ConfigProvidersResult, + }).annotateMerge( + OpenApi.annotations({ + identifier: "config.providers", + summary: "List config providers", + description: "Get a list of all configured AI providers and their default models.", + }), + ), + ) + .annotateMerge( + OpenApi.annotations({ + title: "config", + description: "Experimental HttpApi config routes.", + }), + ), + ) + .annotateMerge( + OpenApi.annotations({ + title: "opencode experimental HttpApi", + version: "0.0.1", + description: "Experimental HttpApi surface for selected instance routes.", + }), + ) + +export const configHandlers = Layer.unwrap( + Effect.gen(function* () { + const svc = yield* Provider.Service + + const providers = Effect.fn("ConfigHttpApi.providers")(function* () { + const providers = yield* svc.list() + return { + providers: Object.values(providers), + default: Provider.defaultModelIDs(providers), + } + }) + + return HttpApiBuilder.group(ConfigApi, "config", (handlers) => handlers.handle("providers", providers)) + }), +).pipe(Layer.provide(Provider.defaultLayer), Layer.provide(Config.defaultLayer)) diff --git a/packages/opencode/src/server/instance/httpapi/provider.ts b/packages/opencode/src/server/instance/httpapi/provider.ts index 31dd1446a0..67831a1faf 100644 --- a/packages/opencode/src/server/instance/httpapi/provider.ts +++ b/packages/opencode/src/server/instance/httpapi/provider.ts @@ -1,6 +1,11 @@ import { ProviderAuth } from "@/provider" -import { Effect, Layer } from "effect" -import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi" +import { Config } from "@/config" +import { ModelsDev } from "@/provider" +import { Provider } from "@/provider" +import { ProviderID } from "@/provider/schema" +import { mapValues } from "remeda" +import { Effect, Layer, Schema } from "effect" +import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiError, HttpApiGroup, OpenApi } from "effect/unstable/httpapi" const root = "/provider" @@ -8,6 +13,15 @@ export const ProviderApi = HttpApi.make("provider") .add( HttpApiGroup.make("provider") .add( + HttpApiEndpoint.get("list", root, { + success: Provider.ListResult, + }).annotateMerge( + OpenApi.annotations({ + identifier: "provider.list", + summary: "List providers", + description: "Get a list of all available AI providers, including both available and connected ones.", + }), + ), HttpApiEndpoint.get("auth", `${root}/auth`, { success: ProviderAuth.Methods, }).annotateMerge( @@ -17,6 +31,28 @@ export const ProviderApi = HttpApi.make("provider") description: "Retrieve available authentication methods for all AI providers.", }), ), + HttpApiEndpoint.post("authorize", `${root}/:providerID/oauth/authorize`, { + params: { providerID: ProviderID }, + payload: ProviderAuth.AuthorizeInput, + success: ProviderAuth.Authorization, + }).annotateMerge( + OpenApi.annotations({ + identifier: "provider.oauth.authorize", + summary: "Start OAuth authorization", + description: "Start the OAuth authorization flow for a provider.", + }), + ), + HttpApiEndpoint.post("callback", `${root}/:providerID/oauth/callback`, { + params: { providerID: ProviderID }, + payload: ProviderAuth.CallbackInput, + success: Schema.Boolean, + }).annotateMerge( + OpenApi.annotations({ + identifier: "provider.oauth.callback", + summary: "Handle OAuth callback", + description: "Handle the OAuth callback from a provider after user authorization.", + }), + ), ) .annotateMerge( OpenApi.annotations({ @@ -35,12 +71,72 @@ export const ProviderApi = HttpApi.make("provider") export const providerHandlers = Layer.unwrap( Effect.gen(function* () { + const cfg = yield* Config.Service + const provider = yield* Provider.Service const svc = yield* ProviderAuth.Service + const list = Effect.fn("ProviderHttpApi.list")(function* () { + const config = yield* cfg.get() + const all = yield* Effect.promise(() => ModelsDev.get()) + const disabled = new Set(config.disabled_providers ?? []) + const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined + const filtered: Record = {} + for (const [key, value] of Object.entries(all)) { + if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) { + filtered[key] = value + } + } + const connected = yield* provider.list() + const providers = Object.assign( + mapValues(filtered, (item) => Provider.fromModelsDevProvider(item)), + connected, + ) + return { + all: Object.values(providers), + default: Provider.defaultModelIDs(providers), + connected: Object.keys(connected), + } + }) + const auth = Effect.fn("ProviderHttpApi.auth")(function* () { return yield* svc.methods() }) - return HttpApiBuilder.group(ProviderApi, "provider", (handlers) => handlers.handle("auth", auth)) + const authorize = Effect.fn("ProviderHttpApi.authorize")(function* (ctx: { + params: { providerID: ProviderID } + payload: ProviderAuth.AuthorizeInput + }) { + const result = yield* svc + .authorize({ + providerID: ctx.params.providerID, + method: ctx.payload.method, + inputs: ctx.payload.inputs, + }) + .pipe(Effect.catch(() => Effect.fail(new HttpApiError.BadRequest({})))) + if (!result) return yield* new HttpApiError.BadRequest({}) + return result + }) + + const callback = Effect.fn("ProviderHttpApi.callback")(function* (ctx: { + params: { providerID: ProviderID } + payload: ProviderAuth.CallbackInput + }) { + yield* svc + .callback({ + providerID: ctx.params.providerID, + method: ctx.payload.method, + code: ctx.payload.code, + }) + .pipe(Effect.catch(() => Effect.fail(new HttpApiError.BadRequest({})))) + return true + }) + + return HttpApiBuilder.group(ProviderApi, "provider", (handlers) => + handlers.handle("list", list).handle("auth", auth).handle("authorize", authorize).handle("callback", callback), + ) }), -).pipe(Layer.provide(ProviderAuth.defaultLayer)) +).pipe( + Layer.provide(ProviderAuth.defaultLayer), + Layer.provide(Provider.defaultLayer), + Layer.provide(Config.defaultLayer), +) diff --git a/packages/opencode/src/server/instance/httpapi/server.ts b/packages/opencode/src/server/instance/httpapi/server.ts index 362d0970b9..64332fd2a0 100644 --- a/packages/opencode/src/server/instance/httpapi/server.ts +++ b/packages/opencode/src/server/instance/httpapi/server.ts @@ -10,6 +10,7 @@ import { InstanceBootstrap } from "@/project/bootstrap" import { Instance } from "@/project/instance" import { lazy } from "@/util/lazy" import { Filesystem } from "@/util" +import { ConfigApi, configHandlers } from "./config" import { PermissionApi, permissionHandlers } from "./permission" import { ProviderApi, providerHandlers } from "./provider" import { QuestionApi, questionHandlers } from "./question" @@ -108,8 +109,10 @@ const instance = HttpRouter.middleware()( const QuestionSecured = QuestionApi.middleware(Authorization) const PermissionSecured = PermissionApi.middleware(Authorization) const ProviderSecured = ProviderApi.middleware(Authorization) +const ConfigSecured = ConfigApi.middleware(Authorization) export const routes = Layer.mergeAll( + HttpApiBuilder.layer(ConfigSecured).pipe(Layer.provide(configHandlers)), HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), diff --git a/packages/opencode/src/server/instance/index.ts b/packages/opencode/src/server/instance/index.ts index 9ef6da63ac..6a290093c5 100644 --- a/packages/opencode/src/server/instance/index.ts +++ b/packages/opencode/src/server/instance/index.ts @@ -1,7 +1,7 @@ import { describeRoute, resolver, validator } from "hono-openapi" import { Hono } from "hono" import type { UpgradeWebSocket } from "hono/ws" -import { Effect } from "effect" +import { Context, Effect } from "effect" import z from "zod" import { Format } from "../../format" import { TuiRoutes } from "./tui" @@ -41,12 +41,17 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) { const handler = ExperimentalHttpApiServer.webHandler().handler - app - .all("/question", (c) => handler(c.req.raw)) - .all("/question/*", (c) => handler(c.req.raw)) - .all("/permission", (c) => handler(c.req.raw)) - .all("/permission/*", (c) => handler(c.req.raw)) - .all("/provider/auth", (c) => handler(c.req.raw)) + const context = Context.empty() as Context.Context + app.get("/question", (c) => handler(c.req.raw, context)) + app.post("/question/:requestID/reply", (c) => handler(c.req.raw, context)) + app.post("/question/:requestID/reject", (c) => handler(c.req.raw, context)) + app.get("/permission", (c) => handler(c.req.raw, context)) + app.post("/permission/:requestID/reply", (c) => handler(c.req.raw, context)) + app.get("/config/providers", (c) => handler(c.req.raw, context)) + app.get("/provider", (c) => handler(c.req.raw, context)) + app.get("/provider/auth", (c) => handler(c.req.raw, context)) + app.post("/provider/:providerID/oauth/authorize", (c) => handler(c.req.raw, context)) + app.post("/provider/:providerID/oauth/callback", (c) => handler(c.req.raw, context)) } return app diff --git a/packages/opencode/src/server/instance/provider.ts b/packages/opencode/src/server/instance/provider.ts index c1580437da..a81ae00d59 100644 --- a/packages/opencode/src/server/instance/provider.ts +++ b/packages/opencode/src/server/instance/provider.ts @@ -25,13 +25,7 @@ export const ProviderRoutes = lazy(() => description: "List of providers", content: { "application/json": { - schema: resolver( - z.object({ - all: Provider.Info.array(), - default: z.record(z.string(), z.string()), - connected: z.array(z.string()), - }), - ), + schema: resolver(Provider.ListResult.zod), }, }, }, @@ -59,7 +53,7 @@ export const ProviderRoutes = lazy(() => ) return { all: Object.values(providers), - default: mapValues(providers, (item) => Provider.sort(Object.values(item.models))[0].id), + default: Provider.defaultModelIDs(providers), connected: Object.keys(connected), } }), @@ -116,13 +110,7 @@ export const ProviderRoutes = lazy(() => providerID: ProviderID.zod.meta({ description: "Provider ID" }), }), ), - validator( - "json", - z.object({ - method: z.number().meta({ description: "Auth method index" }), - inputs: z.record(z.string(), z.string()).optional().meta({ description: "Prompt inputs" }), - }), - ), + validator("json", ProviderAuth.AuthorizeInput.zod), async (c) => { const providerID = c.req.valid("param").providerID const { method, inputs } = c.req.valid("json") @@ -162,13 +150,7 @@ export const ProviderRoutes = lazy(() => providerID: ProviderID.zod.meta({ description: "Provider ID" }), }), ), - validator( - "json", - z.object({ - method: z.number().meta({ description: "Auth method index" }), - code: z.string().optional().meta({ description: "OAuth authorization code" }), - }), - ), + validator("json", ProviderAuth.CallbackInput.zod), async (c) => { const providerID = c.req.valid("param").providerID const { method, code } = c.req.valid("json") From cccb907a9b3df7eb6fae71ee9e2392dccc73e9d3 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 23:19:18 -0400 Subject: [PATCH 119/201] feat(tui): animated GO logo + radial pulse in free-limit upsell dialog (#22976) --- .../src/cli/cmd/tui/component/bg-pulse.tsx | 130 +++++++ .../cmd/tui/component/dialog-go-upsell.tsx | 150 +++++--- .../src/cli/cmd/tui/component/logo.tsx | 335 ++++++++++++++---- .../cli/cmd/tui/component/shimmer-config.ts | 49 +++ packages/opencode/src/cli/logo.ts | 7 +- 5 files changed, 563 insertions(+), 108 deletions(-) create mode 100644 packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx create mode 100644 packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts diff --git a/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx b/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx new file mode 100644 index 0000000000..541ecea4e1 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx @@ -0,0 +1,130 @@ +import { BoxRenderable, RGBA } from "@opentui/core" +import { createMemo, createSignal, For, onCleanup, onMount } from "solid-js" +import { tint, useTheme } from "@tui/context/theme" + +const PERIOD = 4600 +const RINGS = 3 +const WIDTH = 3.8 +const TAIL = 9.5 +const AMP = 0.55 +const TAIL_AMP = 0.16 +const BREATH_AMP = 0.05 +const BREATH_SPEED = 0.0008 +// Offset so bg ring emits from GO center at the moment the logo pulse peaks. +const PHASE_OFFSET = 0.29 + +export type BgPulseMask = { + x: number + y: number + width: number + height: number + pad?: number + strength?: number +} + +export function BgPulse(props: { centerX?: number; centerY?: number; masks?: BgPulseMask[] }) { + const { theme } = useTheme() + const [now, setNow] = createSignal(performance.now()) + const [size, setSize] = createSignal<{ width: number; height: number }>({ width: 0, height: 0 }) + let box: BoxRenderable | undefined + + const timer = setInterval(() => setNow(performance.now()), 50) + onCleanup(() => clearInterval(timer)) + + const sync = () => { + if (!box) return + setSize({ width: box.width, height: box.height }) + } + + onMount(() => { + sync() + box?.on("resize", sync) + }) + + onCleanup(() => { + box?.off("resize", sync) + }) + + const grid = createMemo(() => { + const t = now() + const w = size().width + const h = size().height + if (w === 0 || h === 0) return [] as RGBA[][] + const cxv = props.centerX ?? w / 2 + const cyv = props.centerY ?? h / 2 + const reach = Math.hypot(Math.max(cxv, w - cxv), Math.max(cyv, h - cyv) * 2) + TAIL + const ringStates = Array.from({ length: RINGS }, (_, i) => { + const offset = i / RINGS + const phase = (t / PERIOD + offset - PHASE_OFFSET + 1) % 1 + const envelope = Math.sin(phase * Math.PI) + const eased = envelope * envelope * (3 - 2 * envelope) + return { + head: phase * reach, + eased, + } + }) + const normalizedMasks = props.masks?.map((m) => { + const pad = m.pad ?? 2 + return { + left: m.x - pad, + right: m.x + m.width + pad, + top: m.y - pad, + bottom: m.y + m.height + pad, + pad, + strength: m.strength ?? 0.85, + } + }) + const rows = [] as RGBA[][] + for (let y = 0; y < h; y++) { + const row = [] as RGBA[] + for (let x = 0; x < w; x++) { + const dx = x + 0.5 - cxv + const dy = (y + 0.5 - cyv) * 2 + const dist = Math.hypot(dx, dy) + let level = 0 + for (const ring of ringStates) { + const delta = dist - ring.head + const crest = Math.abs(delta) < WIDTH ? 0.5 + 0.5 * Math.cos((delta / WIDTH) * Math.PI) : 0 + const tail = delta < 0 && delta > -TAIL ? (1 + delta / TAIL) ** 2.3 : 0 + level += (crest * AMP + tail * TAIL_AMP) * ring.eased + } + const edgeFalloff = Math.max(0, 1 - (dist / (reach * 0.85)) ** 2) + const breath = (0.5 + 0.5 * Math.sin(t * BREATH_SPEED)) * BREATH_AMP + let maskAtten = 1 + if (normalizedMasks) { + for (const m of normalizedMasks) { + if (x < m.left || x > m.right || y < m.top || y > m.bottom) continue + const inX = Math.min(x - m.left, m.right - x) + const inY = Math.min(y - m.top, m.bottom - y) + const edge = Math.min(inX / m.pad, inY / m.pad, 1) + const eased = edge * edge * (3 - 2 * edge) + const reduce = 1 - m.strength * eased + if (reduce < maskAtten) maskAtten = reduce + } + } + const strength = Math.min(1, ((level / RINGS) * edgeFalloff + breath * edgeFalloff) * maskAtten) + row.push(tint(theme.backgroundPanel, theme.primary, strength * 0.7)) + } + rows.push(row) + } + return rows + }) + + return ( + (box = item)} width="100%" height="100%"> + + {(row) => ( + + + {(color) => ( + + {" "} + + )} + + + )} + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx index 2d200ca3b8..ace4b090bc 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx @@ -1,12 +1,16 @@ -import { RGBA, TextAttributes } from "@opentui/core" +import { BoxRenderable, RGBA, TextAttributes } from "@opentui/core" import { useKeyboard } from "@opentui/solid" import open from "open" -import { createSignal } from "solid-js" +import { createSignal, onCleanup, onMount } from "solid-js" import { selectedForeground, useTheme } from "@tui/context/theme" import { useDialog, type DialogContext } from "@tui/ui/dialog" import { Link } from "@tui/ui/link" +import { GoLogo } from "./logo" +import { BgPulse, type BgPulseMask } from "./bg-pulse" const GO_URL = "https://opencode.ai/go" +const PAD_X = 3 +const PAD_TOP_OUTER = 1 export type DialogGoUpsellProps = { onClose?: (dontShowAgain?: boolean) => void @@ -27,62 +31,116 @@ export function DialogGoUpsell(props: DialogGoUpsellProps) { const dialog = useDialog() const { theme } = useTheme() const fg = selectedForeground(theme) - const [selected, setSelected] = createSignal(0) + const [selected, setSelected] = createSignal<"dismiss" | "subscribe">("subscribe") + const [center, setCenter] = createSignal<{ x: number; y: number } | undefined>() + const [masks, setMasks] = createSignal([]) + let content: BoxRenderable | undefined + let logoBox: BoxRenderable | undefined + let headingBox: BoxRenderable | undefined + let descBox: BoxRenderable | undefined + let buttonsBox: BoxRenderable | undefined + + const sync = () => { + if (!content || !logoBox) return + setCenter({ + x: logoBox.x - content.x + logoBox.width / 2, + y: logoBox.y - content.y + logoBox.height / 2 + PAD_TOP_OUTER, + }) + const next: BgPulseMask[] = [] + const baseY = PAD_TOP_OUTER + for (const b of [headingBox, descBox, buttonsBox]) { + if (!b) continue + next.push({ + x: b.x - content.x, + y: b.y - content.y + baseY, + width: b.width, + height: b.height, + pad: 2, + strength: 0.78, + }) + } + setMasks(next) + } + + onMount(() => { + sync() + for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.on("resize", sync) + }) + + onCleanup(() => { + for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.off("resize", sync) + }) useKeyboard((evt) => { if (evt.name === "left" || evt.name === "right" || evt.name === "tab") { - setSelected((s) => (s === 0 ? 1 : 0)) + setSelected((s) => (s === "subscribe" ? "dismiss" : "subscribe")) return } - if (evt.name !== "return") return - if (selected() === 0) subscribe(props, dialog) - else dismiss(props, dialog) + if (evt.name === "return") { + if (selected() === "subscribe") subscribe(props, dialog) + else dismiss(props, dialog) + } }) return ( - - - - Free limit reached - - dialog.clear()}> - esc - + (content = item)}> + + - - - Subscribe to OpenCode Go to keep going with reliable access to the best open-source models, starting at - $5/month. - - + + (headingBox = item)} flexDirection="row" justifyContent="space-between"> + + Free limit reached + + dialog.clear()}> + esc + + + (descBox = item)} gap={0}> + + Subscribe to + + OpenCode Go + + for reliable access to the + + best open-source models, starting at $5/month. + + + (logoBox = item)}> + + - - - setSelected(0)} - onMouseUp={() => subscribe(props, dialog)} - > - - subscribe - - - setSelected(1)} - onMouseUp={() => dismiss(props, dialog)} - > - (buttonsBox = item)} flexDirection="row" justifyContent="space-between"> + setSelected("dismiss")} + onMouseUp={() => dismiss(props, dialog)} > - don't show again - + + don't show again + + + setSelected("subscribe")} + onMouseUp={() => subscribe(props, dialog)} + > + + subscribe + + diff --git a/packages/opencode/src/cli/cmd/tui/component/logo.tsx b/packages/opencode/src/cli/cmd/tui/component/logo.tsx index e53974871a..17368ddad8 100644 --- a/packages/opencode/src/cli/cmd/tui/component/logo.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/logo.tsx @@ -1,8 +1,14 @@ import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core" -import { For, createMemo, createSignal, onCleanup, type JSX } from "solid-js" +import { For, createMemo, createSignal, onCleanup, onMount, type JSX } from "solid-js" import { useTheme, tint } from "@tui/context/theme" import * as Sound from "@tui/util/sound" -import { logo } from "@/cli/logo" +import { go, logo } from "@/cli/logo" +import { shimmerConfig, type ShimmerConfig } from "./shimmer-config" + +export type LogoShape = { + left: string[] + right: string[] +} // Shadow markers (rendered chars in parens): // _ = full shadow cell (space with bg=shadow) @@ -74,9 +80,6 @@ type Frame = { spark: number } -const LEFT = logo.left[0]?.length ?? 0 -const FULL = logo.left.map((line, i) => line + " ".repeat(GAP) + logo.right[i]) -const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94 const NEAR = [ [1, 0], [1, 1], @@ -140,7 +143,7 @@ function noise(x: number, y: number, t: number) { } function lit(char: string) { - return char !== " " && char !== "_" && char !== "~" + return char !== " " && char !== "_" && char !== "~" && char !== "," } function key(x: number, y: number) { @@ -188,12 +191,12 @@ function route(list: Array<{ x: number; y: number }>) { return path } -function mapGlyphs() { +function mapGlyphs(full: string[]) { const cells = [] as Array<{ x: number; y: number }> - for (let y = 0; y < FULL.length; y++) { - for (let x = 0; x < (FULL[y]?.length ?? 0); x++) { - if (lit(FULL[y]?.[x] ?? " ")) cells.push({ x, y }) + for (let y = 0; y < full.length; y++) { + for (let x = 0; x < (full[y]?.length ?? 0); x++) { + if (lit(full[y]?.[x] ?? " ")) cells.push({ x, y }) } } @@ -237,9 +240,25 @@ function mapGlyphs() { return { glyph, trace, center } } -const MAP = mapGlyphs() +type LogoContext = { + LEFT: number + FULL: string[] + SPAN: number + MAP: ReturnType + shape: LogoShape +} -function shimmer(x: number, y: number, frame: Frame) { +function build(shape: LogoShape): LogoContext { + const LEFT = shape.left[0]?.length ?? 0 + const FULL = shape.left.map((line, i) => line + " ".repeat(GAP) + shape.right[i]) + const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94 + return { LEFT, FULL, SPAN, MAP: mapGlyphs(FULL), shape } +} + +const DEFAULT = build(logo) +const GO = build(go) + +function shimmer(x: number, y: number, frame: Frame, ctx: LogoContext) { return frame.list.reduce((best, item) => { const age = frame.t - item.at if (age < SHIMMER_IN || age > LIFE) return best @@ -247,7 +266,7 @@ function shimmer(x: number, y: number, frame: Frame) { const dy = y * 2 + 1 - item.y const dist = Math.hypot(dx, dy) const p = age / LIFE - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) const lag = r - dist if (lag < 0.18 || lag > SHIMMER_OUT) return best const band = Math.exp(-(((lag - 1.05) / 0.68) ** 2)) @@ -258,19 +277,19 @@ function shimmer(x: number, y: number, frame: Frame) { }, 0) } -function remain(x: number, y: number, item: Release, t: number) { +function remain(x: number, y: number, item: Release, t: number, ctx: LogoContext) { const age = t - item.at if (age < 0 || age > LIFE) return 0 const p = age / LIFE const dx = x + 0.5 - item.x - 0.5 const dy = y * 2 + 1 - item.y * 2 - 1 const dist = Math.hypot(dx, dy) - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) if (dist > r) return 1 return clamp((r - dist) / 1.35 < 1 ? 1 - (r - dist) / 1.35 : 0) } -function wave(x: number, y: number, frame: Frame, live: boolean) { +function wave(x: number, y: number, frame: Frame, live: boolean, ctx: LogoContext) { return frame.list.reduce((sum, item) => { const age = frame.t - item.at if (age < 0 || age > LIFE) return sum @@ -278,7 +297,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) { const dx = x + 0.5 - item.x const dy = y * 2 + 1 - item.y const dist = Math.hypot(dx, dy) - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) const fade = (1 - p) ** 1.32 const j = 1.02 + noise(x + item.x * 0.7, y + item.y * 0.7, item.at * 0.002 + age * 0.06) * 0.52 const edge = Math.exp(-(((dist - r) / WIDTH) ** 2)) * GAIN * fade * item.force * j @@ -292,7 +311,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) { }, 0) } -function field(x: number, y: number, frame: Frame) { +function field(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest @@ -326,11 +345,11 @@ function field(x: number, y: number, frame: Frame) { Math.max(0, noise(item.x * 3.1, item.y * 2.7, frame.t * 1.7) - 0.72) * Math.exp(-(dist * dist) / 0.15) * lerp(0.08, 0.42, body) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 return (core + shell + ember + ring + fork + glitch + lash + flicker - dim) * fade } -function pick(x: number, y: number, frame: Frame) { +function pick(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest @@ -339,26 +358,26 @@ function pick(x: number, y: number, frame: Frame) { const dx = x + 0.5 - item.x - 0.5 const dy = y * 2 + 1 - item.y * 2 - 1 const dist = Math.hypot(dx, dy) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 return Math.exp(-(dist * dist) / 1.7) * lerp(0.2, 0.96, rise) * fade } -function select(x: number, y: number) { - const direct = MAP.glyph.get(key(x, y)) +function select(x: number, y: number, ctx: LogoContext) { + const direct = ctx.MAP.glyph.get(key(x, y)) if (direct !== undefined) return direct - const near = NEAR.map(([dx, dy]) => MAP.glyph.get(key(x + dx, y + dy))).find( + const near = NEAR.map(([dx, dy]) => ctx.MAP.glyph.get(key(x + dx, y + dy))).find( (item): item is number => item !== undefined, ) return near } -function trace(x: number, y: number, frame: Frame) { +function trace(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest if (!item || item.glyph === undefined) return 0 - const step = MAP.trace.get(key(x, y)) + const step = ctx.MAP.trace.get(key(x, y)) if (!step || step.glyph !== item.glyph || step.l < 2) return 0 const age = frame.t - item.at const rise = held ? ramp(age, HOLD, CHARGE) : rest!.rise @@ -368,29 +387,125 @@ function trace(x: number, y: number, frame: Frame) { const dist = Math.min(Math.abs(step.i - head), step.l - Math.abs(step.i - head)) const tail = (head - TAIL + step.l) % step.l const lag = Math.min(Math.abs(step.i - tail), step.l - Math.abs(step.i - tail)) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 const core = Math.exp(-((dist / 1.05) ** 2)) * lerp(0.8, 2.35, rise) const glow = Math.exp(-((dist / 1.85) ** 2)) * lerp(0.08, 0.34, rise) const trail = Math.exp(-((lag / 1.45) ** 2)) * lerp(0.04, 0.42, rise) return (core + glow + trail) * appear * fade } -function bloom(x: number, y: number, frame: Frame) { +function idle( + x: number, + pixelY: number, + frame: Frame, + ctx: LogoContext, + state: IdleState, +): { glow: number; peak: number; primary: number } { + const cfg = state.cfg + const dx = x + 0.5 - cfg.originX + const dy = pixelY - cfg.originY + const dist = Math.hypot(dx, dy) + const angle = Math.atan2(dy, dx) + const wob1 = noise(x * 0.32, pixelY * 0.25, frame.t * 0.0005) - 0.5 + const wob2 = noise(x * 0.12, pixelY * 0.08, frame.t * 0.00022) - 0.5 + const ripple = Math.sin(angle * 3 + frame.t * 0.0012) * 0.3 + const jitter = (wob1 * 0.55 + wob2 * 0.32 + ripple * 0.18) * cfg.noise + const traveled = dist + jitter + let glow = 0 + let peak = 0 + let halo = 0 + let primary = 0 + let ambient = 0 + for (const active of state.active) { + const head = active.head + const eased = active.eased + const delta = traveled - head + // Use shallower exponent (1.6 vs 2) for softer edges on the Gaussians + // so adjacent pixels have smaller brightness deltas + const core = Math.exp(-(Math.abs(delta / cfg.coreWidth) ** 1.8)) + const soft = Math.exp(-(Math.abs(delta / cfg.softWidth) ** 1.6)) + const tailRange = cfg.tail * 2.6 + const tail = delta < 0 && delta > -tailRange ? (1 + delta / tailRange) ** 2.6 : 0 + const haloDelta = delta + cfg.haloOffset + const haloBand = Math.exp(-(Math.abs(haloDelta / cfg.haloWidth) ** 1.6)) + glow += (soft * cfg.softAmp + tail * cfg.tailAmp) * eased + peak += core * cfg.coreAmp * eased + halo += haloBand * cfg.haloAmp * eased + // Primary-tinted fringe follows the halo (which trails behind the core) and the tail + primary += (haloBand + tail * 0.6) * eased + ambient += active.ambient + } + ambient /= state.rings + return { + glow: glow / state.rings, + peak: cfg.breathBase + ambient + (peak + halo) / state.rings, + primary: (primary / state.rings) * cfg.primaryMix, + } +} + +function bloom(x: number, y: number, frame: Frame, ctx: LogoContext) { const item = frame.glow if (!item) return 0 - const glyph = MAP.glyph.get(key(x, y)) + const glyph = ctx.MAP.glyph.get(key(x, y)) if (glyph !== item.glyph) return 0 const age = frame.t - item.at if (age < 0 || age > GLOW_OUT) return 0 const p = age / GLOW_OUT const flash = (1 - p) ** 2 - const dx = x + 0.5 - MAP.center.get(item.glyph)!.x - const dy = y * 2 + 1 - MAP.center.get(item.glyph)!.y + const dx = x + 0.5 - ctx.MAP.center.get(item.glyph)!.x + const dy = y * 2 + 1 - ctx.MAP.center.get(item.glyph)!.y const bias = Math.exp(-((Math.hypot(dx, dy) / 2.8) ** 2)) return lerp(item.force, item.force * 0.18, p) * lerp(0.72, 1.1, bias) * flash } -export function Logo() { +type IdleState = { + cfg: ShimmerConfig + reach: number + rings: number + active: Array<{ + head: number + eased: number + ambient: number + }> +} + +function buildIdleState(t: number, ctx: LogoContext): IdleState { + const cfg = shimmerConfig + const w = ctx.FULL[0]?.length ?? 1 + const h = ctx.FULL.length * 2 + const corners: [number, number][] = [ + [0, 0], + [w, 0], + [0, h], + [w, h], + ] + let maxCorner = 0 + for (const [cx, cy] of corners) { + const d = Math.hypot(cx - cfg.originX, cy - cfg.originY) + if (d > maxCorner) maxCorner = d + } + const reach = maxCorner + cfg.tail * 2 + const rings = Math.max(1, Math.floor(cfg.rings)) + const active = [] as IdleState["active"] + for (let i = 0; i < rings; i++) { + const offset = i / rings + const cyclePhase = (t / cfg.period + offset) % 1 + if (cyclePhase >= cfg.sweepFraction) continue + const phase = cyclePhase / cfg.sweepFraction + const envelope = Math.sin(phase * Math.PI) + const eased = envelope * envelope * (3 - 2 * envelope) + const d = (phase - cfg.ambientCenter) / cfg.ambientWidth + active.push({ + head: phase * reach, + eased, + ambient: Math.abs(d) < 1 ? (1 - d * d) ** 2 * cfg.ambientAmp : 0, + }) + } + return { cfg, reach, rings, active } +} + +export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } = {}) { + const ctx = props.shape ? build(props.shape) : DEFAULT const { theme } = useTheme() const [rings, setRings] = createSignal([]) const [hold, setHold] = createSignal() @@ -430,6 +545,7 @@ export function Logo() { } if (!live) setRelease(undefined) if (live || hold() || release() || glow()) return + if (props.idle) return stop() } @@ -438,8 +554,20 @@ export function Logo() { timer = setInterval(tick, 16) } + onCleanup(() => { + stop() + hum = false + Sound.dispose() + }) + + onMount(() => { + if (!props.idle) return + setNow(performance.now()) + start() + }) + const hit = (x: number, y: number) => { - const char = FULL[y]?.[x] + const char = ctx.FULL[y]?.[x] return char !== undefined && char !== " " } @@ -448,7 +576,7 @@ export function Logo() { if (last) burst(last.x, last.y) setNow(t) if (!last) setRelease(undefined) - setHold({ x, y, at: t, glyph: select(x, y) }) + setHold({ x, y, at: t, glyph: select(x, y, ctx) }) hum = false start() } @@ -508,6 +636,8 @@ export function Logo() { } }) + const idleState = createMemo(() => (props.idle ? buildIdleState(frame().t, ctx) : undefined)) + const renderLine = ( line: string, y: number, @@ -516,24 +646,64 @@ export function Logo() { off: number, frame: Frame, dusk: Frame, + state: IdleState | undefined, ): JSX.Element[] => { const shadow = tint(theme.background, ink, 0.25) const attrs = bold ? TextAttributes.BOLD : undefined return Array.from(line).map((char, i) => { - const h = field(off + i, y, frame) - const n = wave(off + i, y, frame, lit(char)) + h - const s = wave(off + i, y, dusk, false) + h - const p = lit(char) ? pick(off + i, y, frame) : 0 - const e = lit(char) ? trace(off + i, y, frame) : 0 - const b = lit(char) ? bloom(off + i, y, frame) : 0 - const q = shimmer(off + i, y, frame) + if (char === " ") { + return ( + + {char} + + ) + } + + const h = field(off + i, y, frame, ctx) + const charLit = lit(char) + // Sub-pixel sampling: cells are 2 pixels tall. Sample at top (y*2) and bottom (y*2+1) pixel rows. + const pulseTop = state ? idle(off + i, y * 2, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 } + const pulseBot = state ? idle(off + i, y * 2 + 1, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 } + const peakMixTop = charLit ? Math.min(1, pulseTop.peak) : 0 + const peakMixBot = charLit ? Math.min(1, pulseBot.peak) : 0 + const primaryMixTop = charLit ? Math.min(1, pulseTop.primary) : 0 + const primaryMixBot = charLit ? Math.min(1, pulseBot.primary) : 0 + // Layer primary tint first, then white peak on top — so the halo/tail pulls toward primary, + // while the bright core stays pure white + const inkTopTint = primaryMixTop > 0 ? tint(ink, theme.primary, primaryMixTop) : ink + const inkBotTint = primaryMixBot > 0 ? tint(ink, theme.primary, primaryMixBot) : ink + const inkTop = peakMixTop > 0 ? tint(inkTopTint, PEAK, peakMixTop) : inkTopTint + const inkBot = peakMixBot > 0 ? tint(inkBotTint, PEAK, peakMixBot) : inkBotTint + // For the non-peak-aware brightness channels, use the average of top/bot + const pulse = { + glow: (pulseTop.glow + pulseBot.glow) / 2, + peak: (pulseTop.peak + pulseBot.peak) / 2, + primary: (pulseTop.primary + pulseBot.primary) / 2, + } + const peakMix = charLit ? Math.min(1, pulse.peak) : 0 + const primaryMix = charLit ? Math.min(1, pulse.primary) : 0 + const inkPrimary = primaryMix > 0 ? tint(ink, theme.primary, primaryMix) : ink + const inkTinted = peakMix > 0 ? tint(inkPrimary, PEAK, peakMix) : inkPrimary + const shadowMixCfg = state?.cfg.shadowMix ?? shimmerConfig.shadowMix + const shadowMixTop = Math.min(1, pulseTop.peak * shadowMixCfg) + const shadowMixBot = Math.min(1, pulseBot.peak * shadowMixCfg) + const shadowTop = shadowMixTop > 0 ? tint(shadow, PEAK, shadowMixTop) : shadow + const shadowBot = shadowMixBot > 0 ? tint(shadow, PEAK, shadowMixBot) : shadow + const shadowMix = Math.min(1, pulse.peak * shadowMixCfg) + const shadowTinted = shadowMix > 0 ? tint(shadow, PEAK, shadowMix) : shadow + const n = wave(off + i, y, frame, charLit, ctx) + h + const s = wave(off + i, y, dusk, false, ctx) + h + const p = charLit ? pick(off + i, y, frame, ctx) : 0 + const e = charLit ? trace(off + i, y, frame, ctx) : 0 + const b = charLit ? bloom(off + i, y, frame, ctx) : 0 + const q = shimmer(off + i, y, frame, ctx) if (char === "_") { return ( @@ -545,8 +715,8 @@ export function Logo() { if (char === "^") { return ( @@ -557,34 +727,60 @@ export function Logo() { if (char === "~") { return ( - + ) } - if (char === " ") { + if (char === ",") { return ( - - {char} + + ▄ + + ) + } + + // Solid █: render as ▀ so the top pixel (fg) and bottom pixel (bg) can carry independent shimmer values + if (char === "█") { + return ( + + ▀ + + ) + } + + // ▀ top-half-lit: fg uses top-pixel sample, bg stays transparent/panel + if (char === "▀") { + return ( + + ▀ + + ) + } + + // ▄ bottom-half-lit: fg uses bottom-pixel sample + if (char === "▄") { + return ( + + ▄ ) } return ( - + {char} ) }) } - onCleanup(() => { - stop() - hum = false - Sound.dispose() - }) - const mouse = (evt: MouseEvent) => { if (!box) return if ((evt.type === "down" || evt.type === "drag") && evt.button === MouseButton.LEFT) { @@ -613,17 +809,28 @@ export function Logo() { position="absolute" top={0} left={0} - width={FULL[0]?.length ?? 0} - height={FULL.length} + width={ctx.FULL[0]?.length ?? 0} + height={ctx.FULL.length} zIndex={1} onMouse={mouse} /> - + {(line, index) => ( - {renderLine(line, index(), theme.textMuted, false, 0, frame(), dusk())} - {renderLine(logo.right[index()], index(), theme.text, true, LEFT + GAP, frame(), dusk())} + {renderLine(line, index(), props.ink ?? theme.textMuted, !!props.ink, 0, frame(), dusk(), idleState())} + + + {renderLine( + ctx.shape.right[index()], + index(), + props.ink ?? theme.text, + true, + ctx.LEFT + GAP, + frame(), + dusk(), + idleState(), + )} )} @@ -631,3 +838,9 @@ export function Logo() { ) } + +export function GoLogo() { + const { theme } = useTheme() + const base = tint(theme.background, theme.text, 0.62) + return +} diff --git a/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts b/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts new file mode 100644 index 0000000000..01bc136f5d --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts @@ -0,0 +1,49 @@ +export type ShimmerConfig = { + period: number + rings: number + sweepFraction: number + coreWidth: number + coreAmp: number + softWidth: number + softAmp: number + tail: number + tailAmp: number + haloWidth: number + haloOffset: number + haloAmp: number + breathBase: number + noise: number + ambientAmp: number + ambientCenter: number + ambientWidth: number + shadowMix: number + primaryMix: number + originX: number + originY: number +} + +export const shimmerDefaults: ShimmerConfig = { + period: 4600, + rings: 2, + sweepFraction: 1, + coreWidth: 1.2, + coreAmp: 1.9, + softWidth: 10, + softAmp: 1.6, + tail: 5, + tailAmp: 0.64, + haloWidth: 4.3, + haloOffset: 0.6, + haloAmp: 0.16, + breathBase: 0.04, + noise: 0.1, + ambientAmp: 0.36, + ambientCenter: 0.5, + ambientWidth: 0.34, + shadowMix: 0.1, + primaryMix: 0.3, + originX: 4.5, + originY: 13.5, +} + +export const shimmerConfig: ShimmerConfig = { ...shimmerDefaults } diff --git a/packages/opencode/src/cli/logo.ts b/packages/opencode/src/cli/logo.ts index 44fb93c15b..a58a8cf995 100644 --- a/packages/opencode/src/cli/logo.ts +++ b/packages/opencode/src/cli/logo.ts @@ -3,4 +3,9 @@ export const logo = { right: [" ▄ ", "█▀▀▀ █▀▀█ █▀▀█ █▀▀█", "█___ █__█ █__█ █^^^", "▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀"], } -export const marks = "_^~" +export const go = { + left: [" ", "█▀▀▀", "█_^█", "▀▀▀▀"], + right: [" ", "█▀▀█", "█__█", "▀▀▀▀"], +} + +export const marks = "_^~," From fbbab9d6c8a03c4cd5bed0d13a85f52e3aca47ce Mon Sep 17 00:00:00 2001 From: Jay Date: Thu, 16 Apr 2026 23:31:00 -0400 Subject: [PATCH 120/201] feat(app): hide desktop titlebar tools behind settings (#19029) Co-authored-by: Brendan Allan Co-authored-by: Brendan Allan --- .../src/components/session/session-header.tsx | 105 ++++++----- .../app/src/components/settings-general.tsx | 74 ++++++++ packages/app/src/components/titlebar.tsx | 8 +- packages/app/src/context/settings.tsx | 30 +++ packages/app/src/env.d.ts | 6 +- packages/app/src/i18n/en.ts | 11 ++ .../src/pages/session/session-side-panel.tsx | 176 +++++++++--------- .../pages/session/use-session-commands.tsx | 21 ++- 8 files changed, 290 insertions(+), 141 deletions(-) diff --git a/packages/app/src/components/session/session-header.tsx b/packages/app/src/components/session/session-header.tsx index 7acfdfc374..021e5be67e 100644 --- a/packages/app/src/components/session/session-header.tsx +++ b/packages/app/src/components/session/session-header.tsx @@ -8,7 +8,7 @@ import { Spinner } from "@opencode-ai/ui/spinner" import { showToast } from "@opencode-ai/ui/toast" import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip" import { getFilename } from "@opencode-ai/shared/util/path" -import { createEffect, createMemo, For, Show } from "solid-js" +import { createEffect, createMemo, createSignal, For, onMount, Show } from "solid-js" import { createStore } from "solid-js/store" import { Portal } from "solid-js/web" import { useCommand } from "@/context/command" @@ -16,6 +16,7 @@ import { useLanguage } from "@/context/language" import { useLayout } from "@/context/layout" import { usePlatform } from "@/context/platform" import { useServer } from "@/context/server" +import { useSettings } from "@/context/settings" import { useSync } from "@/context/sync" import { useTerminal } from "@/context/terminal" import { focusTerminalById } from "@/pages/session/helpers" @@ -134,6 +135,7 @@ export function SessionHeader() { const server = useServer() const platform = usePlatform() const language = useLanguage() + const settings = useSettings() const sync = useSync() const terminal = useTerminal() const { params, view } = useSessionLayout() @@ -151,6 +153,11 @@ export function SessionHeader() { }) const hotkey = createMemo(() => command.keybind("file.open")) const os = createMemo(() => detectOS(platform)) + const isDesktopBeta = platform.platform === "desktop" && import.meta.env.VITE_OPENCODE_CHANNEL === "beta" + const search = createMemo(() => !isDesktopBeta || settings.general.showSearch()) + const tree = createMemo(() => !isDesktopBeta || settings.general.showFileTree()) + const term = createMemo(() => !isDesktopBeta || settings.general.showTerminal()) + const status = createMemo(() => !isDesktopBeta || settings.general.showStatus()) const [exists, setExists] = createStore>>({ finder: true, @@ -262,12 +269,16 @@ export function SessionHeader() { .catch((err: unknown) => showRequestError(language, err)) } - const centerMount = createMemo(() => document.getElementById("opencode-titlebar-center")) - const rightMount = createMemo(() => document.getElementById("opencode-titlebar-right")) + const [centerMount, setCenterMount] = createSignal(null) + const [rightMount, setRightMount] = createSignal(null) + onMount(() => { + setCenterMount(document.getElementById("opencode-titlebar-center")) + setRightMount(document.getElementById("opencode-titlebar-right")) + }) return ( <> - + {(mount) => ( - + + + diff --git a/packages/app/src/components/settings-general.tsx b/packages/app/src/components/settings-general.tsx index b4ac061df4..c380fb69b3 100644 --- a/packages/app/src/components/settings-general.tsx +++ b/packages/app/src/components/settings-general.tsx @@ -106,6 +106,7 @@ export const SettingsGeneral: Component = () => { permission.disableAutoAccept(params.id, value) } + const desktop = createMemo(() => platform.platform === "desktop") const check = () => { if (!platform.checkUpdate) return @@ -279,6 +280,74 @@ export const SettingsGeneral: Component = () => { ) + const AdvancedSection = () => ( +
+

{language.t("settings.general.section.advanced")}

+ + + +
+ settings.general.setShowFileTree(checked)} + /> +
+
+ + +
+ settings.general.setShowNavigation(checked)} + /> +
+
+ + +
+ settings.general.setShowSearch(checked)} + /> +
+
+ + +
+ settings.general.setShowTerminal(checked)} + /> +
+
+ + +
+ settings.general.setShowStatus(checked)} + /> +
+
+
+
+ ) + const AppearanceSection = () => (

{language.t("settings.general.section.appearance")}

@@ -527,6 +596,7 @@ export const SettingsGeneral: Component = () => {
) + console.log(import.meta.env) return (
@@ -609,6 +679,10 @@ export const SettingsGeneral: Component = () => { ) }} + + + +
) diff --git a/packages/app/src/components/titlebar.tsx b/packages/app/src/components/titlebar.tsx index b7edb85ede..409fcbeff6 100644 --- a/packages/app/src/components/titlebar.tsx +++ b/packages/app/src/components/titlebar.tsx @@ -11,6 +11,7 @@ import { useLayout } from "@/context/layout" import { usePlatform } from "@/context/platform" import { useCommand } from "@/context/command" import { useLanguage } from "@/context/language" +import { useSettings } from "@/context/settings" import { applyPath, backPath, forwardPath } from "./titlebar-history" type TauriDesktopWindow = { @@ -40,6 +41,7 @@ export function Titlebar() { const platform = usePlatform() const command = useCommand() const language = useLanguage() + const settings = useSettings() const theme = useTheme() const navigate = useNavigate() const location = useLocation() @@ -78,6 +80,7 @@ export function Titlebar() { const canBack = createMemo(() => history.index > 0) const canForward = createMemo(() => history.index < history.stack.length - 1) const hasProjects = createMemo(() => layout.projects.list().length > 0) + const nav = createMemo(() => import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || settings.general.showNavigation()) const back = () => { const next = backPath(history) @@ -255,13 +258,12 @@ export function Titlebar() {
- +
-
+
- - - - {props.reviewCount()}{" "} - {language.t( - props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other", - )} - - - {language.t("session.files.all")} - - - - - - - {language.t("common.loading")} - {language.t("common.loading.ellipsis")} -
- } - > + + + + {props.reviewCount()}{" "} + {language.t( + props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other", + )} + + + {language.t("session.files.all")} + + + + + + + {language.t("common.loading")} + {language.t("common.loading.ellipsis")} +
+ } + > + props.focusReviewDiff(node.path)} + /> +
+ + + + + + {empty(language.t("session.files.empty"))} + props.focusReviewDiff(node.path)} + onFileClick={(node) => openTab(file.tab(node.path))} /> - - - {empty(props.empty())} - - - - - {empty(language.t("session.files.empty"))} - - openTab(file.tab(node.path))} - /> - - - - - - -
props.size.start()}> - { - props.size.touch() - layout.fileTree.resize(width) - }} - /> + + + +
-
- + +
props.size.start()}> + { + props.size.touch() + layout.fileTree.resize(width) + }} + /> +
+
+ + diff --git a/packages/app/src/pages/session/use-session-commands.tsx b/packages/app/src/pages/session/use-session-commands.tsx index b5d2544636..9bbeb10bde 100644 --- a/packages/app/src/pages/session/use-session-commands.tsx +++ b/packages/app/src/pages/session/use-session-commands.tsx @@ -7,8 +7,10 @@ import { useLanguage } from "@/context/language" import { useLayout } from "@/context/layout" import { useLocal } from "@/context/local" import { usePermission } from "@/context/permission" +import { usePlatform } from "@/context/platform" import { usePrompt } from "@/context/prompt" import { useSDK } from "@/context/sdk" +import { useSettings } from "@/context/settings" import { useSync } from "@/context/sync" import { useTerminal } from "@/context/terminal" import { showToast } from "@opencode-ai/ui/toast" @@ -39,8 +41,10 @@ export const useSessionCommands = (actions: SessionCommandContext) => { const language = useLanguage() const local = useLocal() const permission = usePermission() + const platform = usePlatform() const prompt = usePrompt() const sdk = useSDK() + const settings = useSettings() const sync = useSync() const terminal = useTerminal() const layout = useLayout() @@ -66,6 +70,7 @@ export const useSessionCommands = (actions: SessionCommandContext) => { }) const activeFileTab = tabState.activeFileTab const closableTab = tabState.closableTab + const shown = () => platform.platform !== "desktop" || settings.general.showFileTree() const idle = { type: "idle" as const } const status = () => sync.data.session_status[params.id ?? ""] ?? idle @@ -457,12 +462,16 @@ export const useSessionCommands = (actions: SessionCommandContext) => { keybind: "mod+shift+r", onSelect: () => view().reviewPanel.toggle(), }), - viewCommand({ - id: "fileTree.toggle", - title: language.t("command.fileTree.toggle"), - keybind: "mod+\\", - onSelect: () => layout.fileTree.toggle(), - }), + ...(shown() + ? [ + viewCommand({ + id: "fileTree.toggle", + title: language.t("command.fileTree.toggle"), + keybind: "mod+\\", + onSelect: () => layout.fileTree.toggle(), + }), + ] + : []), viewCommand({ id: "input.focus", title: language.t("command.input.focus"), From 0bedea52b19515c69057866ec958769004147f66 Mon Sep 17 00:00:00 2001 From: James Long Date: Thu, 16 Apr 2026 23:35:36 -0400 Subject: [PATCH 121/201] fix(tui): tui resiliency when workspace is dead, disable directory filter in session list (#23013) --- .opencode/opencode.jsonc | 1 + bun.lock | 347 +++++++++++++++++- daytona.ts | 199 ++++++++++ package.json | 1 + .../src/cli/cmd/tui/context/project.tsx | 19 +- .../opencode/src/cli/cmd/tui/context/sync.tsx | 32 +- .../src/cli/cmd/tui/routes/session/index.tsx | 39 +- packages/opencode/src/session/session.ts | 7 +- .../test/cli/tui/sync-provider.test.tsx | 280 -------------- 9 files changed, 595 insertions(+), 330 deletions(-) create mode 100644 daytona.ts delete mode 100644 packages/opencode/test/cli/tui/sync-provider.test.tsx diff --git a/.opencode/opencode.jsonc b/.opencode/opencode.jsonc index 8380f7f719..91b7abdbc6 100644 --- a/.opencode/opencode.jsonc +++ b/.opencode/opencode.jsonc @@ -10,6 +10,7 @@ "packages/opencode/migration/*": "deny", }, }, + "plugin": ["../daytona.ts"], "mcp": {}, "tools": { "github-triage": false, diff --git a/bun.lock b/bun.lock index 63232cb29e..7c562c4d4b 100644 --- a/bun.lock +++ b/bun.lock @@ -6,6 +6,7 @@ "name": "opencode", "dependencies": { "@aws-sdk/client-s3": "3.933.0", + "@daytona/sdk": "0.167.0", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", @@ -864,6 +865,8 @@ "@aws-sdk/credential-providers": ["@aws-sdk/credential-providers@3.993.0", "", { "dependencies": { "@aws-sdk/client-cognito-identity": "3.993.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/credential-provider-cognito-identity": "^3.972.3", "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-login": "^3.972.9", "@aws-sdk/credential-provider-node": "^3.972.10", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-1M/nukgPSLqe9krzOKHnE8OylUaKAiokAV3xRLdeExVHcRE7WG5uzCTKWTj1imKvPjDqXq/FWhlbbdWIn7xIwA=="], + "@aws-sdk/lib-storage": ["@aws-sdk/lib-storage@3.1031.0", "", { "dependencies": { "@smithy/middleware-endpoint": "^4.4.30", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "buffer": "5.6.0", "events": "3.3.0", "stream-browserify": "3.0.0", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-s3": "^3.1031.0" } }, "sha512-QC6MwdeXWZt9wnOV6/rgxSnVUcDeoHr5TCnCm0UPuSQOwuLX1/2QJa6oo7PdfdHg+i3G+VOnmG8QZ3tYxvBGRA=="], + "@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-cnCLWeKPYgvV4yRYPFH6pWMdUByvu2cy2BAlfsPpvnm4RaVioztyvxmQj5PmVN5fvWs5w/2d6U7le8X9iye2sA=="], "@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-5HEQ+JU4DrLNWeY27wKg/jeVa8Suy62ivJHOSUf6e6hZdVIMx0h/kXS1fHEQNNiLu2IzSEP/bFXsKBaW7x7s0g=="], @@ -1048,6 +1051,12 @@ "@ctrl/tinycolor": ["@ctrl/tinycolor@4.2.0", "", {}, "sha512-kzyuwOAQnXJNLS9PSyrk0CWk35nWJW/zl/6KvnTBMFK65gm7U1/Z5BqjxeapjZCIhQcM/DsrEmcbRwDyXyXK4A=="], + "@daytona/api-client": ["@daytona/api-client@0.167.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-2H1NhSu5GVVfg2oU9Mgt3VZRz7OUqACjBSb9GNNyC7uGPCnQh2C93Imqj2wfyUx/XF346hYpk+Nee+/NGqn3KA=="], + + "@daytona/sdk": ["@daytona/sdk@0.167.0", "", { "dependencies": { "@aws-sdk/client-s3": "^3.787.0", "@aws-sdk/lib-storage": "^3.798.0", "@daytona/api-client": "0.167.0", "@daytona/toolbox-api-client": "0.167.0", "@iarna/toml": "^2.2.5", "@opentelemetry/api": "^1.9.0", "@opentelemetry/exporter-trace-otlp-http": "^0.207.0", "@opentelemetry/instrumentation-http": "^0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-node": "^0.207.0", "@opentelemetry/sdk-trace-base": "^2.2.0", "@opentelemetry/semantic-conventions": "^1.37.0", "axios": "^1.13.5", "busboy": "^1.0.0", "dotenv": "^17.0.1", "expand-tilde": "^2.0.2", "fast-glob": "^3.3.0", "form-data": "^4.0.4", "isomorphic-ws": "^5.0.0", "pathe": "^2.0.3", "shell-quote": "^1.8.2", "tar": "^7.5.11" } }, "sha512-Xsb1bJ52tahpCiq2pEJdFVqqxLQoXO8laa2BFJFrSyLnGBMJXMaEormcgZfoOC9J6mVo9oOqTABLuoT+4DP11A=="], + + "@daytona/toolbox-api-client": ["@daytona/toolbox-api-client@0.167.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-FzSh6UVA8ptktNrRhIJCVfZMOcbXlVazTXej+YQ1rEj7L/PnGCQlBbT296xYj94C3wXQUTIxTZVkWzB4vcDTxw=="], + "@develar/schema-utils": ["@develar/schema-utils@2.6.5", "", { "dependencies": { "ajv": "^6.12.0", "ajv-keywords": "^3.4.1" } }, "sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig=="], "@dimforge/rapier2d-simd-compat": ["@dimforge/rapier2d-simd-compat@0.17.3", "", {}, "sha512-bijvwWz6NHsNj5e5i1vtd3dU2pDhthSaTUZSh14DUGGKJfw8eMnlWZsxwHBxB/a3AXVNDjL9abuHw1k9FGR+jg=="], @@ -1198,6 +1207,10 @@ "@graphql-typed-document-node/core": ["@graphql-typed-document-node/core@3.2.0", "", { "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ=="], + "@grpc/grpc-js": ["@grpc/grpc-js@1.14.3", "", { "dependencies": { "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA=="], + + "@grpc/proto-loader": ["@grpc/proto-loader@0.8.0", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ=="], + "@happy-dom/global-registrator": ["@happy-dom/global-registrator@20.0.11", "", { "dependencies": { "@types/node": "^20.0.0", "happy-dom": "^20.0.11" } }, "sha512-GqNqiShBT/lzkHTMC/slKBrvN0DsD4Di8ssBk4aDaVgEn+2WMzE6DXxq701ndSXj7/0cJ8mNT71pM7Bnrr6JRw=="], "@hey-api/codegen-core": ["@hey-api/codegen-core@0.5.5", "", { "dependencies": { "@hey-api/types": "0.1.2", "ansi-colors": "4.1.3", "c12": "3.3.3", "color-support": "1.1.3" }, "peerDependencies": { "typescript": ">=5.5.3" } }, "sha512-f2ZHucnA2wBGAY8ipB4wn/mrEYW+WUxU2huJmUvfDO6AE2vfILSHeF3wCO39Pz4wUYPoAWZByaauftLrOfC12Q=="], @@ -1216,6 +1229,8 @@ "@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="], + "@iarna/toml": ["@iarna/toml@2.2.5", "", {}, "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg=="], + "@ibm/plex": ["@ibm/plex@6.4.1", "", { "dependencies": { "@ibm/telemetry-js": "^1.5.1" } }, "sha512-fnsipQywHt3zWvsnlyYKMikcVI7E2fEwpiPnIHFqlbByXVfQfANAAeJk1IV4mNnxhppUIDlhU0TzwYwL++Rn2g=="], "@ibm/telemetry-js": ["@ibm/telemetry-js@1.11.0", "", { "bin": { "ibmtelemetry": "dist/collect.js" } }, "sha512-RO/9j+URJnSfseWg9ZkEX9p+a3Ousd33DBU7rOafoZB08RqdzxFVYJ2/iM50dkBuD0o7WX7GYt1sLbNgCoE+pA=="], @@ -1346,6 +1361,8 @@ "@js-joda/core": ["@js-joda/core@5.7.0", "", {}, "sha512-WBu4ULVVxySLLzK1Ppq+OdfP+adRS4ntmDQT915rzDJ++i95gc2jZkM5B6LWEAwN3lGXpfie3yPABozdD3K3Vg=="], + "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], + "@js-temporal/polyfill": ["@js-temporal/polyfill@0.5.1", "", { "dependencies": { "jsbi": "^4.3.0" } }, "sha512-hloP58zRVCRSpgDxmqCWJNlizAlUgJFqG2ypq79DCvyv9tHjRYMDOcPFjzfl/A1/YxDvRCZz8wvZvmapQnKwFQ=="], "@jsdevtools/ono": ["@jsdevtools/ono@7.1.3", "", {}, "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="], @@ -1574,24 +1591,56 @@ "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="], - "@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], + "@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-lAb0jQRVyleQQGiuuvCOTDVspc14nx6XJjP4FspJ1sNARo3Regq4ZZbrc3rN4b1TYSuUCvgH+UXUPug4SLOqEQ=="], "@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.6.1", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-XHzhwRNkBpeP8Fs/qjGrAf9r9PRv67wkJQ/7ZPaBQQ68DYlTBBx5MF9LvPx7mhuXcDessKK2b+DcxqwpgkcivQ=="], "@opentelemetry/core": ["@opentelemetry/core@2.6.1", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-8xHSGWpJP9wBxgBpnqGL0R3PbdWQndL1Qp50qrg71+B28zK5OQmUgcDKLJgzyAAV38t4tOyLMGDD60LneR5W8g=="], + "@opentelemetry/exporter-logs-otlp-grpc": ["@opentelemetry/exporter-logs-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-K92RN+kQGTMzFDsCzsYNGqOsXRUnko/Ckk+t/yPJao72MewOLgBUTWVHhebgkNfRCYqDz1v3K0aPT9OJkemvgg=="], + + "@opentelemetry/exporter-logs-otlp-http": ["@opentelemetry/exporter-logs-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-JpOh7MguEUls8eRfkVVW3yRhClo5b9LqwWTOg8+i4gjr/+8eiCtquJnC7whvpTIGyff06cLZ2NsEj+CVP3Mjeg=="], + + "@opentelemetry/exporter-logs-otlp-proto": ["@opentelemetry/exporter-logs-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-RQJEV/K6KPbQrIUbsrRkEe0ufks1o5OGLHy6jbDD8tRjeCsbFHWfg99lYBRqBV33PYZJXsigqMaAbjWGTFYzLw=="], + + "@opentelemetry/exporter-metrics-otlp-grpc": ["@opentelemetry/exporter-metrics-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-6flX89W54gkwmqYShdcTBR1AEF5C1Ob0O8pDgmLPikTKyEv27lByr9yBmO5WrP0+5qJuNPHrLfgFQFYi6npDGA=="], + + "@opentelemetry/exporter-metrics-otlp-http": ["@opentelemetry/exporter-metrics-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-fG8FAJmvXOrKXGIRN8+y41U41IfVXxPRVwyB05LoMqYSjugx/FSBkMZUZXUT/wclTdmBKtS5MKoi0bEKkmRhSw=="], + + "@opentelemetry/exporter-metrics-otlp-proto": ["@opentelemetry/exporter-metrics-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-kDBxiTeQjaRlUQzS1COT9ic+et174toZH6jxaVuVAvGqmxOkgjpLOjrI5ff8SMMQE69r03L3Ll3nPKekLopLwg=="], + + "@opentelemetry/exporter-prometheus": ["@opentelemetry/exporter-prometheus@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-Y5p1s39FvIRmU+F1++j7ly8/KSqhMmn6cMfpQqiDCqDjdDHwUtSq0XI0WwL3HYGnZeaR/VV4BNmsYQJ7GAPrhw=="], + + "@opentelemetry/exporter-trace-otlp-grpc": ["@opentelemetry/exporter-trace-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-7u2ZmcIx6D4KG/+5np4X2qA0o+O0K8cnUDhR4WI/vr5ZZ0la9J9RG+tkSjC7Yz+2XgL6760gSIM7/nyd3yaBLA=="], + "@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-exporter-base": "0.214.0", "@opentelemetry/otlp-transformer": "0.214.0", "@opentelemetry/resources": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-kIN8nTBMgV2hXzV/a20BCFilPZdAIMYYJGSgfMMRm/Xa+07y5hRDS2Vm12A/z8Cdu3Sq++ZvJfElokX2rkgGgw=="], - "@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-transformer": "0.214.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-u1Gdv0/E9wP+apqWf7Wv2npXmgJtxsW2XL0TEv9FZloTZRuMBKmu8cYVXwS4Hm3q/f/3FuCnPTgiwYvIqRSpRg=="], + "@opentelemetry/exporter-trace-otlp-proto": ["@opentelemetry/exporter-trace-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-ruUQB4FkWtxHjNmSXjrhmJZFvyMm+tBzHyMm7YPQshApy4wvZUTcrpPyP/A/rCl/8M4BwoVIZdiwijMdbZaq4w=="], + + "@opentelemetry/exporter-zipkin": ["@opentelemetry/exporter-zipkin@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0" } }, "sha512-VV4QzhGCT7cWrGasBWxelBjqbNBbyHicWWS/66KoZoe9BzYwFB72SH2/kkc4uAviQlO8iwv2okIJy+/jqqEHTg=="], + + "@opentelemetry/instrumentation": ["@opentelemetry/instrumentation@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "import-in-the-middle": "^2.0.0", "require-in-the-middle": "^8.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-y6eeli9+TLKnznrR8AZlQMSJT7wILpXH+6EYq5Vf/4Ao+huI7EedxQHwRgVUOMLFbe7VFDvHJrX9/f4lcwnJsA=="], + + "@opentelemetry/instrumentation-http": ["@opentelemetry/instrumentation-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/semantic-conventions": "^1.29.0", "forwarded-parse": "2.1.2" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-FC4i5hVixTzuhg4SV2ycTEAYx+0E2hm+GwbdoVPSA6kna0pPVI4etzaA9UkpJ9ussumQheFXP6rkGIaFJjMxsw=="], + + "@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-4RQluMVVGMrHok/3SVeSJ6EnRNkA2MINcX88sh+d/7DjGUrewW/WT88IsMEci0wUM+5ykTpPPNbEOoW+jwHnbw=="], + + "@opentelemetry/otlp-grpc-exporter-base": ["@opentelemetry/otlp-grpc-exporter-base@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-eKFjKNdsPed4q9yYqeI5gBTLjXxDM/8jwhiC0icw3zKxHVGBySoDsed5J5q/PGY/3quzenTr3FiTxA3NiNT+nw=="], "@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/sdk-logs": "0.214.0", "@opentelemetry/sdk-metrics": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1", "protobufjs": "^7.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DSaYcuBRh6uozfsWN3R8HsN0yDhCuWP7tOFdkUOVaWD1KVJg8m4qiLUsg/tNhTLS9HUYUcwNpwL2eroLtsZZ/w=="], - "@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + "@opentelemetry/propagator-b3": ["@opentelemetry/propagator-b3@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-9CrbTLFi5Ee4uepxg2qlpQIozoJuoAZU5sKMx0Mn7Oh+p7UrgCiEV6C02FOxxdYVRRFQVCinYR8Kf6eMSQsIsw=="], + + "@opentelemetry/propagator-jaeger": ["@opentelemetry/propagator-jaeger@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FfeOHOrdhiNzecoB1jZKp2fybqmqMPJUXe2ZOydP7QzmTPYcfPeuaclTLYVhK3HyJf71kt8sTl92nV4YIaLaKA=="], + + "@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="], "@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-zf6acnScjhsaBUU22zXZ/sLWim1dfhUAbGXdMmHmNG3LfBnQ3DKsOCITb2IZwoUsNNMTogqFKBnlIPPftUgGwA=="], "@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-9t9hJHX15meBy2NmTJxL+NJfXmnausR2xUDvE19XQce0Qi/GBtDGamU8nS1RMbdgDmhgpm3VaOu2+fiS/SfTpQ=="], + "@opentelemetry/sdk-node": ["@opentelemetry/sdk-node@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-logs-otlp-grpc": "0.207.0", "@opentelemetry/exporter-logs-otlp-http": "0.207.0", "@opentelemetry/exporter-logs-otlp-proto": "0.207.0", "@opentelemetry/exporter-metrics-otlp-grpc": "0.207.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/exporter-metrics-otlp-proto": "0.207.0", "@opentelemetry/exporter-prometheus": "0.207.0", "@opentelemetry/exporter-trace-otlp-grpc": "0.207.0", "@opentelemetry/exporter-trace-otlp-http": "0.207.0", "@opentelemetry/exporter-trace-otlp-proto": "0.207.0", "@opentelemetry/exporter-zipkin": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/propagator-b3": "2.2.0", "@opentelemetry/propagator-jaeger": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/sdk-trace-node": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-hnRsX/M8uj0WaXOBvFenQ8XsE8FLVh2uSnn1rkWu4mx+qu7EKGUZvZng6y/95cyzsqOfiaDDr08Ek4jppkIDNg=="], + "@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-r86ut4T1e8vNwB35CqCcKd45yzqH6/6Wzvpk2/cZB8PsPLlZFTvrh8yfOS3CYZYcUmAx4hHTZJ8AO8Dj8nrdhw=="], "@opentelemetry/sdk-trace-node": ["@opentelemetry/sdk-trace-node@2.6.1", "", { "dependencies": { "@opentelemetry/context-async-hooks": "2.6.1", "@opentelemetry/core": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-Hh2i4FwHWRFhnO2Q/p6svMxy8MPsNCG0uuzUY3glqm0rwM0nQvbTO1dXSp9OqQoTKXcQzaz9q1f65fsurmOhNw=="], @@ -2506,6 +2555,8 @@ "acorn": ["acorn@8.16.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw=="], + "acorn-import-attributes": ["acorn-import-attributes@1.9.5", "", { "peerDependencies": { "acorn": "^8" } }, "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ=="], + "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="], @@ -2728,6 +2779,8 @@ "bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="], + "busboy": ["busboy@1.6.0", "", { "dependencies": { "streamsearch": "^1.1.0" } }, "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA=="], + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], "c12": ["c12@3.3.3", "", { "dependencies": { "chokidar": "^5.0.0", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q=="], @@ -2788,6 +2841,8 @@ "citty": ["citty@0.1.6", "", { "dependencies": { "consola": "^3.2.3" } }, "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ=="], + "cjs-module-lexer": ["cjs-module-lexer@2.2.0", "", {}, "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ=="], + "classnames": ["classnames@2.3.2", "", {}, "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw=="], "clean-css": ["clean-css@5.3.3", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg=="], @@ -3000,7 +3055,7 @@ "dot-prop": ["dot-prop@8.0.2", "", { "dependencies": { "type-fest": "^3.8.0" } }, "sha512-xaBe6ZT4DHPkg0k4Ytbvn5xoxgpG0jOS1dYxSOwAHPuNLjP3/OzN0gH55SrLqpx8cBfSaVt91lXYkApjb+nYdQ=="], - "dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], + "dotenv": ["dotenv@17.4.2", "", {}, "sha512-nI4U3TottKAcAD9LLud4Cb7b2QztQMUEfHbvhTH09bqXTxnSie8WnjPALV/WMCrJZ6UV/qHJ6L03OqO3LcdYZw=="], "dotenv-expand": ["dotenv-expand@11.0.7", "", { "dependencies": { "dotenv": "^16.4.5" } }, "sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA=="], @@ -3152,6 +3207,8 @@ "exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="], + "expand-tilde": ["expand-tilde@2.0.2", "", { "dependencies": { "homedir-polyfill": "^1.0.1" } }, "sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw=="], + "expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="], "exponential-backoff": ["exponential-backoff@3.1.3", "", {}, "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA=="], @@ -3254,6 +3311,8 @@ "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], + "forwarded-parse": ["forwarded-parse@2.1.2", "", {}, "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw=="], + "fraction.js": ["fraction.js@5.3.4", "", {}, "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ=="], "framer-motion": ["framer-motion@8.5.5", "", { "dependencies": { "@motionone/dom": "^10.15.3", "hey-listen": "^1.0.8", "tslib": "^2.4.0" }, "optionalDependencies": { "@emotion/is-prop-valid": "^0.8.2" }, "peerDependencies": { "react": "^18.0.0", "react-dom": "^18.0.0" } }, "sha512-5IDx5bxkjWHWUF3CVJoSyUVOtrbAxtzYBBowRE2uYI/6VYhkEBD+rbTHEGuUmbGHRj6YqqSfoG7Aa1cLyWCrBA=="], @@ -3410,6 +3469,8 @@ "hey-listen": ["hey-listen@1.0.8", "", {}, "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q=="], + "homedir-polyfill": ["homedir-polyfill@1.0.3", "", { "dependencies": { "parse-passwd": "^1.0.0" } }, "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA=="], + "hono": ["hono@4.10.7", "", {}, "sha512-icXIITfw/07Q88nLSkB9aiUrd8rYzSweK681Kjo/TSggaGbOX4RRyxxm71v+3PC8C/j+4rlxGeoTRxQDkaJkUw=="], "hono-openapi": ["hono-openapi@1.1.2", "", { "peerDependencies": { "@hono/standard-validator": "^0.2.0", "@standard-community/standard-json": "^0.3.5", "@standard-community/standard-openapi": "^0.2.9", "@types/json-schema": "^7.0.15", "hono": "^4.8.3", "openapi-types": "^12.1.3" }, "optionalPeers": ["@hono/standard-validator", "hono"] }, "sha512-toUcO60MftRBxqcVyxsHNYs2m4vf4xkQaiARAucQx3TiBPDtMNNkoh+C4I1vAretQZiGyaLOZNWn1YxfSyUA5g=="], @@ -3464,6 +3525,8 @@ "immer": ["immer@11.1.4", "", {}, "sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw=="], + "import-in-the-middle": ["import-in-the-middle@2.0.6", "", { "dependencies": { "acorn": "^8.15.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^2.2.0", "module-details-from-path": "^1.0.4" } }, "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw=="], + "import-local": ["import-local@3.2.0", "", { "dependencies": { "pkg-dir": "^4.2.0", "resolve-cwd": "^3.0.0" }, "bin": { "import-local-fixture": "fixtures/cli.js" } }, "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA=="], "import-meta-resolve": ["import-meta-resolve@4.2.0", "", {}, "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg=="], @@ -3726,6 +3789,8 @@ "lodash": ["lodash@4.18.1", "", {}, "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q=="], + "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], + "lodash.defaults": ["lodash.defaults@4.2.0", "", {}, "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="], "lodash.escaperegexp": ["lodash.escaperegexp@4.1.2", "", {}, "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw=="], @@ -3956,6 +4021,8 @@ "mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="], + "module-details-from-path": ["module-details-from-path@1.0.4", "", {}, "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w=="], + "morphdom": ["morphdom@2.7.8", "", {}, "sha512-D/fR4xgGUyVRbdMGU6Nejea1RFzYxYtyurG4Fbv2Fi/daKlWKuXGLOdXtl+3eIwL110cI2hz1ZojGICjjFLgTg=="], "motion": ["motion@12.34.5", "", { "dependencies": { "framer-motion": "^12.34.5", "tslib": "^2.4.0" }, "peerDependencies": { "@emotion/is-prop-valid": "*", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["@emotion/is-prop-valid", "react", "react-dom"] }, "sha512-N06NLJ9IeBHeielRqIvYvjPfXuRdyTxa+9++BgpGa+hY2D7TcMkI6QzV3jaRuv0aZRXgMa7cPy9YcBUBisPzAQ=="], @@ -4162,6 +4229,8 @@ "parse-latin": ["parse-latin@7.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0", "@types/unist": "^3.0.0", "nlcst-to-string": "^4.0.0", "unist-util-modify-children": "^4.0.0", "unist-util-visit-children": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-mhHgobPPua5kZ98EF4HWiH167JWBfl4pvAIXXdbaVohtK7a6YBOy56kvhCqduqyo/f3yrHFWmqmiMg/BkBkYYQ=="], + "parse-passwd": ["parse-passwd@1.0.0", "", {}, "sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q=="], + "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="], "parse5-htmlparser2-tree-adapter": ["parse5-htmlparser2-tree-adapter@7.1.0", "", { "dependencies": { "domhandler": "^5.0.3", "parse5": "^7.0.0" } }, "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g=="], @@ -4442,6 +4511,8 @@ "require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="], + "require-in-the-middle": ["require-in-the-middle@8.0.1", "", { "dependencies": { "debug": "^4.3.5", "module-details-from-path": "^1.0.3" } }, "sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ=="], + "resedit": ["resedit@1.7.2", "", { "dependencies": { "pe-library": "^0.4.1" } }, "sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA=="], "reselect": ["reselect@4.1.8", "", {}, "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ=="], @@ -4684,8 +4755,12 @@ "storybook-solidjs-vite": ["storybook-solidjs-vite@10.0.12", "", { "dependencies": { "@joshwooding/vite-plugin-react-docgen-typescript": "^0.7.0", "@storybook/builder-vite": "^10.3.1", "@storybook/global": "^5.0.0", "vite-plugin-solid": "^2.11.11" }, "peerDependencies": { "solid-js": "^1.9.0", "storybook": "^0.0.0-0 || ^10.0.0", "typescript": "^4.0.0 || ^5.0.0 || ^6.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" }, "optionalPeers": ["typescript"] }, "sha512-KfKhJRdxbhFLHkBzLKSEk5sO2M/+KV9cdpki5Xdl5pwNP8kcoQnZ3b/okZk8dMRV6x19j86bKc7zDfc5bPSMwA=="], + "stream-browserify": ["stream-browserify@3.0.0", "", { "dependencies": { "inherits": "~2.0.4", "readable-stream": "^3.5.0" } }, "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA=="], + "stream-replace-string": ["stream-replace-string@2.0.0", "", {}, "sha512-TlnjJ1C0QrmxRNrON00JvaFFlNh5TTG00APw23j74ET7gkQpTASi6/L2fuiav8pzK715HXtUeClpBTw2NPSn6w=="], + "streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="], + "streamx": ["streamx@2.25.0", "", { "dependencies": { "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" } }, "sha512-0nQuG6jf1w+wddNEEXCF4nTg3LtufWINB5eFEN+5TNZW7KWJp6x87+JFL43vaAUPyCfH1wID+mNVyW6OHtFamg=="], "string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], @@ -5100,7 +5175,7 @@ "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], "yaml": ["yaml@2.8.3", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg=="], @@ -5336,6 +5411,16 @@ "@aws-sdk/credential-providers/@aws-sdk/types": ["@aws-sdk/types@3.973.7", "", { "dependencies": { "@smithy/types": "^4.14.0", "tslib": "^2.6.2" } }, "sha512-reXRwoJ6CfChoqAsBszUYajAF8Z2LRE+CRcKocvFSMpIiLOtYU3aJ9trmn6VVPAzbbY5LXF+FfmUslbXk1SYFg=="], + "@aws-sdk/lib-storage/@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.4.30", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/middleware-serde": "^4.2.18", "@smithy/node-config-provider": "^4.3.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" } }, "sha512-qS2XqhKeXmdZ4nEQ4cOxIczSP/Y91wPAHYuRwmWDCh975B7/57uxsm5d6sisnUThn2u2FwzMdJNM7AbO1YPsPg=="], + + "@aws-sdk/lib-storage/@smithy/protocol-http": ["@smithy/protocol-http@5.3.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-dN5F8kHx8RNU0r+pCwNmFZyz6ChjMkzShy/zup6MtkRmmix4vZzJdW+di7x//b1LiynIev88FM18ie+wwPcQtQ=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client": ["@smithy/smithy-client@4.12.11", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-stack": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-stream": "^4.5.23", "tslib": "^2.6.2" } }, "sha512-wzz/Wa1CH/Tlhxh0s4DQPEcXSxSVfJ59AZcUh9Gu0c6JTlKuwGf4o/3P2TExv0VbtPFt8odIBG+eQGK2+vTECg=="], + + "@aws-sdk/lib-storage/@smithy/types": ["@smithy/types@4.14.1", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg=="], + + "@aws-sdk/lib-storage/buffer": ["buffer@5.6.0", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4" } }, "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw=="], + "@aws-sdk/middleware-flexible-checksums/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], "@aws-sdk/middleware-sdk-s3/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], @@ -5398,6 +5483,8 @@ "@cspotcode/source-map-support/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="], + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ=="], + "@develar/schema-utils/ajv": ["ajv@6.14.0", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw=="], "@dot/log/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -5442,6 +5529,8 @@ "@gitlab/opencode-gitlab-auth/open": ["open@10.2.0", "", { "dependencies": { "default-browser": "^5.2.1", "define-lazy-prop": "^3.0.0", "is-inside-container": "^1.0.0", "wsl-utils": "^0.1.0" } }, "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA=="], + "@grpc/proto-loader/yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], + "@hey-api/openapi-ts/open": ["open@11.0.0", "", { "dependencies": { "default-browser": "^5.4.0", "define-lazy-prop": "^3.0.0", "is-in-ssh": "^1.0.0", "is-inside-container": "^1.0.0", "powershell-utils": "^0.1.0", "wsl-utils": "^0.3.0" } }, "sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw=="], "@hey-api/openapi-ts/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], @@ -5582,6 +5671,108 @@ "@opencode-ai/web/@shikijs/transformers": ["@shikijs/transformers@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/types": "3.20.0" } }, "sha512-PrHHMRr3Q5W1qB/42kJW6laqFyWdhrPF2hNR9qjOm1xcSiAO3hAHo7HaVyHE6pMyevmy3i51O8kuGGXC78uK3g=="], + "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-prometheus/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-prometheus/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-transformer": "0.214.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-u1Gdv0/E9wP+apqWf7Wv2npXmgJtxsW2XL0TEv9FZloTZRuMBKmu8cYVXwS4Hm3q/f/3FuCnPTgiwYvIqRSpRg=="], + + "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-zipkin/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-zipkin/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/instrumentation-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/otlp-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/otlp-transformer/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], + + "@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/propagator-b3/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/propagator-jaeger/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/sdk-logs/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], + + "@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/sdk-metrics/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/sdk-node/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/sdk-node/@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ=="], + + "@opentelemetry/sdk-node/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/sdk-node/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-node": ["@opentelemetry/sdk-trace-node@2.2.0", "", { "dependencies": { "@opentelemetry/context-async-hooks": "2.2.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-+OaRja3f0IqGG2kptVeYsrZQK9nKRSpfFrKtRBq4uh6nIB8bTBgaGvYQrQoRrQWQMA5dK5yLhDMDc0dvYvCOIQ=="], + + "@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + "@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], "@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.10", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.3" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.10" }, "optionalPeers": ["solid-js"] }, "sha512-HCelrgua/Y+kqO8RyL04JBWS/cVdrtUv/h45GntgQY+cJl4eBcKkCDV3TdMjtKx1nXwRaR9QXslM/Npm1dxdZQ=="], @@ -5710,6 +5901,8 @@ "app-builder-lib/ci-info": ["ci-info@4.3.1", "", {}, "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA=="], + "app-builder-lib/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], + "app-builder-lib/hosted-git-info": ["hosted-git-info@4.1.0", "", { "dependencies": { "lru-cache": "^6.0.0" } }, "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA=="], "app-builder-lib/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], @@ -5756,8 +5949,6 @@ "c12/chokidar": ["chokidar@5.0.0", "", { "dependencies": { "readdirp": "^5.0.0" } }, "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw=="], - "c12/dotenv": ["dotenv@17.4.2", "", {}, "sha512-nI4U3TottKAcAD9LLud4Cb7b2QztQMUEfHbvhTH09bqXTxnSie8WnjPALV/WMCrJZ6UV/qHJ6L03OqO3LcdYZw=="], - "clone-response/mimic-response": ["mimic-response@1.0.1", "", {}, "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ=="], "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], @@ -5788,6 +5979,8 @@ "dot-prop/type-fest": ["type-fest@3.13.1", "", {}, "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g=="], + "dotenv-expand/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], + "editorconfig/commander": ["commander@10.0.1", "", {}, "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug=="], "editorconfig/minimatch": ["minimatch@9.0.9", "", { "dependencies": { "brace-expansion": "^2.0.2" } }, "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg=="], @@ -6022,6 +6215,8 @@ "storybook-solidjs-vite/vite-plugin-solid": ["vite-plugin-solid@2.11.12", "", { "dependencies": { "@babel/core": "^7.23.3", "@types/babel__core": "^7.20.4", "babel-preset-solid": "^1.8.4", "merge-anything": "^5.1.7", "solid-refresh": "^0.6.3", "vitefu": "^1.0.4" }, "peerDependencies": { "@testing-library/jest-dom": "^5.16.6 || ^5.17.0 || ^6.*", "solid-js": "^1.7.2", "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" }, "optionalPeers": ["@testing-library/jest-dom"] }, "sha512-FgjPcx2OwX9h6f28jli7A4bG7PP3te8uyakE5iqsmpq3Jqi1TWLgSroC9N6cMfGRU2zXsl4Q6ISvTr2VL0QHpA=="], + "stream-browserify/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], + "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], @@ -6030,8 +6225,6 @@ "sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="], - "tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - "terser/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], "tiny-async-pool/semver": ["semver@5.7.2", "", { "bin": { "semver": "bin/semver" } }, "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g=="], @@ -6288,6 +6481,26 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core": ["@smithy/core@3.23.15", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-E7GVCgsQttzfujEZb6Qep005wWf4xiL4x06apFEtzQMWYBPggZh/0cnOxPficw5cuK/YjjkehKoIN4YUaSh0UQ=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/middleware-serde": ["@smithy/middleware-serde@4.2.18", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-M6CSgnp3v4tYz9ynj2JHbA60woBZcGqEwNjTKjBsNHPV26R1ZX52+0wW8WsZU18q45jD0tw2wL22S17Ze9LpEw=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/node-config-provider": ["@smithy/node-config-provider@4.3.14", "", { "dependencies": { "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-S+gFjyo/weSVL0P1b9Ts8C/CwIfNCgUPikk3sl6QVsfE/uUuO+QsF+NsE/JkpvWqqyz1wg7HFdiaZuj5CoBMRg=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/shared-ini-file-loader": ["@smithy/shared-ini-file-loader@4.4.9", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-495/V2I15SHgedSJoDPD23JuSfKAp726ZI1V0wtjB07Wh7q/0tri/0e0DLefZCHgxZonrGKt/OCTpAtP1wE1kQ=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/url-parser": ["@smithy/url-parser@4.2.14", "", { "dependencies": { "@smithy/querystring-parser": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/util-middleware": ["@smithy/util-middleware@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core": ["@smithy/core@3.23.15", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-E7GVCgsQttzfujEZb6Qep005wWf4xiL4x06apFEtzQMWYBPggZh/0cnOxPficw5cuK/YjjkehKoIN4YUaSh0UQ=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/middleware-stack": ["@smithy/middleware-stack@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-2dvkUKLuFdKsCRmOE4Mn63co0Djtsm+JMh0bYZQupN1pJwMeE8FmQmRLLzzEMN0dnNi7CDCYYH8F0EVwWiPBeA=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream": ["@smithy/util-stream@4.5.23", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.5.3", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-N6on1+ngJ3RznZOnDWNveIwnTSlqxNnXuNAh7ez889ZZaRdXoNRTXKgmYOLe6dB0gCmAVtuRScE1hymQFl4hpg=="], + + "@aws-sdk/lib-storage/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.17", "", { "dependencies": { "@smithy/types": "^4.14.0", "fast-xml-parser": "5.5.8", "tslib": "^2.6.2" } }, "sha512-Ra7hjqAZf1OXRRMueB13qex7mFJRDK/pgCvdSFemXBT8KCGnQDPoKzHY1SjN+TjJVmnpSF14W5tJ1vDamFu+Gg=="], "@aws-sdk/nested-clients/@aws-sdk/middleware-user-agent/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.996.6", "", { "dependencies": { "@aws-sdk/types": "^3.973.7", "@smithy/types": "^4.14.0", "@smithy/url-parser": "^4.2.13", "@smithy/util-endpoints": "^3.3.4", "tslib": "^2.6.2" } }, "sha512-2nUQ+2ih7CShuKHpGSIYvvAIOHy52dOZguYG36zptBukhw6iFwcvGfG0tes0oZFWQqEWvgZe9HLWaNlvXGdOrg=="], @@ -6324,6 +6537,12 @@ "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + "@develar/schema-utils/ajv/json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], "@electron/asar/minimatch/brace-expansion": ["brace-expansion@1.1.14", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g=="], @@ -6366,6 +6585,10 @@ "@gitlab/opencode-gitlab-auth/open/wsl-utils": ["wsl-utils@0.1.0", "", { "dependencies": { "is-wsl": "^3.1.0" } }, "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw=="], + "@grpc/proto-loader/yargs/cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], + + "@grpc/proto-loader/yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], + "@jsx-email/cli/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.19.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA=="], "@jsx-email/cli/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.19.12", "", { "os": "android", "cpu": "arm" }, "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w=="], @@ -6544,6 +6767,52 @@ "@opencode-ai/web/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.20.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw=="], + "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/sdk-node/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], + + "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-node/@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.2.0", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-qRkLWiUEZNAmYapZ7KGS5C4OmBLcP/H2foXeOEaowYCR0wi89fHejrfYfbuLVCMLp/dWZXKvQusdbUEZjERfwQ=="], + "@opentui/solid/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], "@pierre/diffs/@shikijs/transformers/@shikijs/core": ["@shikijs/core@3.20.0", "", { "dependencies": { "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g=="], @@ -6668,6 +6937,10 @@ "lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], + "minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "motion/framer-motion/motion-dom": ["motion-dom@12.38.0", "", { "dependencies": { "motion-utils": "^12.36.0" } }, "sha512-pdkHLD8QYRp8VfiNLb8xIBJis1byQ9gPT3Jnh2jqfFtAsWUA3dEepDlsWe/xMpO8McV+VdpKVcp+E+TGJEtOoA=="], "motion/framer-motion/motion-utils": ["motion-utils@12.36.0", "", {}, "sha512-eHWisygbiwVvf6PZ1vhaHCLamvkSbPIeAYxWUuL3a2PD/TROgE7FvfHWTIH4vMl798QLfMw15nRqIaRDXTlYRg=="], @@ -6690,6 +6963,8 @@ "opencontrol/@modelcontextprotocol/sdk/zod-to-json-schema": ["zod-to-json-schema@3.25.2", "", { "peerDependencies": { "zod": "^3.25.28 || ^4" } }, "sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA=="], + "openid-client/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "ora/bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], "ora/bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], @@ -6850,10 +7125,34 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream": ["@smithy/util-stream@4.5.23", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.5.3", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-N6on1+ngJ3RznZOnDWNveIwnTSlqxNnXuNAh7ez889ZZaRdXoNRTXKgmYOLe6dB0gCmAVtuRScE1hymQFl4hpg=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/node-config-provider/@smithy/property-provider": ["@smithy/property-provider@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-WuM31CgfsnQ/10i7NYr0PyxqknD72Y5uMfUMVSniPjbEPceiTErb4eIqJQ+pdxNEAUEWrewrGjIRjVbVHsxZiQ=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/url-parser/@smithy/querystring-parser": ["@smithy/querystring-parser@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/url-parser": ["@smithy/url-parser@4.2.14", "", { "dependencies": { "@smithy/querystring-parser": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/util-middleware": ["@smithy/util-middleware@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/fetch-http-handler": ["@smithy/fetch-http-handler@5.3.17", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.5.3", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-lc5jFL++x17sPhIwMWJ3YOnqmSjw/2Po6VLDlUIXvxVWRuJwRXnJ4jOBBLB0cfI5BB5ehIl02Fxr1PDvk/kxDw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], + "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], + + "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + "@electron/asar/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], "@electron/rebuild/node-gyp/make-fetch-happen/@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], @@ -6878,6 +7177,14 @@ "@electron/universal/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "@grpc/proto-loader/yargs/cliui/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + + "@grpc/proto-loader/yargs/cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + + "@grpc/proto-loader/yargs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], + + "@grpc/proto-loader/yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + "@jsx-email/cli/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], "@jsx-email/cli/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="], @@ -6964,6 +7271,8 @@ "app-builder-lib/@electron/get/fs-extra/universalify": ["universalify@0.1.2", "", {}, "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="], + "app-builder-lib/hosted-git-info/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "archiver-utils/glob/jackspeak/@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], "archiver-utils/glob/minimatch/brace-expansion": ["brace-expansion@2.1.0", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w=="], @@ -7080,6 +7389,16 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/fetch-http-handler": ["@smithy/fetch-http-handler@5.3.17", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.5.3", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-lc5jFL++x17sPhIwMWJ3YOnqmSjw/2Po6VLDlUIXvxVWRuJwRXnJ4jOBBLB0cfI5BB5ehIl02Fxr1PDvk/kxDw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/url-parser/@smithy/querystring-parser": ["@smithy/querystring-parser@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/fetch-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], + + "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/node-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], + "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], @@ -7098,6 +7417,10 @@ "@electron/rebuild/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + "@grpc/proto-loader/yargs/cliui/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + + "@grpc/proto-loader/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + "@jsx-email/cli/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.2", "", {}, "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA=="], "@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es/regex": ["regex@5.1.1", "", { "dependencies": { "regex-utilities": "^2.3.0" } }, "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw=="], @@ -7136,6 +7459,10 @@ "@aws-sdk/credential-provider-cognito-identity/@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/fetch-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], + + "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/node-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], + "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/minimatch": ["minimatch@9.0.9", "", { "dependencies": { "brace-expansion": "^2.0.2" } }, "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg=="], @@ -7156,6 +7483,8 @@ "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/minimatch/brace-expansion": ["brace-expansion@2.1.0", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w=="], + "@electron/rebuild/node-gyp/make-fetch-happen/minipass-fetch/minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak/@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak/@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], diff --git a/daytona.ts b/daytona.ts new file mode 100644 index 0000000000..0315560eae --- /dev/null +++ b/daytona.ts @@ -0,0 +1,199 @@ +import type { Daytona, Sandbox } from "@daytonaio/sdk" +import type { Plugin } from "@opencode-ai/plugin" +import { join } from "node:path" +import { fileURLToPath } from "node:url" +import { tmpdir } from "node:os" +import { access, copyFile, mkdir } from "node:fs/promises" + +let client: Promise | undefined + +let daytona = function daytona(): Promise { + if (client == null) { + client = import("@daytonaio/sdk").then( + ({ Daytona }) => + new Daytona({ + apiKey: "dtn_d63c206564ef49d4104ec2cd755e561bb3665beed8fd7d7ab2c5f7a2186965f0", + }), + ) + } + return client +} + + + +const preview = new Map() +const repo = "/home/daytona/workspace/repo" +const root = "/home/daytona/workspace" +const localbin = "/home/daytona/opencode" +const installbin = "/home/daytona/.opencode/bin/opencode" +const health = "http://127.0.0.1:3096/global/health" + +const local = fileURLToPath( + new URL("./packages/opencode/dist/opencode-linux-x64-baseline/bin/opencode", import.meta.url), +) + +async function exists(file: string) { + return access(file) + .then(() => true) + .catch(() => false) +} + +function sh(value: string) { + return `'${value.replace(/'/g, `"'"'"`)}'` +} + +// Internally Daytona uses axios, which tries to overwrite stack +// traces when a failure happens. That path fails in Bun, however, so +// when something goes wrong you only see a very obscure error. +async function withSandbox(name: string, fn: (sandbox: Sandbox) => Promise) { + const stack = Error.captureStackTrace + // @ts-expect-error temporary compatibility hack for Daytona's axios stack handling in Bun + Error.captureStackTrace = undefined + try { + return await fn(await (await daytona()).get(name)) + } finally { + Error.captureStackTrace = stack + } +} + +export const DaytonaWorkspacePlugin: Plugin = async ({ experimental_workspace, worktree, project }) => { + experimental_workspace.register("daytona", { + name: "Daytona", + description: "Create a remote Daytona workspace", + configure(config) { + return config + }, + async create(config, env) { + const temp = join(tmpdir(), `opencode-daytona-${Date.now()}`) + + console.log("creating sandbox...") + + const sandbox = await ( + await daytona() + ).create({ + name: config.name, + snapshot: "daytona-large", + envVars: env, + }) + + console.log("creating ssh...") + + const ssh = await withSandbox(config.name, (sandbox) => sandbox.createSshAccess()) + console.log("daytona:", ssh.sshCommand) + + const run = async (command: string) => { + console.log("sandbox:", command) + const result = await sandbox.process.executeCommand(command) + if (result.result) process.stdout.write(result.result) + if (result.exitCode === 0) return result + throw new Error(result.result || `sandbox command failed: ${command}`) + } + + const wait = async () => { + for (let i = 0; i < 60; i++) { + const result = await sandbox.process.executeCommand(`curl -fsS ${sh(health)}`) + if (result.exitCode === 0) { + if (result.result) process.stdout.write(result.result) + return + } + console.log(`waiting for server (${i + 1}/60)`) + await Bun.sleep(1000) + } + + const log = await sandbox.process.executeCommand(`test -f /tmp/opencode.log && cat /tmp/opencode.log || true`) + throw new Error(log.result || "daytona workspace server did not become ready in time") + } + + const dir = join(temp, "repo") + const tar = join(temp, "repo.tgz") + const source = `file://${worktree}` + await mkdir(temp, { recursive: true }) + const args = ["clone", "--depth", "1", "--no-local"] + if (config.branch) args.push("--branch", config.branch) + args.push(source, dir) + + console.log("git cloning...") + + const clone = Bun.spawn(["git", ...args], { + cwd: tmpdir(), + stdout: "pipe", + stderr: "pipe", + }) + const code = await clone.exited + if (code !== 0) throw new Error(await new Response(clone.stderr).text()) + + const configPackage = join(worktree, ".opencode", "package.json") + // if (await exists(configPackage)) { + // console.log("copying config package...") + // await mkdir(join(dir, ".opencode"), { recursive: true }) + // await copyFile(configPackage, join(dir, ".opencode", "package.json")) + // } + + console.log("tarring...") + + const packed = Bun.spawn(["tar", "-czf", tar, "-C", temp, "repo"], { + stdout: "ignore", + stderr: "pipe", + }) + if ((await packed.exited) !== 0) throw new Error(await new Response(packed.stderr).text()) + + console.log("uploading files...") + + await sandbox.fs.uploadFile(tar, "repo.tgz") + + const have = await exists(local) + console.log("local", local) + if (have) { + console.log("uploading local binary...") + await sandbox.fs.uploadFile(local, "opencode") + } + + console.log("bootstrapping workspace...") + await run(`rm -rf ${sh(repo)} && mkdir -p ${sh(root)} && tar -xzf \"$HOME/repo.tgz\" -C \"$HOME/workspace\"`) + + if (have) { + await run(`chmod +x ${sh(localbin)}`) + } else { + await run( + `mkdir -p \"$HOME/.opencode/bin\" && OPENCODE_INSTALL_DIR=\"$HOME/.opencode/bin\" curl -fsSL https://opencode.ai/install | bash`, + ) + } + + await run(`printf \"%s\\n\" ${sh(project.id)} > ${sh(`${repo}/.git/opencode`)}`) + + console.log("starting server...") + await run( + `cd ${sh(repo)} && exe=${sh(localbin)} && if [ ! -x \"$exe\" ]; then exe=${sh(installbin)}; fi && nohup env \"$exe\" serve --hostname 0.0.0.0 --port 3096 >/tmp/opencode.log 2>&1 undefined) + if (!sandbox) return + await (await daytona()).delete(sandbox) + preview.delete(config.name) + }, + async target(config) { + let link = preview.get(config.name) + if (!link) { + link = await withSandbox(config.name, (sandbox) => sandbox.getPreviewLink(3096)) + preview.set(config.name, link) + } + return { + type: "remote", + url: link.url, + headers: { + "x-daytona-preview-token": link.token, + "x-daytona-skip-preview-warning": "true", + "x-opencode-directory": repo, + }, + } + }, + }) + + return {} +} + +export default DaytonaWorkspacePlugin diff --git a/package.json b/package.json index 5fecc09922..09548bf4e0 100644 --- a/package.json +++ b/package.json @@ -95,6 +95,7 @@ }, "dependencies": { "@aws-sdk/client-s3": "3.933.0", + "@daytona/sdk": "0.167.0", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", diff --git a/packages/opencode/src/cli/cmd/tui/context/project.tsx b/packages/opencode/src/cli/cmd/tui/context/project.tsx index 26e5c075d7..22dd94bc82 100644 --- a/packages/opencode/src/cli/cmd/tui/context/project.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/project.tsx @@ -10,18 +10,21 @@ export const { use: useProject, provider: ProjectProvider } = createSimpleContex name: "Project", init: () => { const sdk = useSDK() + + const defaultPath = { + home: "", + state: "", + config: "", + worktree: "", + directory: sdk.directory ?? "", + } satisfies Path + const [store, setStore] = createStore({ project: { id: undefined as string | undefined, }, instance: { - path: { - home: "", - state: "", - config: "", - worktree: "", - directory: sdk.directory ?? "", - } satisfies Path, + path: defaultPath, }, workspace: { current: undefined as string | undefined, @@ -38,7 +41,7 @@ export const { use: useProject, provider: ProjectProvider } = createSimpleContex ]) batch(() => { - setStore("instance", "path", reconcile(path.data!)) + setStore("instance", "path", reconcile(path.data || defaultPath)) setStore("project", "id", project.data?.id) }) } diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index b5734e67d0..57326e3a1a 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -27,7 +27,7 @@ import { createSimpleContext } from "./helper" import type { Snapshot } from "@/snapshot" import { useExit } from "./exit" import { useArgs } from "./args" -import { batch, createEffect, on } from "solid-js" +import { batch, onMount } from "solid-js" import { Log } from "@/util" import { emptyConsoleState, type ConsoleState } from "@/config/console-state" @@ -108,6 +108,9 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ const project = useProject() const sdk = useSDK() + const fullSyncedSessions = new Set() + let syncedWorkspace = project.workspace.current() + event.subscribe((event) => { switch (event.type) { case "server.instance.disposed": @@ -350,9 +353,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ const exit = useExit() const args = useArgs() - async function bootstrap() { - console.log("bootstrapping") + async function bootstrap(input: { fatal?: boolean } = {}) { + const fatal = input.fatal ?? true const workspace = project.workspace.current() + if (workspace !== syncedWorkspace) { + fullSyncedSessions.clear() + syncedWorkspace = workspace + } const start = Date.now() - 30 * 24 * 60 * 60 * 1000 const sessionListPromise = sdk.client.session .list({ start: start }) @@ -441,20 +448,17 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ name: e instanceof Error ? e.name : undefined, stack: e instanceof Error ? e.stack : undefined, }) - await exit(e) + if (fatal) { + await exit(e) + } else { + throw e + } }) } - const fullSyncedSessions = new Set() - createEffect( - on( - () => project.workspace.current(), - () => { - fullSyncedSessions.clear() - void bootstrap() - }, - ), - ) + onMount(() => { + void bootstrap() + }) const result = { data: store, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 7c40e6c3c0..70a4b73b95 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -181,23 +181,30 @@ export function Session() { const sdk = useSDK() createEffect(async () => { - await sdk.client.session - .get({ sessionID: route.sessionID }, { throwOnError: true }) - .then((x) => { - project.workspace.set(x.data?.workspaceID) - }) - .then(() => sync.session.sync(route.sessionID)) - .then(() => { - if (scroll) scroll.scrollBy(100_000) - }) - .catch((e) => { - console.error(e) - toast.show({ - message: `Session not found: ${route.sessionID}`, - variant: "error", - }) - return navigate({ type: "home" }) + const previousWorkspace = project.workspace.current() + const result = await sdk.client.session.get({ sessionID: route.sessionID }, { throwOnError: true }) + if (!result.data) { + toast.show({ + message: `Session not found: ${route.sessionID}`, + variant: "error", }) + navigate({ type: "home" }) + return + } + + if (result.data.workspaceID !== previousWorkspace) { + project.workspace.set(result.data.workspaceID) + + // Sync all the data for this workspace. Note that this + // workspace may not exist anymore which is why this is not + // fatal. If it doesn't we still want to show the session + // (which will be non-interactive) + try { + await sync.bootstrap({ fatal: false }) + } catch (e) {} + } + await sync.session.sync(route.sessionID) + if (scroll) scroll.scrollBy(100_000) }) // Handle initial prompt from fork diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index a453b19815..077cc43097 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -6,7 +6,6 @@ import { Decimal } from "decimal.js" import z from "zod" import { type ProviderMetadata, type LanguageModelUsage } from "ai" import { Flag } from "../flag/flag" -import { Installation } from "../installation" import { InstallationVersion } from "../installation/version" import { Database, NotFoundError, eq, and, gte, isNull, desc, like, inArray, lt } from "../storage" @@ -713,8 +712,10 @@ export function* list(input?: { if (input?.workspaceID) { conditions.push(eq(SessionTable.workspace_id, input.workspaceID)) } - if (input?.directory) { - conditions.push(eq(SessionTable.directory, input.directory)) + if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + if (input?.directory) { + conditions.push(eq(SessionTable.directory, input.directory)) + } } if (input?.roots) { conditions.push(isNull(SessionTable.parent_id)) diff --git a/packages/opencode/test/cli/tui/sync-provider.test.tsx b/packages/opencode/test/cli/tui/sync-provider.test.tsx deleted file mode 100644 index e75e186199..0000000000 --- a/packages/opencode/test/cli/tui/sync-provider.test.tsx +++ /dev/null @@ -1,280 +0,0 @@ -/** @jsxImportSource @opentui/solid */ -import { afterEach, describe, expect, test } from "bun:test" -import { testRender } from "@opentui/solid" -import { onMount } from "solid-js" -import { ArgsProvider } from "../../../src/cli/cmd/tui/context/args" -import { ExitProvider } from "../../../src/cli/cmd/tui/context/exit" -import { ProjectProvider, useProject } from "../../../src/cli/cmd/tui/context/project" -import { SDKProvider } from "../../../src/cli/cmd/tui/context/sdk" -import { SyncProvider, useSync } from "../../../src/cli/cmd/tui/context/sync" - -const sighup = new Set(process.listeners("SIGHUP")) - -afterEach(() => { - for (const fn of process.listeners("SIGHUP")) { - if (!sighup.has(fn)) process.off("SIGHUP", fn) - } -}) - -function json(data: unknown) { - return new Response(JSON.stringify(data), { - headers: { - "content-type": "application/json", - }, - }) -} - -async function wait(fn: () => boolean, timeout = 2000) { - const start = Date.now() - while (!fn()) { - if (Date.now() - start > timeout) throw new Error("timed out waiting for condition") - await Bun.sleep(10) - } -} - -function data(workspace?: string | null) { - const tag = workspace ?? "root" - return { - session: { - id: "ses_1", - title: `session-${tag}`, - workspaceID: workspace ?? undefined, - time: { - updated: 1, - }, - }, - message: { - info: { - id: "msg_1", - sessionID: "ses_1", - role: "assistant", - time: { - created: 1, - completed: 1, - }, - }, - parts: [ - { - id: "part_1", - messageID: "msg_1", - sessionID: "ses_1", - type: "text", - text: `part-${tag}`, - }, - ], - }, - todo: [ - { - id: `todo-${tag}`, - content: `todo-${tag}`, - status: "pending", - priority: "medium", - }, - ], - diff: [ - { - file: `${tag}.ts`, - patch: "", - additions: 0, - deletions: 0, - }, - ], - } -} - -type Hit = { - path: string - workspace?: string -} - -function createFetch(log: Hit[]) { - return Object.assign( - async (input: RequestInfo | URL, init?: RequestInit) => { - const req = new Request(input, init) - const url = new URL(req.url) - const workspace = url.searchParams.get("workspace") ?? req.headers.get("x-opencode-workspace") ?? undefined - log.push({ - path: url.pathname, - workspace, - }) - - if (url.pathname === "/config/providers") { - return json({ providers: [], default: {} }) - } - if (url.pathname === "/provider") { - return json({ all: [], default: {}, connected: [] }) - } - if (url.pathname === "/experimental/console") { - return json({}) - } - if (url.pathname === "/agent") { - return json([]) - } - if (url.pathname === "/config") { - return json({}) - } - if (url.pathname === "/project/current") { - return json({ id: `proj-${workspace ?? "root"}` }) - } - if (url.pathname === "/path") { - return json({ - state: `/tmp/${workspace ?? "root"}/state`, - config: `/tmp/${workspace ?? "root"}/config`, - worktree: "/tmp/worktree", - directory: `/tmp/${workspace ?? "root"}`, - }) - } - if (url.pathname === "/session") { - return json([]) - } - if (url.pathname === "/command") { - return json([]) - } - if (url.pathname === "/lsp") { - return json([]) - } - if (url.pathname === "/mcp") { - return json({}) - } - if (url.pathname === "/experimental/resource") { - return json({}) - } - if (url.pathname === "/formatter") { - return json([]) - } - if (url.pathname === "/session/status") { - return json({}) - } - if (url.pathname === "/provider/auth") { - return json({}) - } - if (url.pathname === "/vcs") { - return json({ branch: "main" }) - } - if (url.pathname === "/experimental/workspace") { - return json([{ id: "ws_a" }, { id: "ws_b" }]) - } - if (url.pathname === "/session/ses_1") { - return json(data(workspace).session) - } - if (url.pathname === "/session/ses_1/message") { - return json([data(workspace).message]) - } - if (url.pathname === "/session/ses_1/todo") { - return json(data(workspace).todo) - } - if (url.pathname === "/session/ses_1/diff") { - return json(data(workspace).diff) - } - - throw new Error(`unexpected request: ${req.method} ${url.pathname}`) - }, - { preconnect: fetch.preconnect.bind(fetch) }, - ) satisfies typeof fetch -} - -async function mount(log: Hit[]) { - let project!: ReturnType - let sync!: ReturnType - let done!: () => void - const ready = new Promise((resolve) => { - done = resolve - }) - - const app = await testRender(() => ( - () => {} }} - > - - - - - { - project = ctx.project - sync = ctx.sync - done() - }} - /> - - - - - - )) - - await ready - return { app, project, sync } -} - -async function waitBoot(log: Hit[], workspace?: string) { - await wait(() => log.some((item) => item.path === "/experimental/workspace")) - if (!workspace) return - await wait(() => log.some((item) => item.path === "/project/current" && item.workspace === workspace)) -} - -function Probe(props: { - onReady: (ctx: { project: ReturnType; sync: ReturnType }) => void -}) { - const project = useProject() - const sync = useSync() - - onMount(() => { - props.onReady({ project, sync }) - }) - - return -} - -describe("SyncProvider", () => { - test("re-runs bootstrap requests when the active workspace changes", async () => { - const log: Hit[] = [] - const { app, project } = await mount(log) - - try { - await waitBoot(log) - log.length = 0 - - project.workspace.set("ws_a") - - await waitBoot(log, "ws_a") - - expect(log.some((item) => item.path === "/path" && item.workspace === "ws_a")).toBe(true) - expect(log.some((item) => item.path === "/config" && item.workspace === "ws_a")).toBe(true) - expect(log.some((item) => item.path === "/command" && item.workspace === "ws_a")).toBe(true) - } finally { - app.renderer.destroy() - } - }) - - test("clears full-sync cache when the active workspace changes", async () => { - const log: Hit[] = [] - const { app, project, sync } = await mount(log) - - try { - await waitBoot(log) - - log.length = 0 - project.workspace.set("ws_a") - await waitBoot(log, "ws_a") - expect(project.workspace.current()).toBe("ws_a") - - log.length = 0 - await sync.session.sync("ses_1") - expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) - - project.workspace.set("ws_b") - await waitBoot(log, "ws_b") - expect(project.workspace.current()).toBe("ws_b") - - log.length = 0 - await sync.session.sync("ses_1") - expect(log.filter((item) => item.path === "/session/ses_1")).toHaveLength(1) - } finally { - app.renderer.destroy() - } - }) -}) From 4260c40efa332deeebaf730382d5388adc95d024 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 23:36:21 -0400 Subject: [PATCH 122/201] refactor(tui): inline final Go shimmer settings (#23017) --- .../src/cli/cmd/tui/component/logo.tsx | 49 ++++++++++++++++++- .../cli/cmd/tui/component/shimmer-config.ts | 49 ------------------- 2 files changed, 48 insertions(+), 50 deletions(-) delete mode 100644 packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts diff --git a/packages/opencode/src/cli/cmd/tui/component/logo.tsx b/packages/opencode/src/cli/cmd/tui/component/logo.tsx index 17368ddad8..bee104a35d 100644 --- a/packages/opencode/src/cli/cmd/tui/component/logo.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/logo.tsx @@ -3,13 +3,60 @@ import { For, createMemo, createSignal, onCleanup, onMount, type JSX } from "sol import { useTheme, tint } from "@tui/context/theme" import * as Sound from "@tui/util/sound" import { go, logo } from "@/cli/logo" -import { shimmerConfig, type ShimmerConfig } from "./shimmer-config" export type LogoShape = { left: string[] right: string[] } +type ShimmerConfig = { + period: number + rings: number + sweepFraction: number + coreWidth: number + coreAmp: number + softWidth: number + softAmp: number + tail: number + tailAmp: number + haloWidth: number + haloOffset: number + haloAmp: number + breathBase: number + noise: number + ambientAmp: number + ambientCenter: number + ambientWidth: number + shadowMix: number + primaryMix: number + originX: number + originY: number +} + +const shimmerConfig: ShimmerConfig = { + period: 4600, + rings: 2, + sweepFraction: 1, + coreWidth: 1.2, + coreAmp: 1.9, + softWidth: 10, + softAmp: 1.6, + tail: 5, + tailAmp: 0.64, + haloWidth: 4.3, + haloOffset: 0.6, + haloAmp: 0.16, + breathBase: 0.04, + noise: 0.1, + ambientAmp: 0.36, + ambientCenter: 0.5, + ambientWidth: 0.34, + shadowMix: 0.1, + primaryMix: 0.3, + originX: 4.5, + originY: 13.5, +} + // Shadow markers (rendered chars in parens): // _ = full shadow cell (space with bg=shadow) // ^ = letter top, shadow bottom (▀ with fg=letter, bg=shadow) diff --git a/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts b/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts deleted file mode 100644 index 01bc136f5d..0000000000 --- a/packages/opencode/src/cli/cmd/tui/component/shimmer-config.ts +++ /dev/null @@ -1,49 +0,0 @@ -export type ShimmerConfig = { - period: number - rings: number - sweepFraction: number - coreWidth: number - coreAmp: number - softWidth: number - softAmp: number - tail: number - tailAmp: number - haloWidth: number - haloOffset: number - haloAmp: number - breathBase: number - noise: number - ambientAmp: number - ambientCenter: number - ambientWidth: number - shadowMix: number - primaryMix: number - originX: number - originY: number -} - -export const shimmerDefaults: ShimmerConfig = { - period: 4600, - rings: 2, - sweepFraction: 1, - coreWidth: 1.2, - coreAmp: 1.9, - softWidth: 10, - softAmp: 1.6, - tail: 5, - tailAmp: 0.64, - haloWidth: 4.3, - haloOffset: 0.6, - haloAmp: 0.16, - breathBase: 0.04, - noise: 0.1, - ambientAmp: 0.36, - ambientCenter: 0.5, - ambientWidth: 0.34, - shadowMix: 0.1, - primaryMix: 0.3, - originX: 4.5, - originY: 13.5, -} - -export const shimmerConfig: ShimmerConfig = { ...shimmerDefaults } From 67dbb3cf18279c1a8f3f60e242d9b09e1270d01a Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 03:37:21 +0000 Subject: [PATCH 123/201] chore: generate --- daytona.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/daytona.ts b/daytona.ts index 0315560eae..4007b77aa6 100644 --- a/daytona.ts +++ b/daytona.ts @@ -19,8 +19,6 @@ let daytona = function daytona(): Promise { return client } - - const preview = new Map() const repo = "/home/daytona/workspace/repo" const root = "/home/daytona/workspace" From 9ee89f7868bbdb5b412b9b475c41af22ac4f2a20 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Thu, 16 Apr 2026 23:48:12 -0400 Subject: [PATCH 124/201] refactor: move project read routes onto HttpApi (#23003) --- packages/opencode/src/project/project.ts | 76 ++++++++++--------- .../src/server/instance/httpapi/project.ts | 62 +++++++++++++++ .../src/server/instance/httpapi/server.ts | 3 + .../opencode/src/server/instance/index.ts | 17 +++-- .../opencode/src/server/instance/project.ts | 8 +- 5 files changed, 119 insertions(+), 47 deletions(-) create mode 100644 packages/opencode/src/server/instance/httpapi/project.ts diff --git a/packages/opencode/src/project/project.ts b/packages/opencode/src/project/project.ts index f838d9ab43..6a2132274a 100644 --- a/packages/opencode/src/project/project.ts +++ b/packages/opencode/src/project/project.ts @@ -8,46 +8,52 @@ import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" import { which } from "../util/which" import { ProjectID } from "./schema" -import { Effect, Layer, Path, Scope, Context, Stream } from "effect" +import { Effect, Layer, Path, Scope, Context, Stream, Types, Schema } from "effect" import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" import { NodePath } from "@effect/platform-node" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" const log = Log.create({ service: "project" }) -export const Info = z - .object({ - id: ProjectID.zod, - worktree: z.string(), - vcs: z.literal("git").optional(), - name: z.string().optional(), - icon: z - .object({ - url: z.string().optional(), - override: z.string().optional(), - color: z.string().optional(), - }) - .optional(), - commands: z - .object({ - start: z.string().optional().describe("Startup script to run when creating a new workspace (worktree)"), - }) - .optional(), - time: z.object({ - created: z.number(), - updated: z.number(), - initialized: z.number().optional(), - }), - sandboxes: z.array(z.string()), - }) - .meta({ - ref: "Project", - }) -export type Info = z.infer +const ProjectVcs = Schema.Literal("git") + +const ProjectIcon = Schema.Struct({ + url: Schema.optional(Schema.String), + override: Schema.optional(Schema.String), + color: Schema.optional(Schema.String), +}) + +const ProjectCommands = Schema.Struct({ + start: Schema.optional( + Schema.String.annotate({ description: "Startup script to run when creating a new workspace (worktree)" }), + ), +}) + +const ProjectTime = Schema.Struct({ + created: Schema.Number, + updated: Schema.Number, + initialized: Schema.optional(Schema.Number), +}) + +export const Info = Schema.Struct({ + id: ProjectID, + worktree: Schema.String, + vcs: Schema.optional(ProjectVcs), + name: Schema.optional(Schema.String), + icon: Schema.optional(ProjectIcon), + commands: Schema.optional(ProjectCommands), + time: ProjectTime, + sandboxes: Schema.Array(Schema.String), +}) + .annotate({ identifier: "Project" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Types.DeepMutable> export const Event = { - Updated: BusEvent.define("project.updated", Info), + Updated: BusEvent.define("project.updated", Info.zod), } type Row = typeof ProjectTable.$inferSelect @@ -58,7 +64,7 @@ export function fromRow(row: Row): Info { return { id: row.id, worktree: row.worktree, - vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined, + vcs: row.vcs ? Schema.decodeUnknownSync(ProjectVcs)(row.vcs) : undefined, name: row.name ?? undefined, icon, time: { @@ -74,8 +80,8 @@ export function fromRow(row: Row): Info { export const UpdateInput = z.object({ projectID: ProjectID.zod, name: z.string().optional(), - icon: Info.shape.icon.optional(), - commands: Info.shape.commands.optional(), + icon: zod(ProjectIcon).optional(), + commands: zod(ProjectCommands).optional(), }) export type UpdateInput = z.infer @@ -139,7 +145,7 @@ export const layer: Layer.Layer< }), ) - const fakeVcs = Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS) + const fakeVcs = Schema.decodeUnknownSync(Schema.optional(ProjectVcs))(Flag.OPENCODE_FAKE_VCS) const resolveGitPath = (cwd: string, name: string) => { if (!name) return cwd diff --git a/packages/opencode/src/server/instance/httpapi/project.ts b/packages/opencode/src/server/instance/httpapi/project.ts new file mode 100644 index 0000000000..7d2d8462f0 --- /dev/null +++ b/packages/opencode/src/server/instance/httpapi/project.ts @@ -0,0 +1,62 @@ +import { Instance } from "@/project/instance" +import { Project } from "@/project" +import { Effect, Layer, Schema } from "effect" +import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi" + +const root = "/project" + +export const ProjectApi = HttpApi.make("project") + .add( + HttpApiGroup.make("project") + .add( + HttpApiEndpoint.get("list", root, { + success: Schema.Array(Project.Info), + }).annotateMerge( + OpenApi.annotations({ + identifier: "project.list", + summary: "List all projects", + description: "Get a list of projects that have been opened with OpenCode.", + }), + ), + HttpApiEndpoint.get("current", `${root}/current`, { + success: Project.Info, + }).annotateMerge( + OpenApi.annotations({ + identifier: "project.current", + summary: "Get current project", + description: "Retrieve the currently active project that OpenCode is working with.", + }), + ), + ) + .annotateMerge( + OpenApi.annotations({ + title: "project", + description: "Experimental HttpApi project routes.", + }), + ), + ) + .annotateMerge( + OpenApi.annotations({ + title: "opencode experimental HttpApi", + version: "0.0.1", + description: "Experimental HttpApi surface for selected instance routes.", + }), + ) + +export const projectHandlers = Layer.unwrap( + Effect.gen(function* () { + const svc = yield* Project.Service + + const list = Effect.fn("ProjectHttpApi.list")(function* () { + return yield* svc.list() + }) + + const current = Effect.fn("ProjectHttpApi.current")(function* () { + return Instance.project + }) + + return HttpApiBuilder.group(ProjectApi, "project", (handlers) => + handlers.handle("list", list).handle("current", current), + ) + }), +).pipe(Layer.provide(Project.defaultLayer)) diff --git a/packages/opencode/src/server/instance/httpapi/server.ts b/packages/opencode/src/server/instance/httpapi/server.ts index 64332fd2a0..b4442d6400 100644 --- a/packages/opencode/src/server/instance/httpapi/server.ts +++ b/packages/opencode/src/server/instance/httpapi/server.ts @@ -12,6 +12,7 @@ import { lazy } from "@/util/lazy" import { Filesystem } from "@/util" import { ConfigApi, configHandlers } from "./config" import { PermissionApi, permissionHandlers } from "./permission" +import { ProjectApi, projectHandlers } from "./project" import { ProviderApi, providerHandlers } from "./provider" import { QuestionApi, questionHandlers } from "./question" @@ -108,11 +109,13 @@ const instance = HttpRouter.middleware()( const QuestionSecured = QuestionApi.middleware(Authorization) const PermissionSecured = PermissionApi.middleware(Authorization) +const ProjectSecured = ProjectApi.middleware(Authorization) const ProviderSecured = ProviderApi.middleware(Authorization) const ConfigSecured = ConfigApi.middleware(Authorization) export const routes = Layer.mergeAll( HttpApiBuilder.layer(ConfigSecured).pipe(Layer.provide(configHandlers)), + HttpApiBuilder.layer(ProjectSecured).pipe(Layer.provide(projectHandlers)), HttpApiBuilder.layer(QuestionSecured).pipe(Layer.provide(questionHandlers)), HttpApiBuilder.layer(PermissionSecured).pipe(Layer.provide(permissionHandlers)), HttpApiBuilder.layer(ProviderSecured).pipe(Layer.provide(providerHandlers)), diff --git a/packages/opencode/src/server/instance/index.ts b/packages/opencode/src/server/instance/index.ts index 6a290093c5..cfcaffc596 100644 --- a/packages/opencode/src/server/instance/index.ts +++ b/packages/opencode/src/server/instance/index.ts @@ -30,14 +30,7 @@ import { WorkspaceRouterMiddleware } from "./middleware" import { AppRuntime } from "@/effect/app-runtime" export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { - const app = new Hono() - .use(WorkspaceRouterMiddleware(upgrade)) - .route("/project", ProjectRoutes()) - .route("/pty", PtyRoutes(upgrade)) - .route("/config", ConfigRoutes()) - .route("/experimental", ExperimentalRoutes()) - .route("/session", SessionRoutes()) - .route("/permission", PermissionRoutes()) + const app = new Hono().use(WorkspaceRouterMiddleware(upgrade)) if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) { const handler = ExperimentalHttpApiServer.webHandler().handler @@ -52,9 +45,17 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { app.get("/provider/auth", (c) => handler(c.req.raw, context)) app.post("/provider/:providerID/oauth/authorize", (c) => handler(c.req.raw, context)) app.post("/provider/:providerID/oauth/callback", (c) => handler(c.req.raw, context)) + app.get("/project", (c) => handler(c.req.raw, context)) + app.get("/project/current", (c) => handler(c.req.raw, context)) } return app + .route("/project", ProjectRoutes()) + .route("/pty", PtyRoutes(upgrade)) + .route("/config", ConfigRoutes()) + .route("/experimental", ExperimentalRoutes()) + .route("/session", SessionRoutes()) + .route("/permission", PermissionRoutes()) .route("/question", QuestionRoutes()) .route("/provider", ProviderRoutes()) .route("/sync", SyncRoutes()) diff --git a/packages/opencode/src/server/instance/project.ts b/packages/opencode/src/server/instance/project.ts index eea741596d..95b5862fd5 100644 --- a/packages/opencode/src/server/instance/project.ts +++ b/packages/opencode/src/server/instance/project.ts @@ -23,7 +23,7 @@ export const ProjectRoutes = lazy(() => description: "List of projects", content: { "application/json": { - schema: resolver(Project.Info.array()), + schema: resolver(Project.Info.zod.array()), }, }, }, @@ -45,7 +45,7 @@ export const ProjectRoutes = lazy(() => description: "Current project information", content: { "application/json": { - schema: resolver(Project.Info), + schema: resolver(Project.Info.zod), }, }, }, @@ -66,7 +66,7 @@ export const ProjectRoutes = lazy(() => description: "Project information after git initialization", content: { "application/json": { - schema: resolver(Project.Info), + schema: resolver(Project.Info.zod), }, }, }, @@ -99,7 +99,7 @@ export const ProjectRoutes = lazy(() => description: "Updated project information", content: { "application/json": { - schema: resolver(Project.Info), + schema: resolver(Project.Info.zod), }, }, }, From 79e9baf55acd6b5d2c8b49486cbd6a507897bf84 Mon Sep 17 00:00:00 2001 From: Brendan Allan <14191578+Brendonovich@users.noreply.github.com> Date: Fri, 17 Apr 2026 11:54:19 +0800 Subject: [PATCH 125/201] fix(app): use fetchQuery instead of ensureQueryData in global sync (#23025) --- packages/app/src/context/global-sync.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/app/src/context/global-sync.tsx b/packages/app/src/context/global-sync.tsx index 6ff60f161f..1359b07b4e 100644 --- a/packages/app/src/context/global-sync.tsx +++ b/packages/app/src/context/global-sync.tsx @@ -204,7 +204,7 @@ function createGlobalSync() { const limit = Math.max(store.limit + SESSION_RECENT_LIMIT, SESSION_RECENT_LIMIT) const promise = queryClient - .ensureQueryData({ + .fetchQuery({ ...loadSessionsQuery(directory), queryFn: () => loadRootSessionsWithFallback({ From dfaae1454454f02db7879ad7bdfd8a18feba83b5 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 04:14:26 +0000 Subject: [PATCH 126/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index 239b72fd70..54fd991eca 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-tYAb5Mo39UW1VEejYuo0jW0jzH2OyY/HrqgiZL3rmjY=", - "aarch64-linux": "sha256-3zGKV5UwokXpmY0nT1mry3IhNf2EQYLKT7ac+/trmQA=", - "aarch64-darwin": "sha256-oKXAut7eu/eW5a43OT8+aFuH1F1tuIldTs+7PUXSCv4=", - "x86_64-darwin": "sha256-Az+9X1scOEhw3aOO8laKJoZjiuz3qlLTIk1bx25P/z4=" + "x86_64-linux": "sha256-OPbZUo/fQv2Xsf+NEZV08GLBMN/DXovhRvn2JkesFtY=", + "aarch64-linux": "sha256-WK7xlVLuirKDN5LaqjBn7qpv5bYVtYHZw0qRNKX4xXg=", + "aarch64-darwin": "sha256-BAoAdeLQ+lXDD7Klxoxij683OVVug8KXEMRUqIQAjc8=", + "x86_64-darwin": "sha256-ZOBwNR2gZgc5f+y3VIBBT4qZpeZfg7Of6AaGDOfqsG8=" } } From 4bd5a158a5cbc09ac52df8dc7001fb3dc4110506 Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 00:23:30 -0400 Subject: [PATCH 127/201] fix: preserve prompt input across unmount/remount cycles (#22508) --- packages/opencode/src/cli/cmd/tui/app.tsx | 4 --- .../cli/cmd/tui/component/prompt/index.tsx | 19 +++++++++++- .../src/cli/cmd/tui/context/route.tsx | 8 ++--- .../opencode/src/cli/cmd/tui/plugin/api.tsx | 2 +- .../opencode/src/cli/cmd/tui/routes/home.tsx | 5 ++-- .../session/dialog-fork-from-timeline.tsx | 4 +-- .../cmd/tui/routes/session/dialog-message.tsx | 30 +++++++++---------- .../src/cli/cmd/tui/routes/session/index.tsx | 7 ++--- packages/plugin/src/tui.ts | 2 +- 9 files changed, 45 insertions(+), 36 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 8255c007d0..7e883ec0e3 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -420,12 +420,8 @@ function App(props: { onSnapshot?: () => Promise }) { aliases: ["clear"], }, onSelect: () => { - const current = promptRef.current - // Don't require focus - if there's any text, preserve it - const currentPrompt = current?.current?.input ? current.current : undefined route.navigate({ type: "home", - initialPrompt: currentPrompt, }) dialog.clear() }, diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 82c4a7222f..82cdefebcb 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -12,7 +12,7 @@ import { useRoute } from "@tui/context/route" import { useSync } from "@tui/context/sync" import { useEvent } from "@tui/context/event" import { MessageID, PartID } from "@/session/schema" -import { createStore, produce } from "solid-js/store" +import { createStore, produce, unwrap } from "solid-js/store" import { useKeybind } from "@tui/context/keybind" import { usePromptHistory, type PromptInfo } from "./history" import { assign } from "./part" @@ -75,6 +75,8 @@ function randomIndex(count: number) { return Math.floor(Math.random() * count) } +let stashed: { prompt: PromptInfo; cursor: number } | undefined + export function Prompt(props: PromptProps) { let input: TextareaRenderable let anchor: BoxRenderable @@ -433,7 +435,22 @@ export function Prompt(props: PromptProps) { }, } + onMount(() => { + const saved = stashed + stashed = undefined + if (store.prompt.input) return + if (saved && saved.prompt.input) { + input.setText(saved.prompt.input) + setStore("prompt", saved.prompt) + restoreExtmarksFromParts(saved.prompt.parts) + input.cursorOffset = saved.cursor + } + }) + onCleanup(() => { + if (store.prompt.input) { + stashed = { prompt: unwrap(store.prompt), cursor: input.cursorOffset } + } props.ref?.(undefined) }) diff --git a/packages/opencode/src/cli/cmd/tui/context/route.tsx b/packages/opencode/src/cli/cmd/tui/context/route.tsx index e9f463a13f..6db8247592 100644 --- a/packages/opencode/src/cli/cmd/tui/context/route.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/route.tsx @@ -1,16 +1,16 @@ -import { createStore } from "solid-js/store" +import { createStore, reconcile } from "solid-js/store" import { createSimpleContext } from "./helper" import type { PromptInfo } from "../component/prompt/history" export type HomeRoute = { type: "home" - initialPrompt?: PromptInfo + prompt?: PromptInfo } export type SessionRoute = { type: "session" sessionID: string - initialPrompt?: PromptInfo + prompt?: PromptInfo } export type PluginRoute = { @@ -37,7 +37,7 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({ return store }, navigate(route: Route) { - setStore(route) + setStore(reconcile(route)) }, } }, diff --git a/packages/opencode/src/cli/cmd/tui/plugin/api.tsx b/packages/opencode/src/cli/cmd/tui/plugin/api.tsx index d2b495ca31..5bea483807 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/api.tsx +++ b/packages/opencode/src/cli/cmd/tui/plugin/api.tsx @@ -91,7 +91,7 @@ function routeCurrent(route: ReturnType): TuiPluginApi["route"] name: "session", params: { sessionID: route.data.sessionID, - initialPrompt: route.data.initialPrompt, + prompt: route.data.prompt, }, } } diff --git a/packages/opencode/src/cli/cmd/tui/routes/home.tsx b/packages/opencode/src/cli/cmd/tui/routes/home.tsx index 1cce7fb396..2f0ff07e9a 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/home.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/home.tsx @@ -10,7 +10,6 @@ import { usePromptRef } from "../context/prompt" import { useLocal } from "../context/local" import { TuiPluginRuntime } from "../plugin" -// TODO: what is the best way to do this? let once = false const placeholder = { normal: ["Fix a TODO in the codebase", "What is the tech stack of this project?", "Fix broken tests"], @@ -31,8 +30,8 @@ export function Home() { setRef(r) promptRef.set(r) if (once || !r) return - if (route.initialPrompt) { - r.set(route.initialPrompt) + if (route.prompt) { + r.set(route.prompt) once = true return } diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx index 0ce33a59a9..8d1e4438c8 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx @@ -38,7 +38,7 @@ export function DialogForkFromTimeline(props: { sessionID: string; onMove: (mess messageID: message.id, }) const parts = sync.data.part[message.id] ?? [] - const initialPrompt = parts.reduce( + const prompt = parts.reduce( (agg, part) => { if (part.type === "text") { if (!part.synthetic) agg.input += part.text @@ -51,7 +51,7 @@ export function DialogForkFromTimeline(props: { sessionID: string; onMove: (mess route.navigate({ sessionID: forked.data!.id, type: "session", - initialPrompt, + prompt, }) dialog.clear() }, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx index 412b4d87eb..aeea2f52ad 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx @@ -81,25 +81,23 @@ export function DialogMessage(props: { sessionID: props.sessionID, messageID: props.messageID, }) - const initialPrompt = (() => { - const msg = message() - if (!msg) return undefined - const parts = sync.data.part[msg.id] - return parts.reduce( - (agg, part) => { - if (part.type === "text") { - if (!part.synthetic) agg.input += part.text - } - if (part.type === "file") agg.parts.push(part) - return agg - }, - { input: "", parts: [] as PromptInfo["parts"] }, - ) - })() + const msg = message() + const prompt = msg + ? sync.data.part[msg.id].reduce( + (agg, part) => { + if (part.type === "text") { + if (!part.synthetic) agg.input += part.text + } + if (part.type === "file") agg.parts.push(part) + return agg + }, + { input: "", parts: [] as PromptInfo["parts"] }, + ) + : undefined route.navigate({ sessionID: result.data!.id, type: "session", - initialPrompt, + prompt, }) dialog.clear() }, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index 70a4b73b95..ccca4d1eba 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -207,8 +207,6 @@ export function Session() { if (scroll) scroll.scrollBy(100_000) }) - // Handle initial prompt from fork - let seeded = false let lastSwitch: string | undefined = undefined event.on("message.part.updated", (evt) => { const part = evt.properties.part @@ -226,14 +224,15 @@ export function Session() { } }) + let seeded = false let scroll: ScrollBoxRenderable let prompt: PromptRef | undefined const bind = (r: PromptRef | undefined) => { prompt = r promptRef.set(r) - if (seeded || !route.initialPrompt || !r) return + if (seeded || !route.prompt || !r) return seeded = true - r.set(route.initialPrompt) + r.set(route.prompt) } const keybind = useKeybind() const dialog = useDialog() diff --git a/packages/plugin/src/tui.ts b/packages/plugin/src/tui.ts index 099cf27580..1c57a71ab3 100644 --- a/packages/plugin/src/tui.ts +++ b/packages/plugin/src/tui.ts @@ -29,7 +29,7 @@ export type TuiRouteCurrent = name: "session" params: { sessionID: string - initialPrompt?: unknown + prompt?: unknown } } | { From 76a141090ea33da574d06e17cd1c5dbddbde2952 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Thu, 16 Apr 2026 23:31:21 -0500 Subject: [PATCH 128/201] chore: delete filetime module (#22999) --- .opencode/agent/translator.md | 1 - packages/opencode/specs/effect/migration.md | 4 +- packages/opencode/src/effect/app-runtime.ts | 2 - packages/opencode/src/file/time.ts | 113 ----- packages/opencode/src/flag/flag.ts | 1 - packages/opencode/src/session/prompt.ts | 4 - packages/opencode/src/tool/edit.ts | 119 +++-- packages/opencode/src/tool/read.ts | 7 +- packages/opencode/src/tool/registry.ts | 3 - packages/opencode/src/tool/write.ts | 4 - packages/opencode/test/file/time.test.ts | 422 ------------------ .../test/session/prompt-effect.test.ts | 12 - .../test/session/snapshot-tool-race.test.ts | 12 - packages/opencode/test/tool/edit.test.ts | 88 ---- packages/opencode/test/tool/read.test.ts | 2 - packages/opencode/test/tool/write.test.ts | 13 - packages/web/src/content/docs/ar/cli.mdx | 1 - packages/web/src/content/docs/bs/cli.mdx | 1 - packages/web/src/content/docs/cli.mdx | 1 - packages/web/src/content/docs/da/cli.mdx | 1 - packages/web/src/content/docs/de/cli.mdx | 1 - packages/web/src/content/docs/es/cli.mdx | 1 - packages/web/src/content/docs/fr/cli.mdx | 1 - packages/web/src/content/docs/it/cli.mdx | 1 - packages/web/src/content/docs/ja/cli.mdx | 1 - packages/web/src/content/docs/ko/cli.mdx | 1 - packages/web/src/content/docs/nb/cli.mdx | 1 - packages/web/src/content/docs/pl/cli.mdx | 1 - packages/web/src/content/docs/pt-br/cli.mdx | 1 - packages/web/src/content/docs/ru/cli.mdx | 1 - packages/web/src/content/docs/th/cli.mdx | 1 - packages/web/src/content/docs/tr/cli.mdx | 1 - packages/web/src/content/docs/zh-cn/cli.mdx | 1 - packages/web/src/content/docs/zh-tw/cli.mdx | 1 - 34 files changed, 59 insertions(+), 766 deletions(-) delete mode 100644 packages/opencode/src/file/time.ts delete mode 100644 packages/opencode/test/file/time.test.ts diff --git a/.opencode/agent/translator.md b/.opencode/agent/translator.md index a987d01927..8ac7025f17 100644 --- a/.opencode/agent/translator.md +++ b/.opencode/agent/translator.md @@ -594,7 +594,6 @@ OPENCODE_DISABLE_CLAUDE_CODE OPENCODE_DISABLE_CLAUDE_CODE_PROMPT OPENCODE_DISABLE_CLAUDE_CODE_SKILLS OPENCODE_DISABLE_DEFAULT_PLUGINS -OPENCODE_DISABLE_FILETIME_CHECK OPENCODE_DISABLE_LSP_DOWNLOAD OPENCODE_DISABLE_MODELS_FETCH OPENCODE_DISABLE_PRUNE diff --git a/packages/opencode/specs/effect/migration.md b/packages/opencode/specs/effect/migration.md index 105a82290b..b8bf4e0494 100644 --- a/packages/opencode/specs/effect/migration.md +++ b/packages/opencode/specs/effect/migration.md @@ -9,7 +9,7 @@ Use `InstanceState` (from `src/effect/instance-state.ts`) for services that need Use `makeRuntime` (from `src/effect/run-service.ts`) to create a per-service `ManagedRuntime` that lazily initializes and shares layers via a global `memoMap`. Returns `{ runPromise, runFork, runCallback }`. - Global services (no per-directory state): Account, Auth, AppFileSystem, Installation, Truncate, Worktree -- Instance-scoped (per-directory state via InstanceState): Agent, Bus, Command, Config, File, FileTime, FileWatcher, Format, LSP, MCP, Permission, Plugin, ProviderAuth, Pty, Question, SessionStatus, Skill, Snapshot, ToolRegistry, Vcs +- Instance-scoped (per-directory state via InstanceState): Agent, Bus, Command, Config, File, FileWatcher, Format, LSP, MCP, Permission, Plugin, ProviderAuth, Pty, Question, SessionStatus, Skill, Snapshot, ToolRegistry, Vcs Rule of thumb: if two open directories should not share one copy of the service, it needs `InstanceState`. @@ -195,7 +195,6 @@ This checklist is only about the service shape migration. Many of these services - [x] `Config` — `config/config.ts` - [x] `Discovery` — `skill/discovery.ts` (dependency-only layer, no standalone runtime) - [x] `File` — `file/index.ts` -- [x] `FileTime` — `file/time.ts` - [x] `FileWatcher` — `file/watcher.ts` - [x] `Format` — `format/index.ts` - [x] `Installation` — `installation/index.ts` @@ -301,7 +300,6 @@ For each service, the migration is roughly: - `SessionRunState` — migrated 2026-04-11. Single caller in `server/instance/session.ts` converted; facade removed. - `Account` — migrated 2026-04-11. Callers in `server/instance/experimental.ts` and `cli/cmd/account.ts` converted; facade removed. - `Instruction` — migrated 2026-04-11. Test-only callers converted; facade removed. -- `FileTime` — migrated 2026-04-11. Test-only callers converted; facade removed. - `FileWatcher` — migrated 2026-04-11. Callers in `project/bootstrap.ts` and test converted; facade removed. - `Question` — migrated 2026-04-11. Callers in `server/instance/question.ts` and test converted; facade removed. - `Truncate` — migrated 2026-04-11. Caller in `tool/tool.ts` and test converted; facade removed. diff --git a/packages/opencode/src/effect/app-runtime.ts b/packages/opencode/src/effect/app-runtime.ts index a9ed957745..eae52d6366 100644 --- a/packages/opencode/src/effect/app-runtime.ts +++ b/packages/opencode/src/effect/app-runtime.ts @@ -9,7 +9,6 @@ import { Account } from "@/account/account" import { Config } from "@/config" import { Git } from "@/git" import { Ripgrep } from "@/file/ripgrep" -import { FileTime } from "@/file/time" import { File } from "@/file" import { FileWatcher } from "@/file/watcher" import { Storage } from "@/storage" @@ -58,7 +57,6 @@ export const AppLayer = Layer.mergeAll( Config.defaultLayer, Git.defaultLayer, Ripgrep.defaultLayer, - FileTime.defaultLayer, File.defaultLayer, FileWatcher.defaultLayer, Storage.defaultLayer, diff --git a/packages/opencode/src/file/time.ts b/packages/opencode/src/file/time.ts deleted file mode 100644 index cc26682d57..0000000000 --- a/packages/opencode/src/file/time.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { DateTime, Effect, Layer, Option, Semaphore, Context } from "effect" -import { InstanceState } from "@/effect" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Flag } from "@/flag/flag" -import type { SessionID } from "@/session/schema" -import { Log } from "../util" - -const log = Log.create({ service: "file.time" }) - -export type Stamp = { - readonly read: Date - readonly mtime: number | undefined - readonly size: number | undefined -} - -const session = (reads: Map>, sessionID: SessionID) => { - const value = reads.get(sessionID) - if (value) return value - - const next = new Map() - reads.set(sessionID, next) - return next -} - -interface State { - reads: Map> - locks: Map -} - -export interface Interface { - readonly read: (sessionID: SessionID, file: string) => Effect.Effect - readonly get: (sessionID: SessionID, file: string) => Effect.Effect - readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect - readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/FileTime") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fsys = yield* AppFileSystem.Service - const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK - - const stamp = Effect.fnUntraced(function* (file: string) { - const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) - return { - read: yield* DateTime.nowAsDate, - mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, - size: info ? Number(info.size) : undefined, - } - }) - const state = yield* InstanceState.make( - Effect.fn("FileTime.state")(() => - Effect.succeed({ - reads: new Map>(), - locks: new Map(), - }), - ), - ) - - const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { - filepath = AppFileSystem.normalizePath(filepath) - const locks = (yield* InstanceState.get(state)).locks - const lock = locks.get(filepath) - if (lock) return lock - - const next = Semaphore.makeUnsafe(1) - locks.set(filepath, next) - return next - }) - - const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - log.info("read", { sessionID, file }) - session(reads, sessionID).set(file, yield* stamp(file)) - }) - - const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - return reads.get(sessionID)?.get(file)?.read - }) - - const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { - if (disableCheck) return - filepath = AppFileSystem.normalizePath(filepath) - - const reads = (yield* InstanceState.get(state)).reads - const time = reads.get(sessionID)?.get(filepath) - if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) - - const next = yield* stamp(filepath) - const changed = next.mtime !== time.mtime || next.size !== time.size - if (!changed) return - - throw new Error( - `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, - ) - }) - - const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { - return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) - }) - - return Service.of({ read, get, assert, withLock }) - }), -).pipe(Layer.orDie) - -export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) - -export * as FileTime from "./time" diff --git a/packages/opencode/src/flag/flag.ts b/packages/opencode/src/flag/flag.ts index 416f641c44..a1c46288a2 100644 --- a/packages/opencode/src/flag/flag.ts +++ b/packages/opencode/src/flag/flag.ts @@ -70,7 +70,6 @@ export const Flag = { OPENCODE_EXPERIMENTAL_OXFMT: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT"), OPENCODE_EXPERIMENTAL_LSP_TY: truthy("OPENCODE_EXPERIMENTAL_LSP_TY"), OPENCODE_EXPERIMENTAL_LSP_TOOL: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL"), - OPENCODE_DISABLE_FILETIME_CHECK: Config.boolean("OPENCODE_DISABLE_FILETIME_CHECK").pipe(Config.withDefault(false)), OPENCODE_EXPERIMENTAL_PLAN_MODE: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE"), OPENCODE_EXPERIMENTAL_MARKDOWN: !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN"), OPENCODE_MODELS_URL: process.env["OPENCODE_MODELS_URL"], diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 9faa618788..431189d19c 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -23,7 +23,6 @@ import MAX_STEPS from "../session/prompt/max-steps.txt" import { ToolRegistry } from "../tool" import { MCP } from "../mcp" import { LSP } from "../lsp" -import { FileTime } from "../file/time" import { Flag } from "../flag/flag" import { ulid } from "ulid" import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" @@ -94,7 +93,6 @@ export const layer = Layer.effect( const fsys = yield* AppFileSystem.Service const mcp = yield* MCP.Service const lsp = yield* LSP.Service - const filetime = yield* FileTime.Service const registry = yield* ToolRegistry.Service const truncate = yield* Truncate.Service const spawner = yield* ChildProcessSpawner.ChildProcessSpawner @@ -1183,7 +1181,6 @@ NOTE: At any point in time through this workflow you should feel free to ask the ] } - yield* filetime.read(input.sessionID, filepath) return [ { messageID: info.id, @@ -1684,7 +1681,6 @@ export const defaultLayer = Layer.suspend(() => Layer.provide(Permission.defaultLayer), Layer.provide(MCP.defaultLayer), Layer.provide(LSP.defaultLayer), - Layer.provide(FileTime.defaultLayer), Layer.provide(ToolRegistry.defaultLayer), Layer.provide(Truncate.defaultLayer), Layer.provide(Provider.defaultLayer), diff --git a/packages/opencode/src/tool/edit.ts b/packages/opencode/src/tool/edit.ts index 62b96cba82..f535183d4c 100644 --- a/packages/opencode/src/tool/edit.ts +++ b/packages/opencode/src/tool/edit.ts @@ -14,7 +14,6 @@ import { File } from "../file" import { FileWatcher } from "../file/watcher" import { Bus } from "../bus" import { Format } from "../format" -import { FileTime } from "../file/time" import { Instance } from "../project/instance" import { Snapshot } from "@/snapshot" import { assertExternalDirectoryEffect } from "./external-directory" @@ -44,7 +43,6 @@ export const EditTool = Tool.define( "edit", Effect.gen(function* () { const lsp = yield* LSP.Service - const filetime = yield* FileTime.Service const afs = yield* AppFileSystem.Service const format = yield* Format.Service const bus = yield* Bus.Service @@ -70,52 +68,11 @@ export const EditTool = Tool.define( let diff = "" let contentOld = "" let contentNew = "" - yield* filetime.withLock(filePath, () => - Effect.gen(function* () { - if (params.oldString === "") { - const existed = yield* afs.existsSafe(filePath) - contentNew = params.newString - diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew)) - yield* ctx.ask({ - permission: "edit", - patterns: [path.relative(Instance.worktree, filePath)], - always: ["*"], - metadata: { - filepath: filePath, - diff, - }, - }) - yield* afs.writeWithDirs(filePath, params.newString) - yield* format.file(filePath) - yield* bus.publish(File.Event.Edited, { file: filePath }) - yield* bus.publish(FileWatcher.Event.Updated, { - file: filePath, - event: existed ? "change" : "add", - }) - yield* filetime.read(ctx.sessionID, filePath) - return - } - - const info = yield* afs.stat(filePath).pipe(Effect.catch(() => Effect.succeed(undefined))) - if (!info) throw new Error(`File ${filePath} not found`) - if (info.type === "Directory") throw new Error(`Path is a directory, not a file: ${filePath}`) - yield* filetime.assert(ctx.sessionID, filePath) - contentOld = yield* afs.readFileString(filePath) - - const ending = detectLineEnding(contentOld) - const old = convertToLineEnding(normalizeLineEndings(params.oldString), ending) - const next = convertToLineEnding(normalizeLineEndings(params.newString), ending) - - contentNew = replace(contentOld, old, next, params.replaceAll) - - diff = trimDiff( - createTwoFilesPatch( - filePath, - filePath, - normalizeLineEndings(contentOld), - normalizeLineEndings(contentNew), - ), - ) + yield* Effect.gen(function* () { + if (params.oldString === "") { + const existed = yield* afs.existsSafe(filePath) + contentNew = params.newString + diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew)) yield* ctx.ask({ permission: "edit", patterns: [path.relative(Instance.worktree, filePath)], @@ -125,26 +82,62 @@ export const EditTool = Tool.define( diff, }, }) - - yield* afs.writeWithDirs(filePath, contentNew) + yield* afs.writeWithDirs(filePath, params.newString) yield* format.file(filePath) yield* bus.publish(File.Event.Edited, { file: filePath }) yield* bus.publish(FileWatcher.Event.Updated, { file: filePath, - event: "change", + event: existed ? "change" : "add", }) - contentNew = yield* afs.readFileString(filePath) - diff = trimDiff( - createTwoFilesPatch( - filePath, - filePath, - normalizeLineEndings(contentOld), - normalizeLineEndings(contentNew), - ), - ) - yield* filetime.read(ctx.sessionID, filePath) - }).pipe(Effect.orDie), - ) + return + } + + const info = yield* afs.stat(filePath).pipe(Effect.catch(() => Effect.succeed(undefined))) + if (!info) throw new Error(`File ${filePath} not found`) + if (info.type === "Directory") throw new Error(`Path is a directory, not a file: ${filePath}`) + contentOld = yield* afs.readFileString(filePath) + + const ending = detectLineEnding(contentOld) + const old = convertToLineEnding(normalizeLineEndings(params.oldString), ending) + const next = convertToLineEnding(normalizeLineEndings(params.newString), ending) + + contentNew = replace(contentOld, old, next, params.replaceAll) + + diff = trimDiff( + createTwoFilesPatch( + filePath, + filePath, + normalizeLineEndings(contentOld), + normalizeLineEndings(contentNew), + ), + ) + yield* ctx.ask({ + permission: "edit", + patterns: [path.relative(Instance.worktree, filePath)], + always: ["*"], + metadata: { + filepath: filePath, + diff, + }, + }) + + yield* afs.writeWithDirs(filePath, contentNew) + yield* format.file(filePath) + yield* bus.publish(File.Event.Edited, { file: filePath }) + yield* bus.publish(FileWatcher.Event.Updated, { + file: filePath, + event: "change", + }) + contentNew = yield* afs.readFileString(filePath) + diff = trimDiff( + createTwoFilesPatch( + filePath, + filePath, + normalizeLineEndings(contentOld), + normalizeLineEndings(contentNew), + ), + ) + }).pipe(Effect.orDie) const filediff: Snapshot.FileDiff = { file: filePath, diff --git a/packages/opencode/src/tool/read.ts b/packages/opencode/src/tool/read.ts index c6d1461cdf..18c668ca07 100644 --- a/packages/opencode/src/tool/read.ts +++ b/packages/opencode/src/tool/read.ts @@ -7,7 +7,6 @@ import { createInterface } from "readline" import * as Tool from "./tool" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { LSP } from "../lsp" -import { FileTime } from "../file/time" import DESCRIPTION from "./read.txt" import { Instance } from "../project/instance" import { assertExternalDirectoryEffect } from "./external-directory" @@ -31,7 +30,6 @@ export const ReadTool = Tool.define( const fs = yield* AppFileSystem.Service const instruction = yield* Instruction.Service const lsp = yield* LSP.Service - const time = yield* FileTime.Service const scope = yield* Scope.Scope const miss = Effect.fn("ReadTool.miss")(function* (filepath: string) { @@ -75,9 +73,8 @@ export const ReadTool = Tool.define( ).pipe(Effect.map((items: string[]) => items.sort((a, b) => a.localeCompare(b)))) }) - const warm = Effect.fn("ReadTool.warm")(function* (filepath: string, sessionID: Tool.Context["sessionID"]) { + const warm = Effect.fn("ReadTool.warm")(function* (filepath: string) { yield* lsp.touchFile(filepath, false).pipe(Effect.ignore, Effect.forkIn(scope)) - yield* time.read(sessionID, filepath) }) const run = Effect.fn("ReadTool.execute")(function* (params: z.infer, ctx: Tool.Context) { @@ -196,7 +193,7 @@ export const ReadTool = Tool.define( } output += "\n" - yield* warm(filepath, ctx.sessionID) + yield* warm(filepath) if (loaded.length > 0) { output += `\n\n\n${loaded.map((item) => item.content).join("\n\n")}\n` diff --git a/packages/opencode/src/tool/registry.ts b/packages/opencode/src/tool/registry.ts index 34e6b4e364..e27593e597 100644 --- a/packages/opencode/src/tool/registry.ts +++ b/packages/opencode/src/tool/registry.ts @@ -39,7 +39,6 @@ import { InstanceState } from "@/effect" import { Question } from "../question" import { Todo } from "../session/todo" import { LSP } from "../lsp" -import { FileTime } from "../file/time" import { Instruction } from "../session/instruction" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Bus } from "../bus" @@ -80,7 +79,6 @@ export const layer: Layer.Layer< | Session.Service | Provider.Service | LSP.Service - | FileTime.Service | Instruction.Service | AppFileSystem.Service | Bus.Service @@ -329,7 +327,6 @@ export const defaultLayer = Layer.suspend(() => Layer.provide(Session.defaultLayer), Layer.provide(Provider.defaultLayer), Layer.provide(LSP.defaultLayer), - Layer.provide(FileTime.defaultLayer), Layer.provide(Instruction.defaultLayer), Layer.provide(AppFileSystem.defaultLayer), Layer.provide(Bus.layer), diff --git a/packages/opencode/src/tool/write.ts b/packages/opencode/src/tool/write.ts index c5871eb0ef..741091b21d 100644 --- a/packages/opencode/src/tool/write.ts +++ b/packages/opencode/src/tool/write.ts @@ -9,7 +9,6 @@ import { Bus } from "../bus" import { File } from "../file" import { FileWatcher } from "../file/watcher" import { Format } from "../format" -import { FileTime } from "../file/time" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Instance } from "../project/instance" import { trimDiff } from "./edit" @@ -22,7 +21,6 @@ export const WriteTool = Tool.define( Effect.gen(function* () { const lsp = yield* LSP.Service const fs = yield* AppFileSystem.Service - const filetime = yield* FileTime.Service const bus = yield* Bus.Service const format = yield* Format.Service @@ -41,7 +39,6 @@ export const WriteTool = Tool.define( const exists = yield* fs.existsSafe(filepath) const contentOld = exists ? yield* fs.readFileString(filepath) : "" - if (exists) yield* filetime.assert(ctx.sessionID, filepath) const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, params.content)) yield* ctx.ask({ @@ -61,7 +58,6 @@ export const WriteTool = Tool.define( file: filepath, event: exists ? "change" : "add", }) - yield* filetime.read(ctx.sessionID, filepath) let output = "Wrote file successfully." yield* lsp.touchFile(filepath, true) diff --git a/packages/opencode/test/file/time.test.ts b/packages/opencode/test/file/time.test.ts deleted file mode 100644 index cb6390df87..0000000000 --- a/packages/opencode/test/file/time.test.ts +++ /dev/null @@ -1,422 +0,0 @@ -import { afterEach, describe, expect } from "bun:test" -import fs from "fs/promises" -import path from "path" -import { Cause, Deferred, Effect, Exit, Fiber, Layer } from "effect" -import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" -import { FileTime } from "../../src/file/time" -import { Instance } from "../../src/project/instance" -import { SessionID } from "../../src/session/schema" -import { Filesystem } from "../../src/util" -import { provideInstance, provideTmpdirInstance, tmpdirScoped } from "../fixture/fixture" -import { testEffect } from "../lib/effect" - -afterEach(async () => { - await Instance.disposeAll() -}) - -const it = testEffect(Layer.mergeAll(FileTime.defaultLayer, CrossSpawnSpawner.defaultLayer)) - -const id = SessionID.make("ses_00000000000000000000000001") - -const put = (file: string, text: string) => Effect.promise(() => fs.writeFile(file, text, "utf-8")) - -const touch = (file: string, time: number) => - Effect.promise(() => { - const date = new Date(time) - return fs.utimes(file, date, date) - }) - -const read = (id: SessionID, file: string) => FileTime.Service.use((svc) => svc.read(id, file)) - -const get = (id: SessionID, file: string) => FileTime.Service.use((svc) => svc.get(id, file)) - -const check = (id: SessionID, file: string) => FileTime.Service.use((svc) => svc.assert(id, file)) - -const lock =
(file: string, fn: () => Effect.Effect) => FileTime.Service.use((svc) => svc.withLock(file, fn)) - -const fail = Effect.fn("FileTimeTest.fail")(function* (self: Effect.Effect) { - const exit = yield* self.pipe(Effect.exit) - if (Exit.isFailure(exit)) { - const err = Cause.squash(exit.cause) - return err instanceof Error ? err : new Error(String(err)) - } - throw new Error("expected file time effect to fail") -}) - -describe("file/time", () => { - describe("read() and get()", () => { - it.live("stores read timestamp", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const before = yield* get(id, file) - expect(before).toBeUndefined() - - yield* read(id, file) - - const after = yield* get(id, file) - expect(after).toBeInstanceOf(Date) - expect(after!.getTime()).toBeGreaterThan(0) - }), - ), - ) - - it.live("tracks separate timestamps per session", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const one = SessionID.make("ses_00000000000000000000000002") - const two = SessionID.make("ses_00000000000000000000000003") - yield* read(one, file) - yield* read(two, file) - - const first = yield* get(one, file) - const second = yield* get(two, file) - - expect(first).toBeDefined() - expect(second).toBeDefined() - }), - ), - ) - - it.live("updates timestamp on subsequent reads", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - yield* read(id, file) - const first = yield* get(id, file) - - yield* read(id, file) - const second = yield* get(id, file) - - expect(second!.getTime()).toBeGreaterThanOrEqual(first!.getTime()) - }), - ), - ) - - it.live("isolates reads by directory", () => - Effect.gen(function* () { - const one = yield* tmpdirScoped() - const two = yield* tmpdirScoped() - const shared = yield* tmpdirScoped() - const file = path.join(shared, "file.txt") - yield* put(file, "content") - - yield* provideInstance(one)(read(id, file)) - const result = yield* provideInstance(two)(get(id, file)) - expect(result).toBeUndefined() - }), - ) - }) - - describe("assert()", () => { - it.live("passes when file has not been modified", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* check(id, file) - }), - ), - ) - - it.live("throws when file was not read first", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("You must read file") - }), - ), - ) - - it.live("throws when file was modified after read", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* put(file, "modified content") - yield* touch(file, 2_000) - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("modified since it was last read") - }), - ), - ) - - it.live("includes timestamps in error message", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* put(file, "modified") - yield* touch(file, 2_000) - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("Last modification:") - expect(err.message).toContain("Last read:") - }), - ), - ) - }) - - describe("withLock()", () => { - it.live("executes function within lock", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - let hit = false - - yield* lock(file, () => - Effect.sync(() => { - hit = true - return "result" - }), - ) - - expect(hit).toBe(true) - }), - ), - ) - - it.live("returns function result", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const result = yield* lock(file, () => Effect.succeed("success")) - expect(result).toBe("success") - }), - ), - ) - - it.live("serializes concurrent operations on same file", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const order: number[] = [] - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const one = yield* lock(file, () => - Effect.gen(function* () { - order.push(1) - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - order.push(2) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const two = yield* lock(file, () => - Effect.sync(() => { - order.push(3) - order.push(4) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(one) - yield* Fiber.join(two) - - expect(order).toEqual([1, 2, 3, 4]) - }), - ), - ) - - it.live("allows concurrent operations on different files", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const onefile = path.join(dir, "file1.txt") - const twofile = path.join(dir, "file2.txt") - let one = false - let two = false - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const a = yield* lock(onefile, () => - Effect.gen(function* () { - one = true - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - expect(two).toBe(true) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const b = yield* lock(twofile, () => - Effect.sync(() => { - two = true - }), - ).pipe(Effect.forkScoped) - - yield* Fiber.join(b) - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(a) - - expect(one).toBe(true) - expect(two).toBe(true) - }), - ), - ) - - it.live("releases lock even if function throws", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const err = yield* fail(lock(file, () => Effect.die(new Error("Test error")))) - expect(err.message).toContain("Test error") - - let hit = false - yield* lock(file, () => - Effect.sync(() => { - hit = true - }), - ) - expect(hit).toBe(true) - }), - ), - ) - }) - - describe("path normalization", () => { - it.live("read with forward slashes, assert with backslashes", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - const forward = file.replaceAll("\\", "/") - yield* read(id, forward) - yield* check(id, file) - }), - ), - ) - - it.live("read with backslashes, assert with forward slashes", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - const forward = file.replaceAll("\\", "/") - yield* read(id, file) - yield* check(id, forward) - }), - ), - ) - - it.live("get returns timestamp regardless of slash direction", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const forward = file.replaceAll("\\", "/") - yield* read(id, forward) - - const result = yield* get(id, file) - expect(result).toBeInstanceOf(Date) - }), - ), - ) - - it.live("withLock serializes regardless of slash direction", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const forward = file.replaceAll("\\", "/") - const order: number[] = [] - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const one = yield* lock(file, () => - Effect.gen(function* () { - order.push(1) - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - order.push(2) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const two = yield* lock(forward, () => - Effect.sync(() => { - order.push(3) - order.push(4) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(one) - yield* Fiber.join(two) - - expect(order).toEqual([1, 2, 3, 4]) - }), - ), - ) - }) - - describe("stat() Filesystem.stat pattern", () => { - it.live("reads file modification time via Filesystem.stat()", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - - const stat = Filesystem.stat(file) - expect(stat?.mtime).toBeInstanceOf(Date) - expect(stat!.mtime.getTime()).toBeGreaterThan(0) - - yield* check(id, file) - }), - ), - ) - - it.live("detects modification via stat mtime", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "original") - yield* touch(file, 1_000) - - yield* read(id, file) - - const first = Filesystem.stat(file) - - yield* put(file, "modified") - yield* touch(file, 2_000) - - const second = Filesystem.stat(file) - expect(second!.mtime.getTime()).toBeGreaterThan(first!.mtime.getTime()) - - yield* fail(check(id, file)) - }), - ), - ) - }) -}) diff --git a/packages/opencode/test/session/prompt-effect.test.ts b/packages/opencode/test/session/prompt-effect.test.ts index 121d662e5f..2f59046840 100644 --- a/packages/opencode/test/session/prompt-effect.test.ts +++ b/packages/opencode/test/session/prompt-effect.test.ts @@ -7,7 +7,6 @@ import { Agent as AgentSvc } from "../../src/agent/agent" import { Bus } from "../../src/bus" import { Command } from "../../src/command" import { Config } from "../../src/config" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { MCP } from "../../src/mcp" import { Permission } from "../../src/permission" @@ -148,16 +147,6 @@ const lsp = Layer.succeed( }), ) -const filetime = Layer.succeed( - FileTime.Service, - FileTime.Service.of({ - read: () => Effect.void, - get: () => Effect.succeed(undefined), - assert: () => Effect.void, - withLock: (_filepath, fn) => fn(), - }), -) - const status = SessionStatus.layer.pipe(Layer.provideMerge(Bus.layer)) const run = SessionRunState.layer.pipe(Layer.provide(status)) const infra = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer) @@ -173,7 +162,6 @@ function makeHttp() { Plugin.defaultLayer, Config.defaultLayer, ProviderSvc.defaultLayer, - filetime, lsp, mcp, AppFileSystem.defaultLayer, diff --git a/packages/opencode/test/session/snapshot-tool-race.test.ts b/packages/opencode/test/session/snapshot-tool-race.test.ts index 1f66ccb995..6517547339 100644 --- a/packages/opencode/test/session/snapshot-tool-race.test.ts +++ b/packages/opencode/test/session/snapshot-tool-race.test.ts @@ -33,7 +33,6 @@ import { Agent as AgentSvc } from "../../src/agent/agent" import { Bus } from "../../src/bus" import { Command } from "../../src/command" import { Config } from "../../src/config" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { MCP } from "../../src/mcp" import { Permission } from "../../src/permission" @@ -102,16 +101,6 @@ const lsp = Layer.succeed( }), ) -const filetime = Layer.succeed( - FileTime.Service, - FileTime.Service.of({ - read: () => Effect.void, - get: () => Effect.succeed(undefined), - assert: () => Effect.void, - withLock: (_filepath, fn) => fn(), - }), -) - const status = SessionStatus.layer.pipe(Layer.provideMerge(Bus.layer)) const run = SessionRunState.layer.pipe(Layer.provide(status)) const infra = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer) @@ -128,7 +117,6 @@ function makeHttp() { Plugin.defaultLayer, Config.defaultLayer, ProviderSvc.defaultLayer, - filetime, lsp, mcp, AppFileSystem.defaultLayer, diff --git a/packages/opencode/test/tool/edit.test.ts b/packages/opencode/test/tool/edit.test.ts index 2e3dfa8a69..4759b8be36 100644 --- a/packages/opencode/test/tool/edit.test.ts +++ b/packages/opencode/test/tool/edit.test.ts @@ -5,7 +5,6 @@ import { Effect, Layer, ManagedRuntime } from "effect" import { EditTool } from "../../src/tool/edit" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Format } from "../../src/format" @@ -38,7 +37,6 @@ async function touch(file: string, time: number) { const runtime = ManagedRuntime.make( Layer.mergeAll( LSP.defaultLayer, - FileTime.defaultLayer, AppFileSystem.defaultLayer, Format.defaultLayer, Bus.layer, @@ -59,9 +57,6 @@ const resolve = () => }), ) -const readFileTime = (sessionID: SessionID, filepath: string) => - runtime.runPromise(FileTime.Service.use((ft) => ft.read(sessionID, filepath))) - const subscribeBus = (def: D, callback: () => unknown) => runtime.runPromise(Bus.Service.use((bus) => bus.subscribeCallback(def, callback))) @@ -173,8 +168,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() const result = await Effect.runPromise( edit.execute( @@ -202,8 +195,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await expect( Effect.runPromise( @@ -254,8 +245,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await expect( Effect.runPromise( @@ -273,65 +262,6 @@ describe("tool.edit", () => { }) }) - test("throws error when file was not read first (FileTime)", async () => { - await using tmp = await tmpdir() - const filepath = path.join(tmp.path, "file.txt") - await fs.writeFile(filepath, "content", "utf-8") - - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const edit = await resolve() - await expect( - Effect.runPromise( - edit.execute( - { - filePath: filepath, - oldString: "content", - newString: "modified", - }, - ctx, - ), - ), - ).rejects.toThrow("You must read file") - }, - }) - }) - - test("throws error when file has been modified since read", async () => { - await using tmp = await tmpdir() - const filepath = path.join(tmp.path, "file.txt") - await fs.writeFile(filepath, "original content", "utf-8") - await touch(filepath, 1_000) - - await Instance.provide({ - directory: tmp.path, - fn: async () => { - // Read first - await readFileTime(ctx.sessionID, filepath) - - // Simulate external modification - await fs.writeFile(filepath, "modified externally", "utf-8") - await touch(filepath, 2_000) - - // Try to edit with the new content - const edit = await resolve() - await expect( - Effect.runPromise( - edit.execute( - { - filePath: filepath, - oldString: "modified externally", - newString: "edited", - }, - ctx, - ), - ), - ).rejects.toThrow("modified since it was last read") - }, - }) - }) - test("replaces all occurrences with replaceAll option", async () => { await using tmp = await tmpdir() const filepath = path.join(tmp.path, "file.txt") @@ -340,8 +270,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -369,8 +297,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const { FileWatcher } = await import("../../src/file/watcher") const updated = await onceBus(FileWatcher.Event.Updated) @@ -406,8 +332,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -434,8 +358,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -487,8 +409,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, dirpath) - const edit = await resolve() await expect( Effect.runPromise( @@ -514,8 +434,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() const result = await Effect.runPromise( edit.execute( @@ -587,7 +505,6 @@ describe("tool.edit", () => { fn: async () => { const edit = await resolve() const filePath = path.join(tmp.path, "test.txt") - await readFileTime(ctx.sessionID, filePath) await Effect.runPromise( edit.execute( { @@ -730,8 +647,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() // Two concurrent edits @@ -746,9 +661,6 @@ describe("tool.edit", () => { ), ) - // Need to read again since FileTime tracks per-session - await readFileTime(ctx.sessionID, filepath) - const promise2 = Effect.runPromise( edit.execute( { diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts index 3b32c72e05..c3d7074bfb 100644 --- a/packages/opencode/test/tool/read.test.ts +++ b/packages/opencode/test/tool/read.test.ts @@ -4,7 +4,6 @@ import path from "path" import { Agent } from "../../src/agent/agent" import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { Permission } from "../../src/permission" import { Instance } from "../../src/project/instance" @@ -40,7 +39,6 @@ const it = testEffect( Agent.defaultLayer, AppFileSystem.defaultLayer, CrossSpawnSpawner.defaultLayer, - FileTime.defaultLayer, Instruction.defaultLayer, LSP.defaultLayer, Truncate.defaultLayer, diff --git a/packages/opencode/test/tool/write.test.ts b/packages/opencode/test/tool/write.test.ts index 46bbe2e401..50d3b57527 100644 --- a/packages/opencode/test/tool/write.test.ts +++ b/packages/opencode/test/tool/write.test.ts @@ -6,7 +6,6 @@ import { WriteTool } from "../../src/tool/write" import { Instance } from "../../src/project/instance" import { LSP } from "../../src/lsp" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { FileTime } from "../../src/file/time" import { Bus } from "../../src/bus" import { Format } from "../../src/format" import { Truncate } from "../../src/tool" @@ -36,7 +35,6 @@ const it = testEffect( Layer.mergeAll( LSP.defaultLayer, AppFileSystem.defaultLayer, - FileTime.defaultLayer, Bus.layer, Format.defaultLayer, CrossSpawnSpawner.defaultLayer, @@ -58,11 +56,6 @@ const run = Effect.fn("WriteToolTest.run")(function* ( return yield* tool.execute(args, next) }) -const markRead = Effect.fn("WriteToolTest.markRead")(function* (sessionID: string, filepath: string) { - const ft = yield* FileTime.Service - yield* ft.read(sessionID as any, filepath) -}) - describe("tool.write", () => { describe("new file creation", () => { it.live("writes content to new file", () => @@ -110,8 +103,6 @@ describe("tool.write", () => { Effect.gen(function* () { const filepath = path.join(dir, "existing.txt") yield* Effect.promise(() => fs.writeFile(filepath, "old content", "utf-8")) - yield* markRead(ctx.sessionID, filepath) - const result = yield* run({ filePath: filepath, content: "new content" }) expect(result.output).toContain("Wrote file successfully") @@ -128,8 +119,6 @@ describe("tool.write", () => { Effect.gen(function* () { const filepath = path.join(dir, "file.txt") yield* Effect.promise(() => fs.writeFile(filepath, "old", "utf-8")) - yield* markRead(ctx.sessionID, filepath) - const result = yield* run({ filePath: filepath, content: "new" }) expect(result.metadata).toHaveProperty("filepath", filepath) @@ -231,8 +220,6 @@ describe("tool.write", () => { const readonlyPath = path.join(dir, "readonly.txt") yield* Effect.promise(() => fs.writeFile(readonlyPath, "test", "utf-8")) yield* Effect.promise(() => fs.chmod(readonlyPath, 0o444)) - yield* markRead(ctx.sessionID, readonlyPath) - const exit = yield* run({ filePath: readonlyPath, content: "new content" }).pipe(Effect.exit) expect(exit._tag).toBe("Failure") }), diff --git a/packages/web/src/content/docs/ar/cli.mdx b/packages/web/src/content/docs/ar/cli.mdx index 826ea43040..ab2c12fb20 100644 --- a/packages/web/src/content/docs/ar/cli.mdx +++ b/packages/web/src/content/docs/ar/cli.mdx @@ -573,7 +573,6 @@ opencode upgrade v0.1.48 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | تعطيل تحميل `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | تعطيل جلب النماذج من مصادر بعيدة | | `OPENCODE_FAKE_VCS` | string | مزود VCS وهمي لأغراض الاختبار | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | تعطيل التحقق من وقت الملف لتحسين الأداء | | `OPENCODE_CLIENT` | string | معرّف العميل (الافتراضي `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | تفعيل أدوات بحث الويب من Exa | | `OPENCODE_SERVER_PASSWORD` | string | تفعيل المصادقة الأساسية لخادمي `serve`/`web` | diff --git a/packages/web/src/content/docs/bs/cli.mdx b/packages/web/src/content/docs/bs/cli.mdx index 979066acbc..118b81ba4e 100644 --- a/packages/web/src/content/docs/bs/cli.mdx +++ b/packages/web/src/content/docs/bs/cli.mdx @@ -571,7 +571,6 @@ OpenCode se može konfigurirati pomoću varijabli okruženja. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Onemogući učitavanje `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Onemogući dohvaćanje modela iz udaljenih izvora | | `OPENCODE_FAKE_VCS` | string | Lažni VCS provajder za potrebe testiranja | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Onemogući provjeru vremena datoteke radi optimizacije | | `OPENCODE_CLIENT` | string | Identifikator klijenta (zadano na `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Omogući Exa alate za web pretraživanje | | `OPENCODE_SERVER_PASSWORD` | string | Omogući osnovnu autentifikaciju za `serve`/`web` | diff --git a/packages/web/src/content/docs/cli.mdx b/packages/web/src/content/docs/cli.mdx index 579038ad03..786b9d3d94 100644 --- a/packages/web/src/content/docs/cli.mdx +++ b/packages/web/src/content/docs/cli.mdx @@ -575,7 +575,6 @@ OpenCode can be configured using environment variables. | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Disable fetching models from remote sources | | `OPENCODE_DISABLE_MOUSE` | boolean | Disable mouse capture in the TUI | | `OPENCODE_FAKE_VCS` | string | Fake VCS provider for testing purposes | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Disable file time checking for optimization | | `OPENCODE_CLIENT` | string | Client identifier (defaults to `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Enable Exa web search tools | | `OPENCODE_SERVER_PASSWORD` | string | Enable basic auth for `serve`/`web` | diff --git a/packages/web/src/content/docs/da/cli.mdx b/packages/web/src/content/docs/da/cli.mdx index 40c6645e67..45c4f08e3f 100644 --- a/packages/web/src/content/docs/da/cli.mdx +++ b/packages/web/src/content/docs/da/cli.mdx @@ -574,7 +574,6 @@ OpenCode kan konfigureres ved hjælp af miljøvariabler. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Deaktiver indlæsning af `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Deaktivering af modeller fra eksterne kilder | | `OPENCODE_FAKE_VCS` | string | Falsk VCS-udbyder til testformål | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Deaktiver filtidskontrol for optimering | | `OPENCODE_CLIENT` | string | Klient-id (standard til `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Aktiver Exa-websøgeværktøjer | | `OPENCODE_SERVER_PASSWORD` | string | Aktiver grundlæggende godkendelse for `serve`/`web` | diff --git a/packages/web/src/content/docs/de/cli.mdx b/packages/web/src/content/docs/de/cli.mdx index cb1b974e10..43a1189d60 100644 --- a/packages/web/src/content/docs/de/cli.mdx +++ b/packages/web/src/content/docs/de/cli.mdx @@ -573,7 +573,6 @@ OpenCode kann mithilfe von Umgebungsvariablen konfiguriert werden. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolescher Wert | Deaktivieren Sie das Laden von `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolescher Wert | Deaktivieren Sie das Abrufen von Modellen aus Remote-Quellen | | `OPENCODE_FAKE_VCS` | Zeichenfolge | Gefälschter VCS-Anbieter zu Testzwecken | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolescher Wert | Dateizeitprüfung zur Optimierung deaktivieren | | `OPENCODE_CLIENT` | Zeichenfolge | Client-ID (standardmäßig `cli`) | | `OPENCODE_ENABLE_EXA` | boolescher Wert | Exa-Websuchtools aktivieren | | `OPENCODE_SERVER_PASSWORD` | Zeichenfolge | Aktivieren Sie die Basisauthentifizierung für `serve`/`web` | diff --git a/packages/web/src/content/docs/es/cli.mdx b/packages/web/src/content/docs/es/cli.mdx index 658be27084..5c86474a61 100644 --- a/packages/web/src/content/docs/es/cli.mdx +++ b/packages/web/src/content/docs/es/cli.mdx @@ -573,7 +573,6 @@ OpenCode se puede configurar mediante variables de entorno. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | booleano | Deshabilitar la carga `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | booleano | Deshabilitar la recuperación de modelos desde fuentes remotas | | `OPENCODE_FAKE_VCS` | cadena | Proveedor de VCS falso para fines de prueba | -| `OPENCODE_DISABLE_FILETIME_CHECK` | booleano | Deshabilite la verificación del tiempo del archivo para optimizarlo | | `OPENCODE_CLIENT` | cadena | Identificador de cliente (por defecto `cli`) | | `OPENCODE_ENABLE_EXA` | booleano | Habilitar las herramientas de búsqueda web de Exa | | `OPENCODE_SERVER_PASSWORD` | cadena | Habilite la autenticación básica para `serve`/`web` | diff --git a/packages/web/src/content/docs/fr/cli.mdx b/packages/web/src/content/docs/fr/cli.mdx index 2c763618e4..cffa748ad2 100644 --- a/packages/web/src/content/docs/fr/cli.mdx +++ b/packages/web/src/content/docs/fr/cli.mdx @@ -574,7 +574,6 @@ OpenCode peut être configuré à l'aide de variables d'environnement. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | booléen | Désactiver le chargement de `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | booléen | Désactiver la récupération de modèles à partir de sources distantes | | `OPENCODE_FAKE_VCS` | chaîne | Faux fournisseur VCS à des fins de test | -| `OPENCODE_DISABLE_FILETIME_CHECK` | booléen | Désactiver la vérification de l'heure des fichiers pour l'optimisation | | `OPENCODE_CLIENT` | chaîne | Identifiant du client (par défaut `cli`) | | `OPENCODE_ENABLE_EXA` | booléen | Activer les outils de recherche Web Exa | | `OPENCODE_SERVER_PASSWORD` | chaîne | Activer l'authentification de base pour `serve`/`web` | diff --git a/packages/web/src/content/docs/it/cli.mdx b/packages/web/src/content/docs/it/cli.mdx index 46d7da1495..952dfba090 100644 --- a/packages/web/src/content/docs/it/cli.mdx +++ b/packages/web/src/content/docs/it/cli.mdx @@ -574,7 +574,6 @@ OpenCode può essere configurato tramite variabili d'ambiente. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Disabilita caricamento di `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Disabilita fetch dei modelli da fonti remote | | `OPENCODE_FAKE_VCS` | string | Provider VCS finto per scopi di test | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Disabilita controllo file time per ottimizzazione | | `OPENCODE_CLIENT` | string | Identificatore client (default `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Abilita gli strumenti di web search Exa | | `OPENCODE_SERVER_PASSWORD` | string | Abilita basic auth per `serve`/`web` | diff --git a/packages/web/src/content/docs/ja/cli.mdx b/packages/web/src/content/docs/ja/cli.mdx index f690c7d7e9..82a8852ea5 100644 --- a/packages/web/src/content/docs/ja/cli.mdx +++ b/packages/web/src/content/docs/ja/cli.mdx @@ -573,7 +573,6 @@ OpenCode は環境変数を使用して構成できます。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | ブール値 | `.claude/skills` のロードを無効にする | | `OPENCODE_DISABLE_MODELS_FETCH` | ブール値 | リモートソースからのモデルの取得を無効にする | | `OPENCODE_FAKE_VCS` | 文字列 | テスト目的の偽の VCS プロバイダー | -| `OPENCODE_DISABLE_FILETIME_CHECK` | ブール値 | 最適化のためにファイル時間チェックを無効にする | | `OPENCODE_CLIENT` | 文字列 | クライアント識別子 (デフォルトは `cli`) | | `OPENCODE_ENABLE_EXA` | ブール値 | Exa Web 検索ツールを有効にする | | `OPENCODE_SERVER_PASSWORD` | 文字列 | `serve`/`web` の基本認証を有効にする | diff --git a/packages/web/src/content/docs/ko/cli.mdx b/packages/web/src/content/docs/ko/cli.mdx index 0562ab8afd..b0ce10567e 100644 --- a/packages/web/src/content/docs/ko/cli.mdx +++ b/packages/web/src/content/docs/ko/cli.mdx @@ -573,7 +573,6 @@ OpenCode는 환경 변수로도 구성할 수 있습니다. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | `.claude/skills` 로드 비활성화 | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 원격 소스에서 모델 목록 가져오기 비활성화 | | `OPENCODE_FAKE_VCS` | string | 테스트용 가짜 VCS provider | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 최적화를 위한 파일 시간 검사 비활성화 | | `OPENCODE_CLIENT` | string | 클라이언트 식별자(기본값: `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Exa 웹 검색 도구 활성화 | | `OPENCODE_SERVER_PASSWORD` | string | `serve`/`web` 기본 인증 활성화 | diff --git a/packages/web/src/content/docs/nb/cli.mdx b/packages/web/src/content/docs/nb/cli.mdx index 8b6d283e10..8312a1a7c5 100644 --- a/packages/web/src/content/docs/nb/cli.mdx +++ b/packages/web/src/content/docs/nb/cli.mdx @@ -574,7 +574,6 @@ OpenCode kan konfigureres ved hjelp av miljøvariabler. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolsk | Deaktiver innlasting av `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolsk | Deaktiver henting av modeller fra eksterne kilder | | `OPENCODE_FAKE_VCS` | streng | Falsk VCS-leverandør for testformål | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolsk | Deaktiver filtidskontroll for optimalisering | | `OPENCODE_CLIENT` | streng | Klientidentifikator (standard til `cli`) | | `OPENCODE_ENABLE_EXA` | boolsk | Aktiver Exa-nettsøkeverktøy | | `OPENCODE_SERVER_PASSWORD` | streng | Aktiver grunnleggende autentisering for `serve`/`web` | diff --git a/packages/web/src/content/docs/pl/cli.mdx b/packages/web/src/content/docs/pl/cli.mdx index 6cdc67a48f..3931cd6467 100644 --- a/packages/web/src/content/docs/pl/cli.mdx +++ b/packages/web/src/content/docs/pl/cli.mdx @@ -574,7 +574,6 @@ OpenCode można skonfigurować za pomocą zmiennych środowiskowych. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Wyłącz ładowanie `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Wyłącz pobieranie modeli ze źródeł zewnętrznych | | `OPENCODE_FAKE_VCS` | string | Fałszywy dostawca VCS do celów testowych | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Wyłącz sprawdzanie czasu modyfikacji plików (optymalizacja) | | `OPENCODE_CLIENT` | string | Identyfikator klienta (domyślnie `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Włącz narzędzie wyszukiwania internetowego Exa | | `OPENCODE_SERVER_PASSWORD` | string | Włącz uwierzytelnianie podstawowe dla `serve`/`web` | diff --git a/packages/web/src/content/docs/pt-br/cli.mdx b/packages/web/src/content/docs/pt-br/cli.mdx index 32c50d7c0a..78190b3c5d 100644 --- a/packages/web/src/content/docs/pt-br/cli.mdx +++ b/packages/web/src/content/docs/pt-br/cli.mdx @@ -573,7 +573,6 @@ O opencode pode ser configurado usando variáveis de ambiente. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Desabilitar carregamento de `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Desabilitar busca de modelos de fontes remotas | | `OPENCODE_FAKE_VCS` | string | Provedor VCS falso para fins de teste | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Desabilitar verificação de tempo de arquivo para otimização | | `OPENCODE_CLIENT` | string | Identificador do cliente (padrão é `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Habilitar ferramentas de busca web Exa | | `OPENCODE_SERVER_PASSWORD` | string | Habilitar autenticação básica para `serve`/`web` | diff --git a/packages/web/src/content/docs/ru/cli.mdx b/packages/web/src/content/docs/ru/cli.mdx index a98111530f..f5aeee256f 100644 --- a/packages/web/src/content/docs/ru/cli.mdx +++ b/packages/web/src/content/docs/ru/cli.mdx @@ -574,7 +574,6 @@ opencode можно настроить с помощью переменных с | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | логическое значение | Отключить загрузку `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | логическое значение | Отключить получение моделей из удаленных источников | | `OPENCODE_FAKE_VCS` | строка | Поддельный поставщик VCS для целей тестирования | -| `OPENCODE_DISABLE_FILETIME_CHECK` | логическое значение | Отключить проверку времени файла для оптимизации | | `OPENCODE_CLIENT` | строка | Идентификатор клиента (по умолчанию `cli`) | | `OPENCODE_ENABLE_EXA` | логическое значение | Включить инструменты веб-поиска Exa | | `OPENCODE_SERVER_PASSWORD` | строка | Включить базовую аутентификацию для `serve`/`web` | diff --git a/packages/web/src/content/docs/th/cli.mdx b/packages/web/src/content/docs/th/cli.mdx index 2f75a96a7e..4b2db9d988 100644 --- a/packages/web/src/content/docs/th/cli.mdx +++ b/packages/web/src/content/docs/th/cli.mdx @@ -575,7 +575,6 @@ OpenCode สามารถกำหนดค่าโดยใช้ตัว | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | Boolean | ปิดใช้งานการนำเข้า `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | Boolean | ปิดใช้งานการดึงรายการโมเดลจากระยะไกล | | `OPENCODE_FAKE_VCS` | String | เปิดใช้งาน VCS จำลองสำหรับการทดสอบ | -| `OPENCODE_DISABLE_FILETIME_CHECK` | Boolean | ปิดใช้งานการตรวจสอบเวลาแก้ไขไฟล์ | | `OPENCODE_CLIENT` | String | ตัวระบุไคลเอนต์ (ค่าเริ่มต้นคือ `cli`) | | `OPENCODE_ENABLE_EXA` | Boolean | เปิดใช้งานการใช้ Exa แทน ls หากมี | | `OPENCODE_SERVER_PASSWORD` | String | รหัสผ่านสำหรับการตรวจสอบสิทธิ์พื้นฐาน `serve`/`web` | diff --git a/packages/web/src/content/docs/tr/cli.mdx b/packages/web/src/content/docs/tr/cli.mdx index 41600b5bf0..75ecca9926 100644 --- a/packages/web/src/content/docs/tr/cli.mdx +++ b/packages/web/src/content/docs/tr/cli.mdx @@ -574,7 +574,6 @@ opencode ortam değişkenleri kullanılarak yapılandırılabilir. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | `.claude/skills` yüklemesini devre dışı bırak | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Uzak kaynaklardan model getirmeyi devre dışı bırakın | | `OPENCODE_FAKE_VCS` | string | Test amaçlı sahte VCS sağlayıcısı | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Optimizasyon için dosya süresi kontrolünü devre dışı bırakın | | `OPENCODE_CLIENT` | string | Client kimliği (varsayılan: `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Exa web arama araçlarını etkinleştir | | `OPENCODE_SERVER_PASSWORD` | string | `serve`/`web` için temel kimlik doğrulamayı etkinleştirin | diff --git a/packages/web/src/content/docs/zh-cn/cli.mdx b/packages/web/src/content/docs/zh-cn/cli.mdx index 0c54d3d7b1..c0cff134a5 100644 --- a/packages/web/src/content/docs/zh-cn/cli.mdx +++ b/packages/web/src/content/docs/zh-cn/cli.mdx @@ -574,7 +574,6 @@ OpenCode 可以通过环境变量进行配置。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | 禁用加载 `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 禁用从远程源获取模型 | | `OPENCODE_FAKE_VCS` | string | 用于测试目的的模拟 VCS 提供商 | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 禁用文件时间检查优化 | | `OPENCODE_CLIENT` | string | 客户端标识符(默认为 `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | 启用 Exa 网络搜索工具 | | `OPENCODE_SERVER_PASSWORD` | string | 为 `serve`/`web` 启用基本认证 | diff --git a/packages/web/src/content/docs/zh-tw/cli.mdx b/packages/web/src/content/docs/zh-tw/cli.mdx index 5de2b96375..4df9d13fdd 100644 --- a/packages/web/src/content/docs/zh-tw/cli.mdx +++ b/packages/web/src/content/docs/zh-tw/cli.mdx @@ -574,7 +574,6 @@ OpenCode 可以透過環境變數進行設定。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | 停用載入 `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 停用從遠端來源擷取模型 | | `OPENCODE_FAKE_VCS` | string | 用於測試目的的模擬 VCS 供應商 | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 停用檔案時間檢查最佳化 | | `OPENCODE_CLIENT` | string | 用戶端識別碼(預設為 `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | 啟用 Exa 網路搜尋工具 | | `OPENCODE_SERVER_PASSWORD` | string | 為 `serve`/`web` 啟用基本認證 | From f0caeb9b25ae4f57232a152a4e1c8bdd823e2f65 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 04:32:17 +0000 Subject: [PATCH 129/201] chore: generate --- packages/web/src/content/docs/pl/cli.mdx | 52 ++++++++++++------------ 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/web/src/content/docs/pl/cli.mdx b/packages/web/src/content/docs/pl/cli.mdx index 3931cd6467..e175870cbf 100644 --- a/packages/web/src/content/docs/pl/cli.mdx +++ b/packages/web/src/content/docs/pl/cli.mdx @@ -553,32 +553,32 @@ Interfejs CLI OpenCode przyjmuje następujące flagi globalne dla każdego polec OpenCode można skonfigurować za pomocą zmiennych środowiskowych. -| Zmienna | Typ | Opis | -| ------------------------------------- | ------- | ----------------------------------------------------------- | -| `OPENCODE_AUTO_SHARE` | boolean | Automatycznie udostępniaj sesje | -| `OPENCODE_GIT_BASH_PATH` | string | Ścieżka do pliku wykonywalnego Git Bash w systemie Windows | -| `OPENCODE_CONFIG` | string | Ścieżka do pliku konfiguracyjnego | -| `OPENCODE_TUI_CONFIG` | string | Ścieżka do pliku konfiguracyjnego TUI | -| `OPENCODE_CONFIG_DIR` | string | Ścieżka do katalogu konfiguracyjnego | -| `OPENCODE_CONFIG_CONTENT` | string | Treść konfiguracji JSON (inline) | -| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Wyłącz automatyczne sprawdzanie aktualizacji | -| `OPENCODE_DISABLE_PRUNE` | boolean | Wyłącz czyszczenie starych wyników (pruning) | -| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Wyłącz automatyczne ustawianie tytułu terminala | -| `OPENCODE_PERMISSION` | string | Konfiguracja uprawnień w JSON (inline) | -| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Wyłącz domyślne wtyczki | -| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Wyłącz automatyczne pobieranie serwerów LSP | -| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Włącz modele eksperymentalne | -| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Wyłącz automatyczne kompaktowanie kontekstu | -| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Wyłącz integrację z `.claude` (prompt + skills) | -| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Wyłącz czytanie `~/.claude/CLAUDE.md` | -| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Wyłącz ładowanie `.claude/skills` | -| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Wyłącz pobieranie modeli ze źródeł zewnętrznych | -| `OPENCODE_FAKE_VCS` | string | Fałszywy dostawca VCS do celów testowych | -| `OPENCODE_CLIENT` | string | Identyfikator klienta (domyślnie `cli`) | -| `OPENCODE_ENABLE_EXA` | boolean | Włącz narzędzie wyszukiwania internetowego Exa | -| `OPENCODE_SERVER_PASSWORD` | string | Włącz uwierzytelnianie podstawowe dla `serve`/`web` | -| `OPENCODE_SERVER_USERNAME` | string | Nazwa użytkownika do autoryzacji (domyślnie `opencode`) | -| `OPENCODE_MODELS_URL` | string | Niestandardowy adres URL do pobierania konfiguracji modeli | +| Zmienna | Typ | Opis | +| ------------------------------------- | ------- | ---------------------------------------------------------- | +| `OPENCODE_AUTO_SHARE` | boolean | Automatycznie udostępniaj sesje | +| `OPENCODE_GIT_BASH_PATH` | string | Ścieżka do pliku wykonywalnego Git Bash w systemie Windows | +| `OPENCODE_CONFIG` | string | Ścieżka do pliku konfiguracyjnego | +| `OPENCODE_TUI_CONFIG` | string | Ścieżka do pliku konfiguracyjnego TUI | +| `OPENCODE_CONFIG_DIR` | string | Ścieżka do katalogu konfiguracyjnego | +| `OPENCODE_CONFIG_CONTENT` | string | Treść konfiguracji JSON (inline) | +| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Wyłącz automatyczne sprawdzanie aktualizacji | +| `OPENCODE_DISABLE_PRUNE` | boolean | Wyłącz czyszczenie starych wyników (pruning) | +| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Wyłącz automatyczne ustawianie tytułu terminala | +| `OPENCODE_PERMISSION` | string | Konfiguracja uprawnień w JSON (inline) | +| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Wyłącz domyślne wtyczki | +| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Wyłącz automatyczne pobieranie serwerów LSP | +| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Włącz modele eksperymentalne | +| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Wyłącz automatyczne kompaktowanie kontekstu | +| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Wyłącz integrację z `.claude` (prompt + skills) | +| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Wyłącz czytanie `~/.claude/CLAUDE.md` | +| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Wyłącz ładowanie `.claude/skills` | +| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Wyłącz pobieranie modeli ze źródeł zewnętrznych | +| `OPENCODE_FAKE_VCS` | string | Fałszywy dostawca VCS do celów testowych | +| `OPENCODE_CLIENT` | string | Identyfikator klienta (domyślnie `cli`) | +| `OPENCODE_ENABLE_EXA` | boolean | Włącz narzędzie wyszukiwania internetowego Exa | +| `OPENCODE_SERVER_PASSWORD` | string | Włącz uwierzytelnianie podstawowe dla `serve`/`web` | +| `OPENCODE_SERVER_USERNAME` | string | Nazwa użytkownika do autoryzacji (domyślnie `opencode`) | +| `OPENCODE_MODELS_URL` | string | Niestandardowy adres URL do pobierania konfiguracji modeli | --- From 72d7cb717d5b2994a525257cd7fccf0533ca34ee Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 00:42:45 -0400 Subject: [PATCH 130/201] remove accidental commit of daytona plugin (#23030) --- .opencode/opencode.jsonc | 1 - bun.lock | 333 +-------------------------------------- daytona.ts | 197 ----------------------- package.json | 1 - 4 files changed, 8 insertions(+), 524 deletions(-) delete mode 100644 daytona.ts diff --git a/.opencode/opencode.jsonc b/.opencode/opencode.jsonc index 91b7abdbc6..8380f7f719 100644 --- a/.opencode/opencode.jsonc +++ b/.opencode/opencode.jsonc @@ -10,7 +10,6 @@ "packages/opencode/migration/*": "deny", }, }, - "plugin": ["../daytona.ts"], "mcp": {}, "tools": { "github-triage": false, diff --git a/bun.lock b/bun.lock index 7c562c4d4b..c22a2ff270 100644 --- a/bun.lock +++ b/bun.lock @@ -6,7 +6,6 @@ "name": "opencode", "dependencies": { "@aws-sdk/client-s3": "3.933.0", - "@daytona/sdk": "0.167.0", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", @@ -865,8 +864,6 @@ "@aws-sdk/credential-providers": ["@aws-sdk/credential-providers@3.993.0", "", { "dependencies": { "@aws-sdk/client-cognito-identity": "3.993.0", "@aws-sdk/core": "^3.973.11", "@aws-sdk/credential-provider-cognito-identity": "^3.972.3", "@aws-sdk/credential-provider-env": "^3.972.9", "@aws-sdk/credential-provider-http": "^3.972.11", "@aws-sdk/credential-provider-ini": "^3.972.9", "@aws-sdk/credential-provider-login": "^3.972.9", "@aws-sdk/credential-provider-node": "^3.972.10", "@aws-sdk/credential-provider-process": "^3.972.9", "@aws-sdk/credential-provider-sso": "^3.972.9", "@aws-sdk/credential-provider-web-identity": "^3.972.9", "@aws-sdk/nested-clients": "3.993.0", "@aws-sdk/types": "^3.973.1", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.23.2", "@smithy/credential-provider-imds": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/property-provider": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-1M/nukgPSLqe9krzOKHnE8OylUaKAiokAV3xRLdeExVHcRE7WG5uzCTKWTj1imKvPjDqXq/FWhlbbdWIn7xIwA=="], - "@aws-sdk/lib-storage": ["@aws-sdk/lib-storage@3.1031.0", "", { "dependencies": { "@smithy/middleware-endpoint": "^4.4.30", "@smithy/protocol-http": "^5.3.14", "@smithy/smithy-client": "^4.12.11", "@smithy/types": "^4.14.1", "buffer": "5.6.0", "events": "3.3.0", "stream-browserify": "3.0.0", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-s3": "^3.1031.0" } }, "sha512-QC6MwdeXWZt9wnOV6/rgxSnVUcDeoHr5TCnCm0UPuSQOwuLX1/2QJa6oo7PdfdHg+i3G+VOnmG8QZ3tYxvBGRA=="], - "@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-cnCLWeKPYgvV4yRYPFH6pWMdUByvu2cy2BAlfsPpvnm4RaVioztyvxmQj5PmVN5fvWs5w/2d6U7le8X9iye2sA=="], "@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-5HEQ+JU4DrLNWeY27wKg/jeVa8Suy62ivJHOSUf6e6hZdVIMx0h/kXS1fHEQNNiLu2IzSEP/bFXsKBaW7x7s0g=="], @@ -1051,12 +1048,6 @@ "@ctrl/tinycolor": ["@ctrl/tinycolor@4.2.0", "", {}, "sha512-kzyuwOAQnXJNLS9PSyrk0CWk35nWJW/zl/6KvnTBMFK65gm7U1/Z5BqjxeapjZCIhQcM/DsrEmcbRwDyXyXK4A=="], - "@daytona/api-client": ["@daytona/api-client@0.167.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-2H1NhSu5GVVfg2oU9Mgt3VZRz7OUqACjBSb9GNNyC7uGPCnQh2C93Imqj2wfyUx/XF346hYpk+Nee+/NGqn3KA=="], - - "@daytona/sdk": ["@daytona/sdk@0.167.0", "", { "dependencies": { "@aws-sdk/client-s3": "^3.787.0", "@aws-sdk/lib-storage": "^3.798.0", "@daytona/api-client": "0.167.0", "@daytona/toolbox-api-client": "0.167.0", "@iarna/toml": "^2.2.5", "@opentelemetry/api": "^1.9.0", "@opentelemetry/exporter-trace-otlp-http": "^0.207.0", "@opentelemetry/instrumentation-http": "^0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-node": "^0.207.0", "@opentelemetry/sdk-trace-base": "^2.2.0", "@opentelemetry/semantic-conventions": "^1.37.0", "axios": "^1.13.5", "busboy": "^1.0.0", "dotenv": "^17.0.1", "expand-tilde": "^2.0.2", "fast-glob": "^3.3.0", "form-data": "^4.0.4", "isomorphic-ws": "^5.0.0", "pathe": "^2.0.3", "shell-quote": "^1.8.2", "tar": "^7.5.11" } }, "sha512-Xsb1bJ52tahpCiq2pEJdFVqqxLQoXO8laa2BFJFrSyLnGBMJXMaEormcgZfoOC9J6mVo9oOqTABLuoT+4DP11A=="], - - "@daytona/toolbox-api-client": ["@daytona/toolbox-api-client@0.167.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-FzSh6UVA8ptktNrRhIJCVfZMOcbXlVazTXej+YQ1rEj7L/PnGCQlBbT296xYj94C3wXQUTIxTZVkWzB4vcDTxw=="], - "@develar/schema-utils": ["@develar/schema-utils@2.6.5", "", { "dependencies": { "ajv": "^6.12.0", "ajv-keywords": "^3.4.1" } }, "sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig=="], "@dimforge/rapier2d-simd-compat": ["@dimforge/rapier2d-simd-compat@0.17.3", "", {}, "sha512-bijvwWz6NHsNj5e5i1vtd3dU2pDhthSaTUZSh14DUGGKJfw8eMnlWZsxwHBxB/a3AXVNDjL9abuHw1k9FGR+jg=="], @@ -1207,10 +1198,6 @@ "@graphql-typed-document-node/core": ["@graphql-typed-document-node/core@3.2.0", "", { "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ=="], - "@grpc/grpc-js": ["@grpc/grpc-js@1.14.3", "", { "dependencies": { "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA=="], - - "@grpc/proto-loader": ["@grpc/proto-loader@0.8.0", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ=="], - "@happy-dom/global-registrator": ["@happy-dom/global-registrator@20.0.11", "", { "dependencies": { "@types/node": "^20.0.0", "happy-dom": "^20.0.11" } }, "sha512-GqNqiShBT/lzkHTMC/slKBrvN0DsD4Di8ssBk4aDaVgEn+2WMzE6DXxq701ndSXj7/0cJ8mNT71pM7Bnrr6JRw=="], "@hey-api/codegen-core": ["@hey-api/codegen-core@0.5.5", "", { "dependencies": { "@hey-api/types": "0.1.2", "ansi-colors": "4.1.3", "c12": "3.3.3", "color-support": "1.1.3" }, "peerDependencies": { "typescript": ">=5.5.3" } }, "sha512-f2ZHucnA2wBGAY8ipB4wn/mrEYW+WUxU2huJmUvfDO6AE2vfILSHeF3wCO39Pz4wUYPoAWZByaauftLrOfC12Q=="], @@ -1229,8 +1216,6 @@ "@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="], - "@iarna/toml": ["@iarna/toml@2.2.5", "", {}, "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg=="], - "@ibm/plex": ["@ibm/plex@6.4.1", "", { "dependencies": { "@ibm/telemetry-js": "^1.5.1" } }, "sha512-fnsipQywHt3zWvsnlyYKMikcVI7E2fEwpiPnIHFqlbByXVfQfANAAeJk1IV4mNnxhppUIDlhU0TzwYwL++Rn2g=="], "@ibm/telemetry-js": ["@ibm/telemetry-js@1.11.0", "", { "bin": { "ibmtelemetry": "dist/collect.js" } }, "sha512-RO/9j+URJnSfseWg9ZkEX9p+a3Ousd33DBU7rOafoZB08RqdzxFVYJ2/iM50dkBuD0o7WX7GYt1sLbNgCoE+pA=="], @@ -1361,8 +1346,6 @@ "@js-joda/core": ["@js-joda/core@5.7.0", "", {}, "sha512-WBu4ULVVxySLLzK1Ppq+OdfP+adRS4ntmDQT915rzDJ++i95gc2jZkM5B6LWEAwN3lGXpfie3yPABozdD3K3Vg=="], - "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], - "@js-temporal/polyfill": ["@js-temporal/polyfill@0.5.1", "", { "dependencies": { "jsbi": "^4.3.0" } }, "sha512-hloP58zRVCRSpgDxmqCWJNlizAlUgJFqG2ypq79DCvyv9tHjRYMDOcPFjzfl/A1/YxDvRCZz8wvZvmapQnKwFQ=="], "@jsdevtools/ono": ["@jsdevtools/ono@7.1.3", "", {}, "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="], @@ -1591,56 +1574,24 @@ "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="], - "@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-lAb0jQRVyleQQGiuuvCOTDVspc14nx6XJjP4FspJ1sNARo3Regq4ZZbrc3rN4b1TYSuUCvgH+UXUPug4SLOqEQ=="], + "@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], "@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.6.1", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-XHzhwRNkBpeP8Fs/qjGrAf9r9PRv67wkJQ/7ZPaBQQ68DYlTBBx5MF9LvPx7mhuXcDessKK2b+DcxqwpgkcivQ=="], "@opentelemetry/core": ["@opentelemetry/core@2.6.1", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-8xHSGWpJP9wBxgBpnqGL0R3PbdWQndL1Qp50qrg71+B28zK5OQmUgcDKLJgzyAAV38t4tOyLMGDD60LneR5W8g=="], - "@opentelemetry/exporter-logs-otlp-grpc": ["@opentelemetry/exporter-logs-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-K92RN+kQGTMzFDsCzsYNGqOsXRUnko/Ckk+t/yPJao72MewOLgBUTWVHhebgkNfRCYqDz1v3K0aPT9OJkemvgg=="], - - "@opentelemetry/exporter-logs-otlp-http": ["@opentelemetry/exporter-logs-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-JpOh7MguEUls8eRfkVVW3yRhClo5b9LqwWTOg8+i4gjr/+8eiCtquJnC7whvpTIGyff06cLZ2NsEj+CVP3Mjeg=="], - - "@opentelemetry/exporter-logs-otlp-proto": ["@opentelemetry/exporter-logs-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-RQJEV/K6KPbQrIUbsrRkEe0ufks1o5OGLHy6jbDD8tRjeCsbFHWfg99lYBRqBV33PYZJXsigqMaAbjWGTFYzLw=="], - - "@opentelemetry/exporter-metrics-otlp-grpc": ["@opentelemetry/exporter-metrics-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-6flX89W54gkwmqYShdcTBR1AEF5C1Ob0O8pDgmLPikTKyEv27lByr9yBmO5WrP0+5qJuNPHrLfgFQFYi6npDGA=="], - - "@opentelemetry/exporter-metrics-otlp-http": ["@opentelemetry/exporter-metrics-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-fG8FAJmvXOrKXGIRN8+y41U41IfVXxPRVwyB05LoMqYSjugx/FSBkMZUZXUT/wclTdmBKtS5MKoi0bEKkmRhSw=="], - - "@opentelemetry/exporter-metrics-otlp-proto": ["@opentelemetry/exporter-metrics-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-kDBxiTeQjaRlUQzS1COT9ic+et174toZH6jxaVuVAvGqmxOkgjpLOjrI5ff8SMMQE69r03L3Ll3nPKekLopLwg=="], - - "@opentelemetry/exporter-prometheus": ["@opentelemetry/exporter-prometheus@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-Y5p1s39FvIRmU+F1++j7ly8/KSqhMmn6cMfpQqiDCqDjdDHwUtSq0XI0WwL3HYGnZeaR/VV4BNmsYQJ7GAPrhw=="], - - "@opentelemetry/exporter-trace-otlp-grpc": ["@opentelemetry/exporter-trace-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-7u2ZmcIx6D4KG/+5np4X2qA0o+O0K8cnUDhR4WI/vr5ZZ0la9J9RG+tkSjC7Yz+2XgL6760gSIM7/nyd3yaBLA=="], - "@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-exporter-base": "0.214.0", "@opentelemetry/otlp-transformer": "0.214.0", "@opentelemetry/resources": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-kIN8nTBMgV2hXzV/a20BCFilPZdAIMYYJGSgfMMRm/Xa+07y5hRDS2Vm12A/z8Cdu3Sq++ZvJfElokX2rkgGgw=="], - "@opentelemetry/exporter-trace-otlp-proto": ["@opentelemetry/exporter-trace-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-ruUQB4FkWtxHjNmSXjrhmJZFvyMm+tBzHyMm7YPQshApy4wvZUTcrpPyP/A/rCl/8M4BwoVIZdiwijMdbZaq4w=="], - - "@opentelemetry/exporter-zipkin": ["@opentelemetry/exporter-zipkin@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0" } }, "sha512-VV4QzhGCT7cWrGasBWxelBjqbNBbyHicWWS/66KoZoe9BzYwFB72SH2/kkc4uAviQlO8iwv2okIJy+/jqqEHTg=="], - - "@opentelemetry/instrumentation": ["@opentelemetry/instrumentation@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "import-in-the-middle": "^2.0.0", "require-in-the-middle": "^8.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-y6eeli9+TLKnznrR8AZlQMSJT7wILpXH+6EYq5Vf/4Ao+huI7EedxQHwRgVUOMLFbe7VFDvHJrX9/f4lcwnJsA=="], - - "@opentelemetry/instrumentation-http": ["@opentelemetry/instrumentation-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/semantic-conventions": "^1.29.0", "forwarded-parse": "2.1.2" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-FC4i5hVixTzuhg4SV2ycTEAYx+0E2hm+GwbdoVPSA6kna0pPVI4etzaA9UkpJ9ussumQheFXP6rkGIaFJjMxsw=="], - - "@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-4RQluMVVGMrHok/3SVeSJ6EnRNkA2MINcX88sh+d/7DjGUrewW/WT88IsMEci0wUM+5ykTpPPNbEOoW+jwHnbw=="], - - "@opentelemetry/otlp-grpc-exporter-base": ["@opentelemetry/otlp-grpc-exporter-base@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-eKFjKNdsPed4q9yYqeI5gBTLjXxDM/8jwhiC0icw3zKxHVGBySoDsed5J5q/PGY/3quzenTr3FiTxA3NiNT+nw=="], + "@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-transformer": "0.214.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-u1Gdv0/E9wP+apqWf7Wv2npXmgJtxsW2XL0TEv9FZloTZRuMBKmu8cYVXwS4Hm3q/f/3FuCnPTgiwYvIqRSpRg=="], "@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/sdk-logs": "0.214.0", "@opentelemetry/sdk-metrics": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1", "protobufjs": "^7.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DSaYcuBRh6uozfsWN3R8HsN0yDhCuWP7tOFdkUOVaWD1KVJg8m4qiLUsg/tNhTLS9HUYUcwNpwL2eroLtsZZ/w=="], - "@opentelemetry/propagator-b3": ["@opentelemetry/propagator-b3@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-9CrbTLFi5Ee4uepxg2qlpQIozoJuoAZU5sKMx0Mn7Oh+p7UrgCiEV6C02FOxxdYVRRFQVCinYR8Kf6eMSQsIsw=="], - - "@opentelemetry/propagator-jaeger": ["@opentelemetry/propagator-jaeger@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FfeOHOrdhiNzecoB1jZKp2fybqmqMPJUXe2ZOydP7QzmTPYcfPeuaclTLYVhK3HyJf71kt8sTl92nV4YIaLaKA=="], - "@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="], "@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-zf6acnScjhsaBUU22zXZ/sLWim1dfhUAbGXdMmHmNG3LfBnQ3DKsOCITb2IZwoUsNNMTogqFKBnlIPPftUgGwA=="], "@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-9t9hJHX15meBy2NmTJxL+NJfXmnausR2xUDvE19XQce0Qi/GBtDGamU8nS1RMbdgDmhgpm3VaOu2+fiS/SfTpQ=="], - "@opentelemetry/sdk-node": ["@opentelemetry/sdk-node@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-logs-otlp-grpc": "0.207.0", "@opentelemetry/exporter-logs-otlp-http": "0.207.0", "@opentelemetry/exporter-logs-otlp-proto": "0.207.0", "@opentelemetry/exporter-metrics-otlp-grpc": "0.207.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/exporter-metrics-otlp-proto": "0.207.0", "@opentelemetry/exporter-prometheus": "0.207.0", "@opentelemetry/exporter-trace-otlp-grpc": "0.207.0", "@opentelemetry/exporter-trace-otlp-http": "0.207.0", "@opentelemetry/exporter-trace-otlp-proto": "0.207.0", "@opentelemetry/exporter-zipkin": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/propagator-b3": "2.2.0", "@opentelemetry/propagator-jaeger": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/sdk-trace-node": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-hnRsX/M8uj0WaXOBvFenQ8XsE8FLVh2uSnn1rkWu4mx+qu7EKGUZvZng6y/95cyzsqOfiaDDr08Ek4jppkIDNg=="], - "@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-r86ut4T1e8vNwB35CqCcKd45yzqH6/6Wzvpk2/cZB8PsPLlZFTvrh8yfOS3CYZYcUmAx4hHTZJ8AO8Dj8nrdhw=="], "@opentelemetry/sdk-trace-node": ["@opentelemetry/sdk-trace-node@2.6.1", "", { "dependencies": { "@opentelemetry/context-async-hooks": "2.6.1", "@opentelemetry/core": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-Hh2i4FwHWRFhnO2Q/p6svMxy8MPsNCG0uuzUY3glqm0rwM0nQvbTO1dXSp9OqQoTKXcQzaz9q1f65fsurmOhNw=="], @@ -2555,8 +2506,6 @@ "acorn": ["acorn@8.16.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw=="], - "acorn-import-attributes": ["acorn-import-attributes@1.9.5", "", { "peerDependencies": { "acorn": "^8" } }, "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ=="], - "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="], @@ -2779,8 +2728,6 @@ "bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="], - "busboy": ["busboy@1.6.0", "", { "dependencies": { "streamsearch": "^1.1.0" } }, "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA=="], - "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], "c12": ["c12@3.3.3", "", { "dependencies": { "chokidar": "^5.0.0", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q=="], @@ -2841,8 +2788,6 @@ "citty": ["citty@0.1.6", "", { "dependencies": { "consola": "^3.2.3" } }, "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ=="], - "cjs-module-lexer": ["cjs-module-lexer@2.2.0", "", {}, "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ=="], - "classnames": ["classnames@2.3.2", "", {}, "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw=="], "clean-css": ["clean-css@5.3.3", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg=="], @@ -3055,7 +3000,7 @@ "dot-prop": ["dot-prop@8.0.2", "", { "dependencies": { "type-fest": "^3.8.0" } }, "sha512-xaBe6ZT4DHPkg0k4Ytbvn5xoxgpG0jOS1dYxSOwAHPuNLjP3/OzN0gH55SrLqpx8cBfSaVt91lXYkApjb+nYdQ=="], - "dotenv": ["dotenv@17.4.2", "", {}, "sha512-nI4U3TottKAcAD9LLud4Cb7b2QztQMUEfHbvhTH09bqXTxnSie8WnjPALV/WMCrJZ6UV/qHJ6L03OqO3LcdYZw=="], + "dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], "dotenv-expand": ["dotenv-expand@11.0.7", "", { "dependencies": { "dotenv": "^16.4.5" } }, "sha512-zIHwmZPRshsCdpMDyVsqGmgyP0yT8GAgXUnkdAoJisxvf33k7yO6OuoKmcTGuXPWSsm8Oh88nZicRLA9Y0rUeA=="], @@ -3207,8 +3152,6 @@ "exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="], - "expand-tilde": ["expand-tilde@2.0.2", "", { "dependencies": { "homedir-polyfill": "^1.0.1" } }, "sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw=="], - "expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="], "exponential-backoff": ["exponential-backoff@3.1.3", "", {}, "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA=="], @@ -3311,8 +3254,6 @@ "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], - "forwarded-parse": ["forwarded-parse@2.1.2", "", {}, "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw=="], - "fraction.js": ["fraction.js@5.3.4", "", {}, "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ=="], "framer-motion": ["framer-motion@8.5.5", "", { "dependencies": { "@motionone/dom": "^10.15.3", "hey-listen": "^1.0.8", "tslib": "^2.4.0" }, "optionalDependencies": { "@emotion/is-prop-valid": "^0.8.2" }, "peerDependencies": { "react": "^18.0.0", "react-dom": "^18.0.0" } }, "sha512-5IDx5bxkjWHWUF3CVJoSyUVOtrbAxtzYBBowRE2uYI/6VYhkEBD+rbTHEGuUmbGHRj6YqqSfoG7Aa1cLyWCrBA=="], @@ -3469,8 +3410,6 @@ "hey-listen": ["hey-listen@1.0.8", "", {}, "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q=="], - "homedir-polyfill": ["homedir-polyfill@1.0.3", "", { "dependencies": { "parse-passwd": "^1.0.0" } }, "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA=="], - "hono": ["hono@4.10.7", "", {}, "sha512-icXIITfw/07Q88nLSkB9aiUrd8rYzSweK681Kjo/TSggaGbOX4RRyxxm71v+3PC8C/j+4rlxGeoTRxQDkaJkUw=="], "hono-openapi": ["hono-openapi@1.1.2", "", { "peerDependencies": { "@hono/standard-validator": "^0.2.0", "@standard-community/standard-json": "^0.3.5", "@standard-community/standard-openapi": "^0.2.9", "@types/json-schema": "^7.0.15", "hono": "^4.8.3", "openapi-types": "^12.1.3" }, "optionalPeers": ["@hono/standard-validator", "hono"] }, "sha512-toUcO60MftRBxqcVyxsHNYs2m4vf4xkQaiARAucQx3TiBPDtMNNkoh+C4I1vAretQZiGyaLOZNWn1YxfSyUA5g=="], @@ -3525,8 +3464,6 @@ "immer": ["immer@11.1.4", "", {}, "sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw=="], - "import-in-the-middle": ["import-in-the-middle@2.0.6", "", { "dependencies": { "acorn": "^8.15.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^2.2.0", "module-details-from-path": "^1.0.4" } }, "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw=="], - "import-local": ["import-local@3.2.0", "", { "dependencies": { "pkg-dir": "^4.2.0", "resolve-cwd": "^3.0.0" }, "bin": { "import-local-fixture": "fixtures/cli.js" } }, "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA=="], "import-meta-resolve": ["import-meta-resolve@4.2.0", "", {}, "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg=="], @@ -3789,8 +3726,6 @@ "lodash": ["lodash@4.18.1", "", {}, "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q=="], - "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], - "lodash.defaults": ["lodash.defaults@4.2.0", "", {}, "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="], "lodash.escaperegexp": ["lodash.escaperegexp@4.1.2", "", {}, "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw=="], @@ -4021,8 +3956,6 @@ "mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="], - "module-details-from-path": ["module-details-from-path@1.0.4", "", {}, "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w=="], - "morphdom": ["morphdom@2.7.8", "", {}, "sha512-D/fR4xgGUyVRbdMGU6Nejea1RFzYxYtyurG4Fbv2Fi/daKlWKuXGLOdXtl+3eIwL110cI2hz1ZojGICjjFLgTg=="], "motion": ["motion@12.34.5", "", { "dependencies": { "framer-motion": "^12.34.5", "tslib": "^2.4.0" }, "peerDependencies": { "@emotion/is-prop-valid": "*", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["@emotion/is-prop-valid", "react", "react-dom"] }, "sha512-N06NLJ9IeBHeielRqIvYvjPfXuRdyTxa+9++BgpGa+hY2D7TcMkI6QzV3jaRuv0aZRXgMa7cPy9YcBUBisPzAQ=="], @@ -4229,8 +4162,6 @@ "parse-latin": ["parse-latin@7.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0", "@types/unist": "^3.0.0", "nlcst-to-string": "^4.0.0", "unist-util-modify-children": "^4.0.0", "unist-util-visit-children": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-mhHgobPPua5kZ98EF4HWiH167JWBfl4pvAIXXdbaVohtK7a6YBOy56kvhCqduqyo/f3yrHFWmqmiMg/BkBkYYQ=="], - "parse-passwd": ["parse-passwd@1.0.0", "", {}, "sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q=="], - "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="], "parse5-htmlparser2-tree-adapter": ["parse5-htmlparser2-tree-adapter@7.1.0", "", { "dependencies": { "domhandler": "^5.0.3", "parse5": "^7.0.0" } }, "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g=="], @@ -4511,8 +4442,6 @@ "require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="], - "require-in-the-middle": ["require-in-the-middle@8.0.1", "", { "dependencies": { "debug": "^4.3.5", "module-details-from-path": "^1.0.3" } }, "sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ=="], - "resedit": ["resedit@1.7.2", "", { "dependencies": { "pe-library": "^0.4.1" } }, "sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA=="], "reselect": ["reselect@4.1.8", "", {}, "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ=="], @@ -4755,12 +4684,8 @@ "storybook-solidjs-vite": ["storybook-solidjs-vite@10.0.12", "", { "dependencies": { "@joshwooding/vite-plugin-react-docgen-typescript": "^0.7.0", "@storybook/builder-vite": "^10.3.1", "@storybook/global": "^5.0.0", "vite-plugin-solid": "^2.11.11" }, "peerDependencies": { "solid-js": "^1.9.0", "storybook": "^0.0.0-0 || ^10.0.0", "typescript": "^4.0.0 || ^5.0.0 || ^6.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" }, "optionalPeers": ["typescript"] }, "sha512-KfKhJRdxbhFLHkBzLKSEk5sO2M/+KV9cdpki5Xdl5pwNP8kcoQnZ3b/okZk8dMRV6x19j86bKc7zDfc5bPSMwA=="], - "stream-browserify": ["stream-browserify@3.0.0", "", { "dependencies": { "inherits": "~2.0.4", "readable-stream": "^3.5.0" } }, "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA=="], - "stream-replace-string": ["stream-replace-string@2.0.0", "", {}, "sha512-TlnjJ1C0QrmxRNrON00JvaFFlNh5TTG00APw23j74ET7gkQpTASi6/L2fuiav8pzK715HXtUeClpBTw2NPSn6w=="], - "streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="], - "streamx": ["streamx@2.25.0", "", { "dependencies": { "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" } }, "sha512-0nQuG6jf1w+wddNEEXCF4nTg3LtufWINB5eFEN+5TNZW7KWJp6x87+JFL43vaAUPyCfH1wID+mNVyW6OHtFamg=="], "string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], @@ -5175,7 +5100,7 @@ "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], - "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], "yaml": ["yaml@2.8.3", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg=="], @@ -5411,16 +5336,6 @@ "@aws-sdk/credential-providers/@aws-sdk/types": ["@aws-sdk/types@3.973.7", "", { "dependencies": { "@smithy/types": "^4.14.0", "tslib": "^2.6.2" } }, "sha512-reXRwoJ6CfChoqAsBszUYajAF8Z2LRE+CRcKocvFSMpIiLOtYU3aJ9trmn6VVPAzbbY5LXF+FfmUslbXk1SYFg=="], - "@aws-sdk/lib-storage/@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.4.30", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/middleware-serde": "^4.2.18", "@smithy/node-config-provider": "^4.3.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" } }, "sha512-qS2XqhKeXmdZ4nEQ4cOxIczSP/Y91wPAHYuRwmWDCh975B7/57uxsm5d6sisnUThn2u2FwzMdJNM7AbO1YPsPg=="], - - "@aws-sdk/lib-storage/@smithy/protocol-http": ["@smithy/protocol-http@5.3.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-dN5F8kHx8RNU0r+pCwNmFZyz6ChjMkzShy/zup6MtkRmmix4vZzJdW+di7x//b1LiynIev88FM18ie+wwPcQtQ=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client": ["@smithy/smithy-client@4.12.11", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/middleware-endpoint": "^4.4.30", "@smithy/middleware-stack": "^4.2.14", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/util-stream": "^4.5.23", "tslib": "^2.6.2" } }, "sha512-wzz/Wa1CH/Tlhxh0s4DQPEcXSxSVfJ59AZcUh9Gu0c6JTlKuwGf4o/3P2TExv0VbtPFt8odIBG+eQGK2+vTECg=="], - - "@aws-sdk/lib-storage/@smithy/types": ["@smithy/types@4.14.1", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg=="], - - "@aws-sdk/lib-storage/buffer": ["buffer@5.6.0", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4" } }, "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw=="], - "@aws-sdk/middleware-flexible-checksums/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], "@aws-sdk/middleware-sdk-s3/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], @@ -5483,8 +5398,6 @@ "@cspotcode/source-map-support/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="], - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ=="], - "@develar/schema-utils/ajv": ["ajv@6.14.0", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw=="], "@dot/log/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -5529,8 +5442,6 @@ "@gitlab/opencode-gitlab-auth/open": ["open@10.2.0", "", { "dependencies": { "default-browser": "^5.2.1", "define-lazy-prop": "^3.0.0", "is-inside-container": "^1.0.0", "wsl-utils": "^0.1.0" } }, "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA=="], - "@grpc/proto-loader/yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], - "@hey-api/openapi-ts/open": ["open@11.0.0", "", { "dependencies": { "default-browser": "^5.4.0", "define-lazy-prop": "^3.0.0", "is-in-ssh": "^1.0.0", "is-inside-container": "^1.0.0", "powershell-utils": "^0.1.0", "wsl-utils": "^0.3.0" } }, "sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw=="], "@hey-api/openapi-ts/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], @@ -5671,106 +5582,16 @@ "@opencode-ai/web/@shikijs/transformers": ["@shikijs/transformers@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/types": "3.20.0" } }, "sha512-PrHHMRr3Q5W1qB/42kJW6laqFyWdhrPF2hNR9qjOm1xcSiAO3hAHo7HaVyHE6pMyevmy3i51O8kuGGXC78uK3g=="], - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-prometheus/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-prometheus/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.214.0", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/otlp-transformer": "0.214.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-u1Gdv0/E9wP+apqWf7Wv2npXmgJtxsW2XL0TEv9FZloTZRuMBKmu8cYVXwS4Hm3q/f/3FuCnPTgiwYvIqRSpRg=="], - "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-zipkin/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/exporter-zipkin/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/instrumentation-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/otlp-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/otlp-transformer/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], - "@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], - "@opentelemetry/propagator-b3/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/propagator-jaeger/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - "@opentelemetry/sdk-logs/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA=="], - "@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], "@opentelemetry/sdk-metrics/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], - "@opentelemetry/sdk-node/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@opentelemetry/sdk-node/@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ=="], - - "@opentelemetry/sdk-node/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/sdk-node/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-node": ["@opentelemetry/sdk-trace-node@2.2.0", "", { "dependencies": { "@opentelemetry/context-async-hooks": "2.2.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-+OaRja3f0IqGG2kptVeYsrZQK9nKRSpfFrKtRBq4uh6nIB8bTBgaGvYQrQoRrQWQMA5dK5yLhDMDc0dvYvCOIQ=="], - "@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], "@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], @@ -5901,8 +5722,6 @@ "app-builder-lib/ci-info": ["ci-info@4.3.1", "", {}, "sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA=="], - "app-builder-lib/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], - "app-builder-lib/hosted-git-info": ["hosted-git-info@4.1.0", "", { "dependencies": { "lru-cache": "^6.0.0" } }, "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA=="], "app-builder-lib/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], @@ -5949,6 +5768,8 @@ "c12/chokidar": ["chokidar@5.0.0", "", { "dependencies": { "readdirp": "^5.0.0" } }, "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw=="], + "c12/dotenv": ["dotenv@17.4.2", "", {}, "sha512-nI4U3TottKAcAD9LLud4Cb7b2QztQMUEfHbvhTH09bqXTxnSie8WnjPALV/WMCrJZ6UV/qHJ6L03OqO3LcdYZw=="], + "clone-response/mimic-response": ["mimic-response@1.0.1", "", {}, "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ=="], "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], @@ -5979,8 +5800,6 @@ "dot-prop/type-fest": ["type-fest@3.13.1", "", {}, "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g=="], - "dotenv-expand/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], - "editorconfig/commander": ["commander@10.0.1", "", {}, "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug=="], "editorconfig/minimatch": ["minimatch@9.0.9", "", { "dependencies": { "brace-expansion": "^2.0.2" } }, "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg=="], @@ -6215,8 +6034,6 @@ "storybook-solidjs-vite/vite-plugin-solid": ["vite-plugin-solid@2.11.12", "", { "dependencies": { "@babel/core": "^7.23.3", "@types/babel__core": "^7.20.4", "babel-preset-solid": "^1.8.4", "merge-anything": "^5.1.7", "solid-refresh": "^0.6.3", "vitefu": "^1.0.4" }, "peerDependencies": { "@testing-library/jest-dom": "^5.16.6 || ^5.17.0 || ^6.*", "solid-js": "^1.7.2", "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" }, "optionalPeers": ["@testing-library/jest-dom"] }, "sha512-FgjPcx2OwX9h6f28jli7A4bG7PP3te8uyakE5iqsmpq3Jqi1TWLgSroC9N6cMfGRU2zXsl4Q6ISvTr2VL0QHpA=="], - "stream-browserify/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], @@ -6225,6 +6042,8 @@ "sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="], + "tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + "terser/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], "tiny-async-pool/semver": ["semver@5.7.2", "", { "bin": { "semver": "bin/semver" } }, "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g=="], @@ -6481,26 +6300,6 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core": ["@smithy/core@3.23.15", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-E7GVCgsQttzfujEZb6Qep005wWf4xiL4x06apFEtzQMWYBPggZh/0cnOxPficw5cuK/YjjkehKoIN4YUaSh0UQ=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/middleware-serde": ["@smithy/middleware-serde@4.2.18", "", { "dependencies": { "@smithy/core": "^3.23.15", "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-M6CSgnp3v4tYz9ynj2JHbA60woBZcGqEwNjTKjBsNHPV26R1ZX52+0wW8WsZU18q45jD0tw2wL22S17Ze9LpEw=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/node-config-provider": ["@smithy/node-config-provider@4.3.14", "", { "dependencies": { "@smithy/property-provider": "^4.2.14", "@smithy/shared-ini-file-loader": "^4.4.9", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-S+gFjyo/weSVL0P1b9Ts8C/CwIfNCgUPikk3sl6QVsfE/uUuO+QsF+NsE/JkpvWqqyz1wg7HFdiaZuj5CoBMRg=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/shared-ini-file-loader": ["@smithy/shared-ini-file-loader@4.4.9", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-495/V2I15SHgedSJoDPD23JuSfKAp726ZI1V0wtjB07Wh7q/0tri/0e0DLefZCHgxZonrGKt/OCTpAtP1wE1kQ=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/url-parser": ["@smithy/url-parser@4.2.14", "", { "dependencies": { "@smithy/querystring-parser": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/util-middleware": ["@smithy/util-middleware@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core": ["@smithy/core@3.23.15", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/types": "^4.14.1", "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-middleware": "^4.2.14", "@smithy/util-stream": "^4.5.23", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-E7GVCgsQttzfujEZb6Qep005wWf4xiL4x06apFEtzQMWYBPggZh/0cnOxPficw5cuK/YjjkehKoIN4YUaSh0UQ=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/middleware-stack": ["@smithy/middleware-stack@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-2dvkUKLuFdKsCRmOE4Mn63co0Djtsm+JMh0bYZQupN1pJwMeE8FmQmRLLzzEMN0dnNi7CDCYYH8F0EVwWiPBeA=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream": ["@smithy/util-stream@4.5.23", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.5.3", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-N6on1+ngJ3RznZOnDWNveIwnTSlqxNnXuNAh7ez889ZZaRdXoNRTXKgmYOLe6dB0gCmAVtuRScE1hymQFl4hpg=="], - - "@aws-sdk/lib-storage/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], - "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.17", "", { "dependencies": { "@smithy/types": "^4.14.0", "fast-xml-parser": "5.5.8", "tslib": "^2.6.2" } }, "sha512-Ra7hjqAZf1OXRRMueB13qex7mFJRDK/pgCvdSFemXBT8KCGnQDPoKzHY1SjN+TjJVmnpSF14W5tJ1vDamFu+Gg=="], "@aws-sdk/nested-clients/@aws-sdk/middleware-user-agent/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.996.6", "", { "dependencies": { "@aws-sdk/types": "^3.973.7", "@smithy/types": "^4.14.0", "@smithy/url-parser": "^4.2.13", "@smithy/util-endpoints": "^3.3.4", "tslib": "^2.6.2" } }, "sha512-2nUQ+2ih7CShuKHpGSIYvvAIOHy52dOZguYG36zptBukhw6iFwcvGfG0tes0oZFWQqEWvgZe9HLWaNlvXGdOrg=="], @@ -6537,12 +6336,6 @@ "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], - - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - "@develar/schema-utils/ajv/json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], "@electron/asar/minimatch/brace-expansion": ["brace-expansion@1.1.14", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g=="], @@ -6585,10 +6378,6 @@ "@gitlab/opencode-gitlab-auth/open/wsl-utils": ["wsl-utils@0.1.0", "", { "dependencies": { "is-wsl": "^3.1.0" } }, "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw=="], - "@grpc/proto-loader/yargs/cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], - - "@grpc/proto-loader/yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - "@jsx-email/cli/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.19.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA=="], "@jsx-email/cli/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.19.12", "", { "os": "android", "cpu": "arm" }, "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w=="], @@ -6767,52 +6556,6 @@ "@opencode-ai/web/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.20.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw=="], - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-logs-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-metrics-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-metrics-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-metrics-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/otlp-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - - "@opentelemetry/otlp-grpc-exporter-base/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], - - "@opentelemetry/sdk-node/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="], - - "@opentelemetry/sdk-node/@opentelemetry/sdk-trace-node/@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.2.0", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-qRkLWiUEZNAmYapZ7KGS5C4OmBLcP/H2foXeOEaowYCR0wi89fHejrfYfbuLVCMLp/dWZXKvQusdbUEZjERfwQ=="], - "@opentui/solid/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], "@pierre/diffs/@shikijs/transformers/@shikijs/core": ["@shikijs/core@3.20.0", "", { "dependencies": { "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g=="], @@ -6937,10 +6680,6 @@ "lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], - "minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - - "minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "motion/framer-motion/motion-dom": ["motion-dom@12.38.0", "", { "dependencies": { "motion-utils": "^12.36.0" } }, "sha512-pdkHLD8QYRp8VfiNLb8xIBJis1byQ9gPT3Jnh2jqfFtAsWUA3dEepDlsWe/xMpO8McV+VdpKVcp+E+TGJEtOoA=="], "motion/framer-motion/motion-utils": ["motion-utils@12.36.0", "", {}, "sha512-eHWisygbiwVvf6PZ1vhaHCLamvkSbPIeAYxWUuL3a2PD/TROgE7FvfHWTIH4vMl798QLfMw15nRqIaRDXTlYRg=="], @@ -6963,8 +6702,6 @@ "opencontrol/@modelcontextprotocol/sdk/zod-to-json-schema": ["zod-to-json-schema@3.25.2", "", { "peerDependencies": { "zod": "^3.25.28 || ^4" } }, "sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA=="], - "openid-client/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "ora/bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], "ora/bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], @@ -7125,34 +6862,10 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream": ["@smithy/util-stream@4.5.23", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.17", "@smithy/node-http-handler": "^4.5.3", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-N6on1+ngJ3RznZOnDWNveIwnTSlqxNnXuNAh7ez889ZZaRdXoNRTXKgmYOLe6dB0gCmAVtuRScE1hymQFl4hpg=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/node-config-provider/@smithy/property-provider": ["@smithy/property-provider@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-WuM31CgfsnQ/10i7NYr0PyxqknD72Y5uMfUMVSniPjbEPceiTErb4eIqJQ+pdxNEAUEWrewrGjIRjVbVHsxZiQ=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/url-parser/@smithy/querystring-parser": ["@smithy/querystring-parser@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/url-parser": ["@smithy/url-parser@4.2.14", "", { "dependencies": { "@smithy/querystring-parser": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/util-middleware": ["@smithy/util-middleware@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/fetch-http-handler": ["@smithy/fetch-http-handler@5.3.17", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.5.3", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-lc5jFL++x17sPhIwMWJ3YOnqmSjw/2Po6VLDlUIXvxVWRuJwRXnJ4jOBBLB0cfI5BB5ehIl02Fxr1PDvk/kxDw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="], - "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="], - - "@daytona/sdk/@opentelemetry/exporter-trace-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], - "@electron/asar/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], "@electron/rebuild/node-gyp/make-fetch-happen/@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], @@ -7177,14 +6890,6 @@ "@electron/universal/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], - "@grpc/proto-loader/yargs/cliui/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - - "@grpc/proto-loader/yargs/cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - - "@grpc/proto-loader/yargs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], - - "@grpc/proto-loader/yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "@jsx-email/cli/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], "@jsx-email/cli/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="], @@ -7271,8 +6976,6 @@ "app-builder-lib/@electron/get/fs-extra/universalify": ["universalify@0.1.2", "", {}, "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="], - "app-builder-lib/hosted-git-info/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "archiver-utils/glob/jackspeak/@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], "archiver-utils/glob/minimatch/brace-expansion": ["brace-expansion@2.1.0", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w=="], @@ -7389,16 +7092,6 @@ "@aws-sdk/credential-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/fetch-http-handler": ["@smithy/fetch-http-handler@5.3.17", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" } }, "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.5.3", "", { "dependencies": { "@smithy/protocol-http": "^5.3.14", "@smithy/querystring-builder": "^4.2.14", "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-lc5jFL++x17sPhIwMWJ3YOnqmSjw/2Po6VLDlUIXvxVWRuJwRXnJ4jOBBLB0cfI5BB5ehIl02Fxr1PDvk/kxDw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core/@smithy/url-parser/@smithy/querystring-parser": ["@smithy/querystring-parser@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "tslib": "^2.6.2" } }, "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/fetch-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], - - "@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/node-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], - "@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], "@aws-sdk/token-providers/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], @@ -7417,10 +7110,6 @@ "@electron/rebuild/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "@grpc/proto-loader/yargs/cliui/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - - "@grpc/proto-loader/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "@jsx-email/cli/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.2", "", {}, "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA=="], "@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es/regex": ["regex@5.1.1", "", { "dependencies": { "regex-utilities": "^2.3.0" } }, "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw=="], @@ -7459,10 +7148,6 @@ "@aws-sdk/credential-provider-cognito-identity/@aws-sdk/nested-clients/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.2.3", "", {}, "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg=="], - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/fetch-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], - - "@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/node-http-handler/@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.14", "", { "dependencies": { "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A=="], - "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/minimatch": ["minimatch@9.0.9", "", { "dependencies": { "brace-expansion": "^2.0.2" } }, "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg=="], @@ -7483,8 +7168,6 @@ "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/minimatch/brace-expansion": ["brace-expansion@2.1.0", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w=="], - "@electron/rebuild/node-gyp/make-fetch-happen/minipass-fetch/minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak/@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], "@electron/rebuild/node-gyp/make-fetch-happen/cacache/glob/jackspeak/@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], diff --git a/daytona.ts b/daytona.ts deleted file mode 100644 index 4007b77aa6..0000000000 --- a/daytona.ts +++ /dev/null @@ -1,197 +0,0 @@ -import type { Daytona, Sandbox } from "@daytonaio/sdk" -import type { Plugin } from "@opencode-ai/plugin" -import { join } from "node:path" -import { fileURLToPath } from "node:url" -import { tmpdir } from "node:os" -import { access, copyFile, mkdir } from "node:fs/promises" - -let client: Promise | undefined - -let daytona = function daytona(): Promise { - if (client == null) { - client = import("@daytonaio/sdk").then( - ({ Daytona }) => - new Daytona({ - apiKey: "dtn_d63c206564ef49d4104ec2cd755e561bb3665beed8fd7d7ab2c5f7a2186965f0", - }), - ) - } - return client -} - -const preview = new Map() -const repo = "/home/daytona/workspace/repo" -const root = "/home/daytona/workspace" -const localbin = "/home/daytona/opencode" -const installbin = "/home/daytona/.opencode/bin/opencode" -const health = "http://127.0.0.1:3096/global/health" - -const local = fileURLToPath( - new URL("./packages/opencode/dist/opencode-linux-x64-baseline/bin/opencode", import.meta.url), -) - -async function exists(file: string) { - return access(file) - .then(() => true) - .catch(() => false) -} - -function sh(value: string) { - return `'${value.replace(/'/g, `"'"'"`)}'` -} - -// Internally Daytona uses axios, which tries to overwrite stack -// traces when a failure happens. That path fails in Bun, however, so -// when something goes wrong you only see a very obscure error. -async function withSandbox(name: string, fn: (sandbox: Sandbox) => Promise) { - const stack = Error.captureStackTrace - // @ts-expect-error temporary compatibility hack for Daytona's axios stack handling in Bun - Error.captureStackTrace = undefined - try { - return await fn(await (await daytona()).get(name)) - } finally { - Error.captureStackTrace = stack - } -} - -export const DaytonaWorkspacePlugin: Plugin = async ({ experimental_workspace, worktree, project }) => { - experimental_workspace.register("daytona", { - name: "Daytona", - description: "Create a remote Daytona workspace", - configure(config) { - return config - }, - async create(config, env) { - const temp = join(tmpdir(), `opencode-daytona-${Date.now()}`) - - console.log("creating sandbox...") - - const sandbox = await ( - await daytona() - ).create({ - name: config.name, - snapshot: "daytona-large", - envVars: env, - }) - - console.log("creating ssh...") - - const ssh = await withSandbox(config.name, (sandbox) => sandbox.createSshAccess()) - console.log("daytona:", ssh.sshCommand) - - const run = async (command: string) => { - console.log("sandbox:", command) - const result = await sandbox.process.executeCommand(command) - if (result.result) process.stdout.write(result.result) - if (result.exitCode === 0) return result - throw new Error(result.result || `sandbox command failed: ${command}`) - } - - const wait = async () => { - for (let i = 0; i < 60; i++) { - const result = await sandbox.process.executeCommand(`curl -fsS ${sh(health)}`) - if (result.exitCode === 0) { - if (result.result) process.stdout.write(result.result) - return - } - console.log(`waiting for server (${i + 1}/60)`) - await Bun.sleep(1000) - } - - const log = await sandbox.process.executeCommand(`test -f /tmp/opencode.log && cat /tmp/opencode.log || true`) - throw new Error(log.result || "daytona workspace server did not become ready in time") - } - - const dir = join(temp, "repo") - const tar = join(temp, "repo.tgz") - const source = `file://${worktree}` - await mkdir(temp, { recursive: true }) - const args = ["clone", "--depth", "1", "--no-local"] - if (config.branch) args.push("--branch", config.branch) - args.push(source, dir) - - console.log("git cloning...") - - const clone = Bun.spawn(["git", ...args], { - cwd: tmpdir(), - stdout: "pipe", - stderr: "pipe", - }) - const code = await clone.exited - if (code !== 0) throw new Error(await new Response(clone.stderr).text()) - - const configPackage = join(worktree, ".opencode", "package.json") - // if (await exists(configPackage)) { - // console.log("copying config package...") - // await mkdir(join(dir, ".opencode"), { recursive: true }) - // await copyFile(configPackage, join(dir, ".opencode", "package.json")) - // } - - console.log("tarring...") - - const packed = Bun.spawn(["tar", "-czf", tar, "-C", temp, "repo"], { - stdout: "ignore", - stderr: "pipe", - }) - if ((await packed.exited) !== 0) throw new Error(await new Response(packed.stderr).text()) - - console.log("uploading files...") - - await sandbox.fs.uploadFile(tar, "repo.tgz") - - const have = await exists(local) - console.log("local", local) - if (have) { - console.log("uploading local binary...") - await sandbox.fs.uploadFile(local, "opencode") - } - - console.log("bootstrapping workspace...") - await run(`rm -rf ${sh(repo)} && mkdir -p ${sh(root)} && tar -xzf \"$HOME/repo.tgz\" -C \"$HOME/workspace\"`) - - if (have) { - await run(`chmod +x ${sh(localbin)}`) - } else { - await run( - `mkdir -p \"$HOME/.opencode/bin\" && OPENCODE_INSTALL_DIR=\"$HOME/.opencode/bin\" curl -fsSL https://opencode.ai/install | bash`, - ) - } - - await run(`printf \"%s\\n\" ${sh(project.id)} > ${sh(`${repo}/.git/opencode`)}`) - - console.log("starting server...") - await run( - `cd ${sh(repo)} && exe=${sh(localbin)} && if [ ! -x \"$exe\" ]; then exe=${sh(installbin)}; fi && nohup env \"$exe\" serve --hostname 0.0.0.0 --port 3096 >/tmp/opencode.log 2>&1 undefined) - if (!sandbox) return - await (await daytona()).delete(sandbox) - preview.delete(config.name) - }, - async target(config) { - let link = preview.get(config.name) - if (!link) { - link = await withSandbox(config.name, (sandbox) => sandbox.getPreviewLink(3096)) - preview.set(config.name, link) - } - return { - type: "remote", - url: link.url, - headers: { - "x-daytona-preview-token": link.token, - "x-daytona-skip-preview-warning": "true", - "x-opencode-directory": repo, - }, - } - }, - }) - - return {} -} - -export default DaytonaWorkspacePlugin diff --git a/package.json b/package.json index 09548bf4e0..5fecc09922 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,6 @@ }, "dependencies": { "@aws-sdk/client-s3": "3.933.0", - "@daytona/sdk": "0.167.0", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", From e7f8f7fa3bceab49b1606de72d969be63c3e8785 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 01:14:08 -0400 Subject: [PATCH 131/201] fix crash on experimental --- packages/opencode/src/flag/flag.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/flag/flag.ts b/packages/opencode/src/flag/flag.ts index a1c46288a2..72c8931f5b 100644 --- a/packages/opencode/src/flag/flag.ts +++ b/packages/opencode/src/flag/flag.ts @@ -81,7 +81,7 @@ export const Flag = { OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"), OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"], - OPENCODE_EXPERIMENTAL_HTTPAPI: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_HTTPAPI"), + OPENCODE_EXPERIMENTAL_HTTPAPI: truthy("OPENCODE_EXPERIMENTAL_HTTPAPI"), OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"), // Evaluated at access time (not module load) because tests, the CLI, and From 7605acff650db0d41d80429b662b5c0725d89675 Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 02:06:20 -0400 Subject: [PATCH 132/201] refactor(core): move server routes around to clarify workspacing (#23031) --- .../src/control-plane/workspace-context.ts | 4 +- .../src/server/{ => routes}/control/index.ts | 6 +- .../{instance => routes/control}/workspace.ts | 10 +- .../src/server/{instance => routes}/global.ts | 0 .../server/{ => routes}/instance/config.ts | 10 +- .../src/server/{ => routes}/instance/event.ts | 2 +- .../{ => routes}/instance/experimental.ts | 30 +- .../src/server/{ => routes}/instance/file.ts | 12 +- .../{ => routes}/instance/httpapi/config.ts | 0 .../instance/httpapi/permission.ts | 0 .../{ => routes}/instance/httpapi/project.ts | 0 .../{ => routes}/instance/httpapi/provider.ts | 0 .../{ => routes}/instance/httpapi/question.ts | 0 .../{ => routes}/instance/httpapi/server.ts | 0 .../src/server/{ => routes}/instance/index.ts | 19 +- .../src/server/{ => routes}/instance/mcp.ts | 12 +- .../{ => routes}/instance/permission.ts | 4 +- .../server/{ => routes}/instance/project.ts | 12 +- .../server/{ => routes}/instance/provider.ts | 16 +- .../src/server/{ => routes}/instance/pty.ts | 4 +- .../server/{ => routes}/instance/question.ts | 6 +- .../server/{ => routes}/instance/session.ts | 26 +- .../src/server/{ => routes}/instance/sync.ts | 2 +- .../src/server/{ => routes}/instance/trace.ts | 2 +- .../src/server/{ => routes}/instance/tui.ts | 10 +- .../src/server/{ui/index.ts => routes/ui.ts} | 0 packages/opencode/src/server/server.ts | 72 +- .../{instance/middleware.ts => workspace.ts} | 43 +- .../test/server/session-messages.test.ts | 13 - packages/sdk/js/src/v2/gen/sdk.gen.ts | 858 ++++++++--------- packages/sdk/js/src/v2/gen/types.gen.ts | 362 ++++---- packages/sdk/openapi.json | 870 +++++++++--------- 32 files changed, 1196 insertions(+), 1209 deletions(-) rename packages/opencode/src/server/{ => routes}/control/index.ts (96%) rename packages/opencode/src/server/{instance => routes/control}/workspace.ts (95%) rename packages/opencode/src/server/{instance => routes}/global.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/config.ts (92%) rename packages/opencode/src/server/{ => routes}/instance/event.ts (97%) rename packages/opencode/src/server/{ => routes}/instance/experimental.ts (94%) rename packages/opencode/src/server/{ => routes}/instance/file.ts (95%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/config.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/permission.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/project.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/provider.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/question.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/httpapi/server.ts (100%) rename packages/opencode/src/server/{ => routes}/instance/index.ts (95%) rename packages/opencode/src/server/{ => routes}/instance/mcp.ts (96%) rename packages/opencode/src/server/{ => routes}/instance/permission.ts (96%) rename packages/opencode/src/server/{ => routes}/instance/project.ts (92%) rename packages/opencode/src/server/{ => routes}/instance/provider.ts (93%) rename packages/opencode/src/server/{ => routes}/instance/pty.ts (98%) rename packages/opencode/src/server/{ => routes}/instance/question.ts (96%) rename packages/opencode/src/server/{ => routes}/instance/session.ts (98%) rename packages/opencode/src/server/{ => routes}/instance/sync.ts (98%) rename packages/opencode/src/server/{ => routes}/instance/trace.ts (93%) rename packages/opencode/src/server/{ => routes}/instance/tui.ts (98%) rename packages/opencode/src/server/{ui/index.ts => routes/ui.ts} (100%) rename packages/opencode/src/server/{instance/middleware.ts => workspace.ts} (79%) diff --git a/packages/opencode/src/control-plane/workspace-context.ts b/packages/opencode/src/control-plane/workspace-context.ts index 3d4fa5baef..85ef596e7a 100644 --- a/packages/opencode/src/control-plane/workspace-context.ts +++ b/packages/opencode/src/control-plane/workspace-context.ts @@ -2,13 +2,13 @@ import { LocalContext } from "../util" import type { WorkspaceID } from "../control-plane/schema" export interface WorkspaceContext { - workspaceID: WorkspaceID + workspaceID: WorkspaceID | undefined } const context = LocalContext.create("instance") export const WorkspaceContext = { - async provide(input: { workspaceID: WorkspaceID; fn: () => R }): Promise { + async provide(input: { workspaceID?: WorkspaceID; fn: () => R }): Promise { return context.provide({ workspaceID: input.workspaceID }, () => input.fn()) }, diff --git a/packages/opencode/src/server/control/index.ts b/packages/opencode/src/server/routes/control/index.ts similarity index 96% rename from packages/opencode/src/server/control/index.ts rename to packages/opencode/src/server/routes/control/index.ts index 737f958d6b..3fd60636ff 100644 --- a/packages/opencode/src/server/control/index.ts +++ b/packages/opencode/src/server/routes/control/index.ts @@ -6,13 +6,12 @@ import { ProviderID } from "@/provider/schema" import { Hono } from "hono" import { describeRoute, resolver, validator, openAPIRouteHandler } from "hono-openapi" import z from "zod" -import { errors } from "../error" -import { GlobalRoutes } from "../instance/global" +import { errors } from "../../error" +import { WorkspaceRoutes } from "./workspace" export function ControlPlaneRoutes(): Hono { const app = new Hono() return app - .route("/global", GlobalRoutes()) .put( "/auth/:providerID", describeRoute({ @@ -159,4 +158,5 @@ export function ControlPlaneRoutes(): Hono { return c.json(true) }, ) + .route("/experimental/workspace", WorkspaceRoutes()) } diff --git a/packages/opencode/src/server/instance/workspace.ts b/packages/opencode/src/server/routes/control/workspace.ts similarity index 95% rename from packages/opencode/src/server/instance/workspace.ts rename to packages/opencode/src/server/routes/control/workspace.ts index 59369ef8e7..9ff747b68a 100644 --- a/packages/opencode/src/server/instance/workspace.ts +++ b/packages/opencode/src/server/routes/control/workspace.ts @@ -1,11 +1,11 @@ import { Hono } from "hono" import { describeRoute, resolver, validator } from "hono-openapi" import z from "zod" -import { listAdaptors } from "../../control-plane/adaptors" -import { Workspace } from "../../control-plane/workspace" -import { Instance } from "../../project/instance" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { listAdaptors } from "@/control-plane/adaptors" +import { Workspace } from "@/control-plane/workspace" +import { Instance } from "@/project/instance" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" import { Log } from "@/util" import { errorData } from "@/util/error" diff --git a/packages/opencode/src/server/instance/global.ts b/packages/opencode/src/server/routes/global.ts similarity index 100% rename from packages/opencode/src/server/instance/global.ts rename to packages/opencode/src/server/routes/global.ts diff --git a/packages/opencode/src/server/instance/config.ts b/packages/opencode/src/server/routes/instance/config.ts similarity index 92% rename from packages/opencode/src/server/instance/config.ts rename to packages/opencode/src/server/routes/instance/config.ts index 15c393fe5a..235f5682e2 100644 --- a/packages/opencode/src/server/instance/config.ts +++ b/packages/opencode/src/server/routes/instance/config.ts @@ -1,11 +1,11 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { Config } from "../../config" -import { Provider } from "../../provider" -import { errors } from "../error" -import { lazy } from "../../util/lazy" -import { AppRuntime } from "../../effect/app-runtime" +import { Config } from "@/config" +import { Provider } from "@/provider" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" +import { AppRuntime } from "@/effect/app-runtime" import { jsonRequest } from "./trace" export const ConfigRoutes = lazy(() => diff --git a/packages/opencode/src/server/instance/event.ts b/packages/opencode/src/server/routes/instance/event.ts similarity index 97% rename from packages/opencode/src/server/instance/event.ts rename to packages/opencode/src/server/routes/instance/event.ts index 103d3d7cfb..1d883bd883 100644 --- a/packages/opencode/src/server/instance/event.ts +++ b/packages/opencode/src/server/routes/instance/event.ts @@ -5,7 +5,7 @@ import { streamSSE } from "hono/streaming" import { Log } from "@/util" import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" -import { AsyncQueue } from "../../util/queue" +import { AsyncQueue } from "@/util/queue" const log = Log.create({ service: "server" }) diff --git a/packages/opencode/src/server/instance/experimental.ts b/packages/opencode/src/server/routes/instance/experimental.ts similarity index 94% rename from packages/opencode/src/server/instance/experimental.ts rename to packages/opencode/src/server/routes/instance/experimental.ts index 6fe99a8c3b..f7ecc8255b 100644 --- a/packages/opencode/src/server/instance/experimental.ts +++ b/packages/opencode/src/server/routes/instance/experimental.ts @@ -1,22 +1,21 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { ProviderID, ModelID } from "../../provider/schema" -import { ToolRegistry } from "../../tool" -import { Worktree } from "../../worktree" -import { Instance } from "../../project/instance" -import { Project } from "../../project" -import { MCP } from "../../mcp" -import { Session } from "../../session" -import { Config } from "../../config" -import { ConsoleState } from "../../config/console-state" -import { Account } from "../../account/account" -import { AccountID, OrgID } from "../../account/schema" -import { AppRuntime } from "../../effect/app-runtime" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { ProviderID, ModelID } from "@/provider/schema" +import { ToolRegistry } from "@/tool" +import { Worktree } from "@/worktree" +import { Instance } from "@/project/instance" +import { Project } from "@/project" +import { MCP } from "@/mcp" +import { Session } from "@/session" +import { Config } from "@/config" +import { ConsoleState } from "@/config/console-state" +import { Account } from "@/account/account" +import { AccountID, OrgID } from "@/account/schema" +import { AppRuntime } from "@/effect/app-runtime" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" import { Effect, Option } from "effect" -import { WorkspaceRoutes } from "./workspace" import { Agent } from "@/agent/agent" const ConsoleOrgOption = z.object({ @@ -231,7 +230,6 @@ export const ExperimentalRoutes = lazy(() => ) }, ) - .route("/workspace", WorkspaceRoutes()) .post( "/worktree", describeRoute({ diff --git a/packages/opencode/src/server/instance/file.ts b/packages/opencode/src/server/routes/instance/file.ts similarity index 95% rename from packages/opencode/src/server/instance/file.ts rename to packages/opencode/src/server/routes/instance/file.ts index db5e227770..a82e5687d8 100644 --- a/packages/opencode/src/server/instance/file.ts +++ b/packages/opencode/src/server/routes/instance/file.ts @@ -2,12 +2,12 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import { Effect } from "effect" import z from "zod" -import { AppRuntime } from "../../effect/app-runtime" -import { File } from "../../file" -import { Ripgrep } from "../../file/ripgrep" -import { LSP } from "../../lsp" -import { Instance } from "../../project/instance" -import { lazy } from "../../util/lazy" +import { AppRuntime } from "@/effect/app-runtime" +import { File } from "@/file" +import { Ripgrep } from "@/file/ripgrep" +import { LSP } from "@/lsp" +import { Instance } from "@/project/instance" +import { lazy } from "@/util/lazy" export const FileRoutes = lazy(() => new Hono() diff --git a/packages/opencode/src/server/instance/httpapi/config.ts b/packages/opencode/src/server/routes/instance/httpapi/config.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/config.ts rename to packages/opencode/src/server/routes/instance/httpapi/config.ts diff --git a/packages/opencode/src/server/instance/httpapi/permission.ts b/packages/opencode/src/server/routes/instance/httpapi/permission.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/permission.ts rename to packages/opencode/src/server/routes/instance/httpapi/permission.ts diff --git a/packages/opencode/src/server/instance/httpapi/project.ts b/packages/opencode/src/server/routes/instance/httpapi/project.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/project.ts rename to packages/opencode/src/server/routes/instance/httpapi/project.ts diff --git a/packages/opencode/src/server/instance/httpapi/provider.ts b/packages/opencode/src/server/routes/instance/httpapi/provider.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/provider.ts rename to packages/opencode/src/server/routes/instance/httpapi/provider.ts diff --git a/packages/opencode/src/server/instance/httpapi/question.ts b/packages/opencode/src/server/routes/instance/httpapi/question.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/question.ts rename to packages/opencode/src/server/routes/instance/httpapi/question.ts diff --git a/packages/opencode/src/server/instance/httpapi/server.ts b/packages/opencode/src/server/routes/instance/httpapi/server.ts similarity index 100% rename from packages/opencode/src/server/instance/httpapi/server.ts rename to packages/opencode/src/server/routes/instance/httpapi/server.ts diff --git a/packages/opencode/src/server/instance/index.ts b/packages/opencode/src/server/routes/instance/index.ts similarity index 95% rename from packages/opencode/src/server/instance/index.ts rename to packages/opencode/src/server/routes/instance/index.ts index cfcaffc596..017541b8fc 100644 --- a/packages/opencode/src/server/instance/index.ts +++ b/packages/opencode/src/server/routes/instance/index.ts @@ -3,15 +3,15 @@ import { Hono } from "hono" import type { UpgradeWebSocket } from "hono/ws" import { Context, Effect } from "effect" import z from "zod" -import { Format } from "../../format" +import { Format } from "@/format" import { TuiRoutes } from "./tui" -import { Instance } from "../../project/instance" -import { Vcs } from "../../project" -import { Agent } from "../../agent/agent" -import { Skill } from "../../skill" -import { Global } from "../../global" -import { LSP } from "../../lsp" -import { Command } from "../../command" +import { Instance } from "@/project/instance" +import { Vcs } from "@/project" +import { Agent } from "@/agent/agent" +import { Skill } from "@/skill" +import { Global } from "@/global" +import { LSP } from "@/lsp" +import { Command } from "@/command" import { QuestionRoutes } from "./question" import { PermissionRoutes } from "./permission" import { Flag } from "@/flag/flag" @@ -26,11 +26,10 @@ import { ExperimentalRoutes } from "./experimental" import { ProviderRoutes } from "./provider" import { EventRoutes } from "./event" import { SyncRoutes } from "./sync" -import { WorkspaceRouterMiddleware } from "./middleware" import { AppRuntime } from "@/effect/app-runtime" export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { - const app = new Hono().use(WorkspaceRouterMiddleware(upgrade)) + const app = new Hono() if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) { const handler = ExperimentalHttpApiServer.webHandler().handler diff --git a/packages/opencode/src/server/instance/mcp.ts b/packages/opencode/src/server/routes/instance/mcp.ts similarity index 96% rename from packages/opencode/src/server/instance/mcp.ts rename to packages/opencode/src/server/routes/instance/mcp.ts index f6e6f1eddb..197185bde0 100644 --- a/packages/opencode/src/server/instance/mcp.ts +++ b/packages/opencode/src/server/routes/instance/mcp.ts @@ -1,12 +1,12 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { MCP } from "../../mcp" -import { Config } from "../../config" -import { ConfigMCP } from "../../config/mcp" -import { AppRuntime } from "../../effect/app-runtime" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { MCP } from "@/mcp" +import { Config } from "@/config" +import { ConfigMCP } from "@/config/mcp" +import { AppRuntime } from "@/effect/app-runtime" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" import { Effect } from "effect" export const McpRoutes = lazy(() => diff --git a/packages/opencode/src/server/instance/permission.ts b/packages/opencode/src/server/routes/instance/permission.ts similarity index 96% rename from packages/opencode/src/server/instance/permission.ts rename to packages/opencode/src/server/routes/instance/permission.ts index b8c2244140..c3f9c82011 100644 --- a/packages/opencode/src/server/instance/permission.ts +++ b/packages/opencode/src/server/routes/instance/permission.ts @@ -4,8 +4,8 @@ import z from "zod" import { AppRuntime } from "@/effect/app-runtime" import { Permission } from "@/permission" import { PermissionID } from "@/permission/schema" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" export const PermissionRoutes = lazy(() => new Hono() diff --git a/packages/opencode/src/server/instance/project.ts b/packages/opencode/src/server/routes/instance/project.ts similarity index 92% rename from packages/opencode/src/server/instance/project.ts rename to packages/opencode/src/server/routes/instance/project.ts index 95b5862fd5..060542c4b4 100644 --- a/packages/opencode/src/server/instance/project.ts +++ b/packages/opencode/src/server/routes/instance/project.ts @@ -1,13 +1,13 @@ import { Hono } from "hono" import { describeRoute, validator } from "hono-openapi" import { resolver } from "hono-openapi" -import { Instance } from "../../project/instance" -import { Project } from "../../project" +import { Instance } from "@/project/instance" +import { Project } from "@/project" import z from "zod" -import { ProjectID } from "../../project/schema" -import { errors } from "../error" -import { lazy } from "../../util/lazy" -import { InstanceBootstrap } from "../../project/bootstrap" +import { ProjectID } from "@/project/schema" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" +import { InstanceBootstrap } from "@/project/bootstrap" import { AppRuntime } from "@/effect/app-runtime" export const ProjectRoutes = lazy(() => diff --git a/packages/opencode/src/server/instance/provider.ts b/packages/opencode/src/server/routes/instance/provider.ts similarity index 93% rename from packages/opencode/src/server/instance/provider.ts rename to packages/opencode/src/server/routes/instance/provider.ts index a81ae00d59..57aa895e3d 100644 --- a/packages/opencode/src/server/instance/provider.ts +++ b/packages/opencode/src/server/routes/instance/provider.ts @@ -1,15 +1,15 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { Config } from "../../config" -import { Provider } from "../../provider" -import { ModelsDev } from "../../provider" -import { ProviderAuth } from "../../provider" -import { ProviderID } from "../../provider/schema" -import { AppRuntime } from "../../effect/app-runtime" +import { Config } from "@/config" +import { Provider } from "@/provider" +import { ModelsDev } from "@/provider" +import { ProviderAuth } from "@/provider" +import { ProviderID } from "@/provider/schema" +import { AppRuntime } from "@/effect/app-runtime" import { mapValues } from "remeda" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" import { Effect } from "effect" export const ProviderRoutes = lazy(() => diff --git a/packages/opencode/src/server/instance/pty.ts b/packages/opencode/src/server/routes/instance/pty.ts similarity index 98% rename from packages/opencode/src/server/instance/pty.ts rename to packages/opencode/src/server/routes/instance/pty.ts index 7943725120..b3f71c235c 100644 --- a/packages/opencode/src/server/instance/pty.ts +++ b/packages/opencode/src/server/routes/instance/pty.ts @@ -6,8 +6,8 @@ import z from "zod" import { AppRuntime } from "@/effect/app-runtime" import { Pty } from "@/pty" import { PtyID } from "@/pty/schema" -import { NotFoundError } from "../../storage" -import { errors } from "../error" +import { NotFoundError } from "@/storage" +import { errors } from "../../error" export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { return new Hono() diff --git a/packages/opencode/src/server/instance/question.ts b/packages/opencode/src/server/routes/instance/question.ts similarity index 96% rename from packages/opencode/src/server/instance/question.ts rename to packages/opencode/src/server/routes/instance/question.ts index 0f61a18672..9b8f461e39 100644 --- a/packages/opencode/src/server/instance/question.ts +++ b/packages/opencode/src/server/routes/instance/question.ts @@ -2,11 +2,11 @@ import { Hono } from "hono" import { describeRoute, validator } from "hono-openapi" import { resolver } from "hono-openapi" import { QuestionID } from "@/question/schema" -import { Question } from "../../question" +import { Question } from "@/question" import { AppRuntime } from "@/effect/app-runtime" import z from "zod" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" const Reply = z.object({ answers: Question.Answer.zod diff --git a/packages/opencode/src/server/instance/session.ts b/packages/opencode/src/server/routes/instance/session.ts similarity index 98% rename from packages/opencode/src/server/instance/session.ts rename to packages/opencode/src/server/routes/instance/session.ts index 1511e99e8d..ae6185abb8 100644 --- a/packages/opencode/src/server/instance/session.ts +++ b/packages/opencode/src/server/routes/instance/session.ts @@ -3,28 +3,28 @@ import { stream } from "hono/streaming" import { describeRoute, validator, resolver } from "hono-openapi" import { SessionID, MessageID, PartID } from "@/session/schema" import z from "zod" -import { Session } from "../../session" -import { MessageV2 } from "../../session/message-v2" -import { SessionPrompt } from "../../session/prompt" +import { Session } from "@/session" +import { MessageV2 } from "@/session/message-v2" +import { SessionPrompt } from "@/session/prompt" import { SessionRunState } from "@/session/run-state" -import { SessionCompaction } from "../../session/compaction" -import { SessionRevert } from "../../session/revert" +import { SessionCompaction } from "@/session/compaction" +import { SessionRevert } from "@/session/revert" import { SessionShare } from "@/share" import { SessionStatus } from "@/session/status" import { SessionSummary } from "@/session/summary" -import { Todo } from "../../session/todo" +import { Todo } from "@/session/todo" import { Effect } from "effect" -import { AppRuntime } from "../../effect/app-runtime" -import { Agent } from "../../agent/agent" +import { AppRuntime } from "@/effect/app-runtime" +import { Agent } from "@/agent/agent" import { Snapshot } from "@/snapshot" -import { Command } from "../../command" -import { Log } from "../../util" +import { Command } from "@/command" +import { Log } from "@/util" import { Permission } from "@/permission" import { PermissionID } from "@/permission/schema" import { ModelID, ProviderID } from "@/provider/schema" -import { errors } from "../error" -import { lazy } from "../../util/lazy" -import { Bus } from "../../bus" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" +import { Bus } from "@/bus" import { NamedError } from "@opencode-ai/shared/util/error" import { jsonRequest } from "./trace" diff --git a/packages/opencode/src/server/instance/sync.ts b/packages/opencode/src/server/routes/instance/sync.ts similarity index 98% rename from packages/opencode/src/server/instance/sync.ts rename to packages/opencode/src/server/routes/instance/sync.ts index ac43b638eb..c6a067997b 100644 --- a/packages/opencode/src/server/instance/sync.ts +++ b/packages/opencode/src/server/routes/instance/sync.ts @@ -6,7 +6,7 @@ import { Database, asc, and, not, or, lte, eq } from "@/storage" import { EventTable } from "@/sync/event.sql" import { lazy } from "@/util/lazy" import { Log } from "@/util" -import { errors } from "../error" +import { errors } from "../../error" const ReplayEvent = z.object({ id: z.string(), diff --git a/packages/opencode/src/server/instance/trace.ts b/packages/opencode/src/server/routes/instance/trace.ts similarity index 93% rename from packages/opencode/src/server/instance/trace.ts rename to packages/opencode/src/server/routes/instance/trace.ts index b3adbb4c80..3e1f72d8b2 100644 --- a/packages/opencode/src/server/instance/trace.ts +++ b/packages/opencode/src/server/routes/instance/trace.ts @@ -1,6 +1,6 @@ import type { Context } from "hono" import { Effect } from "effect" -import { AppRuntime } from "../../effect/app-runtime" +import { AppRuntime } from "@/effect/app-runtime" type AppEnv = Parameters[0] extends Effect.Effect ? R : never diff --git a/packages/opencode/src/server/instance/tui.ts b/packages/opencode/src/server/routes/instance/tui.ts similarity index 98% rename from packages/opencode/src/server/instance/tui.ts rename to packages/opencode/src/server/routes/instance/tui.ts index 0073ef98c9..2f856c3488 100644 --- a/packages/opencode/src/server/instance/tui.ts +++ b/packages/opencode/src/server/routes/instance/tui.ts @@ -1,13 +1,13 @@ import { Hono, type Context } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { Bus } from "../../bus" -import { Session } from "../../session" +import { Bus } from "@/bus" +import { Session } from "@/session" import { TuiEvent } from "@/cli/cmd/tui/event" import { AppRuntime } from "@/effect/app-runtime" -import { AsyncQueue } from "../../util/queue" -import { errors } from "../error" -import { lazy } from "../../util/lazy" +import { AsyncQueue } from "@/util/queue" +import { errors } from "../../error" +import { lazy } from "@/util/lazy" const TuiRequest = z.object({ path: z.string(), diff --git a/packages/opencode/src/server/ui/index.ts b/packages/opencode/src/server/routes/ui.ts similarity index 100% rename from packages/opencode/src/server/ui/index.ts rename to packages/opencode/src/server/routes/ui.ts diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index 892a99a77c..2201c75b4c 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -1,16 +1,25 @@ import { generateSpecs } from "hono-openapi" import { Hono } from "hono" +import type { MiddlewareHandler } from "hono" import { adapter } from "#hono" -import { MDNS } from "./mdns" import { lazy } from "@/util/lazy" -import { AuthMiddleware, CompressionMiddleware, CorsMiddleware, ErrorMiddleware, LoggerMiddleware } from "./middleware" -import { FenceMiddleware } from "./fence" -import { InstanceRoutes } from "./instance" -import { initProjectors } from "./projectors" import { Log } from "@/util" import { Flag } from "@/flag/flag" -import { ControlPlaneRoutes } from "./control" -import { UIRoutes } from "./ui" +import { Instance } from "@/project/instance" +import { InstanceBootstrap } from "@/project/bootstrap" +import { AppRuntime } from "@/effect/app-runtime" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { WorkspaceID } from "@/control-plane/schema" +import { WorkspaceContext } from "@/control-plane/workspace-context" +import { MDNS } from "./mdns" +import { AuthMiddleware, CompressionMiddleware, CorsMiddleware, ErrorMiddleware, LoggerMiddleware } from "./middleware" +import { FenceMiddleware } from "./fence" +import { initProjectors } from "./projectors" +import { InstanceRoutes } from "./routes/instance" +import { ControlPlaneRoutes } from "./routes/control" +import { UIRoutes } from "./routes/ui" +import { GlobalRoutes } from "./routes/global" +import { WorkspaceRouterMiddleware } from "./workspace" // @ts-ignore This global is needed to prevent ai-sdk from logging warnings to stdout https://github.com/vercel/ai/blob/2dc67e0ef538307f21368db32d5a12345d98831b/packages/ai/src/logger/log-warnings.ts#L85 globalThis.AI_SDK_LOG_WARNINGS = false @@ -30,18 +39,48 @@ export const Default = lazy(() => create({})) function create(opts: { cors?: string[] }) { const app = new Hono() + .onError(ErrorMiddleware) + .use(AuthMiddleware) + .use(LoggerMiddleware) + .use(CompressionMiddleware) + .use(CorsMiddleware(opts)) + .route("/global", GlobalRoutes()) + const runtime = adapter.create(app) + function InstanceMiddleware(workspaceID?: WorkspaceID): MiddlewareHandler { + return async (c, next) => { + const raw = c.req.query("directory") || c.req.header("x-opencode-directory") || process.cwd() + const directory = AppFileSystem.resolve( + (() => { + try { + return decodeURIComponent(raw) + } catch { + return raw + } + })(), + ) + + return WorkspaceContext.provide({ + workspaceID, + async fn() { + return Instance.provide({ + directory, + init: () => AppRuntime.runPromise(InstanceBootstrap), + async fn() { + return next() + }, + }) + }, + }) + } + } + if (Flag.OPENCODE_WORKSPACE_ID) { return { app: app - .onError(ErrorMiddleware) - .use(AuthMiddleware) - .use(LoggerMiddleware) - .use(CompressionMiddleware) - .use(CorsMiddleware(opts)) + .use(InstanceMiddleware(Flag.OPENCODE_WORKSPACE_ID ? WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID) : undefined)) .use(FenceMiddleware) - .route("/", ControlPlaneRoutes()) .route("/", InstanceRoutes(runtime.upgradeWebSocket)), runtime, } @@ -49,12 +88,9 @@ function create(opts: { cors?: string[] }) { return { app: app - .onError(ErrorMiddleware) - .use(AuthMiddleware) - .use(LoggerMiddleware) - .use(CompressionMiddleware) - .use(CorsMiddleware(opts)) + .use(InstanceMiddleware()) .route("/", ControlPlaneRoutes()) + .use(WorkspaceRouterMiddleware(runtime.upgradeWebSocket)) .route("/", InstanceRoutes(runtime.upgradeWebSocket)) .route("/", UIRoutes()), runtime, diff --git a/packages/opencode/src/server/instance/middleware.ts b/packages/opencode/src/server/workspace.ts similarity index 79% rename from packages/opencode/src/server/instance/middleware.ts rename to packages/opencode/src/server/workspace.ts index 7b66072c23..c141d10956 100644 --- a/packages/opencode/src/server/instance/middleware.ts +++ b/packages/opencode/src/server/workspace.ts @@ -2,17 +2,16 @@ import type { MiddlewareHandler } from "hono" import type { UpgradeWebSocket } from "hono/ws" import { getAdaptor } from "@/control-plane/adaptors" import { WorkspaceID } from "@/control-plane/schema" +import { WorkspaceContext } from "@/control-plane/workspace-context" import { Workspace } from "@/control-plane/workspace" -import { ServerProxy } from "../proxy" -import { Instance } from "@/project/instance" -import { InstanceBootstrap } from "@/project/bootstrap" import { Flag } from "@/flag/flag" +import { InstanceBootstrap } from "@/project/bootstrap" +import { Instance } from "@/project/instance" import { Session } from "@/session" import { SessionID } from "@/session/schema" -import { WorkspaceContext } from "@/control-plane/workspace-context" import { AppRuntime } from "@/effect/app-runtime" import { Log } from "@/util" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { ServerProxy } from "./proxy" type Rule = { method?: string; path: string; exact?: boolean; action: "local" | "forward" } @@ -51,45 +50,13 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware const log = Log.create({ service: "workspace-router" }) return async (c, next) => { - const raw = c.req.query("directory") || c.req.header("x-opencode-directory") || process.cwd() - const directory = AppFileSystem.resolve( - (() => { - try { - return decodeURIComponent(raw) - } catch { - return raw - } - })(), - ) - const url = new URL(c.req.url) const sessionWorkspaceID = await getSessionWorkspace(url) const workspaceID = sessionWorkspaceID || url.searchParams.get("workspace") if (!workspaceID || url.pathname.startsWith("/console") || Flag.OPENCODE_WORKSPACE_ID) { - if (Flag.OPENCODE_WORKSPACE_ID) { - return WorkspaceContext.provide({ - workspaceID: WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID), - async fn() { - return Instance.provide({ - directory, - init: () => AppRuntime.runPromise(InstanceBootstrap), - async fn() { - return next() - }, - }) - }, - }) - } - - return Instance.provide({ - directory, - init: () => AppRuntime.runPromise(InstanceBootstrap), - async fn() { - return next() - }, - }) + return next() } const workspace = await Workspace.get(WorkspaceID.make(workspaceID)) diff --git a/packages/opencode/test/server/session-messages.test.ts b/packages/opencode/test/server/session-messages.test.ts index 50b7658969..23e8b50145 100644 --- a/packages/opencode/test/server/session-messages.test.ts +++ b/packages/opencode/test/server/session-messages.test.ts @@ -165,16 +165,3 @@ describe("session messages endpoint", () => { ) }) }) - -describe("session.prompt_async error handling", () => { - test("prompt_async route has error handler for detached prompt call", async () => { - const src = await Bun.file(new URL("../../src/server/instance/session.ts", import.meta.url)).text() - const start = src.indexOf('"/:sessionID/prompt_async"') - const end = src.indexOf('"/:sessionID/command"', start) - expect(start).toBeGreaterThan(-1) - expect(end).toBeGreaterThan(start) - const route = src.slice(start, end) - expect(route).toContain(".catch(") - expect(route).toContain("Bus.publish(Session.Event.Error") - }) -}) diff --git a/packages/sdk/js/src/v2/gen/sdk.gen.ts b/packages/sdk/js/src/v2/gen/sdk.gen.ts index d7bf43f506..f484147a40 100644 --- a/packages/sdk/js/src/v2/gen/sdk.gen.ts +++ b/packages/sdk/js/src/v2/gen/sdk.gen.ts @@ -510,6 +510,430 @@ export class App extends HeyApiClient { } } +export class Adaptor extends HeyApiClient { + /** + * List workspace adaptors + * + * List all available workspace adaptors for the current project. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace/adaptor", + ...options, + ...params, + }) + } +} + +export class Workspace extends HeyApiClient { + /** + * List workspaces + * + * List all workspaces. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace", + ...options, + ...params, + }) + } + + /** + * Create workspace + * + * Create a workspace for the current project. + */ + public create( + parameters?: { + directory?: string + workspace?: string + id?: string + type?: string + branch?: string | null + extra?: unknown | null + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "id" }, + { in: "body", key: "type" }, + { in: "body", key: "branch" }, + { in: "body", key: "extra" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post< + ExperimentalWorkspaceCreateResponses, + ExperimentalWorkspaceCreateErrors, + ThrowOnError + >({ + url: "/experimental/workspace", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } + + /** + * Workspace status + * + * Get connection status for workspaces in the current project. + */ + public status( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace/status", + ...options, + ...params, + }) + } + + /** + * Remove workspace + * + * Remove an existing workspace. + */ + public remove( + parameters: { + id: string + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "path", key: "id" }, + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).delete< + ExperimentalWorkspaceRemoveResponses, + ExperimentalWorkspaceRemoveErrors, + ThrowOnError + >({ + url: "/experimental/workspace/{id}", + ...options, + ...params, + }) + } + + /** + * Restore session into workspace + * + * Replay a session's sync events into the target workspace in batches. + */ + public sessionRestore( + parameters: { + id: string + directory?: string + workspace?: string + sessionID?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "path", key: "id" }, + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "sessionID" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post< + ExperimentalWorkspaceSessionRestoreResponses, + ExperimentalWorkspaceSessionRestoreErrors, + ThrowOnError + >({ + url: "/experimental/workspace/{id}/session-restore", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } + + private _adaptor?: Adaptor + get adaptor(): Adaptor { + return (this._adaptor ??= new Adaptor({ client: this.client })) + } +} + +export class Console extends HeyApiClient { + /** + * Get active Console provider metadata + * + * Get the active Console org name and the set of provider IDs managed by that Console org. + */ + public get( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/console", + ...options, + ...params, + }) + } + + /** + * List switchable Console orgs + * + * Get the available Console orgs across logged-in accounts, including the current active org. + */ + public listOrgs( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/console/orgs", + ...options, + ...params, + }) + } + + /** + * Switch active Console org + * + * Persist a new active Console account/org selection for the current local OpenCode state. + */ + public switchOrg( + parameters?: { + directory?: string + workspace?: string + accountID?: string + orgID?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "accountID" }, + { in: "body", key: "orgID" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post({ + url: "/experimental/console/switch", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } +} + +export class Session extends HeyApiClient { + /** + * List sessions + * + * Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. + */ + public list( + parameters?: { + directory?: string + workspace?: string + roots?: boolean + start?: number + cursor?: number + search?: string + limit?: number + archived?: boolean + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "query", key: "roots" }, + { in: "query", key: "start" }, + { in: "query", key: "cursor" }, + { in: "query", key: "search" }, + { in: "query", key: "limit" }, + { in: "query", key: "archived" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/session", + ...options, + ...params, + }) + } +} + +export class Resource extends HeyApiClient { + /** + * Get MCP resources + * + * Get all available MCP resources from connected servers. Optionally filter by name. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/resource", + ...options, + ...params, + }) + } +} + +export class Experimental extends HeyApiClient { + private _workspace?: Workspace + get workspace(): Workspace { + return (this._workspace ??= new Workspace({ client: this.client })) + } + + private _console?: Console + get console(): Console { + return (this._console ??= new Console({ client: this.client })) + } + + private _session?: Session + get session(): Session { + return (this._session ??= new Session({ client: this.client })) + } + + private _resource?: Resource + get resource(): Resource { + return (this._resource ??= new Resource({ client: this.client })) + } +} + export class Project extends HeyApiClient { /** * List all projects @@ -972,430 +1396,6 @@ export class Config2 extends HeyApiClient { } } -export class Console extends HeyApiClient { - /** - * Get active Console provider metadata - * - * Get the active Console org name and the set of provider IDs managed by that Console org. - */ - public get( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/console", - ...options, - ...params, - }) - } - - /** - * List switchable Console orgs - * - * Get the available Console orgs across logged-in accounts, including the current active org. - */ - public listOrgs( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/console/orgs", - ...options, - ...params, - }) - } - - /** - * Switch active Console org - * - * Persist a new active Console account/org selection for the current local OpenCode state. - */ - public switchOrg( - parameters?: { - directory?: string - workspace?: string - accountID?: string - orgID?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "accountID" }, - { in: "body", key: "orgID" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post({ - url: "/experimental/console/switch", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } -} - -export class Adaptor extends HeyApiClient { - /** - * List workspace adaptors - * - * List all available workspace adaptors for the current project. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace/adaptor", - ...options, - ...params, - }) - } -} - -export class Workspace extends HeyApiClient { - /** - * List workspaces - * - * List all workspaces. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace", - ...options, - ...params, - }) - } - - /** - * Create workspace - * - * Create a workspace for the current project. - */ - public create( - parameters?: { - directory?: string - workspace?: string - id?: string - type?: string - branch?: string | null - extra?: unknown | null - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "id" }, - { in: "body", key: "type" }, - { in: "body", key: "branch" }, - { in: "body", key: "extra" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post< - ExperimentalWorkspaceCreateResponses, - ExperimentalWorkspaceCreateErrors, - ThrowOnError - >({ - url: "/experimental/workspace", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } - - /** - * Workspace status - * - * Get connection status for workspaces in the current project. - */ - public status( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace/status", - ...options, - ...params, - }) - } - - /** - * Remove workspace - * - * Remove an existing workspace. - */ - public remove( - parameters: { - id: string - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "path", key: "id" }, - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).delete< - ExperimentalWorkspaceRemoveResponses, - ExperimentalWorkspaceRemoveErrors, - ThrowOnError - >({ - url: "/experimental/workspace/{id}", - ...options, - ...params, - }) - } - - /** - * Restore session into workspace - * - * Replay a session's sync events into the target workspace in batches. - */ - public sessionRestore( - parameters: { - id: string - directory?: string - workspace?: string - sessionID?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "path", key: "id" }, - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "sessionID" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post< - ExperimentalWorkspaceSessionRestoreResponses, - ExperimentalWorkspaceSessionRestoreErrors, - ThrowOnError - >({ - url: "/experimental/workspace/{id}/session-restore", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } - - private _adaptor?: Adaptor - get adaptor(): Adaptor { - return (this._adaptor ??= new Adaptor({ client: this.client })) - } -} - -export class Session extends HeyApiClient { - /** - * List sessions - * - * Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. - */ - public list( - parameters?: { - directory?: string - workspace?: string - roots?: boolean - start?: number - cursor?: number - search?: string - limit?: number - archived?: boolean - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "query", key: "roots" }, - { in: "query", key: "start" }, - { in: "query", key: "cursor" }, - { in: "query", key: "search" }, - { in: "query", key: "limit" }, - { in: "query", key: "archived" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/session", - ...options, - ...params, - }) - } -} - -export class Resource extends HeyApiClient { - /** - * Get MCP resources - * - * Get all available MCP resources from connected servers. Optionally filter by name. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/resource", - ...options, - ...params, - }) - } -} - -export class Experimental extends HeyApiClient { - private _console?: Console - get console(): Console { - return (this._console ??= new Console({ client: this.client })) - } - - private _workspace?: Workspace - get workspace(): Workspace { - return (this._workspace ??= new Workspace({ client: this.client })) - } - - private _session?: Session - get session(): Session { - return (this._session ??= new Session({ client: this.client })) - } - - private _resource?: Resource - get resource(): Resource { - return (this._resource ??= new Resource({ client: this.client })) - } -} - export class Tool extends HeyApiClient { /** * List tool IDs @@ -4314,6 +4314,11 @@ export class OpencodeClient extends HeyApiClient { return (this._app ??= new App({ client: this.client })) } + private _experimental?: Experimental + get experimental(): Experimental { + return (this._experimental ??= new Experimental({ client: this.client })) + } + private _project?: Project get project(): Project { return (this._project ??= new Project({ client: this.client })) @@ -4329,11 +4334,6 @@ export class OpencodeClient extends HeyApiClient { return (this._config ??= new Config2({ client: this.client })) } - private _experimental?: Experimental - get experimental(): Experimental { - return (this._experimental ??= new Experimental({ client: this.client })) - } - private _tool?: Tool get tool(): Tool { return (this._tool ??= new Tool({ client: this.client })) diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index 25c3cfa669..839dae8b22 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -1706,6 +1706,16 @@ export type WellKnownAuth = { export type Auth = OAuth | ApiAuth | WellKnownAuth +export type Workspace = { + id: string + type: string + name: string + branch: string | null + directory: string | null + extra: unknown | null + projectID: string +} + export type NotFoundError = { name: "NotFoundError" data: { @@ -1808,16 +1818,6 @@ export type ToolListItem = { export type ToolList = Array -export type Workspace = { - id: string - type: string - name: string - branch: string | null - directory: string | null - extra: unknown | null - projectID: string -} - export type Worktree = { name: string branch: string @@ -2394,6 +2394,177 @@ export type AppLogResponses = { export type AppLogResponse = AppLogResponses[keyof AppLogResponses] +export type ExperimentalWorkspaceAdaptorListData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/adaptor" +} + +export type ExperimentalWorkspaceAdaptorListResponses = { + /** + * Workspace adaptors + */ + 200: Array<{ + type: string + name: string + description: string + }> +} + +export type ExperimentalWorkspaceAdaptorListResponse = + ExperimentalWorkspaceAdaptorListResponses[keyof ExperimentalWorkspaceAdaptorListResponses] + +export type ExperimentalWorkspaceListData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace" +} + +export type ExperimentalWorkspaceListResponses = { + /** + * Workspaces + */ + 200: Array +} + +export type ExperimentalWorkspaceListResponse = + ExperimentalWorkspaceListResponses[keyof ExperimentalWorkspaceListResponses] + +export type ExperimentalWorkspaceCreateData = { + body?: { + id?: string + type: string + branch: string | null + extra: unknown | null + } + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace" +} + +export type ExperimentalWorkspaceCreateErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceCreateError = + ExperimentalWorkspaceCreateErrors[keyof ExperimentalWorkspaceCreateErrors] + +export type ExperimentalWorkspaceCreateResponses = { + /** + * Workspace created + */ + 200: Workspace +} + +export type ExperimentalWorkspaceCreateResponse = + ExperimentalWorkspaceCreateResponses[keyof ExperimentalWorkspaceCreateResponses] + +export type ExperimentalWorkspaceStatusData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/status" +} + +export type ExperimentalWorkspaceStatusResponses = { + /** + * Workspace status + */ + 200: Array<{ + workspaceID: string + status: "connected" | "connecting" | "disconnected" | "error" + error?: string + }> +} + +export type ExperimentalWorkspaceStatusResponse = + ExperimentalWorkspaceStatusResponses[keyof ExperimentalWorkspaceStatusResponses] + +export type ExperimentalWorkspaceRemoveData = { + body?: never + path: { + id: string + } + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/{id}" +} + +export type ExperimentalWorkspaceRemoveErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceRemoveError = + ExperimentalWorkspaceRemoveErrors[keyof ExperimentalWorkspaceRemoveErrors] + +export type ExperimentalWorkspaceRemoveResponses = { + /** + * Workspace removed + */ + 200: Workspace +} + +export type ExperimentalWorkspaceRemoveResponse = + ExperimentalWorkspaceRemoveResponses[keyof ExperimentalWorkspaceRemoveResponses] + +export type ExperimentalWorkspaceSessionRestoreData = { + body?: { + sessionID: string + } + path: { + id: string + } + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/{id}/session-restore" +} + +export type ExperimentalWorkspaceSessionRestoreErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceSessionRestoreError = + ExperimentalWorkspaceSessionRestoreErrors[keyof ExperimentalWorkspaceSessionRestoreErrors] + +export type ExperimentalWorkspaceSessionRestoreResponses = { + /** + * Session replay started + */ + 200: { + total: number + } +} + +export type ExperimentalWorkspaceSessionRestoreResponse = + ExperimentalWorkspaceSessionRestoreResponses[keyof ExperimentalWorkspaceSessionRestoreResponses] + export type ProjectListData = { body?: never path?: never @@ -2883,177 +3054,6 @@ export type ToolListResponses = { export type ToolListResponse = ToolListResponses[keyof ToolListResponses] -export type ExperimentalWorkspaceAdaptorListData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/adaptor" -} - -export type ExperimentalWorkspaceAdaptorListResponses = { - /** - * Workspace adaptors - */ - 200: Array<{ - type: string - name: string - description: string - }> -} - -export type ExperimentalWorkspaceAdaptorListResponse = - ExperimentalWorkspaceAdaptorListResponses[keyof ExperimentalWorkspaceAdaptorListResponses] - -export type ExperimentalWorkspaceListData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace" -} - -export type ExperimentalWorkspaceListResponses = { - /** - * Workspaces - */ - 200: Array -} - -export type ExperimentalWorkspaceListResponse = - ExperimentalWorkspaceListResponses[keyof ExperimentalWorkspaceListResponses] - -export type ExperimentalWorkspaceCreateData = { - body?: { - id?: string - type: string - branch: string | null - extra: unknown | null - } - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace" -} - -export type ExperimentalWorkspaceCreateErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceCreateError = - ExperimentalWorkspaceCreateErrors[keyof ExperimentalWorkspaceCreateErrors] - -export type ExperimentalWorkspaceCreateResponses = { - /** - * Workspace created - */ - 200: Workspace -} - -export type ExperimentalWorkspaceCreateResponse = - ExperimentalWorkspaceCreateResponses[keyof ExperimentalWorkspaceCreateResponses] - -export type ExperimentalWorkspaceStatusData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/status" -} - -export type ExperimentalWorkspaceStatusResponses = { - /** - * Workspace status - */ - 200: Array<{ - workspaceID: string - status: "connected" | "connecting" | "disconnected" | "error" - error?: string - }> -} - -export type ExperimentalWorkspaceStatusResponse = - ExperimentalWorkspaceStatusResponses[keyof ExperimentalWorkspaceStatusResponses] - -export type ExperimentalWorkspaceRemoveData = { - body?: never - path: { - id: string - } - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/{id}" -} - -export type ExperimentalWorkspaceRemoveErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceRemoveError = - ExperimentalWorkspaceRemoveErrors[keyof ExperimentalWorkspaceRemoveErrors] - -export type ExperimentalWorkspaceRemoveResponses = { - /** - * Workspace removed - */ - 200: Workspace -} - -export type ExperimentalWorkspaceRemoveResponse = - ExperimentalWorkspaceRemoveResponses[keyof ExperimentalWorkspaceRemoveResponses] - -export type ExperimentalWorkspaceSessionRestoreData = { - body?: { - sessionID: string - } - path: { - id: string - } - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/{id}/session-restore" -} - -export type ExperimentalWorkspaceSessionRestoreErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceSessionRestoreError = - ExperimentalWorkspaceSessionRestoreErrors[keyof ExperimentalWorkspaceSessionRestoreErrors] - -export type ExperimentalWorkspaceSessionRestoreResponses = { - /** - * Session replay started - */ - 200: { - total: number - } -} - -export type ExperimentalWorkspaceSessionRestoreResponse = - ExperimentalWorkspaceSessionRestoreResponses[keyof ExperimentalWorkspaceSessionRestoreResponses] - export type WorktreeRemoveData = { body?: WorktreeRemoveInput path?: never diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 9193b11ad9..cf14026eae 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -415,6 +415,394 @@ ] } }, + "/experimental/workspace/adaptor": { + "get": { + "operationId": "experimental.workspace.adaptor.list", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "List workspace adaptors", + "description": "List all available workspace adaptors for the current project.", + "responses": { + "200": { + "description": "Workspace adaptors", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + } + }, + "required": ["type", "name", "description"] + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.adaptor.list({\n ...\n})" + } + ] + } + }, + "/experimental/workspace": { + "post": { + "operationId": "experimental.workspace.create", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Create workspace", + "description": "Create a workspace for the current project.", + "responses": { + "200": { + "description": "Workspace created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Workspace" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "pattern": "^wrk.*" + }, + "type": { + "type": "string" + }, + "branch": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "extra": { + "anyOf": [ + {}, + { + "type": "null" + } + ] + } + }, + "required": ["type", "branch", "extra"] + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.create({\n ...\n})" + } + ] + }, + "get": { + "operationId": "experimental.workspace.list", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "List workspaces", + "description": "List all workspaces.", + "responses": { + "200": { + "description": "Workspaces", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Workspace" + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.list({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/status": { + "get": { + "operationId": "experimental.workspace.status", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Workspace status", + "description": "Get connection status for workspaces in the current project.", + "responses": { + "200": { + "description": "Workspace status", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "workspaceID": { + "type": "string", + "pattern": "^wrk.*" + }, + "status": { + "type": "string", + "enum": ["connected", "connecting", "disconnected", "error"] + }, + "error": { + "type": "string" + } + }, + "required": ["workspaceID", "status"] + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.status({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/{id}": { + "delete": { + "operationId": "experimental.workspace.remove", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + }, + { + "in": "path", + "name": "id", + "schema": { + "type": "string", + "pattern": "^wrk.*" + }, + "required": true + } + ], + "summary": "Remove workspace", + "description": "Remove an existing workspace.", + "responses": { + "200": { + "description": "Workspace removed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Workspace" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.remove({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/{id}/session-restore": { + "post": { + "operationId": "experimental.workspace.sessionRestore", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + }, + { + "in": "path", + "name": "id", + "schema": { + "type": "string", + "pattern": "^wrk.*" + }, + "required": true + } + ], + "summary": "Restore session into workspace", + "description": "Replay a session's sync events into the target workspace in batches.", + "responses": { + "200": { + "description": "Session replay started", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "minimum": 0, + "maximum": 9007199254740991 + } + }, + "required": ["total"] + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "sessionID": { + "type": "string", + "pattern": "^ses.*" + } + }, + "required": ["sessionID"] + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.sessionRestore({\n ...\n})" + } + ] + } + }, "/project": { "get": { "operationId": "project.list", @@ -1501,394 +1889,6 @@ ] } }, - "/experimental/workspace/adaptor": { - "get": { - "operationId": "experimental.workspace.adaptor.list", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "List workspace adaptors", - "description": "List all available workspace adaptors for the current project.", - "responses": { - "200": { - "description": "Workspace adaptors", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "description": { - "type": "string" - } - }, - "required": ["type", "name", "description"] - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.adaptor.list({\n ...\n})" - } - ] - } - }, - "/experimental/workspace": { - "post": { - "operationId": "experimental.workspace.create", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "Create workspace", - "description": "Create a workspace for the current project.", - "responses": { - "200": { - "description": "Workspace created", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Workspace" - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "requestBody": { - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "id": { - "type": "string", - "pattern": "^wrk.*" - }, - "type": { - "type": "string" - }, - "branch": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "extra": { - "anyOf": [ - {}, - { - "type": "null" - } - ] - } - }, - "required": ["type", "branch", "extra"] - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.create({\n ...\n})" - } - ] - }, - "get": { - "operationId": "experimental.workspace.list", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "List workspaces", - "description": "List all workspaces.", - "responses": { - "200": { - "description": "Workspaces", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Workspace" - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.list({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/status": { - "get": { - "operationId": "experimental.workspace.status", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "Workspace status", - "description": "Get connection status for workspaces in the current project.", - "responses": { - "200": { - "description": "Workspace status", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "object", - "properties": { - "workspaceID": { - "type": "string", - "pattern": "^wrk.*" - }, - "status": { - "type": "string", - "enum": ["connected", "connecting", "disconnected", "error"] - }, - "error": { - "type": "string" - } - }, - "required": ["workspaceID", "status"] - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.status({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/{id}": { - "delete": { - "operationId": "experimental.workspace.remove", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - }, - { - "in": "path", - "name": "id", - "schema": { - "type": "string", - "pattern": "^wrk.*" - }, - "required": true - } - ], - "summary": "Remove workspace", - "description": "Remove an existing workspace.", - "responses": { - "200": { - "description": "Workspace removed", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Workspace" - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.remove({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/{id}/session-restore": { - "post": { - "operationId": "experimental.workspace.sessionRestore", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - }, - { - "in": "path", - "name": "id", - "schema": { - "type": "string", - "pattern": "^wrk.*" - }, - "required": true - } - ], - "summary": "Restore session into workspace", - "description": "Replay a session's sync events into the target workspace in batches.", - "responses": { - "200": { - "description": "Session replay started", - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "total": { - "type": "integer", - "minimum": 0, - "maximum": 9007199254740991 - } - }, - "required": ["total"] - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "requestBody": { - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "sessionID": { - "type": "string", - "pattern": "^ses.*" - } - }, - "required": ["sessionID"] - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.sessionRestore({\n ...\n})" - } - ] - } - }, "/experimental/worktree": { "post": { "operationId": "worktree.create", @@ -12003,6 +12003,53 @@ } ] }, + "Workspace": { + "type": "object", + "properties": { + "id": { + "type": "string", + "pattern": "^wrk.*" + }, + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "branch": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "directory": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "extra": { + "anyOf": [ + {}, + { + "type": "null" + } + ] + }, + "projectID": { + "type": "string" + } + }, + "required": ["id", "type", "name", "branch", "directory", "extra", "projectID"] + }, "NotFoundError": { "type": "object", "properties": { @@ -12309,53 +12356,6 @@ "$ref": "#/components/schemas/ToolListItem" } }, - "Workspace": { - "type": "object", - "properties": { - "id": { - "type": "string", - "pattern": "^wrk.*" - }, - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "branch": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "directory": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "extra": { - "anyOf": [ - {}, - { - "type": "null" - } - ] - }, - "projectID": { - "type": "string" - } - }, - "required": ["id", "type", "name", "branch", "directory", "extra", "projectID"] - }, "Worktree": { "type": "object", "properties": { From 65b2a10e97d34e6b8a832a69b5a75f90f21e076c Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 02:12:41 -0400 Subject: [PATCH 133/201] fade in prompt metadata transitions (#23037) --- packages/opencode/src/cli/cmd/tui/app.tsx | 18 ++++---- .../cli/cmd/tui/component/prompt/index.tsx | 44 ++++++++++++++++--- .../src/cli/cmd/tui/context/route.tsx | 13 +++--- .../opencode/src/cli/cmd/tui/context/sync.tsx | 1 + .../opencode/src/cli/cmd/tui/util/signal.ts | 33 +++++++++++++- 5 files changed, 87 insertions(+), 22 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 7e883ec0e3..74eca9a0f2 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -148,7 +148,16 @@ export function tui(input: { - + Promise }) { }) local.model.set({ providerID, modelID }, { recent: true }) } - // Handle --session without --fork immediately (fork is handled in createEffect below) - if (args.sessionID && !args.fork) { - route.navigate({ - type: "session", - sessionID: args.sessionID, - }) - } }) }) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 82cdefebcb..08540e62e4 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1,5 +1,15 @@ -import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core" -import { createEffect, createMemo, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js" +import { BoxRenderable, RGBA, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core" +import { + createEffect, + createMemo, + onMount, + createSignal, + onCleanup, + on, + Show, + Switch, + Match, +} from "solid-js" import "opentui-spinner/solid" import path from "path" import { fileURLToPath } from "url" @@ -35,6 +45,7 @@ import { DialogProvider as DialogProviderConnect } from "../dialog-provider" import { DialogAlert } from "../../ui/dialog-alert" import { useToast } from "../../ui/toast" import { useKV } from "../../context/kv" +import { createFadeIn } from "../../util/signal" import { useTextareaKeybindings } from "../textarea-keybindings" import { DialogSkill } from "../dialog-skill" import { useArgs } from "@tui/context/args" @@ -75,6 +86,10 @@ function randomIndex(count: number) { return Math.floor(Math.random() * count) } +function fadeColor(color: RGBA, alpha: number) { + return RGBA.fromValues(color.r, color.g, color.b, color.a * alpha) +} + let stashed: { prompt: PromptInfo; cursor: number } | undefined export function Prompt(props: PromptProps) { @@ -97,6 +112,7 @@ export function Prompt(props: PromptProps) { const renderer = useRenderer() const { theme, syntax } = useTheme() const kv = useKV() + const animationsEnabled = createMemo(() => kv.get("animations_enabled", true)) const list = createMemo(() => props.placeholders?.normal ?? []) const shell = createMemo(() => props.placeholders?.shell ?? []) const [auto, setAuto] = createSignal() @@ -858,6 +874,13 @@ export function Prompt(props: PromptProps) { return !!current }) + const agentMetaAlpha = createFadeIn(() => !!local.agent.current(), animationsEnabled) + const modelMetaAlpha = createFadeIn(() => !!local.agent.current() && store.mode === "normal", animationsEnabled) + const variantMetaAlpha = createFadeIn( + () => !!local.agent.current() && store.mode === "normal" && showVariant(), + animationsEnabled, + ) + const placeholderText = createMemo(() => { if (props.showPlaceholder === false) return undefined if (store.mode === "shell") { @@ -1133,17 +1156,24 @@ export function Prompt(props: PromptProps) { }> {(agent) => ( <> - {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + + {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)}{" "} + - + {local.model.parsed().model} - {currentProviderLabel()} + {currentProviderLabel()} - · + · - {local.model.variant.current()} + + {local.model.variant.current()} + diff --git a/packages/opencode/src/cli/cmd/tui/context/route.tsx b/packages/opencode/src/cli/cmd/tui/context/route.tsx index 6db8247592..35be17801b 100644 --- a/packages/opencode/src/cli/cmd/tui/context/route.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/route.tsx @@ -23,13 +23,14 @@ export type Route = HomeRoute | SessionRoute | PluginRoute export const { use: useRoute, provider: RouteProvider } = createSimpleContext({ name: "Route", - init: () => { + init: (props: { initialRoute?: Route }) => { const [store, setStore] = createStore( - process.env["OPENCODE_ROUTE"] - ? JSON.parse(process.env["OPENCODE_ROUTE"]) - : { - type: "home", - }, + props.initialRoute ?? + (process.env["OPENCODE_ROUTE"] + ? JSON.parse(process.env["OPENCODE_ROUTE"]) + : { + type: "home", + }), ) return { diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 57326e3a1a..d2a7e5c4d0 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -467,6 +467,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return store.status }, get ready() { + return true if (process.env.OPENCODE_FAST_BOOT) return true return store.status !== "loading" }, diff --git a/packages/opencode/src/cli/cmd/tui/util/signal.ts b/packages/opencode/src/cli/cmd/tui/util/signal.ts index 15b57886d6..1c7cc0008d 100644 --- a/packages/opencode/src/cli/cmd/tui/util/signal.ts +++ b/packages/opencode/src/cli/cmd/tui/util/signal.ts @@ -1,7 +1,38 @@ -import { createSignal, type Accessor } from "solid-js" +import { createEffect, createSignal, on, onCleanup, type Accessor } from "solid-js" import { debounce, type Scheduled } from "@solid-primitives/scheduled" export function createDebouncedSignal(value: T, ms: number): [Accessor, Scheduled<[value: T]>] { const [get, set] = createSignal(value) return [get, debounce((v: T) => set(() => v), ms)] } + +export function createFadeIn(show: Accessor, enabled: Accessor) { + const [alpha, setAlpha] = createSignal(show() ? 1 : 0) + + createEffect( + on([show, enabled], ([visible, animate], previous) => { + if (!visible) { + setAlpha(0) + return + } + + if (!animate || !previous) { + setAlpha(1) + return + } + + const start = performance.now() + setAlpha(0) + + const timer = setInterval(() => { + const progress = Math.min((performance.now() - start) / 160, 1) + setAlpha(progress * progress * (3 - 2 * progress)) + if (progress >= 1) clearInterval(timer) + }, 16) + + onCleanup(() => clearInterval(timer)) + }), + ) + + return alpha +} From 81f0885879f1e89b21774fc1fc6c603bd0ecd967 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 06:13:42 +0000 Subject: [PATCH 134/201] chore: generate --- .../src/cli/cmd/tui/component/prompt/index.tsx | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 08540e62e4..06e5a0884e 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1,15 +1,5 @@ import { BoxRenderable, RGBA, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core" -import { - createEffect, - createMemo, - onMount, - createSignal, - onCleanup, - on, - Show, - Switch, - Match, -} from "solid-js" +import { createEffect, createMemo, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js" import "opentui-spinner/solid" import path from "path" import { fileURLToPath } from "url" From d9950598d0da16fc0f8e6c289050a9d3da055af7 Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 02:44:01 -0400 Subject: [PATCH 135/201] core: migrate config loading to Effect framework (#23032) --- packages/opencode/src/cli/cmd/tui/app.tsx | 2 + .../src/cli/cmd/tui/config/tui-migrate.ts | 6 +- .../opencode/src/cli/cmd/tui/config/tui.ts | 27 +- packages/opencode/src/config/config.ts | 457 +++++++++--------- packages/opencode/src/config/paths.ts | 46 +- 5 files changed, 275 insertions(+), 263 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 74eca9a0f2..f8ffd27dc8 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -135,7 +135,9 @@ export function tui(input: { await TuiPluginRuntime.dispose() } + console.log("starting renderer") const renderer = await createCliRenderer(rendererConfig(input.config)) + console.log("renderer started") await render(() => { return ( diff --git a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts index 9323dd979a..a7f50ddf9d 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui-migrate.ts @@ -132,8 +132,10 @@ async function backupAndStripLegacy(file: string, source: string) { } async function opencodeFiles(input: { directories: string[]; cwd: string }) { - const project = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("opencode", input.cwd) - const files = [...project, ...ConfigPaths.fileInDirectory(Global.Path.config, "opencode")] + const files = [ + ...ConfigPaths.fileInDirectory(Global.Path.config, "opencode"), + ...(await Filesystem.findUp(["opencode.json", "opencode.jsonc"], input.cwd, undefined, { rootFirst: true })), + ] for (const dir of unique(input.directories)) { files.push(...ConfigPaths.fileInDirectory(dir, "opencode")) } diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 1a5e49badb..a5c9ae0430 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -89,15 +89,13 @@ async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { acc.result.plugin_origins = plugins } -async function loadState(ctx: { directory: string }) { +const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory: string }) { // Every config dir we may read from: global config dir, any `.opencode` // folders between cwd and home, and OPENCODE_CONFIG_DIR. - const directories = await ConfigPaths.directories(ctx.directory) - // One-time migration: extract tui keys (theme/keybinds/tui) from existing - // opencode.json files into sibling tui.json files. - await migrateTuiConfig({ directories, cwd: ctx.directory }) + const directories = yield* ConfigPaths.directories(ctx.directory) + yield* Effect.promise(() => migrateTuiConfig({ directories, cwd: ctx.directory })) - const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : yield* ConfigPaths.files("tui", ctx.directory) const acc: Acc = { result: {}, @@ -105,18 +103,19 @@ async function loadState(ctx: { directory: string }) { // 1. Global tui config (lowest precedence). for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } // 2. Explicit OPENCODE_TUI_CONFIG override, if set. if (Flag.OPENCODE_TUI_CONFIG) { - await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx) - log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG }) + const configFile = Flag.OPENCODE_TUI_CONFIG + yield* Effect.promise(() => mergeFile(acc, configFile, ctx)).pipe(Effect.orDie) + log.debug("loaded custom tui config", { path: configFile }) } // 3. Project tui files, applied root-first so the closest file wins. for (const file of projectFiles) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while @@ -127,7 +126,7 @@ async function loadState(ctx: { directory: string }) { for (const dir of dirs) { if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } } @@ -146,14 +145,14 @@ async function loadState(ctx: { directory: string }) { config: acc.result, dirs: acc.result.plugin?.length ? dirs : [], } -} +}) export const layer = Layer.effect( Service, Effect.gen(function* () { const directory = yield* CurrentWorkingDirectory const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) + const data = yield* loadState({ directory }) const deps = yield* Effect.forEach( data.dirs, (dir) => @@ -176,7 +175,7 @@ export const layer = Layer.effect( }).pipe(Effect.withSpan("TuiConfig.layer")), ) -export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer), Layer.provide(AppFileSystem.defaultLayer)) const { runPromise } = makeRuntime(Service, defaultLayer) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 7598aa92f1..ebd4a41fcb 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -413,260 +413,261 @@ export const layer = Layer.effect( } }) - const loadInstanceState = Effect.fn("Config.loadInstanceState")(function* (ctx: InstanceContext) { - const auth = yield* authSvc.all().pipe(Effect.orDie) + const loadInstanceState = Effect.fn("Config.loadInstanceState")( + function* (ctx: InstanceContext) { + const auth = yield* authSvc.all().pipe(Effect.orDie) - let result: Info = {} - const consoleManagedProviders = new Set() - let activeOrgName: string | undefined + let result: Info = {} + const consoleManagedProviders = new Set() + let activeOrgName: string | undefined - const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { - if (source.startsWith("http://") || source.startsWith("https://")) return "global" - if (source === "OPENCODE_CONFIG_CONTENT") return "local" - if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" - return "global" - }) - - const mergePluginOrigins = Effect.fnUntraced(function* ( - source: string, - // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step - // is attached. - list: ConfigPlugin.Spec[] | undefined, - // Scope can be inferred from the source path, but some callers already know whether the config should - // behave as global or local and can pass that explicitly. - kind?: ConfigPlugin.Scope, - ) { - if (!list?.length) return - const hit = kind ?? (yield* pluginScopeForSource(source)) - // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while - // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(result.plugin_origins ?? []), - ...list.map((spec) => ({ spec, source, scope: hit })), - ]) - result.plugin = plugins.map((item) => item.spec) - result.plugin_origins = plugins - }) - - const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { - result = mergeConfigConcatArrays(result, next) - return mergePluginOrigins(source, next.plugin, kind) - } - - for (const [key, value] of Object.entries(auth)) { - if (value.type === "wellknown") { - const url = key.replace(/\/+$/, "") - process.env[value.key] = value.token - log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) - const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) - if (!response.ok) { - throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) - } - const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } - const remoteConfig = wellknown.config ?? {} - if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" - const source = `${url}/.well-known/opencode` - const next = yield* loadConfig(JSON.stringify(remoteConfig), { - dir: path.dirname(source), - source, - }) - yield* merge(source, next, "global") - log.debug("loaded remote config from well-known", { url }) - } - } - - const global = yield* getGlobal() - yield* merge(Global.Path.config, global, "global") - - if (Flag.OPENCODE_CONFIG) { - yield* merge(Flag.OPENCODE_CONFIG, yield* loadFile(Flag.OPENCODE_CONFIG)) - log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG }) - } - - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - for (const file of yield* Effect.promise(() => - ConfigPaths.projectFiles("opencode", ctx.directory, ctx.worktree), - )) { - yield* merge(file, yield* loadFile(file), "local") - } - } - - result.agent = result.agent || {} - result.mode = result.mode || {} - result.plugin = result.plugin || [] - - const directories = yield* Effect.promise(() => ConfigPaths.directories(ctx.directory, ctx.worktree)) - - if (Flag.OPENCODE_CONFIG_DIR) { - log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR }) - } - - const deps: Fiber.Fiber[] = [] - - for (const dir of directories) { - if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { - for (const file of ["opencode.json", "opencode.jsonc"]) { - const source = path.join(dir, file) - log.debug(`loading config from ${source}`) - yield* merge(source, yield* loadFile(source)) - result.agent ??= {} - result.mode ??= {} - result.plugin ??= [] - } - } - - yield* ensureGitignore(dir).pipe(Effect.orDie) - - const dep = yield* npmSvc - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe( - Effect.exit, - Effect.tap((exit) => - Exit.isFailure(exit) - ? Effect.sync(() => { - log.warn("background dependency install failed", { dir, error: String(exit.cause) }) - }) - : Effect.void, - ), - Effect.asVoid, - Effect.forkDetach, - ) - deps.push(dep) - - result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) - result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) - result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) - // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load - // returns normalized Specs and we only need to attach origin metadata here. - const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) - yield* mergePluginOrigins(dir, list) - } - - if (process.env.OPENCODE_CONFIG_CONTENT) { - const source = "OPENCODE_CONFIG_CONTENT" - const next = yield* loadConfig(process.env.OPENCODE_CONFIG_CONTENT, { - dir: ctx.directory, - source, + const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { + if (source.startsWith("http://") || source.startsWith("https://")) return "global" + if (source === "OPENCODE_CONFIG_CONTENT") return "local" + if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" + return "global" }) - yield* merge(source, next, "local") - log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") - } - const activeAccount = Option.getOrUndefined( - yield* accountSvc.active().pipe(Effect.catch(() => Effect.succeed(Option.none()))), - ) - if (activeAccount?.active_org_id) { - const accountID = activeAccount.id - const orgID = activeAccount.active_org_id - const url = activeAccount.url - yield* Effect.gen(function* () { - const [configOpt, tokenOpt] = yield* Effect.all( - [accountSvc.config(accountID, orgID), accountSvc.token(accountID)], - { concurrency: 2 }, - ) - if (Option.isSome(tokenOpt)) { - process.env["OPENCODE_CONSOLE_TOKEN"] = tokenOpt.value - yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) - } + const mergePluginOrigins = Effect.fnUntraced(function* ( + source: string, + // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step + // is attached. + list: ConfigPlugin.Spec[] | undefined, + // Scope can be inferred from the source path, but some callers already know whether the config should + // behave as global or local and can pass that explicitly. + kind?: ConfigPlugin.Scope, + ) { + if (!list?.length) return + const hit = kind ?? (yield* pluginScopeForSource(source)) + // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while + // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(result.plugin_origins ?? []), + ...list.map((spec) => ({ spec, source, scope: hit })), + ]) + result.plugin = plugins.map((item) => item.spec) + result.plugin_origins = plugins + }) - if (Option.isSome(configOpt)) { - const source = `${url}/api/config` - const next = yield* loadConfig(JSON.stringify(configOpt.value), { + const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { + result = mergeConfigConcatArrays(result, next) + return mergePluginOrigins(source, next.plugin, kind) + } + + for (const [key, value] of Object.entries(auth)) { + if (value.type === "wellknown") { + const url = key.replace(/\/+$/, "") + process.env[value.key] = value.token + log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) + const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) + if (!response.ok) { + throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) + } + const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } + const remoteConfig = wellknown.config ?? {} + if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" + const source = `${url}/.well-known/opencode` + const next = yield* loadConfig(JSON.stringify(remoteConfig), { dir: path.dirname(source), source, }) - for (const providerID of Object.keys(next.provider ?? {})) { - consoleManagedProviders.add(providerID) - } yield* merge(source, next, "global") + log.debug("loaded remote config from well-known", { url }) } - }).pipe( - Effect.withSpan("Config.loadActiveOrgConfig"), - Effect.catch((err) => { - log.debug("failed to fetch remote account config", { - error: err instanceof Error ? err.message : String(err), + } + + const global = yield* getGlobal() + yield* merge(Global.Path.config, global, "global") + + if (Flag.OPENCODE_CONFIG) { + yield* merge(Flag.OPENCODE_CONFIG, yield* loadFile(Flag.OPENCODE_CONFIG)) + log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG }) + } + + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { + for (const file of yield* ConfigPaths.files("opencode", ctx.directory, ctx.worktree).pipe(Effect.orDie)) { + yield* merge(file, yield* loadFile(file), "local") + } + } + + result.agent = result.agent || {} + result.mode = result.mode || {} + result.plugin = result.plugin || [] + + const directories = yield* ConfigPaths.directories(ctx.directory, ctx.worktree) + + if (Flag.OPENCODE_CONFIG_DIR) { + log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR }) + } + + const deps: Fiber.Fiber[] = [] + + for (const dir of directories) { + if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { + for (const file of ["opencode.json", "opencode.jsonc"]) { + const source = path.join(dir, file) + log.debug(`loading config from ${source}`) + yield* merge(source, yield* loadFile(source)) + result.agent ??= {} + result.mode ??= {} + result.plugin ??= [] + } + } + + yield* ensureGitignore(dir).pipe(Effect.orDie) + + const dep = yield* npmSvc + .install(dir, { + add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], }) - return Effect.void - }), - ) - } + .pipe( + Effect.exit, + Effect.tap((exit) => + Exit.isFailure(exit) + ? Effect.sync(() => { + log.warn("background dependency install failed", { dir, error: String(exit.cause) }) + }) + : Effect.void, + ), + Effect.asVoid, + Effect.forkDetach, + ) + deps.push(dep) - const managedDir = ConfigManaged.managedConfigDir() - if (existsSync(managedDir)) { - for (const file of ["opencode.json", "opencode.jsonc"]) { - const source = path.join(managedDir, file) - yield* merge(source, yield* loadFile(source), "global") + result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) + // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load + // returns normalized Specs and we only need to attach origin metadata here. + const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) + yield* mergePluginOrigins(dir, list) } - } - // macOS managed preferences (.mobileconfig deployed via MDM) override everything - const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) - if (managed) { - result = mergeConfigConcatArrays( - result, - yield* loadConfig(managed.text, { - dir: path.dirname(managed.source), - source: managed.source, - }), + if (process.env.OPENCODE_CONFIG_CONTENT) { + const source = "OPENCODE_CONFIG_CONTENT" + const next = yield* loadConfig(process.env.OPENCODE_CONFIG_CONTENT, { + dir: ctx.directory, + source, + }) + yield* merge(source, next, "local") + log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") + } + + const activeAccount = Option.getOrUndefined( + yield* accountSvc.active().pipe(Effect.catch(() => Effect.succeed(Option.none()))), ) - } + if (activeAccount?.active_org_id) { + const accountID = activeAccount.id + const orgID = activeAccount.active_org_id + const url = activeAccount.url + yield* Effect.gen(function* () { + const [configOpt, tokenOpt] = yield* Effect.all( + [accountSvc.config(accountID, orgID), accountSvc.token(accountID)], + { concurrency: 2 }, + ) + if (Option.isSome(tokenOpt)) { + process.env["OPENCODE_CONSOLE_TOKEN"] = tokenOpt.value + yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) + } - for (const [name, mode] of Object.entries(result.mode ?? {})) { - result.agent = mergeDeep(result.agent ?? {}, { - [name]: { - ...mode, - mode: "primary" as const, - }, - }) - } + if (Option.isSome(configOpt)) { + const source = `${url}/api/config` + const next = yield* loadConfig(JSON.stringify(configOpt.value), { + dir: path.dirname(source), + source, + }) + for (const providerID of Object.keys(next.provider ?? {})) { + consoleManagedProviders.add(providerID) + } + yield* merge(source, next, "global") + } + }).pipe( + Effect.withSpan("Config.loadActiveOrgConfig"), + Effect.catch((err) => { + log.debug("failed to fetch remote account config", { + error: err instanceof Error ? err.message : String(err), + }) + return Effect.void + }), + ) + } - if (Flag.OPENCODE_PERMISSION) { - result.permission = mergeDeep(result.permission ?? {}, JSON.parse(Flag.OPENCODE_PERMISSION)) - } - - if (result.tools) { - const perms: Record = {} - for (const [tool, enabled] of Object.entries(result.tools)) { - const action: ConfigPermission.Action = enabled ? "allow" : "deny" - if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { - perms.edit = action - continue + const managedDir = ConfigManaged.managedConfigDir() + if (existsSync(managedDir)) { + for (const file of ["opencode.json", "opencode.jsonc"]) { + const source = path.join(managedDir, file) + yield* merge(source, yield* loadFile(source), "global") } - perms[tool] = action } - result.permission = mergeDeep(perms, result.permission ?? {}) - } - if (!result.username) result.username = os.userInfo().username + // macOS managed preferences (.mobileconfig deployed via MDM) override everything + const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) + if (managed) { + result = mergeConfigConcatArrays( + result, + yield* loadConfig(managed.text, { + dir: path.dirname(managed.source), + source: managed.source, + }), + ) + } - if (result.autoshare === true && !result.share) { - result.share = "auto" - } + for (const [name, mode] of Object.entries(result.mode ?? {})) { + result.agent = mergeDeep(result.agent ?? {}, { + [name]: { + ...mode, + mode: "primary" as const, + }, + }) + } - if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) { - result.compaction = { ...result.compaction, auto: false } - } - if (Flag.OPENCODE_DISABLE_PRUNE) { - result.compaction = { ...result.compaction, prune: false } - } + if (Flag.OPENCODE_PERMISSION) { + result.permission = mergeDeep(result.permission ?? {}, JSON.parse(Flag.OPENCODE_PERMISSION)) + } - return { - config: result, - directories, - deps, - consoleState: { - consoleManagedProviders: Array.from(consoleManagedProviders), - activeOrgName, - switchableOrgCount: 0, - }, - } - }) + if (result.tools) { + const perms: Record = {} + for (const [tool, enabled] of Object.entries(result.tools)) { + const action: ConfigPermission.Action = enabled ? "allow" : "deny" + if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { + perms.edit = action + continue + } + perms[tool] = action + } + result.permission = mergeDeep(perms, result.permission ?? {}) + } + + if (!result.username) result.username = os.userInfo().username + + if (result.autoshare === true && !result.share) { + result.share = "auto" + } + + if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) { + result.compaction = { ...result.compaction, auto: false } + } + if (Flag.OPENCODE_DISABLE_PRUNE) { + result.compaction = { ...result.compaction, prune: false } + } + + return { + config: result, + directories, + deps, + consoleState: { + consoleManagedProviders: Array.from(consoleManagedProviders), + activeOrgName, + switchableOrgCount: 0, + }, + } + }, + Effect.provideService(AppFileSystem.Service, fs), + ) const state = yield* InstanceState.make( Effect.fn("Config.state")(function* (ctx) { - return yield* loadInstanceState(ctx) + return yield* loadInstanceState(ctx).pipe(Effect.orDie) }), ) diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index dcf0c940f2..db4b914f76 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -6,33 +6,41 @@ import { Flag } from "@/flag/flag" import { Global } from "@/global" import { unique } from "remeda" import { JsonError } from "./error" +import * as Effect from "effect/Effect" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" -export async function projectFiles(name: string, directory: string, worktree?: string) { - return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) -} +export const files = Effect.fn("ConfigPaths.projectFiles")(function* ( + name: string, + directory: string, + worktree?: string, +) { + const afs = yield* AppFileSystem.Service + return (yield* afs.up({ + targets: [`${name}.jsonc`, `${name}.json`], + start: directory, + stop: worktree, + })).toReversed() +}) -export async function directories(directory: string, worktree?: string) { +export const directories = Effect.fn("ConfigPaths.directories")(function* (directory: string, worktree?: string) { + const afs = yield* AppFileSystem.Service return unique([ Global.Path.config, ...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG - ? await Array.fromAsync( - Filesystem.up({ - targets: [".opencode"], - start: directory, - stop: worktree, - }), - ) + ? yield* afs.up({ + targets: [".opencode"], + start: directory, + stop: worktree, + }) : []), - ...(await Array.fromAsync( - Filesystem.up({ - targets: [".opencode"], - start: Global.Path.home, - stop: Global.Path.home, - }), - )), + ...(yield* afs.up({ + targets: [".opencode"], + start: Global.Path.home, + stop: Global.Path.home, + })), ...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []), ]) -} +}) export function fileInDirectory(dir: string, name: string) { return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)] From a7265307355e7efed9e276da6625c96458009db6 Mon Sep 17 00:00:00 2001 From: Brendan Allan <14191578+Brendonovich@users.noreply.github.com> Date: Fri, 17 Apr 2026 15:26:14 +0800 Subject: [PATCH 136/201] fix(app): workspace loading and persist ready state (#23046) --- packages/app/src/context/global-sync.tsx | 1 - .../src/context/global-sync/child-store.ts | 1 - packages/app/src/context/global-sync/types.ts | 1 - packages/app/src/pages/layout.tsx | 362 +++++++++--------- .../src/pages/layout/sidebar-workspace.tsx | 12 +- packages/app/src/utils/persist.ts | 2 +- 6 files changed, 186 insertions(+), 193 deletions(-) diff --git a/packages/app/src/context/global-sync.tsx b/packages/app/src/context/global-sync.tsx index 1359b07b4e..1a672639b5 100644 --- a/packages/app/src/context/global-sync.tsx +++ b/packages/app/src/context/global-sync.tsx @@ -264,7 +264,6 @@ function createGlobalSync() { children.pin(directory) const promise = Promise.resolve().then(async () => { const child = children.ensureChild(directory) - child[1]("bootstrapPromise", promise!) const cache = children.vcsCache.get(directory) if (!cache) return const sdk = sdkFor(directory) diff --git a/packages/app/src/context/global-sync/child-store.ts b/packages/app/src/context/global-sync/child-store.ts index 6788e8cc59..3fe67e4fbe 100644 --- a/packages/app/src/context/global-sync/child-store.ts +++ b/packages/app/src/context/global-sync/child-store.ts @@ -182,7 +182,6 @@ export function createChildStoreManager(input: { limit: 5, message: {}, part: {}, - bootstrapPromise: Promise.resolve(), }) children[directory] = child disposers.set(directory, dispose) diff --git a/packages/app/src/context/global-sync/types.ts b/packages/app/src/context/global-sync/types.ts index 28b3705d15..e3ec83c5ee 100644 --- a/packages/app/src/context/global-sync/types.ts +++ b/packages/app/src/context/global-sync/types.ts @@ -72,7 +72,6 @@ export type State = { part: { [messageID: string]: Part[] } - bootstrapPromise: Promise } export type VcsCache = { diff --git a/packages/app/src/pages/layout.tsx b/packages/app/src/pages/layout.tsx index 12a2bf763a..6f6b3c5557 100644 --- a/packages/app/src/pages/layout.tsx +++ b/packages/app/src/pages/layout.tsx @@ -13,7 +13,7 @@ import { type Accessor, } from "solid-js" import { makeEventListener } from "@solid-primitives/event-listener" -import { useNavigate, useParams } from "@solidjs/router" +import { useLocation, useNavigate, useParams } from "@solidjs/router" import { useLayout, LocalProject } from "@/context/layout" import { useGlobalSync } from "@/context/global-sync" import { Persist, persisted } from "@/utils/persist" @@ -127,6 +127,7 @@ export default function Layout(props: ParentProps) { const theme = useTheme() const language = useLanguage() const initialDirectory = decode64(params.dir) + const location = useLocation() const route = createMemo(() => { const slug = params.dir if (!slug) return { slug, dir: "" } @@ -576,7 +577,7 @@ export default function Layout(props: ParentProps) { return projects.find((p) => p.worktree === root) }) - + const [autoselecting] = createResource(async () => { await ready.promise await layout.ready.promise @@ -2102,196 +2103,198 @@ export default function Layout(props: ParentProps) { } > - <> -
-
-
- { - const item = project() - if (!item) return - void renameProject(item, next) - }} - class="text-14-medium text-text-strong truncate" - displayClass="text-14-medium text-text-strong truncate" - stopPropagation - /> + {(project) => ( + <> +
+
+
+ { + const item = project() + if (!item) return + void renameProject(item, next) + }} + class="text-14-medium text-text-strong truncate" + displayClass="text-14-medium text-text-strong truncate" + stopPropagation + /> - - - {worktree().replace(homedir(), "~")} - - + + + {worktree().replace(homedir(), "~")} + + +
+ + + + + + { + const item = project() + if (!item) return + showEditProjectDialog(item) + }} + > + {language.t("common.edit")} + + { + const item = project() + if (!item) return + toggleProjectWorkspaces(item) + }} + > + + {workspacesEnabled() + ? language.t("sidebar.workspaces.disable") + : language.t("sidebar.workspaces.enable")} + + + + + {language.t("sidebar.project.clearNotifications")} + + + + { + const dir = worktree() + if (!dir) return + closeProject(dir) + }} + > + {language.t("common.close")} + + + +
- - - - - - { - const item = project() - if (!item) return - showEditProjectDialog(item) - }} - > - {language.t("common.edit")} - - { - const item = project() - if (!item) return - toggleProjectWorkspaces(item) - }} - > - - {workspacesEnabled() - ? language.t("sidebar.workspaces.disable") - : language.t("sidebar.workspaces.enable")} - - - - - {language.t("sidebar.project.clearNotifications")} - - - - { - const dir = worktree() - if (!dir) return - closeProject(dir) - }} - > - {language.t("common.close")} - - - -
-
-
- + +
+ +
+
+ +
+ + } + > <>
-
- +
+ + + +
{ + if (!panelProps.mobile) scrollContainerRef = el + }} + class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar [overflow-anchor:none]" + > + + + {(directory) => ( + + )} + + +
+ + store.activeWorkspace} + workspaceLabel={workspaceLabel} + /> + +
- } - > - <> -
- -
-
- - - -
{ - if (!panelProps.mobile) scrollContainerRef = el - }} - class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar [overflow-anchor:none]" - > - - - {(directory) => ( - - )} - - -
- - store.activeWorkspace} - workspaceLabel={workspaceLabel} - /> - -
-
- - -
- +
+
+ + )}
) - const [loading] = createResource( - () => route()?.store?.[0]?.bootstrapPromise, - (p) => p, - ) - return (
- {(autoselecting(), loading()) ?? ""} + {autoselecting() ?? ""}
diff --git a/packages/app/src/pages/layout/sidebar-workspace.tsx b/packages/app/src/pages/layout/sidebar-workspace.tsx index c1836fa8a4..0202cfc3be 100644 --- a/packages/app/src/pages/layout/sidebar-workspace.tsx +++ b/packages/app/src/pages/layout/sidebar-workspace.tsx @@ -317,12 +317,11 @@ export const SortableWorkspace = (props: { }) const open = createMemo(() => props.ctx.workspaceExpanded(props.directory, local())) const boot = createMemo(() => open() || active()) - const booted = createMemo((prev) => prev || workspaceStore.status === "complete", false) const count = createMemo(() => sessions()?.length ?? 0) const hasMore = createMemo(() => workspaceStore.sessionTotal > count()) + const query = useQuery(() => ({ ...loadSessionsQuery(props.project.worktree) })) const busy = createMemo(() => props.ctx.isBusy(props.directory)) - const wasBusy = createMemo((prev) => prev || busy(), false) - const loading = createMemo(() => open() && !booted() && count() === 0 && !wasBusy()) + const loading = () => query.isLoading const touch = createMediaQuery("(hover: none)") const showNew = createMemo(() => !loading() && (touch() || count() === 0 || (active() && !params.id))) const loadMore = async () => { @@ -427,7 +426,7 @@ export const SortableWorkspace = (props: { mobile={props.mobile} ctx={props.ctx} showNew={showNew} - loading={loading} + loading={() => query.isLoading && count() === 0} sessions={sessions} hasMore={hasMore} loadMore={loadMore} @@ -453,11 +452,10 @@ export const LocalWorkspace = (props: { }) const slug = createMemo(() => base64Encode(props.project.worktree)) const sessions = createMemo(() => sortedRootSessions(workspace().store, props.sortNow())) - const booted = createMemo((prev) => prev || workspace().store.status === "complete", false) const count = createMemo(() => sessions()?.length ?? 0) const query = useQuery(() => ({ ...loadSessionsQuery(props.project.worktree) })) - const loading = createMemo(() => query.isPending && count() === 0) const hasMore = createMemo(() => workspace().store.sessionTotal > count()) + const loading = () => query.isLoading && count() === 0 const loadMore = async () => { workspace().setStore("limit", (limit) => (limit ?? 0) + 5) await globalSync.project.loadSessions(props.project.worktree) @@ -473,7 +471,7 @@ export const LocalWorkspace = (props: { mobile={props.mobile} ctx={props.ctx} showNew={() => false} - loading={() => query.isLoading} + loading={loading} sessions={sessions} hasMore={hasMore} loadMore={loadMore} diff --git a/packages/app/src/utils/persist.ts b/packages/app/src/utils/persist.ts index dce0e94c3b..0cac30cb1e 100644 --- a/packages/app/src/utils/persist.ts +++ b/packages/app/src/utils/persist.ts @@ -469,7 +469,7 @@ export function persisted( state, setState, init, - Object.assign(() => ready() === true, { + Object.assign(() => (ready.loading ? false : ready.latest === true), { promise: init instanceof Promise ? init : undefined, }), ] From c57c5315c17e1648d6699abd25db8ee195f44de1 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 07:27:13 +0000 Subject: [PATCH 137/201] chore: generate --- packages/app/src/pages/layout.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/app/src/pages/layout.tsx b/packages/app/src/pages/layout.tsx index 6f6b3c5557..3d3bd5e97b 100644 --- a/packages/app/src/pages/layout.tsx +++ b/packages/app/src/pages/layout.tsx @@ -577,7 +577,7 @@ export default function Layout(props: ParentProps) { return projects.find((p) => p.worktree === root) }) - + const [autoselecting] = createResource(async () => { await ready.promise await layout.ready.promise From ec3ac0c4b0a78d0f4de268a1d54401182497fabc Mon Sep 17 00:00:00 2001 From: Sebastian Date: Fri, 17 Apr 2026 14:29:46 +0200 Subject: [PATCH 138/201] upgrade opentui to 0.1.100 (#22928) --- bun.lock | 28 ++++++++++++++-------------- packages/opencode/package.json | 4 ++-- packages/plugin/package.json | 8 ++++---- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/bun.lock b/bun.lock index c22a2ff270..ba6c196241 100644 --- a/bun.lock +++ b/bun.lock @@ -365,8 +365,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "0.1.100", + "@opentui/solid": "0.1.100", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -465,16 +465,16 @@ "zod": "catalog:", }, "devDependencies": { - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "0.1.100", + "@opentui/solid": "0.1.100", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "@typescript/native-preview": "catalog:", "typescript": "catalog:", }, "peerDependencies": { - "@opentui/core": ">=0.1.99", - "@opentui/solid": ">=0.1.99", + "@opentui/core": ">=0.1.100", + "@opentui/solid": ">=0.1.100", }, "optionalPeers": [ "@opentui/core", @@ -1598,21 +1598,21 @@ "@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.40.0", "", {}, "sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw=="], - "@opentui/core": ["@opentui/core@0.1.99", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.99", "@opentui/core-darwin-x64": "0.1.99", "@opentui/core-linux-arm64": "0.1.99", "@opentui/core-linux-x64": "0.1.99", "@opentui/core-win32-arm64": "0.1.99", "@opentui/core-win32-x64": "0.1.99", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-I3+AEgGzqNWIpWX9g2WOscSPwtQDNOm4KlBjxBWCZjLxkF07u77heWXF7OiAdhKLtNUW6TFiyt6yznqAZPdG3A=="], + "@opentui/core": ["@opentui/core@0.1.100", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.100", "@opentui/core-darwin-x64": "0.1.100", "@opentui/core-linux-arm64": "0.1.100", "@opentui/core-linux-x64": "0.1.100", "@opentui/core-win32-arm64": "0.1.100", "@opentui/core-win32-x64": "0.1.100", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-g6Ft3CcOVpytMzq2AZrnHHeMrmvYeLVCsy/8LqZ30VnPv0zfJ+f1TVi/EFrcl4m0GRPdy6yBOVOMcIAWHSZvtg=="], - "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.99", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bzVrqeX2vb5iWrc/ftOUOqeUY8XO+qSgoTwj5TXHuwagavgwD3Hpeyjx8+icnTTeM4pao0som1WR9xfye6/X5Q=="], + "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.100", "", { "os": "darwin", "cpu": "arm64" }, "sha512-cY70nNjkh53tys4iQ0FDST3CQfN4Rp8zOrT6EW0dH/51KZq2Rg3EhxDpc+qu4zASadR5uuU5i61g8lYyVGeGgw=="], - "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.99", "", { "os": "darwin", "cpu": "x64" }, "sha512-VE4FrXBYpkxnvkqcCV1a8aN9jyyMJMihVW+V2NLCtp+4yQsj0AapG5TiUSN76XnmSZRptxDy5rBmEempeoIZbg=="], + "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.100", "", { "os": "darwin", "cpu": "x64" }, "sha512-RUJa4MPX5BWwXuc5nE+Zc+md8+ITYp5X0ourM4+ReaIIW9pTxSDcIPBba9pkJb3PIADyulUPMmmy5OX+umDJhQ=="], - "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.99", "", { "os": "linux", "cpu": "arm64" }, "sha512-viXQsbpS7yHjYkl7+am32JdvG96QU9lvHh1UiZtpOxcNUUqiYmA2ZwZFPD2Bi54jNyj5l2hjH6YkD3DzE2FEWA=="], + "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.100", "", { "os": "linux", "cpu": "arm64" }, "sha512-Hznr39cSXg+2sQd+WcTsk67BjyqrZvuTK6f94Uu1ULYZJYCE4KdyNU2NzJSK6ooyosSWXPDsb4kwetTlfypMZg=="], - "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.99", "", { "os": "linux", "cpu": "x64" }, "sha512-WLoEFINOSp0tZSR9y4LUuGc7n4Y7H1wcpjUPzQ9vChkYDXrfZltEanzoDWbDcQ4kZQW5tHVC7LrZHpAsRLwFZg=="], + "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.100", "", { "os": "linux", "cpu": "x64" }, "sha512-mssQIwH2DU1aMGSUnTJgahMeAEfF82n2WKeTGabTRkPrcxD/6ML+JFiV9l8/8YA880fBD2Kh1XXGEhN2ZGB5UA=="], - "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.99", "", { "os": "win32", "cpu": "arm64" }, "sha512-yWMOLWCEO8HdrctU1dMkgZC8qGkiO4Dwr4/e11tTvVpRmYhDsP/IR89ZjEEtOwnKwFOFuB/MxvflqaEWVQ2g5Q=="], + "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.100", "", { "os": "win32", "cpu": "arm64" }, "sha512-EuLA6+kiIyW/hbo7k56QTc9/QGxg2bYHQ+XPn1UWPf6tmjzbDUwzhw+y52CNJQZpTXtAQSNtxfiYIbigGBd4TQ=="], - "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.99", "", { "os": "win32", "cpu": "x64" }, "sha512-aYRlsL2w8YRL6vPd7/hrqlNVkXU3QowWb01TOvAcHS8UAsXaGFUr47kSDyjxDi1wg1MzmVduCfsC7T3NoThV1w=="], + "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.100", "", { "os": "win32", "cpu": "x64" }, "sha512-RyiqbKQ15olR8hK4VsPKL3gHrOIIpqEXhUP0jzXJdQQNusmQKlxHyk5EY3R6hi52IgqjjGxwG+ocVeRb4/VTRg=="], - "@opentui/solid": ["@opentui/solid@0.1.99", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.99", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-DrqqO4h2V88FmeIP2cErYkMU0ZK5MrUsZw3w6IzZpoXyyiL4/9qpWzUq+CXx+r16VP2iGxDJwGKUmtFAzUch2Q=="], + "@opentui/solid": ["@opentui/solid@0.1.100", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.100", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-7ADpXOIXtdNzp/r0eQdUuO3kBof7YdcJhQBj/Fv1ckNKoq1XScMuxV2kIK5IzU1Jr3PC4mDxRbek9bMvLJEVjw=="], "@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 1dabd91b8d..51e1058839 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -122,8 +122,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "0.1.100", + "@opentui/solid": "0.1.100", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 6f9a0ea1dc..3eb55979ae 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -22,8 +22,8 @@ "zod": "catalog:" }, "peerDependencies": { - "@opentui/core": ">=0.1.99", - "@opentui/solid": ">=0.1.99" + "@opentui/core": ">=0.1.100", + "@opentui/solid": ">=0.1.100" }, "peerDependenciesMeta": { "@opentui/core": { @@ -34,8 +34,8 @@ } }, "devDependencies": { - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "0.1.100", + "@opentui/solid": "0.1.100", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "typescript": "catalog:", From e78d75a003acfdbd9263753913d1bee395ef652a Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 13:07:11 +0000 Subject: [PATCH 139/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index 54fd991eca..c19a837e84 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-OPbZUo/fQv2Xsf+NEZV08GLBMN/DXovhRvn2JkesFtY=", - "aarch64-linux": "sha256-WK7xlVLuirKDN5LaqjBn7qpv5bYVtYHZw0qRNKX4xXg=", - "aarch64-darwin": "sha256-BAoAdeLQ+lXDD7Klxoxij683OVVug8KXEMRUqIQAjc8=", - "x86_64-darwin": "sha256-ZOBwNR2gZgc5f+y3VIBBT4qZpeZfg7Of6AaGDOfqsG8=" + "x86_64-linux": "sha256-yGb+EPlFNDptIi4yFdJ0z7fhAyfOCRXu0GpNxrOnLVA=", + "aarch64-linux": "sha256-h8oBwLB6LnFN4xGB0/ocvxRbBMwewrEck91ADihTUCk=", + "aarch64-darwin": "sha256-/0IOgoihi4OR2AhDDfstLn2DcY/261v/KRQV4Pwhbmk=", + "x86_64-darwin": "sha256-rPkqF+led93s1plBbhFpszrnzF2H+EUz8QlfbUkSHvM=" } } From 06ae43920b3f2384187135cbd71248a8f49ba98f Mon Sep 17 00:00:00 2001 From: opencode Date: Fri, 17 Apr 2026 13:37:06 +0000 Subject: [PATCH 140/201] release: v1.4.8 --- bun.lock | 32 +++++++++++++------------- packages/app/package.json | 2 +- packages/console/app/package.json | 2 +- packages/console/core/package.json | 2 +- packages/console/function/package.json | 2 +- packages/console/mail/package.json | 2 +- packages/desktop-electron/package.json | 2 +- packages/desktop/package.json | 2 +- packages/enterprise/package.json | 2 +- packages/extensions/zed/extension.toml | 12 +++++----- packages/function/package.json | 2 +- packages/opencode/package.json | 2 +- packages/plugin/package.json | 2 +- packages/sdk/js/package.json | 2 +- packages/shared/package.json | 2 +- packages/slack/package.json | 2 +- packages/ui/package.json | 2 +- packages/web/package.json | 2 +- sdks/vscode/package.json | 2 +- 19 files changed, 39 insertions(+), 39 deletions(-) diff --git a/bun.lock b/bun.lock index ba6c196241..b6f071c460 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.7", + "version": "1.4.8", "bin": { "opencode": "./bin/opencode", }, @@ -458,7 +458,7 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.7", + "version": "1.4.8", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", diff --git a/packages/app/package.json b/packages/app/package.json index 2941637d08..56ca71f892 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.7", + "version": "1.4.8", "description": "", "type": "module", "exports": { diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 8783f3fd05..528427ae80 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/core/package.json b/packages/console/core/package.json index cdefd0e609..1b4292e9ad 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.7", + "version": "1.4.8", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/function/package.json b/packages/console/function/package.json index 898c540bac..f3de599f1c 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.7", + "version": "1.4.8", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index 46ff28b7d1..b13ad8520e 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.7", + "version": "1.4.8", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index e1f69b5b20..ed36f62827 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index d8eea4ea36..ce0ad5d4ab 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index 12a72e647f..f667de3822 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.7", + "version": "1.4.8", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index d164534cf7..31facbe3b5 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.7" +version = "1.4.8" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 36a9ddc321..877c9dcd14 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.7", + "version": "1.4.8", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 51e1058839..83d6e526fd 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.7", + "version": "1.4.8", "name": "opencode", "type": "module", "license": "MIT", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 3eb55979ae..0e51002b42 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 53a5893143..06213308f1 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 9dec6bdb6c..794c537da8 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.7", + "version": "1.4.8", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/slack/package.json b/packages/slack/package.json index a23500241e..d9865aa35c 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index cd559041cc..f05ce03826 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.7", + "version": "1.4.8", "type": "module", "license": "MIT", "exports": { diff --git a/packages/web/package.json b/packages/web/package.json index a53ef51932..a1d138319f 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.7", + "version": "1.4.8", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index c499f679fe..07a3682c67 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.7", + "version": "1.4.8", "publisher": "sst-dev", "repository": { "type": "git", From fffc496f41ffc2be3149dbd2faf9a577be0a390a Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 09:46:35 -0400 Subject: [PATCH 141/201] remove log --- packages/opencode/src/cli/cmd/tui/app.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index f8ffd27dc8..74eca9a0f2 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -135,9 +135,7 @@ export function tui(input: { await TuiPluginRuntime.dispose() } - console.log("starting renderer") const renderer = await createCliRenderer(rendererConfig(input.config)) - console.log("renderer started") await render(() => { return ( From 0f80c827ed2f0ee32f8dfb8812605ed64ab47254 Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 09:52:10 -0400 Subject: [PATCH 142/201] feat(core): exponential backoff of workspace reconnect (#23083) --- .../tui/component/dialog-workspace-create.tsx | 4 + .../opencode/src/control-plane/workspace.ts | 119 ++++++++++-------- 2 files changed, 68 insertions(+), 55 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index ad5cd45782..7ea513edee 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -229,6 +229,10 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) = }) const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => { + toast.show({ + message: "Creating workspace failed", + variant: "error", + }) log.error("workspace create request failed", { type, error: errorData(err), diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 3af11707e8..fd22d3af04 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -34,7 +34,6 @@ export type Info = z.infer export const ConnectionStatus = z.object({ workspaceID: WorkspaceID.zod, status: z.enum(["connected", "connecting", "disconnected", "error"]), - error: z.string().optional(), }) export type ConnectionStatus = z.infer @@ -345,10 +344,10 @@ const connections = new Map() const aborts = new Map() const TIMEOUT = 5000 -function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { +function setStatus(id: WorkspaceID, status: ConnectionStatus["status"]) { const prev = connections.get(id) - if (prev?.status === status && prev?.error === error) return - const next = { workspaceID: id, status, error } + if (prev?.status === status) return + const next = { workspaceID: id, status } connections.set(id, next) if (status === "error") { @@ -425,68 +424,78 @@ function route(url: string | URL, path: string) { return next } -async function syncWorkspace(space: Info, signal: AbortSignal) { +async function connectSSE(url: URL | string, headers: HeadersInit | undefined, signal: AbortSignal) { + const res = await fetch(route(url, "/global/event"), { + method: "GET", + headers, + signal, + }) + + if (!res.ok) throw new Error(`Workspace sync HTTP failure: ${res.status}`) + if (!res.body) throw new Error("No response body from global sync") + + return res.body +} + +async function syncWorkspaceLoop(space: Info, signal: AbortSignal) { + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + if (target.type === "local") return null + + let attempt = 0 + while (!signal.aborted) { log.info("connecting to global sync", { workspace: space.name }) setStatus(space.id, "connecting") - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - if (target.type === "local") return - - const res = await fetch(route(target.url, "/global/event"), { - method: "GET", - headers: target.headers, - signal, - }).catch((err: unknown) => { - setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) - + let stream + try { + stream = await connectSSE(target.url, target.headers, signal) + } catch (err) { + setStatus(space.id, "error") log.info("failed to connect to global sync", { workspace: space.name, - error: err, + err, }) - return undefined - }) - - if (!res || !res.ok || !res.body) { - const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` - log.info("failed to connect to global sync", { workspace: space.name, error }) - setStatus(space.id, "error", error) - await sleep(1000) - continue } - log.info("global sync connected", { workspace: space.name }) - setStatus(space.id, "connected") + if (stream) { + attempt = 0 - await parseSSE(res.body, signal, (evt: any) => { - try { - if (!("payload" in evt)) return + log.info("global sync connected", { workspace: space.name }) + setStatus(space.id, "connected") - if (evt.payload.type === "sync") { - SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + await parseSSE(stream, signal, (evt: any) => { + try { + if (!("payload" in evt)) return + + if (evt.payload.type === "sync") { + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + } + + GlobalBus.emit("event", { + directory: evt.directory, + project: evt.project, + workspace: space.id, + payload: evt.payload, + }) + } catch (err) { + log.info("failed to replay global event", { + workspaceID: space.id, + error: err, + }) } + }) - GlobalBus.emit("event", { - directory: evt.directory, - project: evt.project, - workspace: space.id, - payload: evt.payload, - }) - } catch (err) { - log.info("failed to replay global event", { - workspaceID: space.id, - error: err, - }) - } - }) + log.info("disconnected from global sync: " + space.id) + setStatus(space.id, "disconnected") + } - log.info("disconnected from global sync: " + space.id) - setStatus(space.id, "disconnected") - - // TODO: Implement exponential backoff - await sleep(1000) + // Back off reconnect attempts up to 2 minutes while the workspace + // stays unavailable. + await sleep(Math.min(120_000, 1_000 * 2 ** attempt)) + attempt += 1 } } @@ -498,7 +507,7 @@ async function startSync(space: Info) { if (target.type === "local") { void Filesystem.exists(target.directory).then((exists) => { - setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + setStatus(space.id, exists ? "connected" : "error") }) return } @@ -510,10 +519,10 @@ async function startSync(space: Info) { const abort = new AbortController() aborts.set(space.id, abort) - void syncWorkspace(space, abort.signal).catch((error) => { + void syncWorkspaceLoop(space, abort.signal).catch((error) => { aborts.delete(space.id) - setStatus(space.id, "error", String(error)) + setStatus(space.id, "error") log.warn("workspace listener failed", { workspaceID: space.id, error, From cb425ac927cde685ef9b6e38a62ad71c408a47df Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 13:53:11 +0000 Subject: [PATCH 143/201] chore: generate --- packages/sdk/js/src/v2/gen/types.gen.ts | 2 -- packages/sdk/openapi.json | 6 ------ 2 files changed, 8 deletions(-) diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index 839dae8b22..460c2bcdfa 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -535,7 +535,6 @@ export type EventWorkspaceStatus = { properties: { workspaceID: string status: "connected" | "connecting" | "disconnected" | "error" - error?: string } } @@ -2490,7 +2489,6 @@ export type ExperimentalWorkspaceStatusResponses = { 200: Array<{ workspaceID: string status: "connected" | "connecting" | "disconnected" | "error" - error?: string }> } diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index cf14026eae..7bdf025bbe 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -639,9 +639,6 @@ "status": { "type": "string", "enum": ["connected", "connecting", "disconnected", "error"] - }, - "error": { - "type": "string" } }, "required": ["workspaceID", "status"] @@ -8852,9 +8849,6 @@ "status": { "type": "string", "enum": ["connected", "connecting", "disconnected", "error"] - }, - "error": { - "type": "string" } }, "required": ["workspaceID", "status"] From 3707e4a49cb97639408a9e0da7cf148ca5ce8834 Mon Sep 17 00:00:00 2001 From: Frank Date: Fri, 17 Apr 2026 09:54:44 -0400 Subject: [PATCH 144/201] zen: routing logic --- .../app/src/routes/zen/util/handler.ts | 24 +- .../src/routes/zen/util/modelTpmLimiter.ts | 49 + .../migration.sql | 6 + .../snapshot.json | 2615 +++++++++++++++++ packages/console/core/src/model.ts | 15 +- packages/console/core/src/schema/ip.sql.ts | 10 + packages/shared/sst-env.d.ts | 10 + 7 files changed, 2723 insertions(+), 6 deletions(-) create mode 100644 packages/console/app/src/routes/zen/util/modelTpmLimiter.ts create mode 100644 packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql create mode 100644 packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json create mode 100644 packages/shared/sst-env.d.ts diff --git a/packages/console/app/src/routes/zen/util/handler.ts b/packages/console/app/src/routes/zen/util/handler.ts index d1c5985a81..2e576eaf68 100644 --- a/packages/console/app/src/routes/zen/util/handler.ts +++ b/packages/console/app/src/routes/zen/util/handler.ts @@ -45,6 +45,7 @@ import { LiteData } from "@opencode-ai/console-core/lite.js" import { Resource } from "@opencode-ai/console-resource" import { i18n, type Key } from "~/i18n" import { localeFromRequest } from "~/lib/language" +import { createModelTpmLimiter } from "./modelTpmLimiter" type ZenData = Awaited> type RetryOptions = { @@ -121,6 +122,8 @@ export async function handler( const authInfo = await authenticate(modelInfo, zenApiKey) const billingSource = validateBilling(authInfo, modelInfo) logger.metric({ source: billingSource }) + const modelTpmLimiter = createModelTpmLimiter(modelInfo.providers) + const modelTpmLimits = await modelTpmLimiter?.check() const retriableRequest = async (retry: RetryOptions = { excludeProviders: [], retryCount: 0 }) => { const providerInfo = selectProvider( @@ -133,6 +136,7 @@ export async function handler( trialProviders, retry, stickyProvider, + modelTpmLimits, ) validateModelSettings(billingSource, authInfo) updateProviderKey(authInfo, providerInfo) @@ -229,6 +233,7 @@ export async function handler( const usageInfo = providerInfo.normalizeUsage(json.usage) const costInfo = calculateCost(modelInfo, usageInfo) await trialLimiter?.track(usageInfo) + await modelTpmLimiter?.track(providerInfo.id, providerInfo.model, usageInfo) await trackUsage(sessionId, billingSource, authInfo, modelInfo, providerInfo, usageInfo, costInfo) await reload(billingSource, authInfo, costInfo) json.cost = calculateOccurredCost(billingSource, costInfo) @@ -278,6 +283,7 @@ export async function handler( const usageInfo = providerInfo.normalizeUsage(usage) const costInfo = calculateCost(modelInfo, usageInfo) await trialLimiter?.track(usageInfo) + await modelTpmLimiter?.track(providerInfo.id, providerInfo.model, usageInfo) await trackUsage(sessionId, billingSource, authInfo, modelInfo, providerInfo, usageInfo, costInfo) await reload(billingSource, authInfo, costInfo) const cost = calculateOccurredCost(billingSource, costInfo) @@ -433,12 +439,16 @@ export async function handler( trialProviders: string[] | undefined, retry: RetryOptions, stickyProvider: string | undefined, + modelTpmLimits: Record | undefined, ) { const modelProvider = (() => { + // Byok is top priority b/c if user set their own API key, we should use it + // instead of using the sticky provider for the same session if (authInfo?.provider?.credentials) { return modelInfo.providers.find((provider) => provider.id === modelInfo.byokProvider) } + // Always use the same provider for the same session if (stickyProvider) { const provider = modelInfo.providers.find((provider) => provider.id === stickyProvider) if (provider) return provider @@ -451,10 +461,20 @@ export async function handler( } if (retry.retryCount !== MAX_FAILOVER_RETRIES) { - const providers = modelInfo.providers + const allProviders = modelInfo.providers .filter((provider) => !provider.disabled) + .filter((provider) => provider.weight !== 0) .filter((provider) => !retry.excludeProviders.includes(provider.id)) - .flatMap((provider) => Array(provider.weight ?? 1).fill(provider)) + .filter((provider) => { + if (!provider.tpmLimit) return true + const usage = modelTpmLimits?.[`${provider.id}/${provider.model}`] ?? 0 + return usage < provider.tpmLimit * 1_000_000 + }) + + const topPriority = Math.min(...allProviders.map((p) => p.priority)) + const providers = allProviders + .filter((p) => p.priority <= topPriority) + .flatMap((provider) => Array(provider.weight).fill(provider)) // Use the last 4 characters of session ID to select a provider const identifier = sessionId.length ? sessionId : ip diff --git a/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts new file mode 100644 index 0000000000..eeb89da5f2 --- /dev/null +++ b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts @@ -0,0 +1,49 @@ +import { and, Database, eq, inArray, sql } from "@opencode-ai/console-core/drizzle/index.js" +import { ModelRateLimitTable } from "@opencode-ai/console-core/schema/ip.sql.js" +import { UsageInfo } from "./provider/provider" + +export function createModelTpmLimiter(providers: { id: string; model: string; tpmLimit?: number }[]) { + const keys = providers.filter((p) => p.tpmLimit).map((p) => `${p.id}/${p.model}`) + if (keys.length === 0) return + + const yyyyMMddHHmm = new Date(Date.now()) + .toISOString() + .replace(/[^0-9]/g, "") + .substring(0, 12) + + return { + check: async () => { + const data = await Database.use((tx) => + tx + .select() + .from(ModelRateLimitTable) + .where(and(inArray(ModelRateLimitTable.key, keys), eq(ModelRateLimitTable.interval, yyyyMMddHHmm))), + ) + + // convert to map of model to count + return data.reduce( + (acc, curr) => { + acc[curr.key] = curr.count + return acc + }, + {} as Record, + ) + }, + track: async (id: string, model: string, usageInfo: UsageInfo) => { + const usage = + usageInfo.inputTokens + + usageInfo.outputTokens + + (usageInfo.reasoningTokens ?? 0) + + (usageInfo.cacheReadTokens ?? 0) + + (usageInfo.cacheWrite5mTokens ?? 0) + + (usageInfo.cacheWrite1hTokens ?? 0) + if (usage <= 0) return + await Database.use((tx) => + tx + .insert(ModelRateLimitTable) + .values({ key: `${id}/${model}`, interval: yyyyMMddHHmm, count: usage }) + .onDuplicateKeyUpdate({ set: { count: sql`${ModelRateLimitTable.count} + ${usage}` } }), + ) + }, + } +} diff --git a/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql b/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql new file mode 100644 index 0000000000..41a4efe68e --- /dev/null +++ b/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql @@ -0,0 +1,6 @@ +CREATE TABLE `model_rate_limit` ( + `key` varchar(255) NOT NULL, + `interval` varchar(40) NOT NULL, + `count` int NOT NULL, + CONSTRAINT PRIMARY KEY(`key`,`interval`) +); diff --git a/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json new file mode 100644 index 0000000000..169d7dbb4f --- /dev/null +++ b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json @@ -0,0 +1,2615 @@ +{ + "version": "6", + "dialect": "mysql", + "id": "93c492af-c95b-4213-9fc2-38c3dd10374d", + "prevIds": [ + "a09a925d-6cdd-4e7c-b8b1-11c259928b4c" + ], + "ddl": [ + { + "name": "account", + "entityType": "tables" + }, + { + "name": "auth", + "entityType": "tables" + }, + { + "name": "benchmark", + "entityType": "tables" + }, + { + "name": "billing", + "entityType": "tables" + }, + { + "name": "lite", + "entityType": "tables" + }, + { + "name": "payment", + "entityType": "tables" + }, + { + "name": "subscription", + "entityType": "tables" + }, + { + "name": "usage", + "entityType": "tables" + }, + { + "name": "ip_rate_limit", + "entityType": "tables" + }, + { + "name": "ip", + "entityType": "tables" + }, + { + "name": "key_rate_limit", + "entityType": "tables" + }, + { + "name": "model_rate_limit", + "entityType": "tables" + }, + { + "name": "key", + "entityType": "tables" + }, + { + "name": "model", + "entityType": "tables" + }, + { + "name": "provider", + "entityType": "tables" + }, + { + "name": "user", + "entityType": "tables" + }, + { + "name": "workspace", + "entityType": "tables" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "account" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "auth" + }, + { + "type": "enum('email','github','google')", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subject", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "account_id", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "agent", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "mediumtext", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "result", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "customer_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(32)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_type", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(4)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_last4", + "entityType": "columns", + "table": "billing" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "balance", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_limit", + "entityType": "columns", + "table": "billing" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_usage_updated", + "entityType": "columns", + "table": "billing" + }, + { + "type": "boolean", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_trigger", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_amount", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_error", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_reload_error", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_reload_locked_till", + "entityType": "columns", + "table": "billing" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(28)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "enum('20','100','200')", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription_plan", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_subscription_booked", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_subscription_selected", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(28)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "lite_subscription_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "lite", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "rolling_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "weekly_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_rolling_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_weekly_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "customer_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "invoice_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "amount", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_refunded", + "entityType": "columns", + "table": "payment" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "enrichment", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "rolling_usage", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "fixed_usage", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_rolling_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_fixed_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "input_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "output_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reasoning_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_read_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_write_5m_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_write_1h_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cost", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "enrichment", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(45)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "ip", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "varchar(10)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "varchar(45)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "ip", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "ip" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "usage", + "entityType": "columns", + "table": "ip" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "varchar(40)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "varchar(40)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_used", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "provider" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "credentials", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "account_id", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_seen", + "entityType": "columns", + "table": "user" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "color", + "entityType": "columns", + "table": "user" + }, + { + "type": "enum('admin','member')", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "role", + "entityType": "columns", + "table": "user" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_limit", + "entityType": "columns", + "table": "user" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_usage_updated", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "slug", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "workspace" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "account", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "auth", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "benchmark", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "billing", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "lite", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "payment", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "subscription", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "usage", + "entityType": "pks" + }, + { + "columns": [ + "ip", + "interval" + ], + "name": "PRIMARY", + "table": "ip_rate_limit", + "entityType": "pks" + }, + { + "columns": [ + "ip" + ], + "name": "PRIMARY", + "table": "ip", + "entityType": "pks" + }, + { + "columns": [ + "key", + "interval" + ], + "name": "PRIMARY", + "table": "key_rate_limit", + "entityType": "pks" + }, + { + "columns": [ + "key", + "interval" + ], + "name": "PRIMARY", + "table": "model_rate_limit", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "key", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "model", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "provider", + "entityType": "pks" + }, + { + "columns": [ + "workspace_id", + "id" + ], + "name": "PRIMARY", + "table": "user", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "workspace", + "entityType": "pks" + }, + { + "columns": [ + { + "value": "provider", + "isExpression": false + }, + { + "value": "subject", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "provider", + "entityType": "indexes", + "table": "auth" + }, + { + "columns": [ + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "account_id", + "entityType": "indexes", + "table": "auth" + }, + { + "columns": [ + { + "value": "time_created", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "time_created", + "entityType": "indexes", + "table": "benchmark" + }, + { + "columns": [ + { + "value": "customer_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_customer_id", + "entityType": "indexes", + "table": "billing" + }, + { + "columns": [ + { + "value": "subscription_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_subscription_id", + "entityType": "indexes", + "table": "billing" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "user_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_user_id", + "entityType": "indexes", + "table": "lite" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "user_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_user_id", + "entityType": "indexes", + "table": "subscription" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "time_created", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "usage_time_created", + "entityType": "indexes", + "table": "usage" + }, + { + "columns": [ + { + "value": "key", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_key", + "entityType": "indexes", + "table": "key" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "model", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "model_workspace_model", + "entityType": "indexes", + "table": "model" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "provider", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_provider", + "entityType": "indexes", + "table": "provider" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "user_account_id", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "email", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "user_email", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_account_id", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "email", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_email", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "slug", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "slug", + "entityType": "indexes", + "table": "workspace" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/packages/console/core/src/model.ts b/packages/console/core/src/model.ts index 3d614d3034..6281382d65 100644 --- a/packages/console/core/src/model.ts +++ b/packages/console/core/src/model.ts @@ -34,6 +34,8 @@ export namespace ZenData { z.object({ id: z.string(), model: z.string(), + priority: z.number().optional(), + tpmLimit: z.number().optional(), weight: z.number().optional(), disabled: z.boolean().optional(), storeModel: z.string().optional(), @@ -123,10 +125,16 @@ export namespace ZenData { ), models: (() => { const normalize = (model: z.infer) => { - const composite = model.providers.find((p) => compositeProviders[p.id].length > 1) + const providers = model.providers.map((p) => ({ + ...p, + priority: p.priority ?? Infinity, + weight: p.weight ?? 1, + })) + const composite = providers.find((p) => compositeProviders[p.id].length > 1) if (!composite) return { trialProvider: model.trialProvider ? [model.trialProvider] : undefined, + providers, } const weightMulti = compositeProviders[composite.id].length @@ -137,17 +145,16 @@ export namespace ZenData { if (model.trialProvider === composite.id) return compositeProviders[composite.id].map((p) => p.id) return [model.trialProvider] })(), - providers: model.providers.flatMap((p) => + providers: providers.flatMap((p) => p.id === composite.id ? compositeProviders[p.id].map((sub) => ({ ...p, id: sub.id, - weight: p.weight ?? 1, })) : [ { ...p, - weight: (p.weight ?? 1) * weightMulti, + weight: p.weight * weightMulti, }, ], ), diff --git a/packages/console/core/src/schema/ip.sql.ts b/packages/console/core/src/schema/ip.sql.ts index a840a78c19..830842e64d 100644 --- a/packages/console/core/src/schema/ip.sql.ts +++ b/packages/console/core/src/schema/ip.sql.ts @@ -30,3 +30,13 @@ export const KeyRateLimitTable = mysqlTable( }, (table) => [primaryKey({ columns: [table.key, table.interval] })], ) + +export const ModelRateLimitTable = mysqlTable( + "model_rate_limit", + { + key: varchar("key", { length: 255 }).notNull(), + interval: varchar("interval", { length: 40 }).notNull(), + count: int("count").notNull(), + }, + (table) => [primaryKey({ columns: [table.key, table.interval] })], +) diff --git a/packages/shared/sst-env.d.ts b/packages/shared/sst-env.d.ts new file mode 100644 index 0000000000..64441936d7 --- /dev/null +++ b/packages/shared/sst-env.d.ts @@ -0,0 +1,10 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +/* biome-ignore-all lint: auto-generated */ + +/// + +import "sst" +export {} \ No newline at end of file From cc063d4c32a9d9800a27b4e73fc46781e2c2f08b Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 13:56:17 +0000 Subject: [PATCH 145/201] chore: generate --- .../snapshot.json | 86 ++++--------------- 1 file changed, 19 insertions(+), 67 deletions(-) diff --git a/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json index 169d7dbb4f..2152bfa76f 100644 --- a/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json +++ b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json @@ -2,9 +2,7 @@ "version": "6", "dialect": "mysql", "id": "93c492af-c95b-4213-9fc2-38c3dd10374d", - "prevIds": [ - "a09a925d-6cdd-4e7c-b8b1-11c259928b4c" - ], + "prevIds": ["a09a925d-6cdd-4e7c-b8b1-11c259928b4c"], "ddl": [ { "name": "account", @@ -2175,149 +2173,103 @@ "table": "workspace" }, { - "columns": [ - "id" - ], + "columns": ["id"], "name": "PRIMARY", "table": "account", "entityType": "pks" }, { - "columns": [ - "id" - ], + "columns": ["id"], "name": "PRIMARY", "table": "auth", "entityType": "pks" }, { - "columns": [ - "id" - ], + "columns": ["id"], "name": "PRIMARY", "table": "benchmark", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "billing", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "lite", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "payment", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "subscription", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "usage", "entityType": "pks" }, { - "columns": [ - "ip", - "interval" - ], + "columns": ["ip", "interval"], "name": "PRIMARY", "table": "ip_rate_limit", "entityType": "pks" }, { - "columns": [ - "ip" - ], + "columns": ["ip"], "name": "PRIMARY", "table": "ip", "entityType": "pks" }, { - "columns": [ - "key", - "interval" - ], + "columns": ["key", "interval"], "name": "PRIMARY", "table": "key_rate_limit", "entityType": "pks" }, { - "columns": [ - "key", - "interval" - ], + "columns": ["key", "interval"], "name": "PRIMARY", "table": "model_rate_limit", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "key", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "model", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "provider", "entityType": "pks" }, { - "columns": [ - "workspace_id", - "id" - ], + "columns": ["workspace_id", "id"], "name": "PRIMARY", "table": "user", "entityType": "pks" }, { - "columns": [ - "id" - ], + "columns": ["id"], "name": "PRIMARY", "table": "workspace", "entityType": "pks" @@ -2612,4 +2564,4 @@ } ], "renames": [] -} \ No newline at end of file +} From 7e39c9b95001ce93d2f3124fe12fb307ea5427be Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 10:43:17 -0400 Subject: [PATCH 146/201] back to opentui 0.1.99 --- bun.lock | 26 ++++++++++++++++++++++++-- packages/opencode/package.json | 4 ++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/bun.lock b/bun.lock index b6f071c460..e7154c3d1b 100644 --- a/bun.lock +++ b/bun.lock @@ -365,8 +365,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.100", - "@opentui/solid": "0.1.100", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -5926,6 +5926,10 @@ "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], + "opencode/@opentui/core": ["@opentui/core@0.1.99", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.99", "@opentui/core-darwin-x64": "0.1.99", "@opentui/core-linux-arm64": "0.1.99", "@opentui/core-linux-x64": "0.1.99", "@opentui/core-win32-arm64": "0.1.99", "@opentui/core-win32-x64": "0.1.99", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-I3+AEgGzqNWIpWX9g2WOscSPwtQDNOm4KlBjxBWCZjLxkF07u77heWXF7OiAdhKLtNUW6TFiyt6yznqAZPdG3A=="], + + "opencode/@opentui/solid": ["@opentui/solid@0.1.99", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.99", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-DrqqO4h2V88FmeIP2cErYkMU0ZK5MrUsZw3w6IzZpoXyyiL4/9qpWzUq+CXx+r16VP2iGxDJwGKUmtFAzUch2Q=="], + "opencode/minimatch": ["minimatch@10.0.3", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw=="], "opencode-gitlab-auth/open": ["open@10.2.0", "", { "dependencies": { "default-browser": "^5.2.1", "define-lazy-prop": "^3.0.0", "is-inside-container": "^1.0.0", "wsl-utils": "^0.1.0" } }, "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA=="], @@ -6690,6 +6694,22 @@ "opencode-poe-auth/open/wsl-utils": ["wsl-utils@0.1.0", "", { "dependencies": { "is-wsl": "^3.1.0" } }, "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw=="], + "opencode/@opentui/core/@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.99", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bzVrqeX2vb5iWrc/ftOUOqeUY8XO+qSgoTwj5TXHuwagavgwD3Hpeyjx8+icnTTeM4pao0som1WR9xfye6/X5Q=="], + + "opencode/@opentui/core/@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.99", "", { "os": "darwin", "cpu": "x64" }, "sha512-VE4FrXBYpkxnvkqcCV1a8aN9jyyMJMihVW+V2NLCtp+4yQsj0AapG5TiUSN76XnmSZRptxDy5rBmEempeoIZbg=="], + + "opencode/@opentui/core/@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.99", "", { "os": "linux", "cpu": "arm64" }, "sha512-viXQsbpS7yHjYkl7+am32JdvG96QU9lvHh1UiZtpOxcNUUqiYmA2ZwZFPD2Bi54jNyj5l2hjH6YkD3DzE2FEWA=="], + + "opencode/@opentui/core/@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.99", "", { "os": "linux", "cpu": "x64" }, "sha512-WLoEFINOSp0tZSR9y4LUuGc7n4Y7H1wcpjUPzQ9vChkYDXrfZltEanzoDWbDcQ4kZQW5tHVC7LrZHpAsRLwFZg=="], + + "opencode/@opentui/core/@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.99", "", { "os": "win32", "cpu": "arm64" }, "sha512-yWMOLWCEO8HdrctU1dMkgZC8qGkiO4Dwr4/e11tTvVpRmYhDsP/IR89ZjEEtOwnKwFOFuB/MxvflqaEWVQ2g5Q=="], + + "opencode/@opentui/core/@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.99", "", { "os": "win32", "cpu": "x64" }, "sha512-aYRlsL2w8YRL6vPd7/hrqlNVkXU3QowWb01TOvAcHS8UAsXaGFUr47kSDyjxDi1wg1MzmVduCfsC7T3NoThV1w=="], + + "opencode/@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], + + "opencode/@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.10", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.3" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.10" }, "optionalPeers": ["solid-js"] }, "sha512-HCelrgua/Y+kqO8RyL04JBWS/cVdrtUv/h45GntgQY+cJl4eBcKkCDV3TdMjtKx1nXwRaR9QXslM/Npm1dxdZQ=="], + "opencontrol/@modelcontextprotocol/sdk/express": ["express@5.2.1", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "depd": "^2.0.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw=="], "opencontrol/@modelcontextprotocol/sdk/express-rate-limit": ["express-rate-limit@7.5.1", "", { "peerDependencies": { "express": ">= 4.11" } }, "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw=="], @@ -7022,6 +7042,8 @@ "js-beautify/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + "opencode/@opentui/solid/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + "opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], "opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.2", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.3", "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", "qs": "^6.14.1", "raw-body": "^3.0.1", "type-is": "^2.0.1" } }, "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 83d6e526fd..6edc8bd2bb 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -122,8 +122,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.100", - "@opentui/solid": "0.1.100", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", From 10c4ab9a3d63dc55ecba89985c3fd23517e769fd Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 10:51:02 -0400 Subject: [PATCH 147/201] roll back opentui --- bun.lock | 48 ++++++++++------------------------ package.json | 2 ++ packages/opencode/package.json | 4 +-- packages/plugin/package.json | 4 +-- 4 files changed, 20 insertions(+), 38 deletions(-) diff --git a/bun.lock b/bun.lock index e7154c3d1b..776c5fb81e 100644 --- a/bun.lock +++ b/bun.lock @@ -365,8 +365,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -465,8 +465,8 @@ "zod": "catalog:", }, "devDependencies": { - "@opentui/core": "0.1.100", - "@opentui/solid": "0.1.100", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "@typescript/native-preview": "catalog:", @@ -675,6 +675,8 @@ "@npmcli/arborist": "9.4.0", "@octokit/rest": "22.0.0", "@openauthjs/openauth": "0.0.0-20250322224806", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "@pierre/diffs": "1.1.0-beta.18", "@playwright/test": "1.59.1", "@solid-primitives/storage": "4.3.3", @@ -1598,21 +1600,21 @@ "@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.40.0", "", {}, "sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw=="], - "@opentui/core": ["@opentui/core@0.1.100", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.100", "@opentui/core-darwin-x64": "0.1.100", "@opentui/core-linux-arm64": "0.1.100", "@opentui/core-linux-x64": "0.1.100", "@opentui/core-win32-arm64": "0.1.100", "@opentui/core-win32-x64": "0.1.100", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-g6Ft3CcOVpytMzq2AZrnHHeMrmvYeLVCsy/8LqZ30VnPv0zfJ+f1TVi/EFrcl4m0GRPdy6yBOVOMcIAWHSZvtg=="], + "@opentui/core": ["@opentui/core@0.1.99", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.99", "@opentui/core-darwin-x64": "0.1.99", "@opentui/core-linux-arm64": "0.1.99", "@opentui/core-linux-x64": "0.1.99", "@opentui/core-win32-arm64": "0.1.99", "@opentui/core-win32-x64": "0.1.99", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-I3+AEgGzqNWIpWX9g2WOscSPwtQDNOm4KlBjxBWCZjLxkF07u77heWXF7OiAdhKLtNUW6TFiyt6yznqAZPdG3A=="], - "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.100", "", { "os": "darwin", "cpu": "arm64" }, "sha512-cY70nNjkh53tys4iQ0FDST3CQfN4Rp8zOrT6EW0dH/51KZq2Rg3EhxDpc+qu4zASadR5uuU5i61g8lYyVGeGgw=="], + "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.99", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bzVrqeX2vb5iWrc/ftOUOqeUY8XO+qSgoTwj5TXHuwagavgwD3Hpeyjx8+icnTTeM4pao0som1WR9xfye6/X5Q=="], - "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.100", "", { "os": "darwin", "cpu": "x64" }, "sha512-RUJa4MPX5BWwXuc5nE+Zc+md8+ITYp5X0ourM4+ReaIIW9pTxSDcIPBba9pkJb3PIADyulUPMmmy5OX+umDJhQ=="], + "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.99", "", { "os": "darwin", "cpu": "x64" }, "sha512-VE4FrXBYpkxnvkqcCV1a8aN9jyyMJMihVW+V2NLCtp+4yQsj0AapG5TiUSN76XnmSZRptxDy5rBmEempeoIZbg=="], - "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.100", "", { "os": "linux", "cpu": "arm64" }, "sha512-Hznr39cSXg+2sQd+WcTsk67BjyqrZvuTK6f94Uu1ULYZJYCE4KdyNU2NzJSK6ooyosSWXPDsb4kwetTlfypMZg=="], + "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.99", "", { "os": "linux", "cpu": "arm64" }, "sha512-viXQsbpS7yHjYkl7+am32JdvG96QU9lvHh1UiZtpOxcNUUqiYmA2ZwZFPD2Bi54jNyj5l2hjH6YkD3DzE2FEWA=="], - "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.100", "", { "os": "linux", "cpu": "x64" }, "sha512-mssQIwH2DU1aMGSUnTJgahMeAEfF82n2WKeTGabTRkPrcxD/6ML+JFiV9l8/8YA880fBD2Kh1XXGEhN2ZGB5UA=="], + "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.99", "", { "os": "linux", "cpu": "x64" }, "sha512-WLoEFINOSp0tZSR9y4LUuGc7n4Y7H1wcpjUPzQ9vChkYDXrfZltEanzoDWbDcQ4kZQW5tHVC7LrZHpAsRLwFZg=="], - "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.100", "", { "os": "win32", "cpu": "arm64" }, "sha512-EuLA6+kiIyW/hbo7k56QTc9/QGxg2bYHQ+XPn1UWPf6tmjzbDUwzhw+y52CNJQZpTXtAQSNtxfiYIbigGBd4TQ=="], + "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.99", "", { "os": "win32", "cpu": "arm64" }, "sha512-yWMOLWCEO8HdrctU1dMkgZC8qGkiO4Dwr4/e11tTvVpRmYhDsP/IR89ZjEEtOwnKwFOFuB/MxvflqaEWVQ2g5Q=="], - "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.100", "", { "os": "win32", "cpu": "x64" }, "sha512-RyiqbKQ15olR8hK4VsPKL3gHrOIIpqEXhUP0jzXJdQQNusmQKlxHyk5EY3R6hi52IgqjjGxwG+ocVeRb4/VTRg=="], + "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.99", "", { "os": "win32", "cpu": "x64" }, "sha512-aYRlsL2w8YRL6vPd7/hrqlNVkXU3QowWb01TOvAcHS8UAsXaGFUr47kSDyjxDi1wg1MzmVduCfsC7T3NoThV1w=="], - "@opentui/solid": ["@opentui/solid@0.1.100", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.100", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-7ADpXOIXtdNzp/r0eQdUuO3kBof7YdcJhQBj/Fv1ckNKoq1XScMuxV2kIK5IzU1Jr3PC4mDxRbek9bMvLJEVjw=="], + "@opentui/solid": ["@opentui/solid@0.1.99", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.99", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-DrqqO4h2V88FmeIP2cErYkMU0ZK5MrUsZw3w6IzZpoXyyiL4/9qpWzUq+CXx+r16VP2iGxDJwGKUmtFAzUch2Q=="], "@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="], @@ -5926,10 +5928,6 @@ "opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "opencode/@opentui/core": ["@opentui/core@0.1.99", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.99", "@opentui/core-darwin-x64": "0.1.99", "@opentui/core-linux-arm64": "0.1.99", "@opentui/core-linux-x64": "0.1.99", "@opentui/core-win32-arm64": "0.1.99", "@opentui/core-win32-x64": "0.1.99", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-I3+AEgGzqNWIpWX9g2WOscSPwtQDNOm4KlBjxBWCZjLxkF07u77heWXF7OiAdhKLtNUW6TFiyt6yznqAZPdG3A=="], - - "opencode/@opentui/solid": ["@opentui/solid@0.1.99", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.99", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.10", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.11" } }, "sha512-DrqqO4h2V88FmeIP2cErYkMU0ZK5MrUsZw3w6IzZpoXyyiL4/9qpWzUq+CXx+r16VP2iGxDJwGKUmtFAzUch2Q=="], - "opencode/minimatch": ["minimatch@10.0.3", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw=="], "opencode-gitlab-auth/open": ["open@10.2.0", "", { "dependencies": { "default-browser": "^5.2.1", "define-lazy-prop": "^3.0.0", "is-inside-container": "^1.0.0", "wsl-utils": "^0.1.0" } }, "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA=="], @@ -6694,22 +6692,6 @@ "opencode-poe-auth/open/wsl-utils": ["wsl-utils@0.1.0", "", { "dependencies": { "is-wsl": "^3.1.0" } }, "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw=="], - "opencode/@opentui/core/@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.99", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bzVrqeX2vb5iWrc/ftOUOqeUY8XO+qSgoTwj5TXHuwagavgwD3Hpeyjx8+icnTTeM4pao0som1WR9xfye6/X5Q=="], - - "opencode/@opentui/core/@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.99", "", { "os": "darwin", "cpu": "x64" }, "sha512-VE4FrXBYpkxnvkqcCV1a8aN9jyyMJMihVW+V2NLCtp+4yQsj0AapG5TiUSN76XnmSZRptxDy5rBmEempeoIZbg=="], - - "opencode/@opentui/core/@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.99", "", { "os": "linux", "cpu": "arm64" }, "sha512-viXQsbpS7yHjYkl7+am32JdvG96QU9lvHh1UiZtpOxcNUUqiYmA2ZwZFPD2Bi54jNyj5l2hjH6YkD3DzE2FEWA=="], - - "opencode/@opentui/core/@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.99", "", { "os": "linux", "cpu": "x64" }, "sha512-WLoEFINOSp0tZSR9y4LUuGc7n4Y7H1wcpjUPzQ9vChkYDXrfZltEanzoDWbDcQ4kZQW5tHVC7LrZHpAsRLwFZg=="], - - "opencode/@opentui/core/@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.99", "", { "os": "win32", "cpu": "arm64" }, "sha512-yWMOLWCEO8HdrctU1dMkgZC8qGkiO4Dwr4/e11tTvVpRmYhDsP/IR89ZjEEtOwnKwFOFuB/MxvflqaEWVQ2g5Q=="], - - "opencode/@opentui/core/@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.99", "", { "os": "win32", "cpu": "x64" }, "sha512-aYRlsL2w8YRL6vPd7/hrqlNVkXU3QowWb01TOvAcHS8UAsXaGFUr47kSDyjxDi1wg1MzmVduCfsC7T3NoThV1w=="], - - "opencode/@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], - - "opencode/@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.10", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.3" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.10" }, "optionalPeers": ["solid-js"] }, "sha512-HCelrgua/Y+kqO8RyL04JBWS/cVdrtUv/h45GntgQY+cJl4eBcKkCDV3TdMjtKx1nXwRaR9QXslM/Npm1dxdZQ=="], - "opencontrol/@modelcontextprotocol/sdk/express": ["express@5.2.1", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "depd": "^2.0.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw=="], "opencontrol/@modelcontextprotocol/sdk/express-rate-limit": ["express-rate-limit@7.5.1", "", { "peerDependencies": { "express": ">= 4.11" } }, "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw=="], @@ -7042,8 +7024,6 @@ "js-beautify/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "opencode/@opentui/solid/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], "opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.2", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.3", "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", "qs": "^6.14.1", "raw-body": "^3.0.1", "type-is": "^2.0.1" } }, "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA=="], diff --git a/package.json b/package.json index 5fecc09922..f1ad03afc8 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,8 @@ "@types/cross-spawn": "6.0.6", "@octokit/rest": "22.0.0", "@hono/zod-validator": "0.4.2", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "ulid": "3.0.1", "@kobalte/core": "0.13.11", "@types/luxon": "3.7.1", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 6edc8bd2bb..7d9bfaccdd 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -122,8 +122,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 0e51002b42..d4958fa453 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -34,8 +34,8 @@ } }, "devDependencies": { - "@opentui/core": "0.1.100", - "@opentui/solid": "0.1.100", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "typescript": "catalog:", From 20103eb97be978deec529b718e15678875462098 Mon Sep 17 00:00:00 2001 From: Frank Date: Fri, 17 Apr 2026 10:53:43 -0400 Subject: [PATCH 148/201] sync --- packages/console/app/src/routes/zen/util/modelTpmLimiter.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts index eeb89da5f2..9a834a1a5b 100644 --- a/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts +++ b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts @@ -30,6 +30,8 @@ export function createModelTpmLimiter(providers: { id: string; model: string; tp ) }, track: async (id: string, model: string, usageInfo: UsageInfo) => { + const key = `${id}/${model}` + if (!keys.includes(key)) return const usage = usageInfo.inputTokens + usageInfo.outputTokens + @@ -41,7 +43,7 @@ export function createModelTpmLimiter(providers: { id: string; model: string; tp await Database.use((tx) => tx .insert(ModelRateLimitTable) - .values({ key: `${id}/${model}`, interval: yyyyMMddHHmm, count: usage }) + .values({ key, interval: yyyyMMddHHmm, count: usage }) .onDuplicateKeyUpdate({ set: { count: sql`${ModelRateLimitTable.count} + ${usage}` } }), ) }, From 2415820ecdacf1c8a7c94572297515036207238e Mon Sep 17 00:00:00 2001 From: Brendan Allan <14191578+Brendonovich@users.noreply.github.com> Date: Fri, 17 Apr 2026 23:13:59 +0800 Subject: [PATCH 149/201] fix: conditionally show file tree in beta channel (#23099) --- packages/app/src/pages/session/session-side-panel.tsx | 7 ++++++- packages/app/src/pages/session/use-session-commands.tsx | 5 ++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/app/src/pages/session/session-side-panel.tsx b/packages/app/src/pages/session/session-side-panel.tsx index 06cbec48b5..99197f0a70 100644 --- a/packages/app/src/pages/session/session-side-panel.tsx +++ b/packages/app/src/pages/session/session-side-panel.tsx @@ -52,7 +52,12 @@ export function SessionSidePanel(props: { const { sessionKey, tabs, view } = useSessionLayout() const isDesktop = createMediaQuery("(min-width: 768px)") - const shown = createMemo(() => platform.platform !== "desktop" || settings.general.showFileTree()) + const shown = createMemo( + () => + platform.platform !== "desktop" || + import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || + settings.general.showFileTree(), + ) const reviewOpen = createMemo(() => isDesktop() && view().reviewPanel.opened()) const fileOpen = createMemo(() => isDesktop() && shown() && layout.fileTree.opened()) diff --git a/packages/app/src/pages/session/use-session-commands.tsx b/packages/app/src/pages/session/use-session-commands.tsx index 9bbeb10bde..d649aeb0cb 100644 --- a/packages/app/src/pages/session/use-session-commands.tsx +++ b/packages/app/src/pages/session/use-session-commands.tsx @@ -70,7 +70,10 @@ export const useSessionCommands = (actions: SessionCommandContext) => { }) const activeFileTab = tabState.activeFileTab const closableTab = tabState.closableTab - const shown = () => platform.platform !== "desktop" || settings.general.showFileTree() + const shown = () => + platform.platform !== "desktop" || + import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || + settings.general.showFileTree() const idle = { type: "idle" as const } const status = () => sync.data.session_status[params.id ?? ""] ?? idle From 8fbbca5f4bf6a7a971ce49d7ac2c8122767f5308 Mon Sep 17 00:00:00 2001 From: OpeOginni <107570612+OpeOginni@users.noreply.github.com> Date: Fri, 17 Apr 2026 17:25:12 +0200 Subject: [PATCH 150/201] fix(opencode): rescrict github copilot opus 4.7 variants to "medium" (#23097) --- packages/opencode/src/provider/transform.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 0ebd8bbf59..284fb0fcad 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -587,6 +587,12 @@ export function variants(model: Provider.Model): Record [effort, { reasoningEffort: effort }])) + } + } + if (adaptiveEfforts) { return Object.fromEntries( adaptiveEfforts.map((effort) => [ From ac5b395c5d709378592ea6b3ab8c9fe5061b53d5 Mon Sep 17 00:00:00 2001 From: Jen Person Date: Fri, 17 Apr 2026 17:25:42 +0200 Subject: [PATCH 151/201] docs: adding Mistral to docs as a provider (it is already a provider, just docs update) #23070 (#23072) --- packages/web/src/content/docs/providers.mdx | 27 +++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index bd7e10f928..84e60b6ef3 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -1316,6 +1316,33 @@ To use Kimi K2 from Moonshot AI: --- +### Mistral AI + +1. Head over to the [Mistral AI console](https://console.mistral.ai/), create an account, and generate an API key. + +2. Run the `/connect` command and search for **Mistral AI**. + + ```txt + /connect + ``` + +3. Enter your Mistral API key. + + ```txt + ┌ API key + │ + │ + └ enter + ``` + +4. Run the `/models` command to select a model like _Mistral Medium_. + + ```txt + /models + ``` + +--- + ### Nebius Token Factory 1. Head over to the [Nebius Token Factory console](https://tokenfactory.nebius.com/), create an account, and click **Add Key**. From 3a4b49095c5273383ea581dfbc31628d80ac43a2 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 15:26:45 +0000 Subject: [PATCH 152/201] chore: generate --- packages/web/src/content/docs/providers.mdx | 30 ++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index 84e60b6ef3..163dcdcf2b 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -1320,26 +1320,26 @@ To use Kimi K2 from Moonshot AI: 1. Head over to the [Mistral AI console](https://console.mistral.ai/), create an account, and generate an API key. -2. Run the `/connect` command and search for **Mistral AI**. +2. Run the `/connect` command and search for **Mistral AI**. - ```txt - /connect - ``` + ```txt + /connect + ``` -3. Enter your Mistral API key. +3. Enter your Mistral API key. - ```txt - ┌ API key - │ - │ - └ enter - ``` + ```txt + ┌ API key + │ + │ + └ enter + ``` -4. Run the `/models` command to select a model like _Mistral Medium_. +4. Run the `/models` command to select a model like _Mistral Medium_. - ```txt - /models - ``` + ```txt + /models + ``` --- From 3fe602cda3e34761256c53254accc46abdff1c17 Mon Sep 17 00:00:00 2001 From: Ismail Ghallou Date: Fri, 17 Apr 2026 17:29:31 +0200 Subject: [PATCH 153/201] feat: add LLM Gateway provider (#7847) Co-authored-by: Claude Opus 4.5 Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Co-authored-by: Aiden Cline --- packages/opencode/src/provider/provider.ts | 11 +++ packages/opencode/src/provider/transform.ts | 4 +- packages/opencode/test/preload.ts | 1 + .../ui/src/components/provider-icons/types.ts | 1 + packages/web/src/content/docs/providers.mdx | 68 +++++++++++++++++++ 5 files changed, 83 insertions(+), 2 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 711481d80a..558bcb75a5 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -390,6 +390,17 @@ function custom(dep: CustomDep): Record { }, } }), + llmgateway: () => + Effect.succeed({ + autoload: false, + options: { + headers: { + "HTTP-Referer": "https://opencode.ai/", + "X-Title": "opencode", + "X-Source": "opencode", + }, + }, + }), openrouter: () => Effect.succeed({ autoload: false, diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 284fb0fcad..1b6b0918b1 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -807,7 +807,7 @@ export function options(input: { result["promptCacheKey"] = input.sessionID } - if (input.model.api.npm === "@openrouter/ai-sdk-provider") { + if (input.model.api.npm === "@openrouter/ai-sdk-provider" || input.model.api.npm === "@llmgateway/ai-sdk-provider") { result["usage"] = { include: true, } @@ -944,7 +944,7 @@ export function smallOptions(model: Provider.Model) { } return { thinkingConfig: { thinkingBudget: 0 } } } - if (model.providerID === "openrouter") { + if (model.providerID === "openrouter" || model.providerID === "llmgateway") { if (model.api.id.includes("google")) { return { reasoning: { enabled: false } } } diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index a2592286ad..58dc2b0b48 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -62,6 +62,7 @@ delete process.env["AWS_PROFILE"] delete process.env["AWS_REGION"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"] delete process.env["OPENROUTER_API_KEY"] +delete process.env["LLM_GATEWAY_API_KEY"] delete process.env["GROQ_API_KEY"] delete process.env["MISTRAL_API_KEY"] delete process.env["PERPLEXITY_API_KEY"] diff --git a/packages/ui/src/components/provider-icons/types.ts b/packages/ui/src/components/provider-icons/types.ts index f9ddfdf0e9..5a97287509 100644 --- a/packages/ui/src/components/provider-icons/types.ts +++ b/packages/ui/src/components/provider-icons/types.ts @@ -32,6 +32,7 @@ export const iconNames = [ "perplexity", "ovhcloud", "openrouter", + "llmgateway", "opencode", "opencode-go", "openai", diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index 163dcdcf2b..bad9e1ebbc 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -1577,6 +1577,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te --- +### LLM Gateway + +1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key. + +2. Run the `/connect` command and search for LLM Gateway. + + ```txt + /connect + ``` + +3. Enter the API key for the provider. + + ```txt + ┌ API key + │ + │ + └ enter + ``` + +4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want. + + ```txt + /models + ``` + + You can also add additional models through your opencode config. + + ```json title="opencode.json" {6} + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "somecoolnewmodel": {} + } + } + } + } + ``` + +5. You can also customize them through your opencode config. Here's an example of specifying a provider + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "glm-4.7": { + "name": "GLM 4.7" + }, + "gpt-5.2": { + "name": "GPT-5.2" + }, + "gemini-2.5-pro": { + "name": "Gemini 2.5 Pro" + }, + "claude-3-5-sonnet-20241022": { + "name": "Claude 3.5 Sonnet" + } + } + } + } + } + ``` + +--- + ### SAP AI Core SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform. From 38cd3979f20b4d7842268b501568bf930b8e867a Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 15:31:50 +0000 Subject: [PATCH 154/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index c19a837e84..54fd991eca 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-yGb+EPlFNDptIi4yFdJ0z7fhAyfOCRXu0GpNxrOnLVA=", - "aarch64-linux": "sha256-h8oBwLB6LnFN4xGB0/ocvxRbBMwewrEck91ADihTUCk=", - "aarch64-darwin": "sha256-/0IOgoihi4OR2AhDDfstLn2DcY/261v/KRQV4Pwhbmk=", - "x86_64-darwin": "sha256-rPkqF+led93s1plBbhFpszrnzF2H+EUz8QlfbUkSHvM=" + "x86_64-linux": "sha256-OPbZUo/fQv2Xsf+NEZV08GLBMN/DXovhRvn2JkesFtY=", + "aarch64-linux": "sha256-WK7xlVLuirKDN5LaqjBn7qpv5bYVtYHZw0qRNKX4xXg=", + "aarch64-darwin": "sha256-BAoAdeLQ+lXDD7Klxoxij683OVVug8KXEMRUqIQAjc8=", + "x86_64-darwin": "sha256-ZOBwNR2gZgc5f+y3VIBBT4qZpeZfg7Of6AaGDOfqsG8=" } } From 551216a452836a033e35bf88f64845bc966eef8f Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 11:31:49 -0400 Subject: [PATCH 155/201] fix incorrect light mode in ghostty --- packages/opencode/src/cli/cmd/tui/context/theme.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/theme.tsx b/packages/opencode/src/cli/cmd/tui/context/theme.tsx index 679be8f254..04670429da 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/theme.tsx @@ -397,7 +397,7 @@ export const { use: useTheme, provider: ThemeProvider } = createSimpleContext({ if (store.lock) return apply(mode) } - renderer.on(CliRenderEvents.THEME_MODE, handle) + // renderer.on(CliRenderEvents.THEME_MODE, handle) const refresh = () => { renderer.clearPaletteCache() From a27d3c162335946d3e6ed8a6a5a621006a88511e Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 11:41:24 -0400 Subject: [PATCH 156/201] tui: fix session resumption with --session-id flag to navigate after app initialization Previously when passing a session ID directly, the route was set during initial render which could cause navigation issues before the router was fully ready. Now the session navigation happens after initialization completes, ensuring the TUI properly loads the requested session when users resume with --session-id. --- packages/opencode/src/cli/cmd/tui/app.tsx | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 74eca9a0f2..585e1ed232 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -150,7 +150,7 @@ export function tui(input: { Promise }) { }) local.model.set({ providerID, modelID }, { recent: true }) } + if (args.sessionID && !args.fork) { + route.navigate({ + type: "session", + sessionID: args.sessionID, + }) + } }) }) From 803d9eb7ad5f4dfd832d7506a7cad83ded52253e Mon Sep 17 00:00:00 2001 From: opencode Date: Fri, 17 Apr 2026 16:19:46 +0000 Subject: [PATCH 157/201] release: v1.4.9 --- bun.lock | 32 +++++++++++++------------- packages/app/package.json | 2 +- packages/console/app/package.json | 2 +- packages/console/core/package.json | 2 +- packages/console/function/package.json | 2 +- packages/console/mail/package.json | 2 +- packages/desktop-electron/package.json | 2 +- packages/desktop/package.json | 2 +- packages/enterprise/package.json | 2 +- packages/extensions/zed/extension.toml | 12 +++++----- packages/function/package.json | 2 +- packages/opencode/package.json | 2 +- packages/plugin/package.json | 2 +- packages/sdk/js/package.json | 2 +- packages/shared/package.json | 2 +- packages/slack/package.json | 2 +- packages/ui/package.json | 2 +- packages/web/package.json | 2 +- sdks/vscode/package.json | 2 +- 19 files changed, 39 insertions(+), 39 deletions(-) diff --git a/bun.lock b/bun.lock index 776c5fb81e..f86e22af26 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.8", + "version": "1.4.9", "bin": { "opencode": "./bin/opencode", }, @@ -458,7 +458,7 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.8", + "version": "1.4.9", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", diff --git a/packages/app/package.json b/packages/app/package.json index 56ca71f892..0f4ae4228b 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.8", + "version": "1.4.9", "description": "", "type": "module", "exports": { diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 528427ae80..63b6a5c414 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/core/package.json b/packages/console/core/package.json index 1b4292e9ad..e5351c1a87 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.8", + "version": "1.4.9", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/function/package.json b/packages/console/function/package.json index f3de599f1c..ef84eae470 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.8", + "version": "1.4.9", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index b13ad8520e..439beb15c8 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.8", + "version": "1.4.9", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index ed36f62827..b698a08896 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index ce0ad5d4ab..1d9d24bd32 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index f667de3822..02f1ad83ca 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.8", + "version": "1.4.9", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index 31facbe3b5..ffcc975d1b 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.8" +version = "1.4.9" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.8/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 877c9dcd14..2ac49a9228 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.8", + "version": "1.4.9", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 7d9bfaccdd..3ce3b4192a 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.8", + "version": "1.4.9", "name": "opencode", "type": "module", "license": "MIT", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index d4958fa453..a1aa6470dc 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 06213308f1..27d9188151 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 794c537da8..383b26d6ed 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.8", + "version": "1.4.9", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/slack/package.json b/packages/slack/package.json index d9865aa35c..75974ed39e 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index f05ce03826..525cf20935 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.8", + "version": "1.4.9", "type": "module", "license": "MIT", "exports": { diff --git a/packages/web/package.json b/packages/web/package.json index a1d138319f..d3a2a25e4c 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.8", + "version": "1.4.9", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index 07a3682c67..a3ed76c883 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.8", + "version": "1.4.9", "publisher": "sst-dev", "repository": { "type": "git", From 1a5913316850b256e497b899e29f7995302dcfa0 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 12:00:22 -0400 Subject: [PATCH 158/201] Improve light mode dark mode copy --- packages/opencode/src/cli/cmd/tui/app.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 585e1ed232..a58ff05648 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -606,7 +606,7 @@ function App(props: { onSnapshot?: () => Promise }) { category: "System", }, { - title: "Toggle theme mode", + title: mode() === "dark" ? "Switch to light mode" : "Switch to dark mode", value: "theme.switch_mode", onSelect: (dialog) => { setMode(mode() === "dark" ? "light" : "dark") From 0d582f9d3f9b20d5443fdee72aaf653778bf885e Mon Sep 17 00:00:00 2001 From: Vladimir Glafirov Date: Fri, 17 Apr 2026 18:22:43 +0200 Subject: [PATCH 159/201] chore: bump gitlab-ai-provider to 6.6.0 (#23057) --- bun.lock | 4 ++-- packages/opencode/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bun.lock b/bun.lock index f86e22af26..2442fcd709 100644 --- a/bun.lock +++ b/bun.lock @@ -386,7 +386,7 @@ "drizzle-orm": "catalog:", "effect": "catalog:", "fuzzysort": "3.1.0", - "gitlab-ai-provider": "6.4.2", + "gitlab-ai-provider": "6.6.0", "glob": "13.0.5", "google-auth-library": "10.5.0", "gray-matter": "4.0.3", @@ -3314,7 +3314,7 @@ "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], - "gitlab-ai-provider": ["gitlab-ai-provider@6.4.2", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=3.0.0", "@ai-sdk/provider-utils": ">=4.0.0" } }, "sha512-Wyw6uslCuipBOr/NYwAtpgXEUJj68iJY5aekad2DjePN99JetKVQBqkLgAy9PZp2EA4OuscfRQu9qKIBN/evNw=="], + "gitlab-ai-provider": ["gitlab-ai-provider@6.6.0", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=3.0.0", "@ai-sdk/provider-utils": ">=4.0.0" } }, "sha512-jUxYnKA4XQaPc3wxACDZ8bPDXO0Mzx7cZaBDxbT2uGgLqtGZmSi+9tVNIg7louSS+s/ioVra3SoUz3iOFVhKPA=="], "glob": ["glob@13.0.5", "", { "dependencies": { "minimatch": "^10.2.1", "minipass": "^7.1.2", "path-scurry": "^2.0.0" } }, "sha512-BzXxZg24Ibra1pbQ/zE7Kys4Ua1ks7Bn6pKLkVPZ9FZe4JQS6/Q7ef3LG1H+k7lUf5l4T3PLSyYyYJVYUvfgTw=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 3ce3b4192a..ace30e9bcb 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -143,7 +143,7 @@ "drizzle-orm": "catalog:", "effect": "catalog:", "fuzzysort": "3.1.0", - "gitlab-ai-provider": "6.4.2", + "gitlab-ai-provider": "6.6.0", "glob": "13.0.5", "google-auth-library": "10.5.0", "gray-matter": "4.0.3", From fde3d9133bdf53d5bab9cf44f7f9ec4a23ae4fb4 Mon Sep 17 00:00:00 2001 From: rasdani <73563550+rasdani@users.noreply.github.com> Date: Fri, 17 Apr 2026 09:28:23 -0700 Subject: [PATCH 160/201] fix(opencode): pass `EXA_API_KEY` to `websearch` tool to avoid rate limits (#16362) Co-authored-by: Dax Raad Co-authored-by: Aiden Cline Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> --- packages/opencode/src/tool/mcp-exa.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/tool/mcp-exa.ts b/packages/opencode/src/tool/mcp-exa.ts index 638d68c245..3340d84efd 100644 --- a/packages/opencode/src/tool/mcp-exa.ts +++ b/packages/opencode/src/tool/mcp-exa.ts @@ -1,7 +1,9 @@ import { Duration, Effect, Schema } from "effect" import { HttpClient, HttpClientRequest } from "effect/unstable/http" -const URL = "https://mcp.exa.ai/mcp" +const URL = process.env.EXA_API_KEY + ? `https://mcp.exa.ai/mcp?exaApiKey=${encodeURIComponent(process.env.EXA_API_KEY)}` + : "https://mcp.exa.ai/mcp" const McpResult = Schema.Struct({ result: Schema.Struct({ From c491161c0c6b69fe95672bba395a13b506940512 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Fri, 17 Apr 2026 11:40:24 -0500 Subject: [PATCH 161/201] chore: bump @ai-sdk/anthropic to 3.0.71 and dependents (#23120) --- bun.lock | 16 ++++++++-------- packages/opencode/package.json | 6 +++--- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bun.lock b/bun.lock index 2442fcd709..c78c9ae48d 100644 --- a/bun.lock +++ b/bun.lock @@ -322,15 +322,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.94", - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/amazon-bedrock": "4.0.95", + "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.111", + "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", @@ -740,7 +740,7 @@ "@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="], - "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], + "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.95", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-qJKWEy+cNx3bLSJi/XpIVhv0P8KO0JFB1SvEroNWN8gKm820SIglBmXS10DTeXJdM5PPbQX4i/wJj5BHEk2LRQ=="], "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], @@ -764,7 +764,7 @@ "@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="], - "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.111", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5gILpAWWI5idfal/MfoH3tlQeSnOJ9jfL8JB8m2fdc3ue/9xoXkYDpXpDL/nyJImFjMCi6eR0Fpvlo/IKEWDIg=="], + "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.112", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-cSfHCkM+9ZrFtQWIN1WlV93JPD+isGSdFxKj7u1L9m2aLVZajlXdcE41GL9hMt7ld7bZYE4NnZ+4VLxBAHE+Eg=="], "@ai-sdk/groq": ["@ai-sdk/groq@3.0.31", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XbbugpnFmXGu2TlXiq8KUJskP6/VVbuFcnFIGDzDIB/Chg6XHsNnqrTF80Zxkh0Pd3+NvbM+2Uqrtsndk6bDAg=="], @@ -5154,7 +5154,7 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], @@ -5172,7 +5172,7 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], @@ -5922,7 +5922,7 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], - "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index ace30e9bcb..cadb1fa38d 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -79,15 +79,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.94", - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/amazon-bedrock": "4.0.95", + "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", "@ai-sdk/gateway": "3.0.102", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.111", + "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", From 13dfe569efda341cb7b6d4d163e4aee471e65043 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 12:45:29 -0400 Subject: [PATCH 162/201] tui: fix agent cycling and prompt metadata polish (#23115) --- .../cli/cmd/tui/component/dialog-command.tsx | 1 + .../cli/cmd/tui/component/prompt/index.tsx | 24 ++++++++++++------- .../src/cli/cmd/tui/context/local.tsx | 4 +++- .../opencode/src/cli/cmd/tui/util/signal.ts | 7 ++++-- 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx index f42ba15ec0..49bf42c63e 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx @@ -63,6 +63,7 @@ function init() { useKeyboard((evt) => { if (suspended()) return if (dialog.stack.length > 0) return + if (evt.defaultPrevented) return for (const option of entries()) { if (!isEnabled(option)) continue if (option.keybind && keybind.match(option.keybind, evt)) { diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 06e5a0884e..98527bf912 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -5,7 +5,7 @@ import path from "path" import { fileURLToPath } from "url" import { Filesystem } from "@/util" import { useLocal } from "@tui/context/local" -import { useTheme } from "@tui/context/theme" +import { tint, useTheme } from "@tui/context/theme" import { EmptyBorder, SplitBorder } from "@tui/component/border" import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" @@ -463,19 +463,25 @@ export function Prompt(props: PromptProps) { createEffect(() => { if (!input || input.isDestroyed) return if (props.visible === false || dialog.stack.length > 0) { - input.blur() + if (input.focused) input.blur() return } // Slot/plugin updates can remount the background prompt while a dialog is open. // Keep focus with the dialog and let the prompt reclaim it after the dialog closes. - input.focus() + if (!input.focused) input.focus() }) createEffect(() => { if (!input || input.isDestroyed) return + const capture = + store.mode === "normal" + ? auto()?.visible + ? (["escape", "navigate", "submit", "tab"] as const) + : (["tab"] as const) + : undefined input.traits = { - capture: auto()?.visible ? ["escape", "navigate", "submit", "tab"] : undefined, + capture, suspend: !!props.disabled || store.mode === "shell", status: store.mode === "shell" ? "SHELL" : undefined, } @@ -870,6 +876,7 @@ export function Prompt(props: PromptProps) { () => !!local.agent.current() && store.mode === "normal" && showVariant(), animationsEnabled, ) + const borderHighlight = createMemo(() => tint(theme.border, highlight(), agentMetaAlpha())) const placeholderText = createMemo(() => { if (props.showPlaceholder === false) return undefined @@ -931,7 +938,7 @@ export function Prompt(props: PromptProps) { (anchor = r)} visible={props.visible !== false}> }> {(agent) => ( <> - - {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)}{" "} - + {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + · { - let next = agents().findIndex((x) => x.name === agentStore.current) + direction + const current = this.current() + if (!current) return + let next = agents().findIndex((x) => x.name === current.name) + direction if (next < 0) next = agents().length - 1 if (next >= agents().length) next = 0 const value = agents()[next] diff --git a/packages/opencode/src/cli/cmd/tui/util/signal.ts b/packages/opencode/src/cli/cmd/tui/util/signal.ts index 1c7cc0008d..7d20ae04ba 100644 --- a/packages/opencode/src/cli/cmd/tui/util/signal.ts +++ b/packages/opencode/src/cli/cmd/tui/util/signal.ts @@ -8,20 +8,23 @@ export function createDebouncedSignal(value: T, ms: number): [Accessor, Sc export function createFadeIn(show: Accessor, enabled: Accessor) { const [alpha, setAlpha] = createSignal(show() ? 1 : 0) + let revealed = show() createEffect( - on([show, enabled], ([visible, animate], previous) => { + on([show, enabled], ([visible, animate]) => { if (!visible) { setAlpha(0) return } - if (!animate || !previous) { + if (!animate || revealed) { + revealed = true setAlpha(1) return } const start = performance.now() + revealed = true setAlpha(0) const timer = setInterval(() => { From ce0cfb0ea5c86af77b97fc41e9076721f601c052 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 16:46:34 +0000 Subject: [PATCH 163/201] chore: generate --- packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 98527bf912..cf26ec1950 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1153,7 +1153,9 @@ export function Prompt(props: PromptProps) { }> {(agent) => ( <> - {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + + {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + · From 797953c88d29319deb8e003594f31fe6d2e9b8b3 Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 13:01:22 -0400 Subject: [PATCH 164/201] when generating sdk only format sdk, much faster (#23122) --- script/format.ts | 4 +++- script/generate.ts | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/script/format.ts b/script/format.ts index 996de9ad04..37ab2197d0 100755 --- a/script/format.ts +++ b/script/format.ts @@ -2,4 +2,6 @@ import { $ } from "bun" -await $`bun run prettier --ignore-unknown --write .` +const dir = Bun.argv[2] ?? "." + +await $`bun run prettier --ignore-unknown --write ${dir}` diff --git a/script/generate.ts b/script/generate.ts index 8fc251d89d..a99d3e77ef 100755 --- a/script/generate.ts +++ b/script/generate.ts @@ -6,4 +6,4 @@ await $`bun ./packages/sdk/js/script/build.ts` await $`bun dev generate > ../sdk/openapi.json`.cwd("packages/opencode") -await $`./script/format.ts` +await $`./script/format.ts ./packages/sdk` From fcb473ff64f0767461c27db8942ce41df3e115d3 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 17:25:44 +0000 Subject: [PATCH 165/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index 54fd991eca..5fe1189579 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-OPbZUo/fQv2Xsf+NEZV08GLBMN/DXovhRvn2JkesFtY=", - "aarch64-linux": "sha256-WK7xlVLuirKDN5LaqjBn7qpv5bYVtYHZw0qRNKX4xXg=", - "aarch64-darwin": "sha256-BAoAdeLQ+lXDD7Klxoxij683OVVug8KXEMRUqIQAjc8=", - "x86_64-darwin": "sha256-ZOBwNR2gZgc5f+y3VIBBT4qZpeZfg7Of6AaGDOfqsG8=" + "x86_64-linux": "sha256-60KBy/mySuIKbBD5aCS4ZAQqnwZ4PjLMAqZH7gpKCFY=", + "aarch64-linux": "sha256-ROd9OfdCBQZntoAr32O3YVl7ljRgYvJma25U+jHwcts=", + "aarch64-darwin": "sha256-MjMfR73ZZLXtIXfuzqpjvD5RxmIRi9HA1jWXPvagU6w=", + "x86_64-darwin": "sha256-1BADHsSdMxJUbQ4DR/Ww4ZTt18H365lETJs7Fy7fsLc=" } } From a8c78fc005a9a1cff3622a68f65b1df550cb2ccc Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 13:30:09 -0400 Subject: [PATCH 166/201] fix(core): add historical sync on workspace connect (#23121) --- .../opencode/src/cli/cmd/tui/context/sdk.tsx | 28 +++++- .../opencode/src/control-plane/workspace.ts | 97 ++++++++++++++++--- packages/opencode/src/server/proxy.ts | 7 -- .../src/server/routes/instance/sync.ts | 26 ++++- .../test/workspace/workspace-restore.test.ts | 5 +- packages/sdk/js/src/v2/gen/sdk.gen.ts | 33 ++++++- packages/sdk/js/src/v2/gen/types.gen.ts | 19 ++++ packages/sdk/openapi.json | 43 +++++++- 8 files changed, 234 insertions(+), 24 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx index 14d3062886..6a240ceef8 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx @@ -2,6 +2,7 @@ import { createOpencodeClient } from "@opencode-ai/sdk/v2" import type { GlobalEvent } from "@opencode-ai/sdk/v2" import { createSimpleContext } from "./helper" import { createGlobalEmitter } from "@solid-primitives/event-bus" +import { Flag } from "@/flag/flag" import { batch, onCleanup, onMount } from "solid-js" export type EventSource = { @@ -39,6 +40,8 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ let queue: GlobalEvent[] = [] let timer: Timer | undefined let last = 0 + const retryDelay = 1000 + const maxRetryDelay = 30000 const flush = () => { if (queue.length === 0) return @@ -73,9 +76,20 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ const ctrl = new AbortController() sse = ctrl ;(async () => { + let attempt = 0 while (true) { if (abort.signal.aborted || ctrl.signal.aborted) break - const events = await sdk.global.event({ signal: ctrl.signal }) + + const events = await sdk.global.event({ + signal: ctrl.signal, + sseMaxRetryAttempts: 0, + }) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + // Start syncing workspaces, it's important to do this after + // we've started listening to events + await sdk.sync.start().catch(() => {}) + } for await (const event of events.stream) { if (ctrl.signal.aborted) break @@ -84,6 +98,12 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ if (timer) clearTimeout(timer) if (queue.length > 0) flush() + attempt += 1 + if (abort.signal.aborted || ctrl.signal.aborted) break + + // Exponential backoff + const backoff = Math.min(retryDelay * 2 ** (attempt - 1), maxRetryDelay) + await new Promise((resolve) => setTimeout(resolve, backoff)) } })().catch(() => {}) } @@ -92,6 +112,12 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ if (props.events) { const unsub = await props.events.subscribe(handleEvent) onCleanup(unsub) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + // Start syncing workspaces, it's important to do this after + // we've started listening to events + await sdk.sync.start().catch(() => {}) + } } else { startSSE() } diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index fd22d3af04..d678ad7526 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -7,7 +7,7 @@ import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" import { Auth } from "@/auth" import { SyncEvent } from "@/sync" -import { EventTable } from "@/sync/event.sql" +import { EventSequenceTable, EventTable } from "@/sync/event.sql" import { Flag } from "@/flag/flag" import { Log } from "@/util" import { Filesystem } from "@/util" @@ -23,8 +23,8 @@ import { SessionTable } from "@/session/session.sql" import { SessionID } from "@/session/schema" import { errorData } from "@/util/error" import { AppRuntime } from "@/effect/app-runtime" -import { EventSequenceTable } from "@/sync/event.sql" import { waitEvent } from "./util" +import { WorkspaceContext } from "./workspace-context" export const Info = WorkspaceInfo.meta({ ref: "Workspace", @@ -297,22 +297,13 @@ export function list(project: Project.Info) { db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), ) const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) - - for (const space of spaces) startSync(space) return spaces } -function lookup(id: WorkspaceID) { +export const get = fn(WorkspaceID.zod, async (id) => { const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) if (!row) return return fromRow(row) -} - -export const get = fn(WorkspaceID.zod, async (id) => { - const space = lookup(id) - if (!space) return - startSync(space) - return space }) export const remove = fn(WorkspaceID.zod, async (id) => { @@ -437,6 +428,70 @@ async function connectSSE(url: URL | string, headers: HeadersInit | undefined, s return res.body } +async function syncHistory(space: Info, url: URL | string, headers: HeadersInit | undefined, signal: AbortSignal) { + const sessionIDs = Database.use((db) => + db + .select({ id: SessionTable.id }) + .from(SessionTable) + .where(eq(SessionTable.workspace_id, space.id)) + .all() + .map((row) => row.id), + ) + const state = sessionIDs.length + ? Object.fromEntries( + Database.use((db) => + db.select().from(EventSequenceTable).where(inArray(EventSequenceTable.aggregate_id, sessionIDs)).all(), + ).map((row) => [row.aggregate_id, row.seq]), + ) + : {} + + log.info("syncing workspace history", { + workspaceID: space.id, + sessions: sessionIDs.length, + known: Object.keys(state).length, + }) + + const requestHeaders = new Headers(headers) + requestHeaders.set("content-type", "application/json") + + const res = await fetch(route(url, "/sync/history"), { + method: "POST", + headers: requestHeaders, + body: JSON.stringify(state), + signal, + }) + + if (!res.ok) { + const body = await res.text() + throw new Error(`Workspace history HTTP failure: ${res.status} ${body}`) + } + + const events = await res.json() + + return WorkspaceContext.provide({ + workspaceID: space.id, + fn: () => { + for (const event of events) { + SyncEvent.replay( + { + id: event.id, + aggregateID: event.aggregate_id, + seq: event.seq, + type: event.type, + data: event.data, + }, + { publish: true }, + ) + } + }, + }) + + log.info("workspace history synced", { + workspaceID: space.id, + events: events.length, + }) +} + async function syncWorkspaceLoop(space: Info, signal: AbortSignal) { const adaptor = await getAdaptor(space.projectID, space.type) const target = await adaptor.target(space) @@ -452,7 +507,9 @@ async function syncWorkspaceLoop(space: Info, signal: AbortSignal) { let stream try { stream = await connectSSE(target.url, target.headers, signal) + await syncHistory(space, target.url, target.headers, signal) } catch (err) { + stream = null setStatus(space.id, "error") log.info("failed to connect to global sync", { workspace: space.name, @@ -469,6 +526,7 @@ async function syncWorkspaceLoop(space: Info, signal: AbortSignal) { await parseSSE(stream, signal, (evt: any) => { try { if (!("payload" in evt)) return + if (evt.payload.type === "server.heartbeat") return if (evt.payload.type === "sync") { SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) @@ -536,4 +594,19 @@ function stopSync(id: WorkspaceID) { connections.delete(id) } +export function startWorkspaceSyncing(projectID: ProjectID) { + const spaces = Database.use((db) => + db + .select({ workspace: WorkspaceTable }) + .from(WorkspaceTable) + .innerJoin(SessionTable, eq(SessionTable.workspace_id, WorkspaceTable.id)) + .where(eq(WorkspaceTable.project_id, projectID)) + .all(), + ) + + for (const row of new Map(spaces.map((row) => [row.workspace.id, row.workspace])).values()) { + void startSync(fromRow(row)) + } +} + export * as Workspace from "./workspace" diff --git a/packages/opencode/src/server/proxy.ts b/packages/opencode/src/server/proxy.ts index 9c1fd1f288..19a623cb0c 100644 --- a/packages/opencode/src/server/proxy.ts +++ b/packages/opencode/src/server/proxy.ts @@ -130,13 +130,6 @@ export async function http(url: string | URL, extra: HeadersInit | undefined, re const done = sync ? Fence.wait(workspaceID, sync, req.signal) : Promise.resolve() return done.then(async () => { - console.log("proxy http response", { - method: req.method, - request: req.url, - url: String(url), - status: res.status, - statusText: res.statusText, - }) return new Response(res.body, { status: res.status, statusText: res.statusText, diff --git a/packages/opencode/src/server/routes/instance/sync.ts b/packages/opencode/src/server/routes/instance/sync.ts index c6a067997b..b124cd875d 100644 --- a/packages/opencode/src/server/routes/instance/sync.ts +++ b/packages/opencode/src/server/routes/instance/sync.ts @@ -6,6 +6,8 @@ import { Database, asc, and, not, or, lte, eq } from "@/storage" import { EventTable } from "@/sync/event.sql" import { lazy } from "@/util/lazy" import { Log } from "@/util" +import { startWorkspaceSyncing } from "@/control-plane/workspace" +import { Instance } from "@/project/instance" import { errors } from "../../error" const ReplayEvent = z.object({ @@ -20,6 +22,28 @@ const log = Log.create({ service: "server.sync" }) export const SyncRoutes = lazy(() => new Hono() + .post( + "/start", + describeRoute({ + summary: "Start workspace sync", + description: "Start sync loops for workspaces in the current project that have active sessions.", + operationId: "sync.start", + responses: { + 200: { + description: "Workspace sync started", + content: { + "application/json": { + schema: resolver(z.boolean()), + }, + }, + }, + }, + }), + async (c) => { + startWorkspaceSyncing(Instance.project.id) + return c.json(true) + }, + ) .post( "/replay", describeRoute({ @@ -75,7 +99,7 @@ export const SyncRoutes = lazy(() => }) }, ) - .get( + .post( "/history", describeRoute({ summary: "List sync events", diff --git a/packages/opencode/test/workspace/workspace-restore.test.ts b/packages/opencode/test/workspace/workspace-restore.test.ts index 429eeaf9dd..ad6ac2c5fd 100644 --- a/packages/opencode/test/workspace/workspace-restore.test.ts +++ b/packages/opencode/test/workspace/workspace-restore.test.ts @@ -141,9 +141,12 @@ describe("Workspace.sessionRestore", () => { Object.assign( async (input: URL | RequestInfo, init?: BunFetchRequestInit | RequestInit) => { const url = new URL(typeof input === "string" || input instanceof URL ? input : input.url) - if (url.pathname !== "/base/sync/replay") { + if (url.pathname === "/base/global/event") { return eventStreamResponse() } + if (url.pathname === "/base/sync/history") { + return Response.json([]) + } const body = JSON.parse(String(init?.body)) posts.push({ path: url.pathname, diff --git a/packages/sdk/js/src/v2/gen/sdk.gen.ts b/packages/sdk/js/src/v2/gen/sdk.gen.ts index f484147a40..6248eb8e4d 100644 --- a/packages/sdk/js/src/v2/gen/sdk.gen.ts +++ b/packages/sdk/js/src/v2/gen/sdk.gen.ts @@ -163,6 +163,7 @@ import type { SyncHistoryListResponses, SyncReplayErrors, SyncReplayResponses, + SyncStartResponses, TextPartInput, ToolIdsErrors, ToolIdsResponses, @@ -3038,7 +3039,7 @@ export class History extends HeyApiClient { }, ], ) - return (options?.client ?? this.client).get({ + return (options?.client ?? this.client).post({ url: "/sync/history", ...options, ...params, @@ -3052,6 +3053,36 @@ export class History extends HeyApiClient { } export class Sync extends HeyApiClient { + /** + * Start workspace sync + * + * Start sync loops for workspaces in the current project that have active sessions. + */ + public start( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post({ + url: "/sync/start", + ...options, + ...params, + }) + } + /** * Replay sync events * diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index 460c2bcdfa..5698cba54f 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -4502,6 +4502,25 @@ export type ProviderOauthCallbackResponses = { export type ProviderOauthCallbackResponse = ProviderOauthCallbackResponses[keyof ProviderOauthCallbackResponses] +export type SyncStartData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/sync/start" +} + +export type SyncStartResponses = { + /** + * Workspace sync started + */ + 200: boolean +} + +export type SyncStartResponse = SyncStartResponses[keyof SyncStartResponses] + export type SyncReplayData = { body?: { directory: string diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 7bdf025bbe..3b811f2fa9 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -5224,6 +5224,47 @@ ] } }, + "/sync/start": { + "post": { + "operationId": "sync.start", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Start workspace sync", + "description": "Start sync loops for workspaces in the current project that have active sessions.", + "responses": { + "200": { + "description": "Workspace sync started", + "content": { + "application/json": { + "schema": { + "type": "boolean" + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.sync.start({\n ...\n})" + } + ] + } + }, "/sync/replay": { "post": { "operationId": "sync.replay", @@ -5328,7 +5369,7 @@ } }, "/sync/history": { - "get": { + "post": { "operationId": "sync.history.list", "parameters": [ { From 4c30a78cd9623fe8f3a7c27860a7b8a0cc760e39 Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 13:33:11 -0400 Subject: [PATCH 167/201] fix: revert sdk generation script change (#23133) --- script/format.ts | 4 +--- script/generate.ts | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/script/format.ts b/script/format.ts index 37ab2197d0..996de9ad04 100755 --- a/script/format.ts +++ b/script/format.ts @@ -2,6 +2,4 @@ import { $ } from "bun" -const dir = Bun.argv[2] ?? "." - -await $`bun run prettier --ignore-unknown --write ${dir}` +await $`bun run prettier --ignore-unknown --write .` diff --git a/script/generate.ts b/script/generate.ts index a99d3e77ef..8fc251d89d 100755 --- a/script/generate.ts +++ b/script/generate.ts @@ -6,4 +6,4 @@ await $`bun ./packages/sdk/js/script/build.ts` await $`bun dev generate > ../sdk/openapi.json`.cwd("packages/opencode") -await $`./script/format.ts ./packages/sdk` +await $`./script/format.ts` From 2f73e73e9d03262fb59d4e942b3e1e073cb76cb9 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 14:08:23 -0400 Subject: [PATCH 168/201] trace npm fully --- .opencode/opencode.jsonc | 6 +- .../opencode/src/cli/cmd/tui/config/tui.ts | 4 +- packages/opencode/src/cli/cmd/tui/layer.ts | 2 +- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/effect/app-runtime.ts | 5 +- .../opencode/src/effect/bootstrap-runtime.ts | 2 +- packages/opencode/src/effect/memo-map.ts | 3 + packages/opencode/src/effect/run-service.ts | 3 +- .../opencode/src/{cli => }/effect/runtime.ts | 5 +- packages/opencode/src/npm/effect.ts | 261 ++++++++++++++++++ packages/opencode/src/plugin/shared.ts | 2 +- .../server/routes/instance/httpapi/server.ts | 2 +- packages/opencode/test/config/config.test.ts | 2 +- packages/opencode/test/tool/read.test.ts | 1 - packages/shared/src/npm.ts | 249 ----------------- packages/shared/test/npm.test.ts | 18 -- 16 files changed, 279 insertions(+), 288 deletions(-) create mode 100644 packages/opencode/src/effect/memo-map.ts rename packages/opencode/src/{cli => }/effect/runtime.ts (90%) create mode 100644 packages/opencode/src/npm/effect.ts delete mode 100644 packages/shared/src/npm.ts delete mode 100644 packages/shared/test/npm.test.ts diff --git a/.opencode/opencode.jsonc b/.opencode/opencode.jsonc index 8380f7f719..82ab6d1b35 100644 --- a/.opencode/opencode.jsonc +++ b/.opencode/opencode.jsonc @@ -1,10 +1,6 @@ { "$schema": "https://opencode.ai/config.json", - "provider": { - "opencode": { - "options": {}, - }, - }, + "provider": {}, "permission": { "edit": { "packages/opencode/migration/*": "deny", diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index a5c9ae0430..abcf11fcef 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -11,14 +11,14 @@ import { Flag } from "@/flag/flag" import { isRecord } from "@/util/record" import { Global } from "@/global" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Npm } from "@opencode-ai/shared/npm" import { CurrentWorkingDirectory } from "./cwd" import { ConfigPlugin } from "@/config/plugin" import { ConfigKeybinds } from "@/config/keybinds" import { InstallationLocal, InstallationVersion } from "@/installation/version" -import { makeRuntime } from "@/cli/effect/runtime" +import { makeRuntime } from "@/effect/runtime" import { Filesystem, Log } from "@/util" import { ConfigVariable } from "@/config/variable" +import { Npm } from "@/npm/effect" const log = Log.create({ service: "tui.config" }) diff --git a/packages/opencode/src/cli/cmd/tui/layer.ts b/packages/opencode/src/cli/cmd/tui/layer.ts index 734106f8a6..66497f8b1a 100644 --- a/packages/opencode/src/cli/cmd/tui/layer.ts +++ b/packages/opencode/src/cli/cmd/tui/layer.ts @@ -1,6 +1,6 @@ import { Layer } from "effect" import { TuiConfig } from "./config/tui" -import { Npm } from "@opencode-ai/shared/npm" +import { Npm } from "@/npm/effect" import { Observability } from "@/effect/observability" export const CliLayer = Observability.layer.pipe(Layer.merge(TuiConfig.layer), Layer.provide(Npm.defaultLayer)) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index ebd4a41fcb..8980765b79 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -24,7 +24,6 @@ import { InstanceState } from "@/effect" import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { InstanceRef } from "@/effect/instance-ref" -import { Npm } from "@opencode-ai/shared/npm" import { ConfigAgent } from "./agent" import { ConfigMCP } from "./mcp" import { ConfigModelID } from "./model-id" @@ -39,6 +38,7 @@ import { ConfigPaths } from "./paths" import { ConfigFormatter } from "./formatter" import { ConfigLSP } from "./lsp" import { ConfigVariable } from "./variable" +import { Npm } from "@/npm/effect" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/effect/app-runtime.ts b/packages/opencode/src/effect/app-runtime.ts index eae52d6366..262d85e7ea 100644 --- a/packages/opencode/src/effect/app-runtime.ts +++ b/packages/opencode/src/effect/app-runtime.ts @@ -1,5 +1,5 @@ import { Layer, ManagedRuntime } from "effect" -import { attach, memoMap } from "./run-service" +import { attach } from "./run-service" import * as Observability from "./observability" import { AppFileSystem } from "@opencode-ai/shared/filesystem" @@ -46,7 +46,8 @@ import { Pty } from "@/pty" import { Installation } from "@/installation" import { ShareNext } from "@/share" import { SessionShare } from "@/share" -import { Npm } from "@opencode-ai/shared/npm" +import { Npm } from "@/npm/effect" +import { memoMap } from "./memo-map" export const AppLayer = Layer.mergeAll( Npm.defaultLayer, diff --git a/packages/opencode/src/effect/bootstrap-runtime.ts b/packages/opencode/src/effect/bootstrap-runtime.ts index 62b71e58b1..37698c43a5 100644 --- a/packages/opencode/src/effect/bootstrap-runtime.ts +++ b/packages/opencode/src/effect/bootstrap-runtime.ts @@ -1,5 +1,4 @@ import { Layer, ManagedRuntime } from "effect" -import { memoMap } from "./run-service" import { Plugin } from "@/plugin" import { LSP } from "@/lsp" @@ -12,6 +11,7 @@ import { Snapshot } from "@/snapshot" import { Bus } from "@/bus" import { Config } from "@/config" import * as Observability from "./observability" +import { memoMap } from "./memo-map" export const BootstrapLayer = Layer.mergeAll( Config.defaultLayer, diff --git a/packages/opencode/src/effect/memo-map.ts b/packages/opencode/src/effect/memo-map.ts new file mode 100644 index 0000000000..c797dbf42e --- /dev/null +++ b/packages/opencode/src/effect/memo-map.ts @@ -0,0 +1,3 @@ +import { Layer } from "effect" + +export const memoMap = Layer.makeMemoMapUnsafe() diff --git a/packages/opencode/src/effect/run-service.ts b/packages/opencode/src/effect/run-service.ts index 28265f9b27..98ff83ea59 100644 --- a/packages/opencode/src/effect/run-service.ts +++ b/packages/opencode/src/effect/run-service.ts @@ -6,8 +6,7 @@ import { InstanceRef, WorkspaceRef } from "./instance-ref" import * as Observability from "./observability" import { WorkspaceContext } from "@/control-plane/workspace-context" import type { InstanceContext } from "@/project/instance" - -export const memoMap = Layer.makeMemoMapUnsafe() +import { memoMap } from "./memo-map" type Refs = { instance?: InstanceContext diff --git a/packages/opencode/src/cli/effect/runtime.ts b/packages/opencode/src/effect/runtime.ts similarity index 90% rename from packages/opencode/src/cli/effect/runtime.ts rename to packages/opencode/src/effect/runtime.ts index 57b9f8ede9..ad7872f0b5 100644 --- a/packages/opencode/src/cli/effect/runtime.ts +++ b/packages/opencode/src/effect/runtime.ts @@ -1,7 +1,6 @@ -import { Observability } from "@/effect/observability" +import { Observability } from "./observability" import { Layer, type Context, ManagedRuntime, type Effect } from "effect" - -export const memoMap = Layer.makeMemoMapUnsafe() +import { memoMap } from "./memo-map" export function makeRuntime(service: Context.Service, layer: Layer.Layer) { let rt: ManagedRuntime.ManagedRuntime | undefined diff --git a/packages/opencode/src/npm/effect.ts b/packages/opencode/src/npm/effect.ts new file mode 100644 index 0000000000..10b5ff179c --- /dev/null +++ b/packages/opencode/src/npm/effect.ts @@ -0,0 +1,261 @@ +export * as Npm from "./effect" + +import path from "path" +import semver from "semver" +import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" +import { NodeFileSystem } from "@effect/platform-node" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Global } from "@opencode-ai/shared/global" +import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" + +import { makeRuntime } from "../effect/runtime" + +export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { + add: Schema.Array(Schema.String).pipe(Schema.optional), + dir: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export interface EntryPoint { + readonly directory: string + readonly entrypoint: Option.Option +} + +export interface Interface { + readonly add: (pkg: string) => Effect.Effect + readonly install: ( + dir: string, + input?: { add: string[] }, + ) => Effect.Effect + readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect + readonly which: (pkg: string) => Effect.Effect> +} + +export class Service extends Context.Service()("@opencode/Npm") {} + +const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined + +export function sanitize(pkg: string) { + if (!illegal) return pkg + return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") +} + +const resolveEntryPoint = (name: string, dir: string): EntryPoint => { + let entrypoint: Option.Option + try { + const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) + entrypoint = Option.some(resolved) + } catch { + entrypoint = Option.none() + } + return { + directory: dir, + entrypoint, + } +} + +interface ArboristNode { + name: string + path: string +} + +interface ArboristTree { + edgesOut: Map +} + +const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const afs = yield* AppFileSystem.Service + const global = yield* Global.Service + const fs = yield* FileSystem.FileSystem + const flock = yield* EffectFlock.Service + const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) + + const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { + const response = yield* Effect.tryPromise({ + try: () => fetch(`https://registry.npmjs.org/${pkg}`), + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + if (!response || !response.ok) { + return false + } + + const data = yield* Effect.tryPromise({ + try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + const latestVersion = data?.["dist-tags"]?.latest + if (!latestVersion) { + return false + } + + const range = /[\s^~*xX<>|=]/.test(cachedVersion) + if (range) return !semver.satisfies(latestVersion, cachedVersion) + + return semver.lt(cachedVersion, latestVersion) + }) + + const add = Effect.fn("Npm.add")(function* (pkg: string) { + const dir = directory(pkg) + yield* flock.acquire(`npm-install:${dir}`) + + const tree = yield* reify({ dir, add: [pkg] }) + const first = tree.edgesOut.values().next().value?.to + if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) + return resolveEntryPoint(first.name, first.path) + }, Effect.scoped) + + const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { + const canWrite = yield* afs.access(dir, { writable: true }).pipe( + Effect.as(true), + Effect.orElseSucceed(() => false), + ) + if (!canWrite) return + + yield* flock.acquire(`npm-install:${dir}`) + + yield* Effect.gen(function* () { + const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) + if (!nodeModulesExists) { + yield* reify({ add: input?.add, dir }) + return + } + }).pipe(Effect.withSpan("Npm.checkNodeModules")) + + yield* Effect.gen(function* () { + const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) + const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) + + const pkgAny = pkg as any + const lockAny = lock as any + const declared = new Set([ + ...Object.keys(pkgAny?.dependencies || {}), + ...Object.keys(pkgAny?.devDependencies || {}), + ...Object.keys(pkgAny?.peerDependencies || {}), + ...Object.keys(pkgAny?.optionalDependencies || {}), + ...(input?.add || []), + ]) + + const root = lockAny?.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root?.dependencies || {}), + ...Object.keys(root?.devDependencies || {}), + ...Object.keys(root?.peerDependencies || {}), + ...Object.keys(root?.optionalDependencies || {}), + ]) + + for (const name of declared) { + if (!locked.has(name)) { + yield* reify({ dir, add: input?.add }) + return + } + } + }).pipe(Effect.withSpan("Npm.checkDirty")) + + return + }, Effect.scoped) + + const which = Effect.fn("Npm.which")(function* (pkg: string) { + const dir = directory(pkg) + const binDir = path.join(dir, "node_modules", ".bin") + + const pick = Effect.fnUntraced(function* () { + const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) + + if (files.length === 0) return Option.none() + if (files.length === 1) return Option.some(files[0]) + + const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) + + if (Option.isSome(pkgJson)) { + const parsed = pkgJson.value as { bin?: string | Record } + if (parsed?.bin) { + const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg + const bin = parsed.bin + if (typeof bin === "string") return Option.some(unscoped) + const keys = Object.keys(bin) + if (keys.length === 1) return Option.some(keys[0]) + return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) + } + } + + return Option.some(files[0]) + }) + + return yield* Effect.gen(function* () { + const bin = yield* pick() + if (Option.isSome(bin)) { + return Option.some(path.join(binDir, bin.value)) + } + + yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) + + yield* add(pkg) + + const resolved = yield* pick() + if (Option.isNone(resolved)) return Option.none() + return Option.some(path.join(binDir, resolved.value)) + }).pipe( + Effect.scoped, + Effect.orElseSucceed(() => Option.none()), + ) + }) + + return Service.of({ + add, + install, + outdated, + which, + }) + }), +) + +export const defaultLayer = layer.pipe( + Layer.provide(EffectFlock.layer), + Layer.provide(AppFileSystem.layer), + Layer.provide(Global.layer), + Layer.provide(NodeFileSystem.layer), +) + +const { runPromise } = makeRuntime(Service, defaultLayer) + +export async function install(...args: Parameters) { + return runPromise((svc) => svc.install(...args)) +} + +export async function add(...args: Parameters) { + return runPromise((svc) => svc.add(...args)) +} diff --git a/packages/opencode/src/plugin/shared.ts b/packages/opencode/src/plugin/shared.ts index 11f36c41ae..f431204fc4 100644 --- a/packages/opencode/src/plugin/shared.ts +++ b/packages/opencode/src/plugin/shared.ts @@ -2,9 +2,9 @@ import path from "path" import { fileURLToPath, pathToFileURL } from "url" import npa from "npm-package-arg" import semver from "semver" -import { Npm } from "../npm" import { Filesystem } from "@/util" import { isRecord } from "@/util/record" +import { Npm } from "@/npm/effect" // Old npm package names for plugins that are now built-in export const DEPRECATED_PLUGIN_PACKAGES = ["opencode-openai-codex-auth", "opencode-copilot-auth"] diff --git a/packages/opencode/src/server/routes/instance/httpapi/server.ts b/packages/opencode/src/server/routes/instance/httpapi/server.ts index b4442d6400..d012e2c166 100644 --- a/packages/opencode/src/server/routes/instance/httpapi/server.ts +++ b/packages/opencode/src/server/routes/instance/httpapi/server.ts @@ -4,7 +4,6 @@ import { HttpRouter, HttpServer, HttpServerRequest } from "effect/unstable/http" import { AppRuntime } from "@/effect/app-runtime" import { InstanceRef, WorkspaceRef } from "@/effect/instance-ref" import { Observability } from "@/effect" -import { memoMap } from "@/effect/run-service" import { Flag } from "@/flag/flag" import { InstanceBootstrap } from "@/project/bootstrap" import { Instance } from "@/project/instance" @@ -15,6 +14,7 @@ import { PermissionApi, permissionHandlers } from "./permission" import { ProjectApi, projectHandlers } from "./project" import { ProviderApi, providerHandlers } from "./provider" import { QuestionApi, questionHandlers } from "./question" +import { memoMap } from "@/effect/memo-map" const Query = Schema.Struct({ directory: Schema.optional(Schema.String), diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index a321b558cf..7b01ee626a 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -27,7 +27,7 @@ import { Global } from "../../src/global" import { ProjectID } from "../../src/project/schema" import { Filesystem } from "../../src/util" import { ConfigPlugin } from "@/config/plugin" -import { Npm } from "@opencode-ai/shared/npm" +import { Npm } from "@/npm/effect" const emptyAccount = Layer.mock(Account.Service)({ active: () => Effect.succeed(Option.none()), diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts index c3d7074bfb..7456990ad0 100644 --- a/packages/opencode/test/tool/read.test.ts +++ b/packages/opencode/test/tool/read.test.ts @@ -15,7 +15,6 @@ import { Tool } from "../../src/tool" import { Filesystem } from "../../src/util" import { provideInstance, tmpdirScoped } from "../fixture/fixture" import { testEffect } from "../lib/effect" -import { Npm } from "@opencode-ai/shared/npm" const FIXTURES_DIR = path.join(import.meta.dir, "fixtures") diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts deleted file mode 100644 index 865e827b31..0000000000 --- a/packages/shared/src/npm.ts +++ /dev/null @@ -1,249 +0,0 @@ -import path from "path" -import semver from "semver" -import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" -import { NodeFileSystem } from "@effect/platform-node" -import { AppFileSystem } from "./filesystem" -import { Global } from "./global" -import { EffectFlock } from "./util/effect-flock" - -export namespace Npm { - export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { - add: Schema.Array(Schema.String).pipe(Schema.optional), - dir: Schema.String, - cause: Schema.optional(Schema.Defect), - }) {} - - export interface EntryPoint { - readonly directory: string - readonly entrypoint: Option.Option - } - - export interface Interface { - readonly add: (pkg: string) => Effect.Effect - readonly install: ( - dir: string, - input?: { add: string[] }, - ) => Effect.Effect - readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect - readonly which: (pkg: string) => Effect.Effect> - } - - export class Service extends Context.Service()("@opencode/Npm") {} - - const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined - - export function sanitize(pkg: string) { - if (!illegal) return pkg - return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") - } - - const resolveEntryPoint = (name: string, dir: string): EntryPoint => { - let entrypoint: Option.Option - try { - const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - entrypoint = Option.some(resolved) - } catch { - entrypoint = Option.none() - } - return { - directory: dir, - entrypoint, - } - } - - interface ArboristNode { - name: string - path: string - } - - interface ArboristTree { - edgesOut: Map - } - - const reify = (input: { dir: string; add?: string[] }) => - Effect.gen(function* () { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arborist = new Arborist({ - path: input.dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - return yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }), - catch: (cause) => - new InstallFailedError({ - cause, - add: input?.add, - dir: input.dir, - }), - }) as Effect.Effect - }).pipe( - Effect.withSpan("Npm.reify", { - attributes: input, - }), - ) - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const afs = yield* AppFileSystem.Service - const global = yield* Global.Service - const fs = yield* FileSystem.FileSystem - const flock = yield* EffectFlock.Service - const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) - - const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { - const response = yield* Effect.tryPromise({ - try: () => fetch(`https://registry.npmjs.org/${pkg}`), - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - if (!response || !response.ok) { - return false - } - - const data = yield* Effect.tryPromise({ - try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) - }) - - const add = Effect.fn("Npm.add")(function* (pkg: string) { - const dir = directory(pkg) - yield* flock.acquire(`npm-install:${dir}`) - - const tree = yield* reify({ dir, add: [pkg] }) - const first = tree.edgesOut.values().next().value?.to - if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) - return resolveEntryPoint(first.name, first.path) - }, Effect.scoped) - - const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { - const canWrite = yield* afs.access(dir, { writable: true }).pipe( - Effect.as(true), - Effect.orElseSucceed(() => false), - ) - if (!canWrite) return - - yield* flock.acquire(`npm-install:${dir}`) - - yield* Effect.gen(function* () { - const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) - if (!nodeModulesExists) { - yield* reify({ add: input?.add, dir }) - return - } - }).pipe(Effect.withSpan("Npm.checkNodeModules")) - - yield* Effect.gen(function* () { - const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) - const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) - - const pkgAny = pkg as any - const lockAny = lock as any - const declared = new Set([ - ...Object.keys(pkgAny?.dependencies || {}), - ...Object.keys(pkgAny?.devDependencies || {}), - ...Object.keys(pkgAny?.peerDependencies || {}), - ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), - ]) - - const root = lockAny?.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root?.dependencies || {}), - ...Object.keys(root?.devDependencies || {}), - ...Object.keys(root?.peerDependencies || {}), - ...Object.keys(root?.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - yield* reify({ dir, add: input?.add }) - return - } - } - }).pipe(Effect.withSpan("Npm.checkDirty")) - - return - }, Effect.scoped) - - const which = Effect.fn("Npm.which")(function* (pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = Effect.fnUntraced(function* () { - const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) - - if (files.length === 0) return Option.none() - if (files.length === 1) return Option.some(files[0]) - - const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) - - if (Option.isSome(pkgJson)) { - const parsed = pkgJson.value as { bin?: string | Record } - if (parsed?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = parsed.bin - if (typeof bin === "string") return Option.some(unscoped) - const keys = Object.keys(bin) - if (keys.length === 1) return Option.some(keys[0]) - return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) - } - } - - return Option.some(files[0]) - }) - - return yield* Effect.gen(function* () { - const bin = yield* pick() - if (Option.isSome(bin)) { - return Option.some(path.join(binDir, bin.value)) - } - - yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) - - yield* add(pkg) - - const resolved = yield* pick() - if (Option.isNone(resolved)) return Option.none() - return Option.some(path.join(binDir, resolved.value)) - }).pipe( - Effect.scoped, - Effect.orElseSucceed(() => Option.none()), - ) - }) - - return Service.of({ - add, - install, - outdated, - which, - }) - }), - ) - - export const defaultLayer = layer.pipe( - Layer.provide(EffectFlock.layer), - Layer.provide(AppFileSystem.layer), - Layer.provide(Global.layer), - Layer.provide(NodeFileSystem.layer), - ) -} diff --git a/packages/shared/test/npm.test.ts b/packages/shared/test/npm.test.ts deleted file mode 100644 index 4443d2985c..0000000000 --- a/packages/shared/test/npm.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { describe, expect, test } from "bun:test" -import { Npm } from "@opencode-ai/shared/npm" - -const win = process.platform === "win32" - -describe("Npm.sanitize", () => { - test("keeps normal scoped package specs unchanged", () => { - expect(Npm.sanitize("@opencode/acme")).toBe("@opencode/acme") - expect(Npm.sanitize("@opencode/acme@1.0.0")).toBe("@opencode/acme@1.0.0") - expect(Npm.sanitize("prettier")).toBe("prettier") - }) - - test("handles git https specs", () => { - const spec = "acme@git+https://github.com/opencode/acme.git" - const expected = win ? "acme@git+https_//github.com/opencode/acme.git" : spec - expect(Npm.sanitize(spec)).toBe(expected) - }) -}) From 992435aaf8371dc784f9f3489e998e5c93451d18 Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 14:18:48 -0400 Subject: [PATCH 169/201] do not flock until reify --- packages/opencode/src/npm/effect.ts | 63 ++++++++++++++--------------- 1 file changed, 30 insertions(+), 33 deletions(-) diff --git a/packages/opencode/src/npm/effect.ts b/packages/opencode/src/npm/effect.ts index 10b5ff179c..5968f14519 100644 --- a/packages/opencode/src/npm/effect.ts +++ b/packages/opencode/src/npm/effect.ts @@ -63,36 +63,6 @@ interface ArboristTree { edgesOut: Map } -const reify = (input: { dir: string; add?: string[] }) => - Effect.gen(function* () { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arborist = new Arborist({ - path: input.dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - return yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }), - catch: (cause) => - new InstallFailedError({ - cause, - add: input?.add, - dir: input.dir, - }), - }) as Effect.Effect - }).pipe( - Effect.withSpan("Npm.reify", { - attributes: input, - }), - ) - export const layer = Layer.effect( Service, Effect.gen(function* () { @@ -101,6 +71,36 @@ export const layer = Layer.effect( const fs = yield* FileSystem.FileSystem const flock = yield* EffectFlock.Service const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) + const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + yield* flock.acquire(`npm-install:${input.dir}`) + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { const response = yield* Effect.tryPromise({ @@ -130,7 +130,6 @@ export const layer = Layer.effect( const add = Effect.fn("Npm.add")(function* (pkg: string) { const dir = directory(pkg) - yield* flock.acquire(`npm-install:${dir}`) const tree = yield* reify({ dir, add: [pkg] }) const first = tree.edgesOut.values().next().value?.to @@ -145,8 +144,6 @@ export const layer = Layer.effect( ) if (!canWrite) return - yield* flock.acquire(`npm-install:${dir}`) - yield* Effect.gen(function* () { const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) if (!nodeModulesExists) { From b1f076558cf75c9ae5322f6195e68c3c380e3f9c Mon Sep 17 00:00:00 2001 From: Dax Raad Date: Fri, 17 Apr 2026 14:33:02 -0400 Subject: [PATCH 170/201] test: align plugin loader npm mocks - switch plugin loader tests to the effect npm module - return Option.none() for mocked npm entrypoints - keep test fixtures aligned with the current Npm.add contract --- .../cli/tui/plugin-loader-entrypoint.test.ts | 15 ++++++------ .../test/plugin/loader-shared.test.ts | 24 +++++++++---------- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts b/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts index 395e8ce429..c6c25fcc11 100644 --- a/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts +++ b/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts @@ -1,3 +1,4 @@ +import { Option } from "effect" import { expect, spyOn, test } from "bun:test" import fs from "fs/promises" import path from "path" @@ -5,7 +6,7 @@ import { pathToFileURL } from "url" import { tmpdir } from "../../fixture/fixture" import { createTuiPluginApi } from "../../fixture/tui-plugin" import { TuiConfig } from "../../../src/cli/cmd/tui/config/tui" -import { Npm } from "../../../src/npm" +import { Npm } from "../../../src/npm/effect" const { TuiPluginRuntime } = await import("../../../src/cli/cmd/tui/plugin/runtime") @@ -56,7 +57,7 @@ test("loads npm tui plugin from package ./tui export", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -117,7 +118,7 @@ test("does not use npm package exports dot for tui entry", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -179,7 +180,7 @@ test("rejects npm tui export that resolves outside plugin directory", async () = } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -241,7 +242,7 @@ test("rejects npm tui plugin that exports server and tui together", async () => } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -299,7 +300,7 @@ test("does not use npm package main for tui entry", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) const warn = spyOn(console, "warn").mockImplementation(() => {}) const error = spyOn(console, "error").mockImplementation(() => {}) @@ -468,7 +469,7 @@ test("uses npm package name when tui plugin id is omitted", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) diff --git a/packages/opencode/test/plugin/loader-shared.test.ts b/packages/opencode/test/plugin/loader-shared.test.ts index 5072c1e748..8e3ad5ea0b 100644 --- a/packages/opencode/test/plugin/loader-shared.test.ts +++ b/packages/opencode/test/plugin/loader-shared.test.ts @@ -1,5 +1,5 @@ import { afterAll, afterEach, describe, expect, spyOn, test } from "bun:test" -import { Effect } from "effect" +import { Effect, Option } from "effect" import fs from "fs/promises" import path from "path" import { pathToFileURL } from "url" @@ -13,7 +13,7 @@ const { Plugin } = await import("../../src/plugin/index") const { PluginLoader } = await import("../../src/plugin/loader") const { readPackageThemes } = await import("../../src/plugin/shared") const { Instance } = await import("../../src/project/instance") -const { Npm } = await import("../../src/npm") +const { Npm } = await import("../../src/npm/effect") afterAll(() => { if (disableDefault === undefined) { @@ -239,8 +239,8 @@ describe("plugin.loader.shared", () => { }) const add = spyOn(Npm, "add").mockImplementation(async (pkg) => { - if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: tmp.extra.acme } - return { directory: tmp.extra.scope, entrypoint: tmp.extra.scope } + if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: Option.none() } + return { directory: tmp.extra.scope, entrypoint: Option.none() } }) try { @@ -301,7 +301,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await load(tmp.path) @@ -358,7 +358,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await load(tmp.path) @@ -410,7 +410,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await load(tmp.path) @@ -455,7 +455,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await load(tmp.path) @@ -518,7 +518,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { await load(tmp.path) @@ -548,7 +548,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: "" }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: Option.none() }) try { await load(tmp.path) @@ -927,7 +927,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) const missing: string[] = [] try { @@ -996,7 +996,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) try { const loaded = await PluginLoader.loadExternal({ From bbb422d1250da1400c9c228d363bebb336e238ca Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Fri, 17 Apr 2026 13:47:22 -0500 Subject: [PATCH 171/201] chore: bump ai to 6.0.168 and @ai-sdk/gateway to 3.0.104 (#23145) --- bun.lock | 12 +++++------- package.json | 2 +- packages/opencode/package.json | 2 +- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/bun.lock b/bun.lock index c78c9ae48d..8d83d8ab47 100644 --- a/bun.lock +++ b/bun.lock @@ -328,7 +328,7 @@ "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.102", + "@ai-sdk/gateway": "3.0.104", "@ai-sdk/google": "3.0.63", "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", @@ -692,7 +692,7 @@ "@types/node": "22.13.9", "@types/semver": "7.7.1", "@typescript/native-preview": "7.0.0-dev.20251207.1", - "ai": "6.0.158", + "ai": "6.0.168", "cross-spawn": "7.0.6", "diff": "8.0.2", "dompurify": "3.3.1", @@ -760,7 +760,7 @@ "@ai-sdk/fireworks": ["@ai-sdk/fireworks@2.0.46", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XRKR0zgRyegdmtK5CDUEjlyRp0Fo+XVCdoG+301U1SGtgRIAYG3ObVtgzVJBVpJdHFSLHuYeLTnNiQoUxD7+FQ=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.102", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GrwDpaYJiVafrsA1MTbZtXPcQUI67g5AXiJo7Y1F8b+w+SiYHLk3ZIn1YmpQVoVAh2bjvxjj+Vo0AvfskuGH4g=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.104", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.2.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZKX5n74io8VIRlhIMSLWVlvT3sXC8Z7cZ9GHuWBWZDVi96+62AIsWuLGvMfcBA1STYuSoDrp6rIziZmvrTq0TA=="], "@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="], @@ -2456,7 +2456,7 @@ "@valibot/to-json-schema": ["@valibot/to-json-schema@1.6.0", "", { "peerDependencies": { "valibot": "^1.3.0" } }, "sha512-d6rYyK5KVa2XdqamWgZ4/Nr+cXhxjy7lmpe6Iajw15J/jmU+gyxl2IEd1Otg1d7Rl3gOQL5reulnSypzBtYy1A=="], - "@vercel/oidc": ["@vercel/oidc@3.1.0", "", {}, "sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w=="], + "@vercel/oidc": ["@vercel/oidc@3.2.0", "", {}, "sha512-UycprH3T6n3jH0k44NHMa7pnFHGu/N05MjojYr+Mc6I7obkoLIJujSWwin1pCvdy/eOxrI/l3uDLQsmcrOb4ug=="], "@vitejs/plugin-react": ["@vitejs/plugin-react@4.7.0", "", { "dependencies": { "@babel/core": "^7.28.0", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.27", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA=="], @@ -2516,7 +2516,7 @@ "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], - "ai": ["ai@6.0.158", "", { "dependencies": { "@ai-sdk/gateway": "3.0.95", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-gLTp1UXFtMqKUi3XHs33K7UFglbvojkxF/aq337TxnLGOhHIW9+GyP2jwW4hYX87f1es+wId3VQoPRRu9zEStQ=="], + "ai": ["ai@6.0.168", "", { "dependencies": { "@ai-sdk/gateway": "3.0.104", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-2HqCJuO+1V2aV7vfYs5LFEUfxbkGX+5oa54q/gCCTL7KLTdbxcCu5D7TdLA5kwsrs3Szgjah9q6D9tpjHM3hUQ=="], "ai-gateway-provider": ["ai-gateway-provider@3.1.2", "", { "optionalDependencies": { "@ai-sdk/amazon-bedrock": "^4.0.62", "@ai-sdk/anthropic": "^3.0.46", "@ai-sdk/azure": "^3.0.31", "@ai-sdk/cerebras": "^2.0.34", "@ai-sdk/cohere": "^3.0.21", "@ai-sdk/deepgram": "^2.0.20", "@ai-sdk/deepseek": "^2.0.20", "@ai-sdk/elevenlabs": "^2.0.20", "@ai-sdk/fireworks": "^2.0.34", "@ai-sdk/google": "^3.0.30", "@ai-sdk/google-vertex": "^4.0.61", "@ai-sdk/groq": "^3.0.24", "@ai-sdk/mistral": "^3.0.20", "@ai-sdk/openai": "^3.0.30", "@ai-sdk/perplexity": "^3.0.19", "@ai-sdk/xai": "^3.0.57", "@openrouter/ai-sdk-provider": "^2.2.3" }, "peerDependencies": { "@ai-sdk/openai-compatible": "^2.0.0", "@ai-sdk/provider": "^3.0.0", "@ai-sdk/provider-utils": "^4.0.0", "ai": "^6.0.0" } }, "sha512-krGNnJSoO/gJ7Hbe5nQDlsBpDUGIBGtMQTRUaW7s1MylsfvLduba0TLWzQaGtOmNRkP0pGhtGlwsnS6FNQMlyw=="], @@ -5702,8 +5702,6 @@ "accepts/negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], - "ai/@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.95", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZmUNNbZl3V42xwQzPaNUi+s8eqR2lnrxf0bvB6YbLXpLjHYv0k2Y78t12cNOfY0bxGeuVVTLyk856uLuQIuXEQ=="], - "ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], "ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], diff --git a/package.json b/package.json index f1ad03afc8..ddd711adaf 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ "drizzle-kit": "1.0.0-beta.19-d95b7a4", "drizzle-orm": "1.0.0-beta.19-d95b7a4", "effect": "4.0.0-beta.48", - "ai": "6.0.158", + "ai": "6.0.168", "cross-spawn": "7.0.6", "hono": "4.10.7", "hono-openapi": "1.1.2", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index cadb1fa38d..2f56b74775 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -85,7 +85,7 @@ "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.102", + "@ai-sdk/gateway": "3.0.104", "@ai-sdk/google": "3.0.63", "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", From 467be08e679d82c20164870f067eb759abe5f6ec Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 14:58:37 -0400 Subject: [PATCH 172/201] refactor: consolidate npm exports and trace flock acquisition (#23151) --- .../opencode/src/cli/cmd/tui/config/tui.ts | 2 +- packages/opencode/src/cli/cmd/tui/layer.ts | 2 +- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/effect/app-runtime.ts | 2 +- packages/opencode/src/npm/effect.ts | 258 ----------- packages/opencode/src/npm/index.ts | 405 +++++++++++------- packages/opencode/src/plugin/shared.ts | 2 +- .../cli/tui/plugin-loader-entrypoint.test.ts | 15 +- packages/opencode/test/config/config.test.ts | 2 +- .../test/plugin/loader-shared.test.ts | 24 +- packages/shared/src/util/effect-flock.ts | 75 ++-- 11 files changed, 304 insertions(+), 485 deletions(-) delete mode 100644 packages/opencode/src/npm/effect.ts diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index abcf11fcef..179046e026 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -18,7 +18,7 @@ import { InstallationLocal, InstallationVersion } from "@/installation/version" import { makeRuntime } from "@/effect/runtime" import { Filesystem, Log } from "@/util" import { ConfigVariable } from "@/config/variable" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" const log = Log.create({ service: "tui.config" }) diff --git a/packages/opencode/src/cli/cmd/tui/layer.ts b/packages/opencode/src/cli/cmd/tui/layer.ts index 66497f8b1a..64cba08e82 100644 --- a/packages/opencode/src/cli/cmd/tui/layer.ts +++ b/packages/opencode/src/cli/cmd/tui/layer.ts @@ -1,6 +1,6 @@ import { Layer } from "effect" import { TuiConfig } from "./config/tui" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" import { Observability } from "@/effect/observability" export const CliLayer = Observability.layer.pipe(Layer.merge(TuiConfig.layer), Layer.provide(Npm.defaultLayer)) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 8980765b79..459f76961a 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -38,7 +38,7 @@ import { ConfigPaths } from "./paths" import { ConfigFormatter } from "./formatter" import { ConfigLSP } from "./lsp" import { ConfigVariable } from "./variable" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" const log = Log.create({ service: "config" }) diff --git a/packages/opencode/src/effect/app-runtime.ts b/packages/opencode/src/effect/app-runtime.ts index 262d85e7ea..d68e00a323 100644 --- a/packages/opencode/src/effect/app-runtime.ts +++ b/packages/opencode/src/effect/app-runtime.ts @@ -46,7 +46,7 @@ import { Pty } from "@/pty" import { Installation } from "@/installation" import { ShareNext } from "@/share" import { SessionShare } from "@/share" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" import { memoMap } from "./memo-map" export const AppLayer = Layer.mergeAll( diff --git a/packages/opencode/src/npm/effect.ts b/packages/opencode/src/npm/effect.ts deleted file mode 100644 index 5968f14519..0000000000 --- a/packages/opencode/src/npm/effect.ts +++ /dev/null @@ -1,258 +0,0 @@ -export * as Npm from "./effect" - -import path from "path" -import semver from "semver" -import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" -import { NodeFileSystem } from "@effect/platform-node" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Global } from "@opencode-ai/shared/global" -import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" - -import { makeRuntime } from "../effect/runtime" - -export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { - add: Schema.Array(Schema.String).pipe(Schema.optional), - dir: Schema.String, - cause: Schema.optional(Schema.Defect), -}) {} - -export interface EntryPoint { - readonly directory: string - readonly entrypoint: Option.Option -} - -export interface Interface { - readonly add: (pkg: string) => Effect.Effect - readonly install: ( - dir: string, - input?: { add: string[] }, - ) => Effect.Effect - readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect - readonly which: (pkg: string) => Effect.Effect> -} - -export class Service extends Context.Service()("@opencode/Npm") {} - -const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined - -export function sanitize(pkg: string) { - if (!illegal) return pkg - return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") -} - -const resolveEntryPoint = (name: string, dir: string): EntryPoint => { - let entrypoint: Option.Option - try { - const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - entrypoint = Option.some(resolved) - } catch { - entrypoint = Option.none() - } - return { - directory: dir, - entrypoint, - } -} - -interface ArboristNode { - name: string - path: string -} - -interface ArboristTree { - edgesOut: Map -} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const afs = yield* AppFileSystem.Service - const global = yield* Global.Service - const fs = yield* FileSystem.FileSystem - const flock = yield* EffectFlock.Service - const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) - const reify = (input: { dir: string; add?: string[] }) => - Effect.gen(function* () { - yield* flock.acquire(`npm-install:${input.dir}`) - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arborist = new Arborist({ - path: input.dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - return yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }), - catch: (cause) => - new InstallFailedError({ - cause, - add: input?.add, - dir: input.dir, - }), - }) as Effect.Effect - }).pipe( - Effect.withSpan("Npm.reify", { - attributes: input, - }), - ) - - const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { - const response = yield* Effect.tryPromise({ - try: () => fetch(`https://registry.npmjs.org/${pkg}`), - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - if (!response || !response.ok) { - return false - } - - const data = yield* Effect.tryPromise({ - try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) - }) - - const add = Effect.fn("Npm.add")(function* (pkg: string) { - const dir = directory(pkg) - - const tree = yield* reify({ dir, add: [pkg] }) - const first = tree.edgesOut.values().next().value?.to - if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) - return resolveEntryPoint(first.name, first.path) - }, Effect.scoped) - - const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { - const canWrite = yield* afs.access(dir, { writable: true }).pipe( - Effect.as(true), - Effect.orElseSucceed(() => false), - ) - if (!canWrite) return - - yield* Effect.gen(function* () { - const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) - if (!nodeModulesExists) { - yield* reify({ add: input?.add, dir }) - return - } - }).pipe(Effect.withSpan("Npm.checkNodeModules")) - - yield* Effect.gen(function* () { - const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) - const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) - - const pkgAny = pkg as any - const lockAny = lock as any - const declared = new Set([ - ...Object.keys(pkgAny?.dependencies || {}), - ...Object.keys(pkgAny?.devDependencies || {}), - ...Object.keys(pkgAny?.peerDependencies || {}), - ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), - ]) - - const root = lockAny?.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root?.dependencies || {}), - ...Object.keys(root?.devDependencies || {}), - ...Object.keys(root?.peerDependencies || {}), - ...Object.keys(root?.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - yield* reify({ dir, add: input?.add }) - return - } - } - }).pipe(Effect.withSpan("Npm.checkDirty")) - - return - }, Effect.scoped) - - const which = Effect.fn("Npm.which")(function* (pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = Effect.fnUntraced(function* () { - const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) - - if (files.length === 0) return Option.none() - if (files.length === 1) return Option.some(files[0]) - - const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) - - if (Option.isSome(pkgJson)) { - const parsed = pkgJson.value as { bin?: string | Record } - if (parsed?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = parsed.bin - if (typeof bin === "string") return Option.some(unscoped) - const keys = Object.keys(bin) - if (keys.length === 1) return Option.some(keys[0]) - return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) - } - } - - return Option.some(files[0]) - }) - - return yield* Effect.gen(function* () { - const bin = yield* pick() - if (Option.isSome(bin)) { - return Option.some(path.join(binDir, bin.value)) - } - - yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) - - yield* add(pkg) - - const resolved = yield* pick() - if (Option.isNone(resolved)) return Option.none() - return Option.some(path.join(binDir, resolved.value)) - }).pipe( - Effect.scoped, - Effect.orElseSucceed(() => Option.none()), - ) - }) - - return Service.of({ - add, - install, - outdated, - which, - }) - }), -) - -export const defaultLayer = layer.pipe( - Layer.provide(EffectFlock.layer), - Layer.provide(AppFileSystem.layer), - Layer.provide(Global.layer), - Layer.provide(NodeFileSystem.layer), -) - -const { runPromise } = makeRuntime(Service, defaultLayer) - -export async function install(...args: Parameters) { - return runPromise((svc) => svc.install(...args)) -} - -export async function add(...args: Parameters) { - return runPromise((svc) => svc.add(...args)) -} diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 425b27f420..f242598192 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -1,198 +1,271 @@ -import semver from "semver" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Log } from "../util" +export * as Npm from "." + import path from "path" -import { readdir, rm } from "fs/promises" -import { Filesystem } from "@/util" -import { Flock } from "@opencode-ai/shared/util/flock" +import semver from "semver" +import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" +import { NodeFileSystem } from "@effect/platform-node" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Global } from "@opencode-ai/shared/global" +import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" + +import { makeRuntime } from "../effect/runtime" + +export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { + add: Schema.Array(Schema.String).pipe(Schema.optional), + dir: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export interface EntryPoint { + readonly directory: string + readonly entrypoint: Option.Option +} + +export interface Interface { + readonly add: (pkg: string) => Effect.Effect + readonly install: ( + dir: string, + input?: { add: string[] }, + ) => Effect.Effect + readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect + readonly which: (pkg: string) => Effect.Effect> +} + +export class Service extends Context.Service()("@opencode/Npm") {} -const log = Log.create({ service: "npm" }) const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined -export const InstallFailedError = NamedError.create( - "NpmInstallFailedError", - z.object({ - pkg: z.string(), - }), -) - export function sanitize(pkg: string) { if (!illegal) return pkg return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") } -function directory(pkg: string) { - return path.join(Global.Path.cache, "packages", sanitize(pkg)) -} - -function resolveEntryPoint(name: string, dir: string) { - let entrypoint: string | undefined +const resolveEntryPoint = (name: string, dir: string): EntryPoint => { + let entrypoint: Option.Option try { - entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - } catch {} - const result = { + const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) + entrypoint = Option.some(resolved) + } catch { + entrypoint = Option.none() + } + return { directory: dir, entrypoint, } - return result } -export async function outdated(pkg: string, cachedVersion: string): Promise { - const response = await fetch(`https://registry.npmjs.org/${pkg}`) - if (!response.ok) { - log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) - return false - } - - const data = (await response.json()) as { "dist-tags"?: { latest?: string } } - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - log.warn("No latest version found, using cached", { pkg, cachedVersion }) - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) +interface ArboristNode { + name: string + path: string } -export async function add(pkg: string) { - const { Arborist } = await import("@npmcli/arborist") - const dir = directory(pkg) - await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) - log.info("installing package", { - pkg, - }) +interface ArboristTree { + edgesOut: Map +} - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - const tree = await arborist.loadVirtual().catch(() => {}) - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const afs = yield* AppFileSystem.Service + const global = yield* Global.Service + const fs = yield* FileSystem.FileSystem + const flock = yield* EffectFlock.Service + const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) + const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + yield* flock.acquire(`npm-install:${input.dir}`) + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) - const result = await arborist - .reify({ - add: [pkg], - save: true, - saveType: "prod", + const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { + const response = yield* Effect.tryPromise({ + try: () => fetch(`https://registry.npmjs.org/${pkg}`), + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + if (!response || !response.ok) { + return false + } + + const data = yield* Effect.tryPromise({ + try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + const latestVersion = data?.["dist-tags"]?.latest + if (!latestVersion) { + return false + } + + const range = /[\s^~*xX<>|=]/.test(cachedVersion) + if (range) return !semver.satisfies(latestVersion, cachedVersion) + + return semver.lt(cachedVersion, latestVersion) }) - .catch((cause) => { - throw new InstallFailedError( - { pkg }, - { - cause, - }, + + const add = Effect.fn("Npm.add")(function* (pkg: string) { + const dir = directory(pkg) + + const tree = yield* reify({ dir, add: [pkg] }) + const first = tree.edgesOut.values().next().value?.to + if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) + return resolveEntryPoint(first.name, first.path) + }, Effect.scoped) + + const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { + const canWrite = yield* afs.access(dir, { writable: true }).pipe( + Effect.as(true), + Effect.orElseSucceed(() => false), + ) + if (!canWrite) return + + yield* Effect.gen(function* () { + const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) + if (!nodeModulesExists) { + yield* reify({ add: input?.add, dir }) + return + } + }).pipe(Effect.withSpan("Npm.checkNodeModules")) + + yield* Effect.gen(function* () { + const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) + const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) + + const pkgAny = pkg as any + const lockAny = lock as any + const declared = new Set([ + ...Object.keys(pkgAny?.dependencies || {}), + ...Object.keys(pkgAny?.devDependencies || {}), + ...Object.keys(pkgAny?.peerDependencies || {}), + ...Object.keys(pkgAny?.optionalDependencies || {}), + ...(input?.add || []), + ]) + + const root = lockAny?.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root?.dependencies || {}), + ...Object.keys(root?.devDependencies || {}), + ...Object.keys(root?.peerDependencies || {}), + ...Object.keys(root?.optionalDependencies || {}), + ]) + + for (const name of declared) { + if (!locked.has(name)) { + yield* reify({ dir, add: input?.add }) + return + } + } + }).pipe(Effect.withSpan("Npm.checkDirty")) + + return + }, Effect.scoped) + + const which = Effect.fn("Npm.which")(function* (pkg: string) { + const dir = directory(pkg) + const binDir = path.join(dir, "node_modules", ".bin") + + const pick = Effect.fnUntraced(function* () { + const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) + + if (files.length === 0) return Option.none() + if (files.length === 1) return Option.some(files[0]) + + const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) + + if (Option.isSome(pkgJson)) { + const parsed = pkgJson.value as { bin?: string | Record } + if (parsed?.bin) { + const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg + const bin = parsed.bin + if (typeof bin === "string") return Option.some(unscoped) + const keys = Object.keys(bin) + if (keys.length === 1) return Option.some(keys[0]) + return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) + } + } + + return Option.some(files[0]) + }) + + return yield* Effect.gen(function* () { + const bin = yield* pick() + if (Option.isSome(bin)) { + return Option.some(path.join(binDir, bin.value)) + } + + yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) + + yield* add(pkg) + + const resolved = yield* pick() + if (Option.isNone(resolved)) return Option.none() + return Option.some(path.join(binDir, resolved.value)) + }).pipe( + Effect.scoped, + Effect.orElseSucceed(() => Option.none()), ) }) - const first = result.edgesOut.values().next().value?.to - if (!first) throw new InstallFailedError({ pkg }) - return resolveEntryPoint(first.name, first.path) -} - -export async function install(dir: string) { - await using _ = await Flock.acquire(`npm-install:${dir}`) - log.info("checking dependencies", { dir }) - - const reify = async () => { - const { Arborist } = await import("@npmcli/arborist") - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, + return Service.of({ + add, + install, + outdated, + which, }) - await arb.reify().catch(() => {}) - } + }), +) - if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { - log.info("node_modules missing, reifying") - await reify() - return - } +export const defaultLayer = layer.pipe( + Layer.provide(EffectFlock.layer), + Layer.provide(AppFileSystem.layer), + Layer.provide(Global.layer), + Layer.provide(NodeFileSystem.layer), +) - type PackageDeps = Record - type PackageJson = { - dependencies?: PackageDeps - devDependencies?: PackageDeps - peerDependencies?: PackageDeps - optionalDependencies?: PackageDeps - } - const pkg: PackageJson = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock: { packages?: Record } = await Filesystem.readJson<{ - packages?: Record - }>(path.join(dir, "package-lock.json")).catch(() => ({})) +const { runPromise } = makeRuntime(Service, defaultLayer) - const declared = new Set([ - ...Object.keys(pkg.dependencies || {}), - ...Object.keys(pkg.devDependencies || {}), - ...Object.keys(pkg.peerDependencies || {}), - ...Object.keys(pkg.optionalDependencies || {}), - ]) - - const root = lock.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root.dependencies || {}), - ...Object.keys(root.devDependencies || {}), - ...Object.keys(root.peerDependencies || {}), - ...Object.keys(root.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - log.info("dependency not in lock file, reifying", { name }) - await reify() - return - } - } - - log.info("dependencies in sync") +export async function install(...args: Parameters) { + return runPromise((svc) => svc.install(...args)) } -export async function which(pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = async () => { - const files = await readdir(binDir).catch(() => []) - if (files.length === 0) return undefined - if (files.length === 1) return files[0] - // Multiple binaries — resolve from package.json bin field like npx does - const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( - path.join(dir, "node_modules", pkg, "package.json"), - ).catch(() => undefined) - if (pkgJson?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = pkgJson.bin - if (typeof bin === "string") return unscoped - const keys = Object.keys(bin) - if (keys.length === 1) return keys[0] - return bin[unscoped] ? unscoped : keys[0] - } - return files[0] +export async function add(...args: Parameters) { + const entry = await runPromise((svc) => svc.add(...args)) + return { + directory: entry.directory, + entrypoint: Option.getOrUndefined(entry.entrypoint), } - - const bin = await pick() - if (bin) return path.join(binDir, bin) - - await rm(path.join(dir, "package-lock.json"), { force: true }) - await add(pkg) - const resolved = await pick() - if (!resolved) return - return path.join(binDir, resolved) } -export * as Npm from "." +export async function outdated(...args: Parameters) { + return runPromise((svc) => svc.outdated(...args)) +} + +export async function which(...args: Parameters) { + const resolved = await runPromise((svc) => svc.which(...args)) + return Option.getOrUndefined(resolved) +} diff --git a/packages/opencode/src/plugin/shared.ts b/packages/opencode/src/plugin/shared.ts index f431204fc4..ca821216d4 100644 --- a/packages/opencode/src/plugin/shared.ts +++ b/packages/opencode/src/plugin/shared.ts @@ -4,7 +4,7 @@ import npa from "npm-package-arg" import semver from "semver" import { Filesystem } from "@/util" import { isRecord } from "@/util/record" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" // Old npm package names for plugins that are now built-in export const DEPRECATED_PLUGIN_PACKAGES = ["opencode-openai-codex-auth", "opencode-copilot-auth"] diff --git a/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts b/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts index c6c25fcc11..74236afae8 100644 --- a/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts +++ b/packages/opencode/test/cli/tui/plugin-loader-entrypoint.test.ts @@ -1,4 +1,3 @@ -import { Option } from "effect" import { expect, spyOn, test } from "bun:test" import fs from "fs/promises" import path from "path" @@ -6,7 +5,7 @@ import { pathToFileURL } from "url" import { tmpdir } from "../../fixture/fixture" import { createTuiPluginApi } from "../../fixture/tui-plugin" import { TuiConfig } from "../../../src/cli/cmd/tui/config/tui" -import { Npm } from "../../../src/npm/effect" +import { Npm } from "../../../src/npm" const { TuiPluginRuntime } = await import("../../../src/cli/cmd/tui/plugin/runtime") @@ -57,7 +56,7 @@ test("loads npm tui plugin from package ./tui export", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -118,7 +117,7 @@ test("does not use npm package exports dot for tui entry", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -180,7 +179,7 @@ test("rejects npm tui export that resolves outside plugin directory", async () = } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -242,7 +241,7 @@ test("rejects npm tui plugin that exports server and tui together", async () => } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) @@ -300,7 +299,7 @@ test("does not use npm package main for tui entry", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) const warn = spyOn(console, "warn").mockImplementation(() => {}) const error = spyOn(console, "error").mockImplementation(() => {}) @@ -469,7 +468,7 @@ test("uses npm package name when tui plugin id is omitted", async () => { } const wait = spyOn(TuiConfig, "waitForDependencies").mockResolvedValue() const cwd = spyOn(process, "cwd").mockImplementation(() => tmp.path) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await TuiPluginRuntime.init({ api: createTuiPluginApi(), config }) diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 7b01ee626a..9f2bf9db9a 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -27,7 +27,7 @@ import { Global } from "../../src/global" import { ProjectID } from "../../src/project/schema" import { Filesystem } from "../../src/util" import { ConfigPlugin } from "@/config/plugin" -import { Npm } from "@/npm/effect" +import { Npm } from "@/npm" const emptyAccount = Layer.mock(Account.Service)({ active: () => Effect.succeed(Option.none()), diff --git a/packages/opencode/test/plugin/loader-shared.test.ts b/packages/opencode/test/plugin/loader-shared.test.ts index 8e3ad5ea0b..83e9d71b4f 100644 --- a/packages/opencode/test/plugin/loader-shared.test.ts +++ b/packages/opencode/test/plugin/loader-shared.test.ts @@ -1,5 +1,5 @@ import { afterAll, afterEach, describe, expect, spyOn, test } from "bun:test" -import { Effect, Option } from "effect" +import { Effect } from "effect" import fs from "fs/promises" import path from "path" import { pathToFileURL } from "url" @@ -13,7 +13,7 @@ const { Plugin } = await import("../../src/plugin/index") const { PluginLoader } = await import("../../src/plugin/loader") const { readPackageThemes } = await import("../../src/plugin/shared") const { Instance } = await import("../../src/project/instance") -const { Npm } = await import("../../src/npm/effect") +const { Npm } = await import("../../src/npm") afterAll(() => { if (disableDefault === undefined) { @@ -239,8 +239,8 @@ describe("plugin.loader.shared", () => { }) const add = spyOn(Npm, "add").mockImplementation(async (pkg) => { - if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: Option.none() } - return { directory: tmp.extra.scope, entrypoint: Option.none() } + if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: undefined } + return { directory: tmp.extra.scope, entrypoint: undefined } }) try { @@ -301,7 +301,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -358,7 +358,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -410,7 +410,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -455,7 +455,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -518,7 +518,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -548,7 +548,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: undefined }) try { await load(tmp.path) @@ -927,7 +927,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) const missing: string[] = [] try { @@ -996,7 +996,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: Option.none() }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { const loaded = await PluginLoader.loadExternal({ diff --git a/packages/shared/src/util/effect-flock.ts b/packages/shared/src/util/effect-flock.ts index 3e00afc9e4..16bcf091b4 100644 --- a/packages/shared/src/util/effect-flock.ts +++ b/packages/shared/src/util/effect-flock.ts @@ -165,55 +165,60 @@ export namespace EffectFlock { type Handle = { token: string; metaPath: string; heartbeatPath: string; lockDir: string } - const tryAcquireLockDir = Effect.fn("EffectFlock.tryAcquire")(function* (lockDir: string) { - const token = randomUUID() - const metaPath = path.join(lockDir, "meta.json") - const heartbeatPath = path.join(lockDir, "heartbeat") + const tryAcquireLockDir = (lockDir: string, key: string) => + Effect.gen(function* () { + const token = randomUUID() + const metaPath = path.join(lockDir, "meta.json") + const heartbeatPath = path.join(lockDir, "heartbeat") - // Atomic mkdir — the POSIX lock primitive - const created = yield* atomicMkdir(lockDir) + // Atomic mkdir — the POSIX lock primitive + const created = yield* atomicMkdir(lockDir) - if (!created) { - if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return yield* new NotAcquired() + if (!created) { + if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return yield* new NotAcquired() - // Stale — race for breaker ownership - const breakerPath = lockDir + ".breaker" + // Stale — race for breaker ownership + const breakerPath = lockDir + ".breaker" - const claimed = yield* fs.makeDirectory(breakerPath, { mode: 0o700 }).pipe( - Effect.as(true), - Effect.catchIf( - (e) => e.reason._tag === "AlreadyExists", - () => cleanStaleBreaker(breakerPath), - ), - Effect.catchIf(isPathGone, () => Effect.succeed(false)), - Effect.orDie, - ) + const claimed = yield* fs.makeDirectory(breakerPath, { mode: 0o700 }).pipe( + Effect.as(true), + Effect.catchIf( + (e) => e.reason._tag === "AlreadyExists", + () => cleanStaleBreaker(breakerPath), + ), + Effect.catchIf(isPathGone, () => Effect.succeed(false)), + Effect.orDie, + ) - if (!claimed) return yield* new NotAcquired() + if (!claimed) return yield* new NotAcquired() - // We own the breaker — double-check staleness, nuke, recreate - const recreated = yield* Effect.gen(function* () { - if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return false - yield* forceRemove(lockDir) - return yield* atomicMkdir(lockDir) - }).pipe(Effect.ensuring(forceRemove(breakerPath))) + // We own the breaker — double-check staleness, nuke, recreate + const recreated = yield* Effect.gen(function* () { + if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return false + yield* forceRemove(lockDir) + return yield* atomicMkdir(lockDir) + }).pipe(Effect.ensuring(forceRemove(breakerPath))) - if (!recreated) return yield* new NotAcquired() - } + if (!recreated) return yield* new NotAcquired() + } - // We own the lock dir — write heartbeat + meta with exclusive create - yield* exclusiveWrite(heartbeatPath, "", lockDir, "heartbeat already existed") + // We own the lock dir — write heartbeat + meta with exclusive create + yield* exclusiveWrite(heartbeatPath, "", lockDir, "heartbeat already existed") - const metaJson = encodeMeta({ token, pid: process.pid, hostname, createdAt: new Date().toISOString() }) - yield* exclusiveWrite(metaPath, metaJson, lockDir, "meta.json already existed") + const metaJson = encodeMeta({ token, pid: process.pid, hostname, createdAt: new Date().toISOString() }) + yield* exclusiveWrite(metaPath, metaJson, lockDir, "meta.json already existed") - return { token, metaPath, heartbeatPath, lockDir } satisfies Handle - }) + return { token, metaPath, heartbeatPath, lockDir } satisfies Handle + }).pipe( + Effect.withSpan("EffectFlock.tryAcquire", { + attributes: { key }, + }), + ) // -- retry wrapper (preserves Handle type) -- const acquireHandle = (lockfile: string, key: string): Effect.Effect => - tryAcquireLockDir(lockfile).pipe( + tryAcquireLockDir(lockfile, key).pipe( Effect.retry({ while: (err) => err._tag === "NotAcquired", schedule: retrySchedule, From b275b8580d7bd3b884cf535e28c1b826759eb14b Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 15:14:05 -0400 Subject: [PATCH 173/201] feat(tui): minor UX improvements for workspaces (#23146) --- .../cmd/tui/component/dialog-session-list.tsx | 9 +- .../tui/component/dialog-workspace-create.tsx | 11 ++- .../dialog-workspace-unavailable.tsx | 83 +++++++++++++++++++ .../cli/cmd/tui/component/prompt/index.tsx | 53 ++++++++++-- .../cli/cmd/tui/routes/session/sidebar.tsx | 20 +++++ 5 files changed, 159 insertions(+), 17 deletions(-) create mode 100644 packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index 60ef6087ba..32342e7724 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -139,15 +139,10 @@ export function DialogSessionList() { {desc}{" "} - ■ + ● ) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index 7ea513edee..6dcdabe0b9 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -139,7 +139,16 @@ export async function restoreWorkspaceSession(input: { total: result.data.total, }) - await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()]).catch((err) => { + input.project.workspace.set(input.workspaceID) + + try { + await input.sync.bootstrap({ fatal: false }) + } catch (e) {} + + await Promise.all([ + input.project.workspace.sync(), + input.sync.session.sync(input.sessionID), + ]).catch((err) => { log.error("session restore refresh failed", { workspaceID: input.workspaceID, sessionID: input.sessionID, diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx new file mode 100644 index 0000000000..0c2dd3e2f3 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx @@ -0,0 +1,83 @@ +import { TextAttributes } from "@opentui/core" +import { useKeyboard } from "@opentui/solid" +import { createStore } from "solid-js/store" +import { For } from "solid-js" +import { useTheme } from "../context/theme" +import { useDialog } from "../ui/dialog" + +export function DialogWorkspaceUnavailable(props: { + onRestore?: () => boolean | void | Promise +}) { + const dialog = useDialog() + const { theme } = useTheme() + const [store, setStore] = createStore({ + active: "restore" as "cancel" | "restore", + }) + + const options = ["cancel", "restore"] as const + + async function confirm() { + if (store.active === "cancel") { + dialog.clear() + return + } + const result = await props.onRestore?.() + if (result === false) return + } + + useKeyboard((evt) => { + if (evt.name === "return") { + evt.preventDefault() + evt.stopPropagation() + void confirm() + return + } + if (evt.name === "left") { + evt.preventDefault() + evt.stopPropagation() + setStore("active", "cancel") + return + } + if (evt.name === "right") { + evt.preventDefault() + evt.stopPropagation() + setStore("active", "restore") + } + }) + + return ( + + + + Workspace Unavailable + + dialog.clear()}> + esc + + + + This session is attached to a workspace that is no longer available. + + + Would you like to restore this session into a new workspace? + + + + {(item) => ( + { + setStore("active", item) + void confirm() + }} + > + {item} + + )} + + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index cf26ec1950..2e08e66a4a 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -9,6 +9,7 @@ import { tint, useTheme } from "@tui/context/theme" import { EmptyBorder, SplitBorder } from "@tui/component/border" import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" +import { useProject } from "@tui/context/project" import { useSync } from "@tui/context/sync" import { useEvent } from "@tui/context/event" import { MessageID, PartID } from "@/session/schema" @@ -38,6 +39,8 @@ import { useKV } from "../../context/kv" import { createFadeIn } from "../../util/signal" import { useTextareaKeybindings } from "../textarea-keybindings" import { DialogSkill } from "../dialog-skill" +import { DialogWorkspaceCreate, restoreWorkspaceSession } from "../dialog-workspace-create" +import { DialogWorkspaceUnavailable } from "../dialog-workspace-unavailable" import { useArgs } from "@tui/context/args" export type PromptProps = { @@ -92,6 +95,7 @@ export function Prompt(props: PromptProps) { const args = useArgs() const sdk = useSDK() const route = useRoute() + const project = useProject() const sync = useSync() const dialog = useDialog() const toast = useToast() @@ -241,9 +245,11 @@ export function Prompt(props: PromptProps) { keybind: "input_submit", category: "Prompt", hidden: true, - onSelect: (dialog) => { + onSelect: async (dialog) => { if (!input.focused) return - void submit() + const handled = await submit() + if (!handled) return + dialog.clear() }, }, @@ -628,20 +634,48 @@ export function Prompt(props: PromptProps) { setStore("prompt", "input", input.plainText) syncExtmarksWithPromptParts() } - if (props.disabled) return - if (autocomplete?.visible) return - if (!store.prompt.input) return + if (props.disabled) return false + if (autocomplete?.visible) return false + if (!store.prompt.input) return false const agent = local.agent.current() - if (!agent) return + if (!agent) return false const trimmed = store.prompt.input.trim() if (trimmed === "exit" || trimmed === "quit" || trimmed === ":q") { void exit() - return + return true } const selectedModel = local.model.current() if (!selectedModel) { void promptModelWarning() - return + return false + } + + const workspaceSession = props.sessionID ? sync.session.get(props.sessionID) : undefined + const workspaceID = workspaceSession?.workspaceID + const workspaceStatus = workspaceID ? (project.workspace.status(workspaceID) ?? "error") : undefined + if (props.sessionID && workspaceID && workspaceStatus !== "connected") { + dialog.replace(() => ( + { + dialog.replace(() => ( + + restoreWorkspaceSession({ + dialog, + sdk, + sync, + project, + toast, + workspaceID: nextWorkspaceID, + sessionID: props.sessionID!, + }) + } + /> + )) + }} + /> + )) + return false } let sessionID = props.sessionID @@ -656,7 +690,7 @@ export function Prompt(props: PromptProps) { variant: "error", }) - return + return true } sessionID = res.data.id @@ -770,6 +804,7 @@ export function Prompt(props: PromptProps) { }) }, 50) input.clear() + return true } const exit = useExit() diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx index 06bc270644..4a7b711a03 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx @@ -1,3 +1,4 @@ +import { useProject } from "@tui/context/project" import { useSync } from "@tui/context/sync" import { createMemo, Show } from "solid-js" import { useTheme } from "../../context/theme" @@ -8,10 +9,23 @@ import { TuiPluginRuntime } from "../../plugin" import { getScrollAcceleration } from "../../util/scroll" export function Sidebar(props: { sessionID: string; overlay?: boolean }) { + const project = useProject() const sync = useSync() const { theme } = useTheme() const tuiConfig = useTuiConfig() const session = createMemo(() => sync.session.get(props.sessionID)) + const workspaceStatus = () => { + const workspaceID = session()?.workspaceID + if (!workspaceID) return "error" + return project.workspace.status(workspaceID) ?? "error" + } + const workspaceLabel = () => { + const workspaceID = session()?.workspaceID + if (!workspaceID) return "unknown" + const info = project.workspace.get(workspaceID) + if (!info) return "unknown" + return `${info.type}: ${info.name}` + } const scrollAcceleration = createMemo(() => getScrollAcceleration(tuiConfig)) return ( @@ -48,6 +62,12 @@ export function Sidebar(props: { sessionID: string; overlay?: boolean }) { {session()!.title} + + + {" "} + {workspaceLabel()} + + {session()!.share!.url} From d6e1362fee569e9d9fa9bc0c6c982f64b2027b30 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 19:15:07 +0000 Subject: [PATCH 174/201] chore: generate --- .../src/cli/cmd/tui/component/dialog-workspace-create.tsx | 5 +---- .../cli/cmd/tui/component/dialog-workspace-unavailable.tsx | 4 +--- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index 6dcdabe0b9..a16c98a9f4 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -145,10 +145,7 @@ export async function restoreWorkspaceSession(input: { await input.sync.bootstrap({ fatal: false }) } catch (e) {} - await Promise.all([ - input.project.workspace.sync(), - input.sync.session.sync(input.sessionID), - ]).catch((err) => { + await Promise.all([input.project.workspace.sync(), input.sync.session.sync(input.sessionID)]).catch((err) => { log.error("session restore refresh failed", { workspaceID: input.workspaceID, sessionID: input.sessionID, diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx index 0c2dd3e2f3..7a21798534 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx @@ -5,9 +5,7 @@ import { For } from "solid-js" import { useTheme } from "../context/theme" import { useDialog } from "../ui/dialog" -export function DialogWorkspaceUnavailable(props: { - onRestore?: () => boolean | void | Promise -}) { +export function DialogWorkspaceUnavailable(props: { onRestore?: () => boolean | void | Promise }) { const dialog = useDialog() const { theme } = useTheme() const [store, setStore] = createStore({ From 88582566bf2bfd2d26000f0c25735bf48ddeca00 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 19:18:55 +0000 Subject: [PATCH 175/201] chore: update nix node_modules hashes --- nix/hashes.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nix/hashes.json b/nix/hashes.json index 5fe1189579..01366c82dc 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-60KBy/mySuIKbBD5aCS4ZAQqnwZ4PjLMAqZH7gpKCFY=", - "aarch64-linux": "sha256-ROd9OfdCBQZntoAr32O3YVl7ljRgYvJma25U+jHwcts=", - "aarch64-darwin": "sha256-MjMfR73ZZLXtIXfuzqpjvD5RxmIRi9HA1jWXPvagU6w=", - "x86_64-darwin": "sha256-1BADHsSdMxJUbQ4DR/Ww4ZTt18H365lETJs7Fy7fsLc=" + "x86_64-linux": "sha256-GjpBQhvGLTM6NWX29b/mS+KjrQPl0w9VjQHH5jaK9SM=", + "aarch64-linux": "sha256-F5h9p+iZ8CASdUYaYR7O22NwBRa/iT+ZinUxO8lbPTc=", + "aarch64-darwin": "sha256-jWo5yvCtjVKRf9i5XUcTTaLtj2+G6+T1Td2llO/cT5I=", + "x86_64-darwin": "sha256-LzV+5/8P2mkiFHmt+a8zDeJjRbU8z9nssSA4tzv1HxA=" } } From 5621373bc2d5ca106e9fbb2d2fbc3bc547dd744c Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 15:20:11 -0400 Subject: [PATCH 176/201] fix(core): move instance middleware after control plane routes (#23150) --- .../src/server/routes/instance/index.ts | 6 ++- .../src/server/routes/instance/middleware.ts | 35 +++++++++++++++ packages/opencode/src/server/server.ts | 44 +++---------------- 3 files changed, 46 insertions(+), 39 deletions(-) create mode 100644 packages/opencode/src/server/routes/instance/middleware.ts diff --git a/packages/opencode/src/server/routes/instance/index.ts b/packages/opencode/src/server/routes/instance/index.ts index 017541b8fc..c0339fded7 100644 --- a/packages/opencode/src/server/routes/instance/index.ts +++ b/packages/opencode/src/server/routes/instance/index.ts @@ -15,6 +15,7 @@ import { Command } from "@/command" import { QuestionRoutes } from "./question" import { PermissionRoutes } from "./permission" import { Flag } from "@/flag/flag" +import { WorkspaceID } from "@/control-plane/schema" import { ExperimentalHttpApiServer } from "./httpapi/server" import { ProjectRoutes } from "./project" import { SessionRoutes } from "./session" @@ -27,9 +28,10 @@ import { ProviderRoutes } from "./provider" import { EventRoutes } from "./event" import { SyncRoutes } from "./sync" import { AppRuntime } from "@/effect/app-runtime" +import { InstanceMiddleware } from "./middleware" -export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { - const app = new Hono() +export const InstanceRoutes = (upgrade: UpgradeWebSocket, workspaceID?: WorkspaceID): Hono => { + const app = new Hono().use(InstanceMiddleware(workspaceID)) if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) { const handler = ExperimentalHttpApiServer.webHandler().handler diff --git a/packages/opencode/src/server/routes/instance/middleware.ts b/packages/opencode/src/server/routes/instance/middleware.ts new file mode 100644 index 0000000000..b963268d64 --- /dev/null +++ b/packages/opencode/src/server/routes/instance/middleware.ts @@ -0,0 +1,35 @@ +import type { MiddlewareHandler } from "hono" +import { Instance } from "@/project/instance" +import { InstanceBootstrap } from "@/project/bootstrap" +import { AppRuntime } from "@/effect/app-runtime" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { WorkspaceContext } from "@/control-plane/workspace-context" +import { WorkspaceID } from "@/control-plane/schema" + +export function InstanceMiddleware(workspaceID?: WorkspaceID): MiddlewareHandler { + return async (c, next) => { + const raw = c.req.query("directory") || c.req.header("x-opencode-directory") || process.cwd() + const directory = AppFileSystem.resolve( + (() => { + try { + return decodeURIComponent(raw) + } catch { + return raw + } + })(), + ) + + return WorkspaceContext.provide({ + workspaceID, + async fn() { + return Instance.provide({ + directory, + init: () => AppRuntime.runPromise(InstanceBootstrap), + async fn() { + return next() + }, + }) + }, + }) + } +} diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index 2201c75b4c..f608c2b732 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -1,16 +1,10 @@ import { generateSpecs } from "hono-openapi" import { Hono } from "hono" -import type { MiddlewareHandler } from "hono" import { adapter } from "#hono" import { lazy } from "@/util/lazy" import { Log } from "@/util" import { Flag } from "@/flag/flag" -import { Instance } from "@/project/instance" -import { InstanceBootstrap } from "@/project/bootstrap" -import { AppRuntime } from "@/effect/app-runtime" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { WorkspaceID } from "@/control-plane/schema" -import { WorkspaceContext } from "@/control-plane/workspace-context" import { MDNS } from "./mdns" import { AuthMiddleware, CompressionMiddleware, CorsMiddleware, ErrorMiddleware, LoggerMiddleware } from "./middleware" import { FenceMiddleware } from "./fence" @@ -48,47 +42,23 @@ function create(opts: { cors?: string[] }) { const runtime = adapter.create(app) - function InstanceMiddleware(workspaceID?: WorkspaceID): MiddlewareHandler { - return async (c, next) => { - const raw = c.req.query("directory") || c.req.header("x-opencode-directory") || process.cwd() - const directory = AppFileSystem.resolve( - (() => { - try { - return decodeURIComponent(raw) - } catch { - return raw - } - })(), - ) - - return WorkspaceContext.provide({ - workspaceID, - async fn() { - return Instance.provide({ - directory, - init: () => AppRuntime.runPromise(InstanceBootstrap), - async fn() { - return next() - }, - }) - }, - }) - } - } - if (Flag.OPENCODE_WORKSPACE_ID) { return { app: app - .use(InstanceMiddleware(Flag.OPENCODE_WORKSPACE_ID ? WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID) : undefined)) .use(FenceMiddleware) - .route("/", InstanceRoutes(runtime.upgradeWebSocket)), + .route( + "/", + InstanceRoutes( + runtime.upgradeWebSocket, + Flag.OPENCODE_WORKSPACE_ID ? WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID) : undefined, + ), + ), runtime, } } return { app: app - .use(InstanceMiddleware()) .route("/", ControlPlaneRoutes()) .use(WorkspaceRouterMiddleware(runtime.upgradeWebSocket)) .route("/", InstanceRoutes(runtime.upgradeWebSocket)) From 68834cfcc37ce51c1b0b4895b9563a86f1611c9a Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 15:22:08 -0400 Subject: [PATCH 177/201] fix(opencode): normalize provider metadata and tag otel runs (#23140) --- packages/opencode/src/cli/cmd/tui/thread.ts | 9 ++-- packages/opencode/src/cli/cmd/tui/worker.ts | 3 ++ packages/opencode/src/effect/observability.ts | 50 +++++++++++-------- packages/opencode/src/file/ripgrep.ts | 5 +- packages/opencode/src/file/ripgrep.worker.ts | 5 +- packages/opencode/src/index.ts | 5 ++ packages/opencode/src/provider/provider.ts | 14 +++--- .../opencode/src/util/opencode-process.ts | 24 +++++++++ .../opencode/test/provider/provider.test.ts | 34 ++++++++++++- 9 files changed, 112 insertions(+), 37 deletions(-) create mode 100644 packages/opencode/src/util/opencode-process.ts diff --git a/packages/opencode/src/cli/cmd/tui/thread.ts b/packages/opencode/src/cli/cmd/tui/thread.ts index 96ceb905c5..e3e9eb8117 100644 --- a/packages/opencode/src/cli/cmd/tui/thread.ts +++ b/packages/opencode/src/cli/cmd/tui/thread.ts @@ -15,6 +15,7 @@ import type { EventSource } from "./context/sdk" import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32" import { writeHeapSnapshot } from "v8" import { TuiConfig } from "./config/tui" +import { OPENCODE_PROCESS_ROLE, OPENCODE_RUN_ID, ensureRunID, sanitizedProcessEnv } from "@/util/opencode-process" declare global { const OPENCODE_WORKER_PATH: string @@ -129,11 +130,13 @@ export const TuiThreadCommand = cmd({ return } const cwd = Filesystem.resolve(process.cwd()) + const env = sanitizedProcessEnv({ + [OPENCODE_PROCESS_ROLE]: "worker", + [OPENCODE_RUN_ID]: ensureRunID(), + }) const worker = new Worker(file, { - env: Object.fromEntries( - Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined), - ), + env, }) worker.onerror = (e) => { Log.Default.error("thread error", { diff --git a/packages/opencode/src/cli/cmd/tui/worker.ts b/packages/opencode/src/cli/cmd/tui/worker.ts index 393a407eb0..8cec99c615 100644 --- a/packages/opencode/src/cli/cmd/tui/worker.ts +++ b/packages/opencode/src/cli/cmd/tui/worker.ts @@ -11,6 +11,9 @@ import { Flag } from "@/flag/flag" import { writeHeapSnapshot } from "node:v8" import { Heap } from "@/cli/heap" import { AppRuntime } from "@/effect/app-runtime" +import { ensureProcessMetadata } from "@/util/opencode-process" + +ensureProcessMetadata("worker") await Log.init({ print: process.argv.includes("--print-logs"), diff --git a/packages/opencode/src/effect/observability.ts b/packages/opencode/src/effect/observability.ts index efd16ffc09..1c385d60ae 100644 --- a/packages/opencode/src/effect/observability.ts +++ b/packages/opencode/src/effect/observability.ts @@ -4,9 +4,11 @@ import { OtlpLogger, OtlpSerialization } from "effect/unstable/observability" import * as EffectLogger from "./logger" import { Flag } from "@/flag/flag" import { InstallationChannel, InstallationVersion } from "@/installation/version" +import { ensureProcessMetadata } from "@/util/opencode-process" const base = Flag.OTEL_EXPORTER_OTLP_ENDPOINT export const enabled = !!base +const processID = crypto.randomUUID() const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS ? Flag.OTEL_EXPORTER_OTLP_HEADERS.split(",").reduce( @@ -19,26 +21,34 @@ const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS ) : undefined -const resource = { - serviceName: "opencode", - serviceVersion: InstallationVersion, - attributes: { - "deployment.environment.name": InstallationChannel, - "opencode.client": Flag.OPENCODE_CLIENT, - }, +function resource() { + const processMetadata = ensureProcessMetadata("main") + return { + serviceName: "opencode", + serviceVersion: InstallationVersion, + attributes: { + "deployment.environment.name": InstallationChannel, + "opencode.client": Flag.OPENCODE_CLIENT, + "opencode.process_role": processMetadata.processRole, + "opencode.run_id": processMetadata.runID, + "service.instance.id": processID, + }, + } } -const logs = Logger.layer( - [ - EffectLogger.logger, - OtlpLogger.make({ - url: `${base}/v1/logs`, - resource, - headers, - }), - ], - { mergeWithExisting: false }, -).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer)) +function logs() { + return Logger.layer( + [ + EffectLogger.logger, + OtlpLogger.make({ + url: `${base}/v1/logs`, + resource: resource(), + headers, + }), + ], + { mergeWithExisting: false }, + ).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer)) +} const traces = async () => { const NodeSdk = await import("@effect/opentelemetry/NodeSdk") @@ -58,7 +68,7 @@ const traces = async () => { context.setGlobalContextManager(mgr) return NodeSdk.layer(() => ({ - resource, + resource: resource(), spanProcessor: new SdkBase.BatchSpanProcessor( new OTLP.OTLPTraceExporter({ url: `${base}/v1/traces`, @@ -73,7 +83,7 @@ export const layer = !base : Layer.unwrap( Effect.gen(function* () { const trace = yield* Effect.promise(traces) - return Layer.mergeAll(trace, logs) + return Layer.mergeAll(trace, logs()) }), ) diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index ac450108e1..3f16f6c501 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -7,6 +7,7 @@ import { ripgrep } from "ripgrep" import { Filesystem } from "@/util" import { Log } from "@/util" +import { sanitizedProcessEnv } from "@/util/opencode-process" const log = Log.create({ service: "ripgrep" }) @@ -157,9 +158,7 @@ type WorkerError = { } function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) + const env = sanitizedProcessEnv() delete env.RIPGREP_CONFIG_PATH return env } diff --git a/packages/opencode/src/file/ripgrep.worker.ts b/packages/opencode/src/file/ripgrep.worker.ts index 62094c7acc..21a3aef5cc 100644 --- a/packages/opencode/src/file/ripgrep.worker.ts +++ b/packages/opencode/src/file/ripgrep.worker.ts @@ -1,9 +1,8 @@ import { ripgrep } from "ripgrep" +import { sanitizedProcessEnv } from "@/util/opencode-process" function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) + const env = sanitizedProcessEnv() delete env.RIPGREP_CONFIG_PATH return env } diff --git a/packages/opencode/src/index.ts b/packages/opencode/src/index.ts index 67de87c2aa..0a3a927b46 100644 --- a/packages/opencode/src/index.ts +++ b/packages/opencode/src/index.ts @@ -38,6 +38,9 @@ import { errorMessage } from "./util/error" import { PluginCommand } from "./cli/cmd/plug" import { Heap } from "./cli/heap" import { drizzle } from "drizzle-orm/bun-sqlite" +import { ensureProcessMetadata } from "./util/opencode-process" + +const processMetadata = ensureProcessMetadata("main") process.on("unhandledRejection", (e) => { Log.Default.error("rejection", { @@ -108,6 +111,8 @@ const cli = yargs(args) Log.Default.info("opencode", { version: InstallationVersion, args: process.argv.slice(2), + process_role: processMetadata.processRole, + run_id: processMetadata.runID, }) const marker = path.join(Global.Path.data, "opencode.db") diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 558bcb75a5..51aca21c77 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -968,7 +968,7 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model family: model.family, api: { id: model.id, - url: model.provider?.api ?? provider.api!, + url: model.provider?.api ?? provider.api ?? "", npm: model.provider?.npm ?? provider.npm ?? "@ai-sdk/openai-compatible", }, status: model.status ?? "active", @@ -981,10 +981,10 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model output: model.limit.output, }, capabilities: { - temperature: model.temperature, - reasoning: model.reasoning, - attachment: model.attachment, - toolcall: model.tool_call, + temperature: model.temperature ?? false, + reasoning: model.reasoning ?? false, + attachment: model.attachment ?? false, + toolcall: model.tool_call ?? true, input: { text: model.modalities?.input?.includes("text") ?? false, audio: model.modalities?.input?.includes("audio") ?? false, @@ -1001,7 +1001,7 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model }, interleaved: model.interleaved ?? false, }, - release_date: model.release_date, + release_date: model.release_date ?? "", variants: {}, } @@ -1143,7 +1143,7 @@ const layer: Layer.Layer< existingModel?.api.npm ?? modelsDev[providerID]?.npm ?? "@ai-sdk/openai-compatible", - url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api, + url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api ?? "", }, status: model.status ?? existingModel?.status ?? "active", name, diff --git a/packages/opencode/src/util/opencode-process.ts b/packages/opencode/src/util/opencode-process.ts new file mode 100644 index 0000000000..f59270ad2d --- /dev/null +++ b/packages/opencode/src/util/opencode-process.ts @@ -0,0 +1,24 @@ +export const OPENCODE_RUN_ID = "OPENCODE_RUN_ID" +export const OPENCODE_PROCESS_ROLE = "OPENCODE_PROCESS_ROLE" + +export function ensureRunID() { + return (process.env[OPENCODE_RUN_ID] ??= crypto.randomUUID()) +} + +export function ensureProcessRole(fallback: "main" | "worker") { + return (process.env[OPENCODE_PROCESS_ROLE] ??= fallback) +} + +export function ensureProcessMetadata(fallback: "main" | "worker") { + return { + runID: ensureRunID(), + processRole: ensureProcessRole(fallback), + } +} + +export function sanitizedProcessEnv(overrides?: Record) { + const env = Object.fromEntries( + Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined), + ) + return overrides ? Object.assign(env, overrides) : env +} diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index df8fc4e966..8993020820 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -1916,7 +1916,7 @@ test("mode cost preserves over-200k pricing from base model", () => { }, }, }, - } as ModelsDev.Provider + } as unknown as ModelsDev.Provider const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4-fast"] expect(model.cost.input).toEqual(5) @@ -1934,6 +1934,38 @@ test("mode cost preserves over-200k pricing from base model", () => { }) }) +test("models.dev normalization fills required response fields", () => { + const provider = { + id: "gateway", + name: "Gateway", + env: [], + models: { + "gpt-5.4": { + id: "gpt-5.4", + name: "GPT-5.4", + family: "gpt", + cost: { + input: 2.5, + output: 15, + }, + limit: { + context: 1_050_000, + input: 922_000, + output: 128_000, + }, + }, + }, + } as unknown as ModelsDev.Provider + + const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4"] + expect(model.api.url).toBe("") + expect(model.capabilities.temperature).toBe(false) + expect(model.capabilities.reasoning).toBe(false) + expect(model.capabilities.attachment).toBe(false) + expect(model.capabilities.toolcall).toBe(true) + expect(model.release_date).toBe("") +}) + test("model variants are generated for reasoning models", async () => { await using tmp = await tmpdir({ init: async (dir) => { From aa05b9abe5ff3a4b29b72fb878be969567eda5bb Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 15:25:58 -0400 Subject: [PATCH 178/201] fix(core): pass OTEL config to workspace env (#23154) --- packages/opencode/src/control-plane/types.ts | 2 +- packages/opencode/src/control-plane/workspace.ts | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/control-plane/types.ts b/packages/opencode/src/control-plane/types.ts index 3961cd0e2a..07acd5ce58 100644 --- a/packages/opencode/src/control-plane/types.ts +++ b/packages/opencode/src/control-plane/types.ts @@ -28,7 +28,7 @@ export type WorkspaceAdaptor = { name: string description: string configure(info: WorkspaceInfo): WorkspaceInfo | Promise - create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise + create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise remove(info: WorkspaceInfo): Promise target(info: WorkspaceInfo): Target | Promise } diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index d678ad7526..e94d6c2c93 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -115,6 +115,8 @@ export const create = fn(CreateInput, async (input) => { OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), OPENCODE_WORKSPACE_ID: config.id, OPENCODE_EXPERIMENTAL_WORKSPACES: "true", + OTEL_EXPORTER_OTLP_HEADERS: process.env.OTEL_EXPORTER_OTLP_HEADERS, + OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT, } await adaptor.create(config, env) From f83cecaaf6ee29da70109575595901884cdbc312 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 15:29:32 -0400 Subject: [PATCH 179/201] fix(opencode): untrace streaming event hot paths (#23156) --- packages/opencode/src/session/processor.ts | 2 +- packages/opencode/src/session/session.ts | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 820c61aa91..900579f036 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -213,7 +213,7 @@ export const layer: Layer.Layer< return true }) - const handleEvent = Effect.fn("SessionProcessor.handleEvent")(function* (value: StreamEvent) { + const handleEvent = Effect.fnUntraced(function* (value: StreamEvent) { switch (value.type) { case "start": yield* status.set(ctx.sessionID, { type: "busy" }) diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index 077cc43097..1941a6704e 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -649,10 +649,10 @@ export const layer: Layer.Layer = return input.partID }) - const updatePartDelta = Effect.fn("Session.updatePartDelta")(function* (input: { - sessionID: SessionID - messageID: MessageID - partID: PartID + const updatePartDelta = Effect.fnUntraced(function* (input: { + sessionID: SessionID + messageID: MessageID + partID: PartID field: string delta: string }) { From 9b0659d4f9b64cb2416bb340c503037d0778349e Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 19:30:28 +0000 Subject: [PATCH 180/201] chore: generate --- packages/opencode/src/session/processor.ts | 2 +- packages/opencode/src/session/session.ts | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 900579f036..9ab74ca341 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -213,7 +213,7 @@ export const layer: Layer.Layer< return true }) - const handleEvent = Effect.fnUntraced(function* (value: StreamEvent) { + const handleEvent = Effect.fnUntraced(function* (value: StreamEvent) { switch (value.type) { case "start": yield* status.set(ctx.sessionID, { type: "busy" }) diff --git a/packages/opencode/src/session/session.ts b/packages/opencode/src/session/session.ts index 1941a6704e..5168b80b56 100644 --- a/packages/opencode/src/session/session.ts +++ b/packages/opencode/src/session/session.ts @@ -649,10 +649,10 @@ export const layer: Layer.Layer = return input.partID }) - const updatePartDelta = Effect.fnUntraced(function* (input: { - sessionID: SessionID - messageID: MessageID - partID: PartID + const updatePartDelta = Effect.fnUntraced(function* (input: { + sessionID: SessionID + messageID: MessageID + partID: PartID field: string delta: string }) { From 9b6c3971712baaecacb1d09eb21c4d6f77955622 Mon Sep 17 00:00:00 2001 From: opencode Date: Fri, 17 Apr 2026 20:13:25 +0000 Subject: [PATCH 181/201] release: v1.4.10 --- bun.lock | 32 +++++++++++++------------- packages/app/package.json | 2 +- packages/console/app/package.json | 2 +- packages/console/core/package.json | 2 +- packages/console/function/package.json | 2 +- packages/console/mail/package.json | 2 +- packages/desktop-electron/package.json | 2 +- packages/desktop/package.json | 2 +- packages/enterprise/package.json | 2 +- packages/extensions/zed/extension.toml | 12 +++++----- packages/function/package.json | 2 +- packages/opencode/package.json | 2 +- packages/plugin/package.json | 2 +- packages/sdk/js/package.json | 2 +- packages/shared/package.json | 2 +- packages/slack/package.json | 2 +- packages/ui/package.json | 2 +- packages/web/package.json | 2 +- sdks/vscode/package.json | 2 +- 19 files changed, 39 insertions(+), 39 deletions(-) diff --git a/bun.lock b/bun.lock index 8d83d8ab47..6e8845c4f2 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.9", + "version": "1.4.10", "bin": { "opencode": "./bin/opencode", }, @@ -458,7 +458,7 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.9", + "version": "1.4.10", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", diff --git a/packages/app/package.json b/packages/app/package.json index 0f4ae4228b..7206bd5394 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.9", + "version": "1.4.10", "description": "", "type": "module", "exports": { diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 63b6a5c414..2dbd630163 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/core/package.json b/packages/console/core/package.json index e5351c1a87..323ee013df 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.9", + "version": "1.4.10", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/function/package.json b/packages/console/function/package.json index ef84eae470..bef6f53e08 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.9", + "version": "1.4.10", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index 439beb15c8..5b56f5678d 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.9", + "version": "1.4.10", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index b698a08896..dbf63e8500 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index 1d9d24bd32..97c8713a2b 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index 02f1ad83ca..914978d7da 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.9", + "version": "1.4.10", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index ffcc975d1b..55b3673739 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.9" +version = "1.4.10" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.10/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.10/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.10/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.10/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.9/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.10/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 2ac49a9228..1dc1f32bb1 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.9", + "version": "1.4.10", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 2f56b74775..7d24d7ca31 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.9", + "version": "1.4.10", "name": "opencode", "type": "module", "license": "MIT", diff --git a/packages/plugin/package.json b/packages/plugin/package.json index a1aa6470dc..cc1f24b68e 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 27d9188151..3a6d13680e 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 383b26d6ed..615cd42c00 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.9", + "version": "1.4.10", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/slack/package.json b/packages/slack/package.json index 75974ed39e..eb23c2036f 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index 525cf20935..e756c33fca 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.9", + "version": "1.4.10", "type": "module", "license": "MIT", "exports": { diff --git a/packages/web/package.json b/packages/web/package.json index d3a2a25e4c..0c4a0c6480 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.9", + "version": "1.4.10", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index a3ed76c883..83d00c987a 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.9", + "version": "1.4.10", "publisher": "sst-dev", "repository": { "type": "git", From b708e8431ec3fcda2a53a3c3e3b8608e5edfb16d Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 15:56:52 -0400 Subject: [PATCH 182/201] docs(opencode): annotate plugin loader flow (#23160) --- packages/opencode/src/plugin/loader.ts | 46 ++++++++++++++++++++++++-- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/packages/opencode/src/plugin/loader.ts b/packages/opencode/src/plugin/loader.ts index 0245d311e0..f17673f36e 100644 --- a/packages/opencode/src/plugin/loader.ts +++ b/packages/opencode/src/plugin/loader.ts @@ -12,31 +12,41 @@ import { ConfigPlugin } from "@/config/plugin" import { InstallationVersion } from "@/installation/version" export namespace PluginLoader { + // A normalized plugin declaration derived from config before any filesystem or npm work happens. export type Plan = { spec: string options: ConfigPlugin.Options | undefined deprecated: boolean } + + // A plugin that has been resolved to a concrete target and entrypoint on disk. export type Resolved = Plan & { source: PluginSource target: string entry: string pkg?: PluginPackage } + + // A plugin target we could inspect, but which does not expose the requested kind of entrypoint. export type Missing = Plan & { source: PluginSource target: string pkg?: PluginPackage message: string } + + // A resolved plugin whose module has been imported successfully. export type Loaded = Resolved & { mod: Record } type Candidate = { origin: ConfigPlugin.Origin; plan: Plan } type Report = { + // Called before each attempt so callers can log initial load attempts and retries uniformly. start?: (candidate: Candidate, retry: boolean) => void + // Called when the package exists but does not provide the requested entrypoint. missing?: (candidate: Candidate, retry: boolean, message: string, resolved: Missing) => void + // Called for operational failures such as install, compatibility, or dynamic import errors. error?: ( candidate: Candidate, retry: boolean, @@ -46,19 +56,25 @@ export namespace PluginLoader { ) => void } + // Normalize a config item into the loader's internal representation. function plan(item: ConfigPlugin.Spec): Plan { const spec = ConfigPlugin.pluginSpecifier(item) return { spec, options: ConfigPlugin.pluginOptions(item), deprecated: isDeprecatedPlugin(spec) } } + // Resolve a configured plugin into a concrete entrypoint that can later be imported. + // + // The stages here intentionally separate install/target resolution, entrypoint detection, + // and compatibility checks so callers can report the exact reason a plugin was skipped. export async function resolve( plan: Plan, kind: PluginKind, ): Promise< | { ok: true; value: Resolved } - | { ok: false; stage: "missing"; value: Missing } - | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown } + | { ok: false; stage: "missing"; value: Missing } + | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown } > { + // First make sure the plugin exists locally, installing npm plugins on demand. let target = "" try { target = await resolvePluginTarget(plan.spec) @@ -67,6 +83,7 @@ export namespace PluginLoader { } if (!target) return { ok: false, stage: "install", error: new Error(`Plugin ${plan.spec} target is empty`) } + // Then inspect the target for the requested server/tui entrypoint. let base try { base = await createPluginEntry(plan.spec, target, kind) @@ -86,6 +103,8 @@ export namespace PluginLoader { }, } + // npm plugins can declare which opencode versions they support; file plugins are treated + // as local development code and skip this compatibility gate. if (base.source === "npm") { try { await checkPluginCompatibility(base.target, InstallationVersion, base.pkg) @@ -96,6 +115,7 @@ export namespace PluginLoader { return { ok: true, value: { ...plan, source: base.source, target: base.target, entry: base.entry, pkg: base.pkg } } } + // Import the resolved module only after all earlier validation has succeeded. export async function load(row: Resolved): Promise<{ ok: true; value: Loaded } | { ok: false; error: unknown }> { let mod try { @@ -107,6 +127,8 @@ export namespace PluginLoader { return { ok: true, value: { ...row, mod } } } + // Run one candidate through the full pipeline: resolve, optionally surface a missing entry, + // import the module, and finally let the caller transform the loaded plugin into any result type. async function attempt( candidate: Candidate, kind: PluginKind, @@ -116,11 +138,17 @@ export namespace PluginLoader { report: Report | undefined, ): Promise { const plan = candidate.plan + + // Deprecated plugin packages are silently ignored because they are now built in. if (plan.deprecated) return + report?.start?.(candidate, retry) + const resolved = await resolve(plan, kind) if (!resolved.ok) { if (resolved.stage === "missing") { + // Missing entrypoints are handled separately so callers can still inspect package metadata, + // for example to load theme files from a tui plugin package that has no code entrypoint. if (missing) { const value = await missing(resolved.value, candidate.origin, retry) if (value !== undefined) return value @@ -131,11 +159,15 @@ export namespace PluginLoader { report?.error?.(candidate, retry, resolved.stage, resolved.error) return } + const loaded = await load(resolved.value) if (!loaded.ok) { report?.error?.(candidate, retry, "load", loaded.error, resolved.value) return } + + // The default behavior is to return the successfully loaded plugin as-is, but callers can + // provide a finisher to adapt the result into a more specific runtime shape. if (!finish) return loaded.value as R return finish(loaded.value, candidate.origin, retry) } @@ -149,6 +181,11 @@ export namespace PluginLoader { report?: Report } + // Resolve and load all configured plugins in parallel. + // + // If `wait` is provided, file-based plugins that initially failed are retried once after the + // caller finishes preparing dependencies. This supports local plugins that depend on an install + // step happening elsewhere before their entrypoint becomes loadable. export async function loadExternal(input: Input): Promise { const candidates = input.items.map((origin) => ({ origin, plan: plan(origin.spec) })) const list: Array> = [] @@ -160,6 +197,9 @@ export namespace PluginLoader { let deps: Promise | undefined for (let i = 0; i < candidates.length; i++) { if (out[i] !== undefined) continue + + // Only local file plugins are retried. npm plugins already attempted installation during + // the first pass, while file plugins may need the caller's dependency preparation to finish. const candidate = candidates[i] if (!candidate || pluginSource(candidate.plan.spec) !== "file") continue deps ??= input.wait() @@ -167,6 +207,8 @@ export namespace PluginLoader { out[i] = await attempt(candidate, input.kind, true, input.finish, input.missing, input.report) } } + + // Drop skipped/failed entries while preserving the successful result order. const ready: R[] = [] for (const item of out) if (item !== undefined) ready.push(item) return ready From c2061c6bbfa83147792ab9aec251cf193ccb6b0d Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 19:58:18 +0000 Subject: [PATCH 183/201] chore: generate --- packages/opencode/src/plugin/loader.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/opencode/src/plugin/loader.ts b/packages/opencode/src/plugin/loader.ts index f17673f36e..e61612561b 100644 --- a/packages/opencode/src/plugin/loader.ts +++ b/packages/opencode/src/plugin/loader.ts @@ -71,8 +71,8 @@ export namespace PluginLoader { kind: PluginKind, ): Promise< | { ok: true; value: Resolved } - | { ok: false; stage: "missing"; value: Missing } - | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown } + | { ok: false; stage: "missing"; value: Missing } + | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown } > { // First make sure the plugin exists locally, installing npm plugins on demand. let target = "" From 984f5ed6eb0cbf3048d8c1dbd8f643f5a01d2501 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:15:24 -0400 Subject: [PATCH 184/201] fix(opencode): skip share sync for unshared sessions (#23159) --- packages/opencode/src/share/share-next.ts | 41 ++++++++++++++++++----- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/packages/opencode/src/share/share-next.ts b/packages/opencode/src/share/share-next.ts index 3484d5da76..2622f4f7f0 100644 --- a/packages/opencode/src/share/share-next.ts +++ b/packages/opencode/src/share/share-next.ts @@ -38,8 +38,9 @@ const ShareSchema = Schema.Struct({ export type Share = typeof ShareSchema.Type type State = { - queue: Map }> + queue: Map> scope: Scope.Closeable + shared: Map } type Data = @@ -118,17 +119,20 @@ export const layer = Layer.effect( function sync(sessionID: SessionID, data: Data[]): Effect.Effect { return Effect.gen(function* () { if (disabled) return + const share = yield* getCached(sessionID) + if (!share) return + const s = yield* InstanceState.get(state) const existing = s.queue.get(sessionID) if (existing) { for (const item of data) { - existing.data.set(key(item), item) + existing.set(key(item), item) } return } const next = new Map(data.map((item) => [key(item), item])) - s.queue.set(sessionID, { data: next }) + s.queue.set(sessionID, next) yield* flush(sessionID).pipe( Effect.delay(1000), Effect.catchCause((cause) => @@ -143,13 +147,14 @@ export const layer = Layer.effect( const state: InstanceState.InstanceState = yield* InstanceState.make( Effect.fn("ShareNext.state")(function* (_ctx) { - const cache: State = { queue: new Map(), scope: yield* Scope.make() } + const cache: State = { queue: new Map(), scope: yield* Scope.make(), shared: new Map() } yield* Effect.addFinalizer(() => Scope.close(cache.scope, Exit.void).pipe( Effect.andThen( Effect.sync(() => { cache.queue.clear() + cache.shared.clear() }), ), ), @@ -227,6 +232,18 @@ export const layer = Layer.effect( return { id: row.id, secret: row.secret, url: row.url } satisfies Share }) + const getCached = Effect.fnUntraced(function* (sessionID: SessionID) { + const s = yield* InstanceState.get(state) + if (s.shared.has(sessionID)) { + const cached = s.shared.get(sessionID) + return cached === null ? undefined : cached + } + + const share = yield* get(sessionID) + s.shared.set(sessionID, share ?? null) + return share + }) + const flush = Effect.fn("ShareNext.flush")(function* (sessionID: SessionID) { if (disabled) return const s = yield* InstanceState.get(state) @@ -235,13 +252,13 @@ export const layer = Layer.effect( s.queue.delete(sessionID) - const share = yield* get(sessionID) + const share = yield* getCached(sessionID) if (!share) return const req = yield* request() const res = yield* HttpClientRequest.post(`${req.baseUrl}${req.api.sync(share.id)}`).pipe( HttpClientRequest.setHeaders(req.headers), - HttpClientRequest.bodyJson({ secret: share.secret, data: Array.from(queued.data.values()) }), + HttpClientRequest.bodyJson({ secret: share.secret, data: Array.from(queued.values()) }), Effect.flatMap((r) => http.execute(r)), ) @@ -307,6 +324,7 @@ export const layer = Layer.effect( .run(), ) const s = yield* InstanceState.get(state) + s.shared.set(sessionID, result) yield* full(sessionID).pipe( Effect.catchCause((cause) => Effect.sync(() => { @@ -321,8 +339,13 @@ export const layer = Layer.effect( const remove = Effect.fn("ShareNext.remove")(function* (sessionID: SessionID) { if (disabled) return log.info("removing share", { sessionID }) - const share = yield* get(sessionID) - if (!share) return + const s = yield* InstanceState.get(state) + const share = yield* getCached(sessionID) + if (!share) { + s.shared.delete(sessionID) + s.queue.delete(sessionID) + return + } const req = yield* request() yield* HttpClientRequest.delete(`${req.baseUrl}${req.api.remove(share.id)}`).pipe( @@ -332,6 +355,8 @@ export const layer = Layer.effect( ) yield* db((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run()) + s.shared.delete(sessionID) + s.queue.delete(sessionID) }) return Service.of({ init, url, request, create, remove }) From 6af8ab0df201e245fac503aa8822750662ebf6b9 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:20:57 -0400 Subject: [PATCH 185/201] docs(http-api): refresh bridge inventory and clarify Schema.Class vs Struct (#23164) --- packages/opencode/specs/effect/http-api.md | 77 ++++++++++++++++------ 1 file changed, 58 insertions(+), 19 deletions(-) diff --git a/packages/opencode/specs/effect/http-api.md b/packages/opencode/specs/effect/http-api.md index 71b50250ed..09cb86a000 100644 --- a/packages/opencode/specs/effect/http-api.md +++ b/packages/opencode/specs/effect/http-api.md @@ -189,10 +189,46 @@ Ordering for a route-group migration: SDK shape rule: -- every schema migration must preserve the generated SDK output byte-for-byte -- `Schema.Class` emits a named `$ref` in OpenAPI via its identifier — use it only for types that already had `.meta({ ref })` in the old Zod schema -- inner / nested types that were anonymous in the old Zod schema should stay as `Schema.Struct` (not `Schema.Class`) to avoid introducing new named components in the OpenAPI spec -- if a diff appears in `packages/sdk/js/src/v2/gen/types.gen.ts`, the migration introduced an unintended API surface change — fix it before merging +- every schema migration must preserve the generated SDK output byte-for-byte **unless the new ref is intentional** (see Schema.Class vs Schema.Struct below) +- if an unintended diff appears in `packages/sdk/js/src/v2/gen/types.gen.ts`, the migration introduced an unintended API surface change — fix it before merging + +### Schema.Class vs Schema.Struct + +The pattern choice determines whether a schema becomes a **named** export in the SDK or stays **anonymous inline**. + +**Schema.Class** emits a named `$ref` in OpenAPI via its identifier → produces a named `export type Foo = ...` in `types.gen.ts`: + +```ts +export class Info extends Schema.Class("FooConfig")({ ... }) { + static readonly zod = zod(this) +} +``` + +**Schema.Struct** stays anonymous and is inlined everywhere it is referenced: + +```ts +export const Info = Schema.Struct({ ... }).pipe( + withStatics((s) => ({ zod: zod(s) })), +) +export type Info = Schema.Schema.Type +``` + +When to use each: + +- Use **Schema.Class** when: + - the original Zod had `.meta({ ref: ... })` (preserve the existing named SDK type byte-for-byte) + - the schema is a top-level endpoint request or response (SDK consumers benefit from a stable importable name) +- Use **Schema.Struct** when: + - the type is only used as a nested field inside another named schema + - the original Zod was anonymous and promoting it would bloat SDK types with no import value + +Promoting a previously-anonymous schema to Schema.Class is acceptable when it is top-level or endpoint-facing, but call it out in the PR — it is an additive SDK change (`export type Foo = ...` newly appears) even if it preserves the JSON shape. + +Schemas that are **not** pure objects (enums, unions, records, tuples) cannot use Schema.Class. For those, add `.annotate({ identifier: "FooName" })` to get the same named-ref behavior: + +```ts +export const Action = Schema.Literals(["ask", "allow", "deny"]).annotate({ identifier: "PermissionActionConfig" }) +``` Temporary exception: @@ -365,17 +401,16 @@ Current instance route inventory: endpoints: `GET /question`, `POST /question/:requestID/reply`, `POST /question/:requestID/reject` - `permission` - `bridged` endpoints: `GET /permission`, `POST /permission/:requestID/reply` -- `provider` - `bridged` (partial) - bridged endpoint: `GET /provider/auth` - not yet ported: `GET /provider`, OAuth mutations -- `config` - `next` - best next endpoint: `GET /config/providers` +- `provider` - `bridged` + endpoints: `GET /provider`, `GET /provider/auth`, `POST /provider/:providerID/oauth/authorize`, `POST /provider/:providerID/oauth/callback` +- `config` - `bridged` (partial) + bridged endpoint: `GET /config/providers` later endpoint: `GET /config` defer `PATCH /config` for now -- `project` - `later` - best small reads: `GET /project`, `GET /project/current` +- `project` - `bridged` (partial) + bridged endpoints: `GET /project`, `GET /project/current` defer git-init mutation first -- `workspace` - `later` +- `workspace` - `next` best small reads: `GET /experimental/workspace/adaptor`, `GET /experimental/workspace`, `GET /experimental/workspace/status` defer create/remove mutations first - `file` - `later` @@ -393,12 +428,12 @@ Current instance route inventory: - `tui` - `defer` queue-style UI bridge, weak early `HttpApi` fit -Recommended near-term sequence after the first spike: +Recommended near-term sequence: -1. `provider` auth read endpoint -2. `config` providers read endpoint -3. `project` read endpoints -4. `workspace` read endpoints +1. `workspace` read endpoints (`GET /experimental/workspace/adaptor`, `GET /experimental/workspace`, `GET /experimental/workspace/status`) +2. `config` full read endpoint (`GET /config`) +3. `file` JSON read endpoints +4. `mcp` JSON read endpoints ## Checklist @@ -411,8 +446,12 @@ Recommended near-term sequence after the first spike: - [x] gate behind `OPENCODE_EXPERIMENTAL_HTTPAPI` flag - [x] verify OTEL spans and HTTP logs flow to motel - [x] bridge question, permission, and provider auth routes -- [ ] port remaining provider endpoints (`GET /provider`, OAuth mutations) -- [ ] port `config` read endpoints +- [x] port remaining provider endpoints (`GET /provider`, OAuth mutations) +- [x] port `config` providers read endpoint +- [x] port `project` read endpoints (`GET /project`, `GET /project/current`) +- [ ] port `workspace` read endpoints +- [ ] port `GET /config` full read endpoint +- [ ] port `file` JSON read endpoints - [ ] decide when to remove the flag and make Effect routes the default ## Rule of thumb From 11fa257549a1094b43d25f014e54557ed6aab660 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:25:49 -0400 Subject: [PATCH 186/201] refactor(config): migrate mcp schemas to Effect Schema.Class (#23163) --- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/config/mcp.ts | 118 +++++++++--------- .../src/server/routes/instance/mcp.ts | 2 +- 3 files changed, 58 insertions(+), 64 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 459f76961a..258500d7bd 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -178,7 +178,7 @@ export const Info = z .record( z.string(), z.union([ - ConfigMCP.Info, + ConfigMCP.Info.zod, z .object({ enabled: z.boolean(), diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index 5036cd6e4f..8b77bc4c28 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -1,68 +1,62 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Local = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) +export class Local extends Schema.Class("McpLocalConfig")({ + type: Schema.Literal("local").annotate({ description: "Type of MCP server connection" }), + command: Schema.mutable(Schema.Array(Schema.String)).annotate({ + description: "Command and arguments to run the MCP server", + }), + environment: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ + description: "Environment variables to set when running the MCP server", + }), + enabled: Schema.optional(Schema.Boolean).annotate({ + description: "Enable or disable the MCP server on startup", + }), + timeout: Schema.optional(Schema.Number).annotate({ + description: "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", + }), +}) { + static readonly zod = zod(this) +} -export const OAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) -export type OAuth = z.infer +export class OAuth extends Schema.Class("McpOAuthConfig")({ + clientId: Schema.optional(Schema.String).annotate({ + description: "OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted.", + }), + clientSecret: Schema.optional(Schema.String).annotate({ + description: "OAuth client secret (if required by the authorization server)", + }), + scope: Schema.optional(Schema.String).annotate({ description: "OAuth scopes to request during authorization" }), + redirectUri: Schema.optional(Schema.String).annotate({ + description: "OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback).", + }), +}) { + static readonly zod = zod(this) +} -export const Remote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([OAuth, z.literal(false)]) - .optional() - .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) +export class Remote extends Schema.Class("McpRemoteConfig")({ + type: Schema.Literal("remote").annotate({ description: "Type of MCP server connection" }), + url: Schema.String.annotate({ description: "URL of the remote MCP server" }), + enabled: Schema.optional(Schema.Boolean).annotate({ + description: "Enable or disable the MCP server on startup", + }), + headers: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ + description: "Headers to send with the request", + }), + oauth: Schema.optional(Schema.Union([OAuth, Schema.Literal(false)])).annotate({ + description: "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + }), + timeout: Schema.optional(Schema.Number).annotate({ + description: "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", + }), +}) { + static readonly zod = zod(this) +} -export const Info = z.discriminatedUnion("type", [Local, Remote]) -export type Info = z.infer +export const Info = Schema.Union([Local, Remote]) + .annotate({ discriminator: "type" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Schema.Schema.Type export * as ConfigMCP from "./mcp" diff --git a/packages/opencode/src/server/routes/instance/mcp.ts b/packages/opencode/src/server/routes/instance/mcp.ts index 197185bde0..b42cfb5314 100644 --- a/packages/opencode/src/server/routes/instance/mcp.ts +++ b/packages/opencode/src/server/routes/instance/mcp.ts @@ -54,7 +54,7 @@ export const McpRoutes = lazy(() => "json", z.object({ name: z.string(), - config: ConfigMCP.Info, + config: ConfigMCP.Info.zod, }), ), async (c) => { From 54435325b6adf292d720885622f1282eaed98c92 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 20:26:43 +0000 Subject: [PATCH 187/201] chore: generate --- packages/sdk/openapi.json | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 3b811f2fa9..71e78307cd 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -11428,13 +11428,10 @@ }, "timeout": { "description": "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", - "type": "integer", - "exclusiveMinimum": 0, - "maximum": 9007199254740991 + "type": "number" } }, - "required": ["type", "command"], - "additionalProperties": false + "required": ["type", "command"] }, "McpOAuthConfig": { "type": "object", @@ -11455,8 +11452,7 @@ "description": "OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback).", "type": "string" } - }, - "additionalProperties": false + } }, "McpRemoteConfig": { "type": "object", @@ -11498,13 +11494,10 @@ }, "timeout": { "description": "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", - "type": "integer", - "exclusiveMinimum": 0, - "maximum": 9007199254740991 + "type": "number" } }, - "required": ["type", "url"], - "additionalProperties": false + "required": ["type", "url"] }, "LayoutConfig": { "description": "@deprecated Always uses stretch layout.", From 650a13a6908e23c30e07329f9f3ec816d7a6f2a7 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:34:47 -0400 Subject: [PATCH 188/201] refactor(config): migrate lsp schemas to Effect Schema (#23167) --- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/config/lsp.ts | 58 ++++++++++++++------------ 2 files changed, 33 insertions(+), 27 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 258500d7bd..039b0598da 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -189,7 +189,7 @@ export const Info = z .optional() .describe("MCP (Model Context Protocol) server configurations"), formatter: ConfigFormatter.Info.optional(), - lsp: ConfigLSP.Info.optional(), + lsp: ConfigLSP.Info.zod.optional(), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), permission: ConfigPermission.Info.optional(), diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts index 5530a5be56..94a6d4bc56 100644 --- a/packages/opencode/src/config/lsp.ts +++ b/packages/opencode/src/config/lsp.ts @@ -1,37 +1,43 @@ export * as ConfigLSP from "./lsp" -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" import * as LSPServer from "../lsp/server" -export const Disabled = z.object({ - disabled: z.literal(true), -}) +export const Disabled = Schema.Struct({ + disabled: Schema.Literal(true), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export const Entry = z.union([ +export const Entry = Schema.Union([ Disabled, - z.object({ - command: z.array(z.string()), - extensions: z.array(z.string()).optional(), - disabled: z.boolean().optional(), - env: z.record(z.string(), z.string()).optional(), - initialization: z.record(z.string(), z.any()).optional(), + Schema.Struct({ + command: Schema.mutable(Schema.Array(Schema.String)), + extensions: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + disabled: Schema.optional(Schema.Boolean), + env: Schema.optional(Schema.Record(Schema.String, Schema.String)), + initialization: Schema.optional(Schema.Record(Schema.String, Schema.Unknown)), }), -]) +]).pipe(withStatics((s) => ({ zod: zod(s) }))) -export const Info = z.union([z.boolean(), z.record(z.string(), Entry)]).refine( - (data) => { - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) +export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]).pipe( + withStatics((s) => ({ + zod: zod(s).refine( + (data) => { + if (typeof data === "boolean") return true + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, + return Object.entries(data).every(([id, config]) => { + if (config.disabled) return true + if (serverIds.has(id)) return true + return Boolean(config.extensions) + }) + }, + { + error: "For custom LSP servers, 'extensions' array is required.", + }, + ), + })), ) -export type Info = z.infer +export type Info = Schema.Schema.Type From d11268ece76d132108feda4bb91f0caacac51719 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:35:42 -0400 Subject: [PATCH 189/201] refactor(config): migrate permission Action/Object/Rule leaves to Effect Schema (#23168) --- packages/opencode/src/config/permission.ts | 63 +++++++++++----------- 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/packages/opencode/src/config/permission.ts b/packages/opencode/src/config/permission.ts index af01f6f2a3..7cfbaec01f 100644 --- a/packages/opencode/src/config/permission.ts +++ b/packages/opencode/src/config/permission.ts @@ -1,5 +1,8 @@ export * as ConfigPermission from "./permission" +import { Schema } from "effect" import z from "zod" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" const permissionPreprocess = (val: unknown) => { if (typeof val === "object" && val !== null && !Array.isArray(val)) { @@ -8,20 +11,20 @@ const permissionPreprocess = (val: unknown) => { return val } -export const Action = z.enum(["ask", "allow", "deny"]).meta({ - ref: "PermissionActionConfig", -}) -export type Action = z.infer +export const Action = Schema.Literals(["ask", "allow", "deny"]) + .annotate({ identifier: "PermissionActionConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Action = Schema.Schema.Type -export const Object = z.record(z.string(), Action).meta({ - ref: "PermissionObjectConfig", -}) -export type Object = z.infer +export const Object = Schema.Record(Schema.String, Action) + .annotate({ identifier: "PermissionObjectConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Object = Schema.Schema.Type -export const Rule = z.union([Action, Object]).meta({ - ref: "PermissionRuleConfig", -}) -export type Rule = z.infer +export const Rule = Schema.Union([Action, Object]) + .annotate({ identifier: "PermissionRuleConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Rule = Schema.Schema.Type const transform = (x: unknown): Record => { if (typeof x === "string") return { "*": x as Action } @@ -41,25 +44,25 @@ export const Info = z z .object({ __originalKeys: z.string().array().optional(), - read: Rule.optional(), - edit: Rule.optional(), - glob: Rule.optional(), - grep: Rule.optional(), - list: Rule.optional(), - bash: Rule.optional(), - task: Rule.optional(), - external_directory: Rule.optional(), - todowrite: Action.optional(), - question: Action.optional(), - webfetch: Action.optional(), - websearch: Action.optional(), - codesearch: Action.optional(), - lsp: Rule.optional(), - doom_loop: Action.optional(), - skill: Rule.optional(), + read: Rule.zod.optional(), + edit: Rule.zod.optional(), + glob: Rule.zod.optional(), + grep: Rule.zod.optional(), + list: Rule.zod.optional(), + bash: Rule.zod.optional(), + task: Rule.zod.optional(), + external_directory: Rule.zod.optional(), + todowrite: Action.zod.optional(), + question: Action.zod.optional(), + webfetch: Action.zod.optional(), + websearch: Action.zod.optional(), + codesearch: Action.zod.optional(), + lsp: Rule.zod.optional(), + doom_loop: Action.zod.optional(), + skill: Rule.zod.optional(), }) - .catchall(Rule) - .or(Action), + .catchall(Rule.zod) + .or(Action.zod), ) .transform(transform) .meta({ From 47f553f9ba9d98056fa5a20fd1a4e6d4d63df016 Mon Sep 17 00:00:00 2001 From: James Long Date: Fri, 17 Apr 2026 16:39:34 -0400 Subject: [PATCH 190/201] fix(core): more explicit routing to fix workspace instance issue (#23171) --- .../src/server/routes/control/index.ts | 2 -- .../src/server/routes/instance/index.ts | 5 ++--- packages/opencode/src/server/server.ts | 19 +++++++++++-------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/opencode/src/server/routes/control/index.ts b/packages/opencode/src/server/routes/control/index.ts index 3fd60636ff..60883274a5 100644 --- a/packages/opencode/src/server/routes/control/index.ts +++ b/packages/opencode/src/server/routes/control/index.ts @@ -7,7 +7,6 @@ import { Hono } from "hono" import { describeRoute, resolver, validator, openAPIRouteHandler } from "hono-openapi" import z from "zod" import { errors } from "../../error" -import { WorkspaceRoutes } from "./workspace" export function ControlPlaneRoutes(): Hono { const app = new Hono() @@ -158,5 +157,4 @@ export function ControlPlaneRoutes(): Hono { return c.json(true) }, ) - .route("/experimental/workspace", WorkspaceRoutes()) } diff --git a/packages/opencode/src/server/routes/instance/index.ts b/packages/opencode/src/server/routes/instance/index.ts index c0339fded7..ddb7e335ad 100644 --- a/packages/opencode/src/server/routes/instance/index.ts +++ b/packages/opencode/src/server/routes/instance/index.ts @@ -15,7 +15,6 @@ import { Command } from "@/command" import { QuestionRoutes } from "./question" import { PermissionRoutes } from "./permission" import { Flag } from "@/flag/flag" -import { WorkspaceID } from "@/control-plane/schema" import { ExperimentalHttpApiServer } from "./httpapi/server" import { ProjectRoutes } from "./project" import { SessionRoutes } from "./session" @@ -30,8 +29,8 @@ import { SyncRoutes } from "./sync" import { AppRuntime } from "@/effect/app-runtime" import { InstanceMiddleware } from "./middleware" -export const InstanceRoutes = (upgrade: UpgradeWebSocket, workspaceID?: WorkspaceID): Hono => { - const app = new Hono().use(InstanceMiddleware(workspaceID)) +export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { + const app = new Hono() if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) { const handler = ExperimentalHttpApiServer.webHandler().handler diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index f608c2b732..8b1f1aee10 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -14,6 +14,8 @@ import { ControlPlaneRoutes } from "./routes/control" import { UIRoutes } from "./routes/ui" import { GlobalRoutes } from "./routes/global" import { WorkspaceRouterMiddleware } from "./workspace" +import { InstanceMiddleware } from "./routes/instance/middleware" +import { WorkspaceRoutes } from "./routes/control/workspace" // @ts-ignore This global is needed to prevent ai-sdk from logging warnings to stdout https://github.com/vercel/ai/blob/2dc67e0ef538307f21368db32d5a12345d98831b/packages/ai/src/logger/log-warnings.ts#L85 globalThis.AI_SDK_LOG_WARNINGS = false @@ -45,14 +47,9 @@ function create(opts: { cors?: string[] }) { if (Flag.OPENCODE_WORKSPACE_ID) { return { app: app + .use(InstanceMiddleware(Flag.OPENCODE_WORKSPACE_ID ? WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID) : undefined)) .use(FenceMiddleware) - .route( - "/", - InstanceRoutes( - runtime.upgradeWebSocket, - Flag.OPENCODE_WORKSPACE_ID ? WorkspaceID.make(Flag.OPENCODE_WORKSPACE_ID) : undefined, - ), - ), + .route("/", InstanceRoutes(runtime.upgradeWebSocket)), runtime, } } @@ -60,7 +57,13 @@ function create(opts: { cors?: string[] }) { return { app: app .route("/", ControlPlaneRoutes()) - .use(WorkspaceRouterMiddleware(runtime.upgradeWebSocket)) + .route( + "/", + new Hono() + .use(InstanceMiddleware()) + .route("/experimental/workspace", WorkspaceRoutes()) + .use(WorkspaceRouterMiddleware(runtime.upgradeWebSocket)), + ) .route("/", InstanceRoutes(runtime.upgradeWebSocket)) .route("/", UIRoutes()), runtime, From e7686dbd643241f7e4ebf4a5e37e5ef33ad7797f Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:46:05 -0400 Subject: [PATCH 191/201] feat(effect-zod): translate Schema.check filters into zod .superRefine + promote LSP refinement to Effect layer (#23173) --- packages/opencode/src/config/lsp.ts | 38 ++++---- packages/opencode/src/util/effect-zod.ts | 23 ++++- packages/opencode/test/config/lsp.test.ts | 87 +++++++++++++++++++ .../opencode/test/util/effect-zod.test.ts | 61 +++++++++++++ 4 files changed, 190 insertions(+), 19 deletions(-) create mode 100644 packages/opencode/test/config/lsp.test.ts diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts index 94a6d4bc56..6d61c6b8e3 100644 --- a/packages/opencode/src/config/lsp.ts +++ b/packages/opencode/src/config/lsp.ts @@ -20,24 +20,26 @@ export const Entry = Schema.Union([ }), ]).pipe(withStatics((s) => ({ zod: zod(s) }))) -export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]).pipe( - withStatics((s) => ({ - zod: zod(s).refine( - (data) => { - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) - - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ), - })), +/** + * For custom (non-builtin) LSP server entries, `extensions` is required so the + * client knows which files the server should attach to. Builtin server IDs and + * explicitly disabled entries are exempt. + */ +export const requiresExtensionsForCustomServers = Schema.makeFilter>>( + (data) => { + if (typeof data === "boolean") return undefined + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) + const ok = Object.entries(data).every(([id, config]) => { + if ("disabled" in config && config.disabled) return true + if (serverIds.has(id)) return true + return "extensions" in config && Boolean(config.extensions) + }) + return ok ? undefined : "For custom LSP servers, 'extensions' array is required." + }, ) +export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]) + .check(requiresExtensionsForCustomServers) + .pipe(withStatics((s) => ({ zod: zod(s) }))) + export type Info = Schema.Schema.Type diff --git a/packages/opencode/src/util/effect-zod.ts b/packages/opencode/src/util/effect-zod.ts index 6e99fd4688..c3240deaac 100644 --- a/packages/opencode/src/util/effect-zod.ts +++ b/packages/opencode/src/util/effect-zod.ts @@ -16,13 +16,34 @@ function walk(ast: SchemaAST.AST): z.ZodTypeAny { const override = (ast.annotations as any)?.[ZodOverride] as z.ZodTypeAny | undefined if (override) return override - const out = body(ast) + let out = body(ast) + for (const check of ast.checks ?? []) { + out = applyCheck(out, check, ast) + } const desc = SchemaAST.resolveDescription(ast) const ref = SchemaAST.resolveIdentifier(ast) const next = desc ? out.describe(desc) : out return ref ? next.meta({ ref }) : next } +function applyCheck(out: z.ZodTypeAny, check: SchemaAST.Check, ast: SchemaAST.AST): z.ZodTypeAny { + if (check._tag === "FilterGroup") { + return check.checks.reduce((acc, sub) => applyCheck(acc, sub, ast), out) + } + return out.superRefine((value, ctx) => { + const issue = check.run(value, ast, {} as any) + if (!issue) return + const message = issueMessage(issue) ?? (check.annotations as any)?.message ?? "Validation failed" + ctx.addIssue({ code: "custom", message }) + }) +} + +function issueMessage(issue: any): string | undefined { + if (typeof issue?.annotations?.message === "string") return issue.annotations.message + if (typeof issue?.message === "string") return issue.message + return undefined +} + function body(ast: SchemaAST.AST): z.ZodTypeAny { if (SchemaAST.isOptional(ast)) return opt(ast) diff --git a/packages/opencode/test/config/lsp.test.ts b/packages/opencode/test/config/lsp.test.ts new file mode 100644 index 0000000000..1d24fe124d --- /dev/null +++ b/packages/opencode/test/config/lsp.test.ts @@ -0,0 +1,87 @@ +import { describe, expect, test } from "bun:test" +import { Schema } from "effect" +import { ConfigLSP } from "../../src/config/lsp" + +// The LSP config refinement enforces: any custom (non-builtin) LSP server +// entry must declare an `extensions` array so the client knows which files +// the server should attach to. Builtin server IDs and explicitly disabled +// entries are exempt. +// +// Both validation paths must honor this rule: +// - `Schema.decodeUnknownSync(ConfigLSP.Info)` (Effect layer) +// - `ConfigLSP.Info.zod.parse(...)` (derived Zod) +// +// `typescript` is a builtin server id (see src/lsp/server.ts). +describe("ConfigLSP.Info refinement", () => { + const decodeEffect = Schema.decodeUnknownSync(ConfigLSP.Info) + + describe("accepted inputs", () => { + test("true and false pass (top-level toggle)", () => { + expect(decodeEffect(true)).toBe(true) + expect(decodeEffect(false)).toBe(false) + expect(ConfigLSP.Info.zod.parse(true)).toBe(true) + expect(ConfigLSP.Info.zod.parse(false)).toBe(false) + }) + + test("builtin server with no extensions passes", () => { + const input = { typescript: { command: ["typescript-language-server", "--stdio"] } } + expect(decodeEffect(input)).toEqual(input) + expect(ConfigLSP.Info.zod.parse(input)).toEqual(input) + }) + + test("custom server WITH extensions passes", () => { + const input = { + "my-lsp": { command: ["my-lsp-bin"], extensions: [".ml"] }, + } + expect(decodeEffect(input)).toEqual(input) + expect(ConfigLSP.Info.zod.parse(input)).toEqual(input) + }) + + test("disabled custom server passes (no extensions needed)", () => { + const input = { "my-lsp": { disabled: true as const } } + expect(decodeEffect(input)).toEqual(input) + expect(ConfigLSP.Info.zod.parse(input)).toEqual(input) + }) + + test("mix of builtin and custom with extensions passes", () => { + const input = { + typescript: { command: ["typescript-language-server", "--stdio"] }, + "my-lsp": { command: ["my-lsp-bin"], extensions: [".ml"] }, + } + expect(decodeEffect(input)).toEqual(input) + expect(ConfigLSP.Info.zod.parse(input)).toEqual(input) + }) + }) + + describe("rejected inputs", () => { + const expectedMessage = "For custom LSP servers, 'extensions' array is required." + + test("custom server WITHOUT extensions fails via Effect decode", () => { + expect(() => decodeEffect({ "my-lsp": { command: ["my-lsp-bin"] } })).toThrow(expectedMessage) + }) + + test("custom server WITHOUT extensions fails via derived Zod", () => { + const result = ConfigLSP.Info.zod.safeParse({ "my-lsp": { command: ["my-lsp-bin"] } }) + expect(result.success).toBe(false) + expect(result.error!.issues.some((i) => i.message === expectedMessage)).toBe(true) + }) + + test("custom server with empty extensions array fails (extensions must be non-empty-truthy)", () => { + // Boolean(['']) is true, so a non-empty array of strings is fine. + // Boolean([]) is also true in JS, so empty arrays are accepted by the + // refinement. This test documents current behavior. + const input = { "my-lsp": { command: ["my-lsp-bin"], extensions: [] } } + expect(decodeEffect(input)).toEqual(input) + expect(ConfigLSP.Info.zod.parse(input)).toEqual(input) + }) + + test("custom server without extensions mixed with a valid builtin still fails", () => { + const input = { + typescript: { command: ["typescript-language-server", "--stdio"] }, + "my-lsp": { command: ["my-lsp-bin"] }, + } + expect(() => decodeEffect(input)).toThrow(expectedMessage) + expect(ConfigLSP.Info.zod.safeParse(input).success).toBe(false) + }) + }) +}) diff --git a/packages/opencode/test/util/effect-zod.test.ts b/packages/opencode/test/util/effect-zod.test.ts index 7f7249514d..2a5cc45f8d 100644 --- a/packages/opencode/test/util/effect-zod.test.ts +++ b/packages/opencode/test/util/effect-zod.test.ts @@ -186,4 +186,65 @@ describe("util.effect-zod", () => { const schema = json(zod(Parent)) as any expect(schema.properties.sessionID).toEqual({ type: "string", pattern: "^ses.*" }) }) + + describe("Schema.check translation", () => { + test("filter returning string triggers refinement with that message", () => { + const isEven = Schema.makeFilter((n: number) => + n % 2 === 0 ? undefined : "expected an even number", + ) + const schema = zod(Schema.Number.check(isEven)) + + expect(schema.parse(4)).toBe(4) + const result = schema.safeParse(3) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("expected an even number") + }) + + test("filter returning false triggers refinement with fallback message", () => { + const nonEmpty = Schema.makeFilter((s: string) => s.length > 0) + const schema = zod(Schema.String.check(nonEmpty)) + + expect(schema.parse("hi")).toBe("hi") + const result = schema.safeParse("") + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toMatch(/./) + }) + + test("filter returning undefined passes validation", () => { + const alwaysOk = Schema.makeFilter(() => undefined) + const schema = zod(Schema.Number.check(alwaysOk)) + + expect(schema.parse(42)).toBe(42) + }) + + test("annotations.message on the filter is used when filter returns false", () => { + const positive = Schema.makeFilter( + (n: number) => n > 0, + { message: "must be positive" }, + ) + const schema = zod(Schema.Number.check(positive)) + + const result = schema.safeParse(-1) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("must be positive") + }) + + test("cross-field check on a record flags missing key", () => { + const hasKey = Schema.makeFilter( + (data: Record) => + "required" in data ? undefined : "missing 'required' key", + ) + const schema = zod( + Schema.Record(Schema.String, Schema.Struct({ enabled: Schema.Boolean })).check(hasKey), + ) + + expect(schema.parse({ required: { enabled: true } })).toEqual({ + required: { enabled: true }, + }) + + const result = schema.safeParse({ other: { enabled: true } }) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("missing 'required' key") + }) + }) }) From dc16013b4f0aa5e4d3eb433046965e1274da3990 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 20:47:05 +0000 Subject: [PATCH 192/201] chore: generate --- packages/opencode/src/config/lsp.ts | 24 +++++++++---------- .../opencode/test/util/effect-zod.test.ts | 18 ++++---------- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts index 6d61c6b8e3..1cf93177e4 100644 --- a/packages/opencode/src/config/lsp.ts +++ b/packages/opencode/src/config/lsp.ts @@ -25,18 +25,18 @@ export const Entry = Schema.Union([ * client knows which files the server should attach to. Builtin server IDs and * explicitly disabled entries are exempt. */ -export const requiresExtensionsForCustomServers = Schema.makeFilter>>( - (data) => { - if (typeof data === "boolean") return undefined - const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) - const ok = Object.entries(data).every(([id, config]) => { - if ("disabled" in config && config.disabled) return true - if (serverIds.has(id)) return true - return "extensions" in config && Boolean(config.extensions) - }) - return ok ? undefined : "For custom LSP servers, 'extensions' array is required." - }, -) +export const requiresExtensionsForCustomServers = Schema.makeFilter< + boolean | Record> +>((data) => { + if (typeof data === "boolean") return undefined + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) + const ok = Object.entries(data).every(([id, config]) => { + if ("disabled" in config && config.disabled) return true + if (serverIds.has(id)) return true + return "extensions" in config && Boolean(config.extensions) + }) + return ok ? undefined : "For custom LSP servers, 'extensions' array is required." +}) export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]) .check(requiresExtensionsForCustomServers) diff --git a/packages/opencode/test/util/effect-zod.test.ts b/packages/opencode/test/util/effect-zod.test.ts index 2a5cc45f8d..1c84c96f3a 100644 --- a/packages/opencode/test/util/effect-zod.test.ts +++ b/packages/opencode/test/util/effect-zod.test.ts @@ -189,9 +189,7 @@ describe("util.effect-zod", () => { describe("Schema.check translation", () => { test("filter returning string triggers refinement with that message", () => { - const isEven = Schema.makeFilter((n: number) => - n % 2 === 0 ? undefined : "expected an even number", - ) + const isEven = Schema.makeFilter((n: number) => (n % 2 === 0 ? undefined : "expected an even number")) const schema = zod(Schema.Number.check(isEven)) expect(schema.parse(4)).toBe(4) @@ -218,10 +216,7 @@ describe("util.effect-zod", () => { }) test("annotations.message on the filter is used when filter returns false", () => { - const positive = Schema.makeFilter( - (n: number) => n > 0, - { message: "must be positive" }, - ) + const positive = Schema.makeFilter((n: number) => n > 0, { message: "must be positive" }) const schema = zod(Schema.Number.check(positive)) const result = schema.safeParse(-1) @@ -230,13 +225,10 @@ describe("util.effect-zod", () => { }) test("cross-field check on a record flags missing key", () => { - const hasKey = Schema.makeFilter( - (data: Record) => - "required" in data ? undefined : "missing 'required' key", - ) - const schema = zod( - Schema.Record(Schema.String, Schema.Struct({ enabled: Schema.Boolean })).check(hasKey), + const hasKey = Schema.makeFilter((data: Record) => + "required" in data ? undefined : "missing 'required' key", ) + const schema = zod(Schema.Record(Schema.String, Schema.Struct({ enabled: Schema.Boolean })).check(hasKey)) expect(schema.parse({ required: { enabled: true } })).toEqual({ required: { enabled: true }, From b1307d5c2a41c12d6696b31fe46bea7882993753 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:49:36 -0400 Subject: [PATCH 193/201] refactor(config): migrate skills, formatter, console-state to Effect Schema (#23162) --- packages/opencode/src/config/config.ts | 4 ++-- packages/opencode/src/config/console-state.ts | 21 +++++++++--------- packages/opencode/src/config/formatter.ts | 22 +++++++++++-------- packages/opencode/src/config/skills.ts | 21 ++++++++++-------- .../server/routes/instance/experimental.ts | 2 +- packages/sdk/js/src/v2/gen/types.gen.ts | 12 +++++----- 6 files changed, 46 insertions(+), 36 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 039b0598da..261c5acab4 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -100,7 +100,7 @@ export const Info = z .record(z.string(), ConfigCommand.Info) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), - skills: ConfigSkills.Info.optional().describe("Additional skill folder paths"), + skills: ConfigSkills.Info.zod.optional().describe("Additional skill folder paths"), watcher: z .object({ ignore: z.array(z.string()).optional(), @@ -188,7 +188,7 @@ export const Info = z ) .optional() .describe("MCP (Model Context Protocol) server configurations"), - formatter: ConfigFormatter.Info.optional(), + formatter: ConfigFormatter.Info.zod.optional(), lsp: ConfigLSP.Info.zod.optional(), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), diff --git a/packages/opencode/src/config/console-state.ts b/packages/opencode/src/config/console-state.ts index cf96a4e305..08668afe4e 100644 --- a/packages/opencode/src/config/console-state.ts +++ b/packages/opencode/src/config/console-state.ts @@ -1,15 +1,16 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" -export const ConsoleState = z.object({ - consoleManagedProviders: z.array(z.string()), - activeOrgName: z.string().optional(), - switchableOrgCount: z.number().int().nonnegative(), -}) +export class ConsoleState extends Schema.Class("ConsoleState")({ + consoleManagedProviders: Schema.mutable(Schema.Array(Schema.String)), + activeOrgName: Schema.optional(Schema.String), + switchableOrgCount: Schema.Number, +}) { + static readonly zod = zod(this) +} -export type ConsoleState = z.infer - -export const emptyConsoleState: ConsoleState = { +export const emptyConsoleState: ConsoleState = ConsoleState.make({ consoleManagedProviders: [], activeOrgName: undefined, switchableOrgCount: 0, -} +}) diff --git a/packages/opencode/src/config/formatter.ts b/packages/opencode/src/config/formatter.ts index 93b87f0281..8c1f09a247 100644 --- a/packages/opencode/src/config/formatter.ts +++ b/packages/opencode/src/config/formatter.ts @@ -1,13 +1,17 @@ export * as ConfigFormatter from "./formatter" -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Entry = z.object({ - disabled: z.boolean().optional(), - command: z.array(z.string()).optional(), - environment: z.record(z.string(), z.string()).optional(), - extensions: z.array(z.string()).optional(), -}) +export const Entry = Schema.Struct({ + disabled: Schema.optional(Schema.Boolean), + command: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + environment: Schema.optional(Schema.Record(Schema.String, Schema.String)), + extensions: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export const Info = z.union([z.boolean(), z.record(z.string(), Entry)]) -export type Info = z.infer +export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]).pipe( + withStatics((s) => ({ zod: zod(s) })), +) +export type Info = Schema.Schema.Type diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts index 38cbf99e7d..f29d854f50 100644 --- a/packages/opencode/src/config/skills.ts +++ b/packages/opencode/src/config/skills.ts @@ -1,13 +1,16 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Info = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), -}) +export const Info = Schema.Struct({ + paths: Schema.optional(Schema.Array(Schema.String)).annotate({ + description: "Additional paths to skill folders", + }), + urls: Schema.optional(Schema.Array(Schema.String)).annotate({ + description: "URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)", + }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Info = z.infer +export type Info = Schema.Schema.Type export * as ConfigSkills from "./skills" diff --git a/packages/opencode/src/server/routes/instance/experimental.ts b/packages/opencode/src/server/routes/instance/experimental.ts index f7ecc8255b..d5659346ef 100644 --- a/packages/opencode/src/server/routes/instance/experimental.ts +++ b/packages/opencode/src/server/routes/instance/experimental.ts @@ -49,7 +49,7 @@ export const ExperimentalRoutes = lazy(() => description: "Active Console provider metadata", content: { "application/json": { - schema: resolver(ConsoleState), + schema: resolver(ConsoleState.zod), }, }, }, diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index 5698cba54f..72a383a608 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -1807,6 +1807,12 @@ export type Provider = { } } +export type ConsoleState = { + consoleManagedProviders: Array + activeOrgName?: string + switchableOrgCount: number +} + export type ToolIds = Array export type ToolListItem = { @@ -2933,11 +2939,7 @@ export type ExperimentalConsoleGetResponses = { /** * Active Console provider metadata */ - 200: { - consoleManagedProviders: Array - activeOrgName?: string - switchableOrgCount: number - } + 200: ConsoleState } export type ExperimentalConsoleGetResponse = ExperimentalConsoleGetResponses[keyof ExperimentalConsoleGetResponses] From ed0f0225025336ad84c04210850fcd7bfb38f221 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 20:50:37 +0000 Subject: [PATCH 194/201] chore: generate --- packages/sdk/openapi.json | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 71e78307cd..c3fd00356c 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -1607,24 +1607,7 @@ "content": { "application/json": { "schema": { - "type": "object", - "properties": { - "consoleManagedProviders": { - "type": "array", - "items": { - "type": "string" - } - }, - "activeOrgName": { - "type": "string" - }, - "switchableOrgCount": { - "type": "integer", - "minimum": 0, - "maximum": 9007199254740991 - } - }, - "required": ["consoleManagedProviders", "switchableOrgCount"] + "$ref": "#/components/schemas/ConsoleState" } } } @@ -12359,6 +12342,24 @@ }, "required": ["id", "name", "source", "env", "options", "models"] }, + "ConsoleState": { + "type": "object", + "properties": { + "consoleManagedProviders": { + "type": "array", + "items": { + "type": "string" + } + }, + "activeOrgName": { + "type": "string" + }, + "switchableOrgCount": { + "type": "number" + } + }, + "required": ["consoleManagedProviders", "switchableOrgCount"] + }, "ToolIDs": { "type": "array", "items": { From 999d8651aab7f9531539f686f685375fcbb19437 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:52:40 -0400 Subject: [PATCH 195/201] feat(server): wrap remaining route handlers in request spans (#23169) --- .../src/server/routes/instance/config.ts | 13 +- .../server/routes/instance/experimental.ts | 170 ++++---- .../src/server/routes/instance/file.ts | 88 ++-- .../src/server/routes/instance/index.ts | 87 ++-- .../src/server/routes/instance/mcp.ts | 80 ++-- .../src/server/routes/instance/permission.ts | 37 +- .../src/server/routes/instance/project.ts | 18 +- .../src/server/routes/instance/provider.ts | 117 +++-- .../src/server/routes/instance/pty.ts | 69 ++- .../src/server/routes/instance/question.ts | 47 +- .../src/server/routes/instance/session.ts | 411 +++++++++--------- .../src/server/routes/instance/tui.ts | 8 +- packages/opencode/src/server/workspace.ts | 5 +- 13 files changed, 550 insertions(+), 600 deletions(-) diff --git a/packages/opencode/src/server/routes/instance/config.ts b/packages/opencode/src/server/routes/instance/config.ts index 235f5682e2..7f368cd31c 100644 --- a/packages/opencode/src/server/routes/instance/config.ts +++ b/packages/opencode/src/server/routes/instance/config.ts @@ -5,7 +5,6 @@ import { Config } from "@/config" import { Provider } from "@/provider" import { errors } from "../../error" import { lazy } from "@/util/lazy" -import { AppRuntime } from "@/effect/app-runtime" import { jsonRequest } from "./trace" export const ConfigRoutes = lazy(() => @@ -52,11 +51,13 @@ export const ConfigRoutes = lazy(() => }, }), validator("json", Config.Info), - async (c) => { - const config = c.req.valid("json") - await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.update(config))) - return c.json(config) - }, + async (c) => + jsonRequest("ConfigRoutes.update", c, function* () { + const config = c.req.valid("json") + const cfg = yield* Config.Service + yield* cfg.update(config) + return config + }), ) .get( "/providers", diff --git a/packages/opencode/src/server/routes/instance/experimental.ts b/packages/opencode/src/server/routes/instance/experimental.ts index d5659346ef..9c86494987 100644 --- a/packages/opencode/src/server/routes/instance/experimental.ts +++ b/packages/opencode/src/server/routes/instance/experimental.ts @@ -12,11 +12,11 @@ import { Config } from "@/config" import { ConsoleState } from "@/config/console-state" import { Account } from "@/account/account" import { AccountID, OrgID } from "@/account/schema" -import { AppRuntime } from "@/effect/app-runtime" import { errors } from "../../error" import { lazy } from "@/util/lazy" import { Effect, Option } from "effect" import { Agent } from "@/agent/agent" +import { jsonRequest, runRequest } from "./trace" const ConsoleOrgOption = z.object({ accountID: z.string(), @@ -55,22 +55,18 @@ export const ExperimentalRoutes = lazy(() => }, }, }), - async (c) => { - const result = await AppRuntime.runPromise( - Effect.gen(function* () { - const config = yield* Config.Service - const account = yield* Account.Service - const [state, groups] = yield* Effect.all([config.getConsoleState(), account.orgsByAccount()], { - concurrency: "unbounded", - }) - return { - ...state, - switchableOrgCount: groups.reduce((count, group) => count + group.orgs.length, 0), - } - }), - ) - return c.json(result) - }, + async (c) => + jsonRequest("ExperimentalRoutes.console.get", c, function* () { + const config = yield* Config.Service + const account = yield* Account.Service + const [state, groups] = yield* Effect.all([config.getConsoleState(), account.orgsByAccount()], { + concurrency: "unbounded", + }) + return { + ...state, + switchableOrgCount: groups.reduce((count, group) => count + group.orgs.length, 0), + } + }), ) .get( "/console/orgs", @@ -89,28 +85,25 @@ export const ExperimentalRoutes = lazy(() => }, }, }), - async (c) => { - const orgs = await AppRuntime.runPromise( - Effect.gen(function* () { - const account = yield* Account.Service - const [groups, active] = yield* Effect.all([account.orgsByAccount(), account.active()], { - concurrency: "unbounded", - }) - const info = Option.getOrUndefined(active) - return groups.flatMap((group) => - group.orgs.map((org) => ({ - accountID: group.account.id, - accountEmail: group.account.email, - accountUrl: group.account.url, - orgID: org.id, - orgName: org.name, - active: !!info && info.id === group.account.id && info.active_org_id === org.id, - })), - ) - }), - ) - return c.json({ orgs }) - }, + async (c) => + jsonRequest("ExperimentalRoutes.console.listOrgs", c, function* () { + const account = yield* Account.Service + const [groups, active] = yield* Effect.all([account.orgsByAccount(), account.active()], { + concurrency: "unbounded", + }) + const info = Option.getOrUndefined(active) + const orgs = groups.flatMap((group) => + group.orgs.map((org) => ({ + accountID: group.account.id, + accountEmail: group.account.email, + accountUrl: group.account.url, + orgID: org.id, + orgName: org.name, + active: !!info && info.id === group.account.id && info.active_org_id === org.id, + })), + ) + return { orgs } + }), ) .post( "/console/switch", @@ -130,16 +123,13 @@ export const ExperimentalRoutes = lazy(() => }, }), validator("json", ConsoleSwitchBody), - async (c) => { - const body = c.req.valid("json") - await AppRuntime.runPromise( - Effect.gen(function* () { - const account = yield* Account.Service - yield* account.use(AccountID.make(body.accountID), Option.some(OrgID.make(body.orgID))) - }), - ) - return c.json(true) - }, + async (c) => + jsonRequest("ExperimentalRoutes.console.switchOrg", c, function* () { + const body = c.req.valid("json") + const account = yield* Account.Service + yield* account.use(AccountID.make(body.accountID), Option.some(OrgID.make(body.orgID))) + return true + }), ) .get( "/tool/ids", @@ -160,15 +150,11 @@ export const ExperimentalRoutes = lazy(() => ...errors(400), }, }), - async (c) => { - const ids = await AppRuntime.runPromise( - Effect.gen(function* () { - const registry = yield* ToolRegistry.Service - return yield* registry.ids() - }), - ) - return c.json(ids) - }, + async (c) => + jsonRequest("ExperimentalRoutes.tool.ids", c, function* () { + const registry = yield* ToolRegistry.Service + return yield* registry.ids() + }), ) .get( "/tool", @@ -210,7 +196,9 @@ export const ExperimentalRoutes = lazy(() => ), async (c) => { const { provider, model } = c.req.valid("query") - const tools = await AppRuntime.runPromise( + const tools = await runRequest( + "ExperimentalRoutes.tool.list", + c, Effect.gen(function* () { const agents = yield* Agent.Service const registry = yield* ToolRegistry.Service @@ -249,11 +237,12 @@ export const ExperimentalRoutes = lazy(() => }, }), validator("json", Worktree.CreateInput.optional()), - async (c) => { - const body = c.req.valid("json") - const worktree = await AppRuntime.runPromise(Worktree.Service.use((svc) => svc.create(body))) - return c.json(worktree) - }, + async (c) => + jsonRequest("ExperimentalRoutes.worktree.create", c, function* () { + const body = c.req.valid("json") + const svc = yield* Worktree.Service + return yield* svc.create(body) + }), ) .get( "/worktree", @@ -272,10 +261,11 @@ export const ExperimentalRoutes = lazy(() => }, }, }), - async (c) => { - const sandboxes = await AppRuntime.runPromise(Project.Service.use((svc) => svc.sandboxes(Instance.project.id))) - return c.json(sandboxes) - }, + async (c) => + jsonRequest("ExperimentalRoutes.worktree.list", c, function* () { + const svc = yield* Project.Service + return yield* svc.sandboxes(Instance.project.id) + }), ) .delete( "/worktree", @@ -296,14 +286,15 @@ export const ExperimentalRoutes = lazy(() => }, }), validator("json", Worktree.RemoveInput), - async (c) => { - const body = c.req.valid("json") - await AppRuntime.runPromise(Worktree.Service.use((svc) => svc.remove(body))) - await AppRuntime.runPromise( - Project.Service.use((svc) => svc.removeSandbox(Instance.project.id, body.directory)), - ) - return c.json(true) - }, + async (c) => + jsonRequest("ExperimentalRoutes.worktree.remove", c, function* () { + const body = c.req.valid("json") + const worktree = yield* Worktree.Service + const project = yield* Project.Service + yield* worktree.remove(body) + yield* project.removeSandbox(Instance.project.id, body.directory) + return true + }), ) .post( "/worktree/reset", @@ -324,11 +315,13 @@ export const ExperimentalRoutes = lazy(() => }, }), validator("json", Worktree.ResetInput), - async (c) => { - const body = c.req.valid("json") - await AppRuntime.runPromise(Worktree.Service.use((svc) => svc.reset(body))) - return c.json(true) - }, + async (c) => + jsonRequest("ExperimentalRoutes.worktree.reset", c, function* () { + const body = c.req.valid("json") + const svc = yield* Worktree.Service + yield* svc.reset(body) + return true + }), ) .get( "/session", @@ -406,15 +399,10 @@ export const ExperimentalRoutes = lazy(() => }, }, }), - async (c) => { - return c.json( - await AppRuntime.runPromise( - Effect.gen(function* () { - const mcp = yield* MCP.Service - return yield* mcp.resources() - }), - ), - ) - }, + async (c) => + jsonRequest("ExperimentalRoutes.resource.list", c, function* () { + const mcp = yield* MCP.Service + return yield* mcp.resources() + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/file.ts b/packages/opencode/src/server/routes/instance/file.ts index a82e5687d8..bbef679a85 100644 --- a/packages/opencode/src/server/routes/instance/file.ts +++ b/packages/opencode/src/server/routes/instance/file.ts @@ -1,13 +1,12 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" -import { Effect } from "effect" import z from "zod" -import { AppRuntime } from "@/effect/app-runtime" import { File } from "@/file" import { Ripgrep } from "@/file/ripgrep" import { LSP } from "@/lsp" import { Instance } from "@/project/instance" import { lazy } from "@/util/lazy" +import { jsonRequest } from "./trace" export const FileRoutes = lazy(() => new Hono() @@ -34,13 +33,13 @@ export const FileRoutes = lazy(() => pattern: z.string(), }), ), - async (c) => { - const pattern = c.req.valid("query").pattern - const result = await AppRuntime.runPromise( - Ripgrep.Service.use((svc) => svc.search({ cwd: Instance.directory, pattern, limit: 10 })), - ) - return c.json(result.items) - }, + async (c) => + jsonRequest("FileRoutes.findText", c, function* () { + const pattern = c.req.valid("query").pattern + const svc = yield* Ripgrep.Service + const result = yield* svc.search({ cwd: Instance.directory, pattern, limit: 10 }) + return result.items + }), ) .get( "/find/file", @@ -68,25 +67,17 @@ export const FileRoutes = lazy(() => limit: z.coerce.number().int().min(1).max(200).optional(), }), ), - async (c) => { - const query = c.req.valid("query").query - const dirs = c.req.valid("query").dirs - const type = c.req.valid("query").type - const limit = c.req.valid("query").limit - const results = await AppRuntime.runPromise( - Effect.gen(function* () { - return yield* File.Service.use((svc) => - svc.search({ - query, - limit: limit ?? 10, - dirs: dirs !== "false", - type, - }), - ) - }), - ) - return c.json(results) - }, + async (c) => + jsonRequest("FileRoutes.findFile", c, function* () { + const query = c.req.valid("query") + const svc = yield* File.Service + return yield* svc.search({ + query: query.query, + limit: query.limit ?? 10, + dirs: query.dirs !== "false", + type: query.type, + }) + }), ) .get( "/find/symbol", @@ -138,15 +129,11 @@ export const FileRoutes = lazy(() => path: z.string(), }), ), - async (c) => { - const path = c.req.valid("query").path - const content = await AppRuntime.runPromise( - Effect.gen(function* () { - return yield* File.Service.use((svc) => svc.list(path)) - }), - ) - return c.json(content) - }, + async (c) => + jsonRequest("FileRoutes.list", c, function* () { + const svc = yield* File.Service + return yield* svc.list(c.req.valid("query").path) + }), ) .get( "/file/content", @@ -171,15 +158,11 @@ export const FileRoutes = lazy(() => path: z.string(), }), ), - async (c) => { - const path = c.req.valid("query").path - const content = await AppRuntime.runPromise( - Effect.gen(function* () { - return yield* File.Service.use((svc) => svc.read(path)) - }), - ) - return c.json(content) - }, + async (c) => + jsonRequest("FileRoutes.read", c, function* () { + const svc = yield* File.Service + return yield* svc.read(c.req.valid("query").path) + }), ) .get( "/file/status", @@ -198,13 +181,10 @@ export const FileRoutes = lazy(() => }, }, }), - async (c) => { - const content = await AppRuntime.runPromise( - Effect.gen(function* () { - return yield* File.Service.use((svc) => svc.status()) - }), - ) - return c.json(content) - }, + async (c) => + jsonRequest("FileRoutes.status", c, function* () { + const svc = yield* File.Service + return yield* svc.status() + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/index.ts b/packages/opencode/src/server/routes/instance/index.ts index ddb7e335ad..5cc51d27ab 100644 --- a/packages/opencode/src/server/routes/instance/index.ts +++ b/packages/opencode/src/server/routes/instance/index.ts @@ -26,8 +26,8 @@ import { ExperimentalRoutes } from "./experimental" import { ProviderRoutes } from "./provider" import { EventRoutes } from "./event" import { SyncRoutes } from "./sync" -import { AppRuntime } from "@/effect/app-runtime" import { InstanceMiddleware } from "./middleware" +import { jsonRequest } from "./trace" export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { const app = new Hono() @@ -141,19 +141,14 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - return c.json( - await AppRuntime.runPromise( - Effect.gen(function* () { - const vcs = yield* Vcs.Service - const [branch, default_branch] = yield* Effect.all([vcs.branch(), vcs.defaultBranch()], { - concurrency: 2, - }) - return { branch, default_branch } - }), - ), - ) - }, + async (c) => + jsonRequest("InstanceRoutes.vcs.get", c, function* () { + const vcs = yield* Vcs.Service + const [branch, default_branch] = yield* Effect.all([vcs.branch(), vcs.defaultBranch()], { + concurrency: 2, + }) + return { branch, default_branch } + }), ) .get( "/vcs/diff", @@ -178,16 +173,11 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { mode: Vcs.Mode, }), ), - async (c) => { - return c.json( - await AppRuntime.runPromise( - Effect.gen(function* () { - const vcs = yield* Vcs.Service - return yield* vcs.diff(c.req.valid("query").mode) - }), - ), - ) - }, + async (c) => + jsonRequest("InstanceRoutes.vcs.diff", c, function* () { + const vcs = yield* Vcs.Service + return yield* vcs.diff(c.req.valid("query").mode) + }), ) .get( "/command", @@ -206,10 +196,11 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - const commands = await AppRuntime.runPromise(Command.Service.use((svc) => svc.list())) - return c.json(commands) - }, + async (c) => + jsonRequest("InstanceRoutes.command.list", c, function* () { + const svc = yield* Command.Service + return yield* svc.list() + }), ) .get( "/agent", @@ -228,10 +219,11 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - const modes = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.list())) - return c.json(modes) - }, + async (c) => + jsonRequest("InstanceRoutes.agent.list", c, function* () { + const svc = yield* Agent.Service + return yield* svc.list() + }), ) .get( "/skill", @@ -250,15 +242,11 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - const skills = await AppRuntime.runPromise( - Effect.gen(function* () { - const skill = yield* Skill.Service - return yield* skill.all() - }), - ) - return c.json(skills) - }, + async (c) => + jsonRequest("InstanceRoutes.skill.list", c, function* () { + const skill = yield* Skill.Service + return yield* skill.all() + }), ) .get( "/lsp", @@ -277,10 +265,11 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - const items = await AppRuntime.runPromise(LSP.Service.use((lsp) => lsp.status())) - return c.json(items) - }, + async (c) => + jsonRequest("InstanceRoutes.lsp.status", c, function* () { + const lsp = yield* LSP.Service + return yield* lsp.status() + }), ) .get( "/formatter", @@ -299,8 +288,10 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => { }, }, }), - async (c) => { - return c.json(await AppRuntime.runPromise(Format.Service.use((svc) => svc.status()))) - }, + async (c) => + jsonRequest("InstanceRoutes.formatter.status", c, function* () { + const svc = yield* Format.Service + return yield* svc.status() + }), ) } diff --git a/packages/opencode/src/server/routes/instance/mcp.ts b/packages/opencode/src/server/routes/instance/mcp.ts index b42cfb5314..ce4722933b 100644 --- a/packages/opencode/src/server/routes/instance/mcp.ts +++ b/packages/opencode/src/server/routes/instance/mcp.ts @@ -2,12 +2,11 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { MCP } from "@/mcp" -import { Config } from "@/config" import { ConfigMCP } from "@/config/mcp" -import { AppRuntime } from "@/effect/app-runtime" import { errors } from "../../error" import { lazy } from "@/util/lazy" import { Effect } from "effect" +import { jsonRequest, runRequest } from "./trace" export const McpRoutes = lazy(() => new Hono() @@ -28,9 +27,11 @@ export const McpRoutes = lazy(() => }, }, }), - async (c) => { - return c.json(await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.status()))) - }, + async (c) => + jsonRequest("McpRoutes.status", c, function* () { + const mcp = yield* MCP.Service + return yield* mcp.status() + }), ) .post( "/", @@ -57,11 +58,13 @@ export const McpRoutes = lazy(() => config: ConfigMCP.Info.zod, }), ), - async (c) => { - const { name, config } = c.req.valid("json") - const result = await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.add(name, config))) - return c.json(result.status) - }, + async (c) => + jsonRequest("McpRoutes.add", c, function* () { + const { name, config } = c.req.valid("json") + const mcp = yield* MCP.Service + const result = yield* mcp.add(name, config) + return result.status + }), ) .post( "/:name/auth", @@ -87,7 +90,9 @@ export const McpRoutes = lazy(() => }), async (c) => { const name = c.req.param("name") - const result = await AppRuntime.runPromise( + const result = await runRequest( + "McpRoutes.auth.start", + c, Effect.gen(function* () { const mcp = yield* MCP.Service const supports = yield* mcp.supportsOAuth(name) @@ -129,12 +134,13 @@ export const McpRoutes = lazy(() => code: z.string().describe("Authorization code from OAuth callback"), }), ), - async (c) => { - const name = c.req.param("name") - const { code } = c.req.valid("json") - const status = await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.finishAuth(name, code))) - return c.json(status) - }, + async (c) => + jsonRequest("McpRoutes.auth.callback", c, function* () { + const name = c.req.param("name") + const { code } = c.req.valid("json") + const mcp = yield* MCP.Service + return yield* mcp.finishAuth(name, code) + }), ) .post( "/:name/auth/authenticate", @@ -156,7 +162,9 @@ export const McpRoutes = lazy(() => }), async (c) => { const name = c.req.param("name") - const result = await AppRuntime.runPromise( + const result = await runRequest( + "McpRoutes.auth.authenticate", + c, Effect.gen(function* () { const mcp = yield* MCP.Service const supports = yield* mcp.supportsOAuth(name) @@ -191,11 +199,13 @@ export const McpRoutes = lazy(() => ...errors(404), }, }), - async (c) => { - const name = c.req.param("name") - await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.removeAuth(name))) - return c.json({ success: true as const }) - }, + async (c) => + jsonRequest("McpRoutes.auth.remove", c, function* () { + const name = c.req.param("name") + const mcp = yield* MCP.Service + yield* mcp.removeAuth(name) + return { success: true as const } + }), ) .post( "/:name/connect", @@ -214,11 +224,13 @@ export const McpRoutes = lazy(() => }, }), validator("param", z.object({ name: z.string() })), - async (c) => { - const { name } = c.req.valid("param") - await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.connect(name))) - return c.json(true) - }, + async (c) => + jsonRequest("McpRoutes.connect", c, function* () { + const { name } = c.req.valid("param") + const mcp = yield* MCP.Service + yield* mcp.connect(name) + return true + }), ) .post( "/:name/disconnect", @@ -237,10 +249,12 @@ export const McpRoutes = lazy(() => }, }), validator("param", z.object({ name: z.string() })), - async (c) => { - const { name } = c.req.valid("param") - await AppRuntime.runPromise(MCP.Service.use((mcp) => mcp.disconnect(name))) - return c.json(true) - }, + async (c) => + jsonRequest("McpRoutes.disconnect", c, function* () { + const { name } = c.req.valid("param") + const mcp = yield* MCP.Service + yield* mcp.disconnect(name) + return true + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/permission.ts b/packages/opencode/src/server/routes/instance/permission.ts index c3f9c82011..c18f4734b4 100644 --- a/packages/opencode/src/server/routes/instance/permission.ts +++ b/packages/opencode/src/server/routes/instance/permission.ts @@ -1,11 +1,11 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" -import { AppRuntime } from "@/effect/app-runtime" import { Permission } from "@/permission" import { PermissionID } from "@/permission/schema" import { errors } from "../../error" import { lazy } from "@/util/lazy" +import { jsonRequest } from "./trace" export const PermissionRoutes = lazy(() => new Hono() @@ -34,20 +34,18 @@ export const PermissionRoutes = lazy(() => }), ), validator("json", z.object({ reply: Permission.Reply.zod, message: z.string().optional() })), - async (c) => { - const params = c.req.valid("param") - const json = c.req.valid("json") - await AppRuntime.runPromise( - Permission.Service.use((svc) => - svc.reply({ - requestID: params.requestID, - reply: json.reply, - message: json.message, - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("PermissionRoutes.reply", c, function* () { + const params = c.req.valid("param") + const json = c.req.valid("json") + const svc = yield* Permission.Service + yield* svc.reply({ + requestID: params.requestID, + reply: json.reply, + message: json.message, + }) + return true + }), ) .get( "/", @@ -66,9 +64,10 @@ export const PermissionRoutes = lazy(() => }, }, }), - async (c) => { - const permissions = await AppRuntime.runPromise(Permission.Service.use((svc) => svc.list())) - return c.json(permissions) - }, + async (c) => + jsonRequest("PermissionRoutes.list", c, function* () { + const svc = yield* Permission.Service + return yield* svc.list() + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/project.ts b/packages/opencode/src/server/routes/instance/project.ts index 060542c4b4..5acef6d788 100644 --- a/packages/opencode/src/server/routes/instance/project.ts +++ b/packages/opencode/src/server/routes/instance/project.ts @@ -9,6 +9,7 @@ import { errors } from "../../error" import { lazy } from "@/util/lazy" import { InstanceBootstrap } from "@/project/bootstrap" import { AppRuntime } from "@/effect/app-runtime" +import { jsonRequest, runRequest } from "./trace" export const ProjectRoutes = lazy(() => new Hono() @@ -75,7 +76,9 @@ export const ProjectRoutes = lazy(() => async (c) => { const dir = Instance.directory const prev = Instance.project - const next = await AppRuntime.runPromise( + const next = await runRequest( + "ProjectRoutes.initGit", + c, Project.Service.use((svc) => svc.initGit({ directory: dir, project: prev })), ) if (next.id === prev.id && next.vcs === prev.vcs && next.worktree === prev.worktree) return c.json(next) @@ -108,11 +111,12 @@ export const ProjectRoutes = lazy(() => }), validator("param", z.object({ projectID: ProjectID.zod })), validator("json", Project.UpdateInput.omit({ projectID: true })), - async (c) => { - const projectID = c.req.valid("param").projectID - const body = c.req.valid("json") - const project = await AppRuntime.runPromise(Project.Service.use((svc) => svc.update({ ...body, projectID }))) - return c.json(project) - }, + async (c) => + jsonRequest("ProjectRoutes.update", c, function* () { + const projectID = c.req.valid("param").projectID + const body = c.req.valid("json") + const svc = yield* Project.Service + return yield* svc.update({ ...body, projectID }) + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/provider.ts b/packages/opencode/src/server/routes/instance/provider.ts index 57aa895e3d..617980e39c 100644 --- a/packages/opencode/src/server/routes/instance/provider.ts +++ b/packages/opencode/src/server/routes/instance/provider.ts @@ -6,11 +6,11 @@ import { Provider } from "@/provider" import { ModelsDev } from "@/provider" import { ProviderAuth } from "@/provider" import { ProviderID } from "@/provider/schema" -import { AppRuntime } from "@/effect/app-runtime" import { mapValues } from "remeda" import { errors } from "../../error" import { lazy } from "@/util/lazy" import { Effect } from "effect" +import { jsonRequest } from "./trace" export const ProviderRoutes = lazy(() => new Hono() @@ -31,39 +31,31 @@ export const ProviderRoutes = lazy(() => }, }, }), - async (c) => { - const result = await AppRuntime.runPromise( - Effect.gen(function* () { - const svc = yield* Provider.Service - const cfg = yield* Config.Service - const config = yield* cfg.get() - const all = yield* Effect.promise(() => ModelsDev.get()) - const disabled = new Set(config.disabled_providers ?? []) - const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined - const filtered: Record = {} - for (const [key, value] of Object.entries(all)) { - if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) { - filtered[key] = value - } + async (c) => + jsonRequest("ProviderRoutes.list", c, function* () { + const svc = yield* Provider.Service + const cfg = yield* Config.Service + const config = yield* cfg.get() + const all = yield* Effect.promise(() => ModelsDev.get()) + const disabled = new Set(config.disabled_providers ?? []) + const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined + const filtered: Record = {} + for (const [key, value] of Object.entries(all)) { + if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) { + filtered[key] = value } - const connected = yield* svc.list() - const providers = Object.assign( - mapValues(filtered, (x) => Provider.fromModelsDevProvider(x)), - connected, - ) - return { - all: Object.values(providers), - default: Provider.defaultModelIDs(providers), - connected: Object.keys(connected), - } - }), - ) - return c.json({ - all: result.all, - default: result.default, - connected: result.connected, - }) - }, + } + const connected = yield* svc.list() + const providers = Object.assign( + mapValues(filtered, (x) => Provider.fromModelsDevProvider(x)), + connected, + ) + return { + all: Object.values(providers), + default: Provider.defaultModelIDs(providers), + connected: Object.keys(connected), + } + }), ) .get( "/auth", @@ -82,9 +74,11 @@ export const ProviderRoutes = lazy(() => }, }, }), - async (c) => { - return c.json(await AppRuntime.runPromise(ProviderAuth.Service.use((svc) => svc.methods()))) - }, + async (c) => + jsonRequest("ProviderRoutes.auth", c, function* () { + const svc = yield* ProviderAuth.Service + return yield* svc.methods() + }), ) .post( "/:providerID/oauth/authorize", @@ -111,20 +105,17 @@ export const ProviderRoutes = lazy(() => }), ), validator("json", ProviderAuth.AuthorizeInput.zod), - async (c) => { - const providerID = c.req.valid("param").providerID - const { method, inputs } = c.req.valid("json") - const result = await AppRuntime.runPromise( - ProviderAuth.Service.use((svc) => - svc.authorize({ - providerID, - method, - inputs, - }), - ), - ) - return c.json(result) - }, + async (c) => + jsonRequest("ProviderRoutes.oauth.authorize", c, function* () { + const providerID = c.req.valid("param").providerID + const { method, inputs } = c.req.valid("json") + const svc = yield* ProviderAuth.Service + return yield* svc.authorize({ + providerID, + method, + inputs, + }) + }), ) .post( "/:providerID/oauth/callback", @@ -151,19 +142,17 @@ export const ProviderRoutes = lazy(() => }), ), validator("json", ProviderAuth.CallbackInput.zod), - async (c) => { - const providerID = c.req.valid("param").providerID - const { method, code } = c.req.valid("json") - await AppRuntime.runPromise( - ProviderAuth.Service.use((svc) => - svc.callback({ - providerID, - method, - code, - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("ProviderRoutes.oauth.callback", c, function* () { + const providerID = c.req.valid("param").providerID + const { method, code } = c.req.valid("json") + const svc = yield* ProviderAuth.Service + yield* svc.callback({ + providerID, + method, + code, + }) + return true + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/pty.ts b/packages/opencode/src/server/routes/instance/pty.ts index b3f71c235c..a25b66e9ff 100644 --- a/packages/opencode/src/server/routes/instance/pty.ts +++ b/packages/opencode/src/server/routes/instance/pty.ts @@ -8,6 +8,7 @@ import { Pty } from "@/pty" import { PtyID } from "@/pty/schema" import { NotFoundError } from "@/storage" import { errors } from "../../error" +import { jsonRequest, runRequest } from "./trace" export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { return new Hono() @@ -28,16 +29,11 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { }, }, }), - async (c) => { - return c.json( - await AppRuntime.runPromise( - Effect.gen(function* () { - const pty = yield* Pty.Service - return yield* pty.list() - }), - ), - ) - }, + async (c) => + jsonRequest("PtyRoutes.list", c, function* () { + const pty = yield* Pty.Service + return yield* pty.list() + }), ) .post( "/", @@ -58,15 +54,11 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { }, }), validator("json", Pty.CreateInput), - async (c) => { - const info = await AppRuntime.runPromise( - Effect.gen(function* () { - const pty = yield* Pty.Service - return yield* pty.create(c.req.valid("json")) - }), - ) - return c.json(info) - }, + async (c) => + jsonRequest("PtyRoutes.create", c, function* () { + const pty = yield* Pty.Service + return yield* pty.create(c.req.valid("json")) + }), ) .get( "/:ptyID", @@ -88,7 +80,9 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { }), validator("param", z.object({ ptyID: PtyID.zod })), async (c) => { - const info = await AppRuntime.runPromise( + const info = await runRequest( + "PtyRoutes.get", + c, Effect.gen(function* () { const pty = yield* Pty.Service return yield* pty.get(c.req.valid("param").ptyID) @@ -120,15 +114,11 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { }), validator("param", z.object({ ptyID: PtyID.zod })), validator("json", Pty.UpdateInput), - async (c) => { - const info = await AppRuntime.runPromise( - Effect.gen(function* () { - const pty = yield* Pty.Service - return yield* pty.update(c.req.valid("param").ptyID, c.req.valid("json")) - }), - ) - return c.json(info) - }, + async (c) => + jsonRequest("PtyRoutes.update", c, function* () { + const pty = yield* Pty.Service + return yield* pty.update(c.req.valid("param").ptyID, c.req.valid("json")) + }), ) .delete( "/:ptyID", @@ -149,15 +139,12 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { }, }), validator("param", z.object({ ptyID: PtyID.zod })), - async (c) => { - await AppRuntime.runPromise( - Effect.gen(function* () { - const pty = yield* Pty.Service - yield* pty.remove(c.req.valid("param").ptyID) - }), - ) - return c.json(true) - }, + async (c) => + jsonRequest("PtyRoutes.remove", c, function* () { + const pty = yield* Pty.Service + yield* pty.remove(c.req.valid("param").ptyID) + return true + }), ) .get( "/:ptyID/connect", @@ -194,7 +181,9 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { })() let handler: Handler | undefined if ( - !(await AppRuntime.runPromise( + !(await runRequest( + "PtyRoutes.connect", + c, Effect.gen(function* () { const pty = yield* Pty.Service return yield* pty.get(id) @@ -232,7 +221,7 @@ export function PtyRoutes(upgradeWebSocket: UpgradeWebSocket) { Effect.gen(function* () { const pty = yield* Pty.Service return yield* pty.connect(id, socket, cursor) - }), + }).pipe(Effect.withSpan("PtyRoutes.connect.open")), ) ready = true for (const msg of pending) handler?.onMessage(msg) diff --git a/packages/opencode/src/server/routes/instance/question.ts b/packages/opencode/src/server/routes/instance/question.ts index 9b8f461e39..51ecb48ccd 100644 --- a/packages/opencode/src/server/routes/instance/question.ts +++ b/packages/opencode/src/server/routes/instance/question.ts @@ -3,10 +3,10 @@ import { describeRoute, validator } from "hono-openapi" import { resolver } from "hono-openapi" import { QuestionID } from "@/question/schema" import { Question } from "@/question" -import { AppRuntime } from "@/effect/app-runtime" import z from "zod" import { errors } from "../../error" import { lazy } from "@/util/lazy" +import { jsonRequest } from "./trace" const Reply = z.object({ answers: Question.Answer.zod @@ -33,10 +33,11 @@ export const QuestionRoutes = lazy(() => }, }, }), - async (c) => { - const questions = await AppRuntime.runPromise(Question.Service.use((svc) => svc.list())) - return c.json(questions) - }, + async (c) => + jsonRequest("QuestionRoutes.list", c, function* () { + const svc = yield* Question.Service + return yield* svc.list() + }), ) .post( "/:requestID/reply", @@ -63,19 +64,17 @@ export const QuestionRoutes = lazy(() => }), ), validator("json", Reply), - async (c) => { - const params = c.req.valid("param") - const json = c.req.valid("json") - await AppRuntime.runPromise( - Question.Service.use((svc) => - svc.reply({ - requestID: params.requestID, - answers: json.answers, - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("QuestionRoutes.reply", c, function* () { + const params = c.req.valid("param") + const json = c.req.valid("json") + const svc = yield* Question.Service + yield* svc.reply({ + requestID: params.requestID, + answers: json.answers, + }) + return true + }), ) .post( "/:requestID/reject", @@ -101,10 +100,12 @@ export const QuestionRoutes = lazy(() => requestID: QuestionID.zod, }), ), - async (c) => { - const params = c.req.valid("param") - await AppRuntime.runPromise(Question.Service.use((svc) => svc.reject(params.requestID))) - return c.json(true) - }, + async (c) => + jsonRequest("QuestionRoutes.reject", c, function* () { + const params = c.req.valid("param") + const svc = yield* Question.Service + yield* svc.reject(params.requestID) + return true + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/session.ts b/packages/opencode/src/server/routes/instance/session.ts index ae6185abb8..bf713935b0 100644 --- a/packages/opencode/src/server/routes/instance/session.ts +++ b/packages/opencode/src/server/routes/instance/session.ts @@ -14,7 +14,6 @@ import { SessionStatus } from "@/session/status" import { SessionSummary } from "@/session/summary" import { Todo } from "@/session/todo" import { Effect } from "effect" -import { AppRuntime } from "@/effect/app-runtime" import { Agent } from "@/agent/agent" import { Snapshot } from "@/snapshot" import { Command } from "@/command" @@ -26,7 +25,7 @@ import { errors } from "../../error" import { lazy } from "@/util/lazy" import { Bus } from "@/bus" import { NamedError } from "@opencode-ai/shared/util/error" -import { jsonRequest } from "./trace" +import { jsonRequest, runRequest } from "./trace" const log = Log.create({ service: "server" }) @@ -218,11 +217,12 @@ export const SessionRoutes = lazy(() => }, }), validator("json", Session.CreateInput), - async (c) => { - const body = c.req.valid("json") ?? {} - const session = await AppRuntime.runPromise(SessionShare.Service.use((svc) => svc.create(body))) - return c.json(session) - }, + async (c) => + jsonRequest("SessionRoutes.create", c, function* () { + const body = c.req.valid("json") ?? {} + const svc = yield* SessionShare.Service + return yield* svc.create(body) + }), ) .delete( "/:sessionID", @@ -248,11 +248,13 @@ export const SessionRoutes = lazy(() => sessionID: Session.RemoveInput, }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(sessionID))) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.delete", c, function* () { + const sessionID = c.req.valid("param").sessionID + const svc = yield* Session.Service + yield* svc.remove(sessionID) + return true + }), ) .patch( "/:sessionID", @@ -290,32 +292,28 @@ export const SessionRoutes = lazy(() => .optional(), }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const updates = c.req.valid("json") - const session = await AppRuntime.runPromise( - Effect.gen(function* () { - const session = yield* Session.Service - const current = yield* session.get(sessionID) + async (c) => + jsonRequest("SessionRoutes.update", c, function* () { + const sessionID = c.req.valid("param").sessionID + const updates = c.req.valid("json") + const session = yield* Session.Service + const current = yield* session.get(sessionID) - if (updates.title !== undefined) { - yield* session.setTitle({ sessionID, title: updates.title }) - } - if (updates.permission !== undefined) { - yield* session.setPermission({ - sessionID, - permission: Permission.merge(current.permission ?? [], updates.permission), - }) - } - if (updates.time?.archived !== undefined) { - yield* session.setArchived({ sessionID, time: updates.time.archived }) - } + if (updates.title !== undefined) { + yield* session.setTitle({ sessionID, title: updates.title }) + } + if (updates.permission !== undefined) { + yield* session.setPermission({ + sessionID, + permission: Permission.merge(current.permission ?? [], updates.permission), + }) + } + if (updates.time?.archived !== undefined) { + yield* session.setArchived({ sessionID, time: updates.time.archived }) + } - return yield* session.get(sessionID) - }), - ) - return c.json(session) - }, + return yield* session.get(sessionID) + }), ) // TODO(v2): remove this dedicated route and rely on the normal `/init` command flow. .post( @@ -351,22 +349,20 @@ export const SessionRoutes = lazy(() => messageID: MessageID.zod, }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const body = c.req.valid("json") - await AppRuntime.runPromise( - SessionPrompt.Service.use((svc) => - svc.command({ - sessionID, - messageID: body.messageID, - model: body.providerID + "/" + body.modelID, - command: Command.Default.INIT, - arguments: "", - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.init", c, function* () { + const sessionID = c.req.valid("param").sessionID + const body = c.req.valid("json") + const svc = yield* SessionPrompt.Service + yield* svc.command({ + sessionID, + messageID: body.messageID, + model: body.providerID + "/" + body.modelID, + command: Command.Default.INIT, + arguments: "", + }) + return true + }), ) .post( "/:sessionID/fork", @@ -392,12 +388,13 @@ export const SessionRoutes = lazy(() => }), ), validator("json", Session.ForkInput.omit({ sessionID: true })), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const body = c.req.valid("json") - const result = await AppRuntime.runPromise(Session.Service.use((svc) => svc.fork({ ...body, sessionID }))) - return c.json(result) - }, + async (c) => + jsonRequest("SessionRoutes.fork", c, function* () { + const sessionID = c.req.valid("param").sessionID + const body = c.req.valid("json") + const svc = yield* Session.Service + return yield* svc.fork({ ...body, sessionID }) + }), ) .post( "/:sessionID/abort", @@ -423,10 +420,12 @@ export const SessionRoutes = lazy(() => sessionID: SessionID.zod, }), ), - async (c) => { - await AppRuntime.runPromise(SessionPrompt.Service.use((svc) => svc.cancel(c.req.valid("param").sessionID))) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.abort", c, function* () { + const svc = yield* SessionPrompt.Service + yield* svc.cancel(c.req.valid("param").sessionID) + return true + }), ) .post( "/:sessionID/share", @@ -452,18 +451,14 @@ export const SessionRoutes = lazy(() => sessionID: SessionID.zod, }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const session = await AppRuntime.runPromise( - Effect.gen(function* () { - const share = yield* SessionShare.Service - const session = yield* Session.Service - yield* share.share(sessionID) - return yield* session.get(sessionID) - }), - ) - return c.json(session) - }, + async (c) => + jsonRequest("SessionRoutes.share", c, function* () { + const sessionID = c.req.valid("param").sessionID + const share = yield* SessionShare.Service + const session = yield* Session.Service + yield* share.share(sessionID) + return yield* session.get(sessionID) + }), ) .get( "/:sessionID/diff", @@ -494,19 +489,16 @@ export const SessionRoutes = lazy(() => messageID: SessionSummary.DiffInput.shape.messageID, }), ), - async (c) => { - const query = c.req.valid("query") - const params = c.req.valid("param") - const result = await AppRuntime.runPromise( - SessionSummary.Service.use((summary) => - summary.diff({ - sessionID: params.sessionID, - messageID: query.messageID, - }), - ), - ) - return c.json(result) - }, + async (c) => + jsonRequest("SessionRoutes.diff", c, function* () { + const query = c.req.valid("query") + const params = c.req.valid("param") + const summary = yield* SessionSummary.Service + return yield* summary.diff({ + sessionID: params.sessionID, + messageID: query.messageID, + }) + }), ) .delete( "/:sessionID/share", @@ -532,18 +524,14 @@ export const SessionRoutes = lazy(() => sessionID: SessionID.zod, }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const session = await AppRuntime.runPromise( - Effect.gen(function* () { - const share = yield* SessionShare.Service - const session = yield* Session.Service - yield* share.unshare(sessionID) - return yield* session.get(sessionID) - }), - ) - return c.json(session) - }, + async (c) => + jsonRequest("SessionRoutes.unshare", c, function* () { + const sessionID = c.req.valid("param").sessionID + const share = yield* SessionShare.Service + const session = yield* Session.Service + yield* share.unshare(sessionID) + return yield* session.get(sessionID) + }), ) .post( "/:sessionID/summarize", @@ -577,43 +565,40 @@ export const SessionRoutes = lazy(() => auto: z.boolean().optional().default(false), }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const body = c.req.valid("json") - await AppRuntime.runPromise( - Effect.gen(function* () { - const session = yield* Session.Service - const revert = yield* SessionRevert.Service - const compact = yield* SessionCompaction.Service - const prompt = yield* SessionPrompt.Service - const agent = yield* Agent.Service + async (c) => + jsonRequest("SessionRoutes.summarize", c, function* () { + const sessionID = c.req.valid("param").sessionID + const body = c.req.valid("json") + const session = yield* Session.Service + const revert = yield* SessionRevert.Service + const compact = yield* SessionCompaction.Service + const prompt = yield* SessionPrompt.Service + const agent = yield* Agent.Service - yield* revert.cleanup(yield* session.get(sessionID)) - const msgs = yield* session.messages({ sessionID }) - const defaultAgent = yield* agent.defaultAgent() - let currentAgent = defaultAgent - for (let i = msgs.length - 1; i >= 0; i--) { - const info = msgs[i].info - if (info.role === "user") { - currentAgent = info.agent || defaultAgent - break - } + yield* revert.cleanup(yield* session.get(sessionID)) + const msgs = yield* session.messages({ sessionID }) + const defaultAgent = yield* agent.defaultAgent() + let currentAgent = defaultAgent + for (let i = msgs.length - 1; i >= 0; i--) { + const info = msgs[i].info + if (info.role === "user") { + currentAgent = info.agent || defaultAgent + break } + } - yield* compact.create({ - sessionID, - agent: currentAgent, - model: { - providerID: body.providerID, - modelID: body.modelID, - }, - auto: body.auto, - }) - yield* prompt.loop({ sessionID }) - }), - ) - return c.json(true) - }, + yield* compact.create({ + sessionID, + agent: currentAgent, + model: { + providerID: body.providerID, + modelID: body.modelID, + }, + auto: body.auto, + }) + yield* prompt.loop({ sessionID }) + return true + }), ) .get( "/:sessionID/message", @@ -675,7 +660,9 @@ export const SessionRoutes = lazy(() => const query = c.req.valid("query") const sessionID = c.req.valid("param").sessionID if (query.limit === undefined || query.limit === 0) { - const messages = await AppRuntime.runPromise( + const messages = await runRequest( + "SessionRoutes.messages", + c, Effect.gen(function* () { const session = yield* Session.Service yield* session.get(sessionID) @@ -766,21 +753,18 @@ export const SessionRoutes = lazy(() => messageID: MessageID.zod, }), ), - async (c) => { - const params = c.req.valid("param") - await AppRuntime.runPromise( - Effect.gen(function* () { - const state = yield* SessionRunState.Service - const session = yield* Session.Service - yield* state.assertNotBusy(params.sessionID) - yield* session.removeMessage({ - sessionID: params.sessionID, - messageID: params.messageID, - }) - }), - ) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.deleteMessage", c, function* () { + const params = c.req.valid("param") + const state = yield* SessionRunState.Service + const session = yield* Session.Service + yield* state.assertNotBusy(params.sessionID) + yield* session.removeMessage({ + sessionID: params.sessionID, + messageID: params.messageID, + }) + return true + }), ) .delete( "/:sessionID/message/:messageID/part/:partID", @@ -807,19 +791,17 @@ export const SessionRoutes = lazy(() => partID: PartID.zod, }), ), - async (c) => { - const params = c.req.valid("param") - await AppRuntime.runPromise( - Session.Service.use((svc) => - svc.removePart({ - sessionID: params.sessionID, - messageID: params.messageID, - partID: params.partID, - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.deletePart", c, function* () { + const params = c.req.valid("param") + const svc = yield* Session.Service + yield* svc.removePart({ + sessionID: params.sessionID, + messageID: params.messageID, + partID: params.partID, + }) + return true + }), ) .patch( "/:sessionID/message/:messageID/part/:partID", @@ -855,8 +837,10 @@ export const SessionRoutes = lazy(() => `Part mismatch: body.id='${body.id}' vs partID='${params.partID}', body.messageID='${body.messageID}' vs messageID='${params.messageID}', body.sessionID='${body.sessionID}' vs sessionID='${params.sessionID}'`, ) } - const part = await AppRuntime.runPromise(Session.Service.use((svc) => svc.updatePart(body))) - return c.json(part) + return jsonRequest("SessionRoutes.updatePart", c, function* () { + const svc = yield* Session.Service + return yield* svc.updatePart(body) + }) }, ) .post( @@ -895,7 +879,9 @@ export const SessionRoutes = lazy(() => return stream(c, async (stream) => { const sessionID = c.req.valid("param").sessionID const body = c.req.valid("json") - const msg = await AppRuntime.runPromise( + const msg = await runRequest( + "SessionRoutes.prompt", + c, SessionPrompt.Service.use((svc) => svc.prompt({ ...body, sessionID })), ) void stream.write(JSON.stringify(msg)) @@ -926,15 +912,17 @@ export const SessionRoutes = lazy(() => async (c) => { const sessionID = c.req.valid("param").sessionID const body = c.req.valid("json") - void AppRuntime.runPromise(SessionPrompt.Service.use((svc) => svc.prompt({ ...body, sessionID }))).catch( - (err) => { - log.error("prompt_async failed", { sessionID, error: err }) - void Bus.publish(Session.Event.Error, { - sessionID, - error: new NamedError.Unknown({ message: err instanceof Error ? err.message : String(err) }).toObject(), - }) - }, - ) + void runRequest( + "SessionRoutes.prompt_async", + c, + SessionPrompt.Service.use((svc) => svc.prompt({ ...body, sessionID })), + ).catch((err) => { + log.error("prompt_async failed", { sessionID, error: err }) + void Bus.publish(Session.Event.Error, { + sessionID, + error: new NamedError.Unknown({ message: err instanceof Error ? err.message : String(err) }).toObject(), + }) + }) return c.body(null, 204) }, @@ -969,12 +957,13 @@ export const SessionRoutes = lazy(() => }), ), validator("json", SessionPrompt.CommandInput.omit({ sessionID: true })), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const body = c.req.valid("json") - const msg = await AppRuntime.runPromise(SessionPrompt.Service.use((svc) => svc.command({ ...body, sessionID }))) - return c.json(msg) - }, + async (c) => + jsonRequest("SessionRoutes.command", c, function* () { + const sessionID = c.req.valid("param").sessionID + const body = c.req.valid("json") + const svc = yield* SessionPrompt.Service + return yield* svc.command({ ...body, sessionID }) + }), ) .post( "/:sessionID/shell", @@ -1001,12 +990,13 @@ export const SessionRoutes = lazy(() => }), ), validator("json", SessionPrompt.ShellInput.omit({ sessionID: true })), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const body = c.req.valid("json") - const msg = await AppRuntime.runPromise(SessionPrompt.Service.use((svc) => svc.shell({ ...body, sessionID }))) - return c.json(msg) - }, + async (c) => + jsonRequest("SessionRoutes.shell", c, function* () { + const sessionID = c.req.valid("param").sessionID + const body = c.req.valid("json") + const svc = yield* SessionPrompt.Service + return yield* svc.shell({ ...body, sessionID }) + }), ) .post( "/:sessionID/revert", @@ -1036,15 +1026,13 @@ export const SessionRoutes = lazy(() => async (c) => { const sessionID = c.req.valid("param").sessionID log.info("revert", c.req.valid("json")) - const session = await AppRuntime.runPromise( - SessionRevert.Service.use((svc) => - svc.revert({ - sessionID, - ...c.req.valid("json"), - }), - ), - ) - return c.json(session) + return jsonRequest("SessionRoutes.revert", c, function* () { + const svc = yield* SessionRevert.Service + return yield* svc.revert({ + sessionID, + ...c.req.valid("json"), + }) + }) }, ) .post( @@ -1071,11 +1059,12 @@ export const SessionRoutes = lazy(() => sessionID: SessionID.zod, }), ), - async (c) => { - const sessionID = c.req.valid("param").sessionID - const session = await AppRuntime.runPromise(SessionRevert.Service.use((svc) => svc.unrevert({ sessionID }))) - return c.json(session) - }, + async (c) => + jsonRequest("SessionRoutes.unrevert", c, function* () { + const sessionID = c.req.valid("param").sessionID + const svc = yield* SessionRevert.Service + return yield* svc.unrevert({ sessionID }) + }), ) .post( "/:sessionID/permissions/:permissionID", @@ -1104,17 +1093,15 @@ export const SessionRoutes = lazy(() => }), ), validator("json", z.object({ response: Permission.Reply.zod })), - async (c) => { - const params = c.req.valid("param") - await AppRuntime.runPromise( - Permission.Service.use((svc) => - svc.reply({ - requestID: params.permissionID, - reply: c.req.valid("json").response, - }), - ), - ) - return c.json(true) - }, + async (c) => + jsonRequest("SessionRoutes.permissionRespond", c, function* () { + const params = c.req.valid("param") + const svc = yield* Permission.Service + yield* svc.reply({ + requestID: params.permissionID, + reply: c.req.valid("json").response, + }) + return true + }), ), ) diff --git a/packages/opencode/src/server/routes/instance/tui.ts b/packages/opencode/src/server/routes/instance/tui.ts index 2f856c3488..d6add67b97 100644 --- a/packages/opencode/src/server/routes/instance/tui.ts +++ b/packages/opencode/src/server/routes/instance/tui.ts @@ -4,10 +4,10 @@ import z from "zod" import { Bus } from "@/bus" import { Session } from "@/session" import { TuiEvent } from "@/cli/cmd/tui/event" -import { AppRuntime } from "@/effect/app-runtime" import { AsyncQueue } from "@/util/queue" import { errors } from "../../error" import { lazy } from "@/util/lazy" +import { runRequest } from "./trace" const TuiRequest = z.object({ path: z.string(), @@ -371,7 +371,11 @@ export const TuiRoutes = lazy(() => validator("json", TuiEvent.SessionSelect.properties), async (c) => { const { sessionID } = c.req.valid("json") - await AppRuntime.runPromise(Session.Service.use((svc) => svc.get(sessionID))) + await runRequest( + "TuiRoutes.sessionSelect", + c, + Session.Service.use((svc) => svc.get(sessionID)), + ) await Bus.publish(TuiEvent.SessionSelect, { sessionID }) return c.json(true) }, diff --git a/packages/opencode/src/server/workspace.ts b/packages/opencode/src/server/workspace.ts index c141d10956..d30a117d6a 100644 --- a/packages/opencode/src/server/workspace.ts +++ b/packages/opencode/src/server/workspace.ts @@ -10,6 +10,7 @@ import { Instance } from "@/project/instance" import { Session } from "@/session" import { SessionID } from "@/session/schema" import { AppRuntime } from "@/effect/app-runtime" +import { Effect } from "effect" import { Log } from "@/util" import { ServerProxy } from "./proxy" @@ -42,7 +43,9 @@ async function getSessionWorkspace(url: URL) { const id = getSessionID(url) if (!id) return null - const session = await AppRuntime.runPromise(Session.Service.use((svc) => svc.get(id))).catch(() => undefined) + const session = await AppRuntime.runPromise( + Session.Service.use((svc) => svc.get(id)).pipe(Effect.withSpan("WorkspaceRouter.lookup")), + ).catch(() => undefined) return session?.workspaceID } From ce69bd97b90200a4d9794bd0409d3b4bb7996e54 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 16:59:24 -0400 Subject: [PATCH 196/201] refactor(config): migrate model-id and command to Effect Schema (#23175) --- packages/opencode/src/config/agent.ts | 2 +- packages/opencode/src/config/command.ts | 22 ++++++++++++---------- packages/opencode/src/config/config.ts | 8 +++++--- packages/opencode/src/config/model-id.ts | 13 ++++++++++++- 4 files changed, 30 insertions(+), 15 deletions(-) diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts index f754f009d4..9053b19fc1 100644 --- a/packages/opencode/src/config/agent.ts +++ b/packages/opencode/src/config/agent.ts @@ -15,7 +15,7 @@ const log = Log.create({ service: "config" }) export const Info = z .object({ - model: ConfigModelID.optional(), + model: ConfigModelID.zod.optional(), variant: z .string() .optional() diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 9799250567..3e0adccc30 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -1,10 +1,12 @@ export * as ConfigCommand from "./command" import { Log } from "../util" -import z from "zod" +import { Schema } from "effect" import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" import { configEntryNameFromPath } from "./entry-name" import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" @@ -12,15 +14,15 @@ import { ConfigModelID } from "./model-id" const log = Log.create({ service: "config" }) -export const Info = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ConfigModelID.optional(), - subtask: z.boolean().optional(), -}) +export const Info = Schema.Struct({ + template: Schema.String, + description: Schema.optional(Schema.String), + agent: Schema.optional(Schema.String), + model: Schema.optional(ConfigModelID), + subtask: Schema.optional(Schema.Boolean), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Info = z.infer +export type Info = Schema.Schema.Type export async function load(dir: string) { const result: Record = {} @@ -49,7 +51,7 @@ export async function load(dir: string) { ...md.data, template: md.content.trim(), } - const parsed = Info.safeParse(config) + const parsed = Info.zod.safeParse(config) if (parsed.success) { result[config.name] = parsed.data continue diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 261c5acab4..684fdf63ab 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -97,7 +97,7 @@ export const Info = z logLevel: Log.Level.optional().describe("Log level"), server: Server.optional().describe("Server configuration for opencode serve and web commands"), command: z - .record(z.string(), ConfigCommand.Info) + .record(z.string(), ConfigCommand.Info.zod) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), skills: ConfigSkills.Info.zod.optional().describe("Additional skill folder paths"), @@ -135,8 +135,10 @@ export const Info = z .array(z.string()) .optional() .describe("When set, ONLY these providers will be enabled. All other providers will be ignored"), - model: ConfigModelID.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), - small_model: ConfigModelID.describe( + model: ConfigModelID.zod + .describe("Model to use in the format of provider/model, eg anthropic/claude-2") + .optional(), + small_model: ConfigModelID.zod.describe( "Small model to use for tasks like title generation in the format of provider/model", ).optional(), default_agent: z diff --git a/packages/opencode/src/config/model-id.ts b/packages/opencode/src/config/model-id.ts index 909e9aa929..3ad9e035ce 100644 --- a/packages/opencode/src/config/model-id.ts +++ b/packages/opencode/src/config/model-id.ts @@ -1,3 +1,14 @@ +import { Schema } from "effect" import z from "zod" +import { zod, ZodOverride } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const ConfigModelID = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) +// The original Zod schema carried an external $ref pointing at the models.dev +// JSON schema. That external reference is not a named SDK component — it is a +// literal pointer to an outside schema — so the walker cannot re-derive it +// from AST metadata. Preserve the exact original Zod via ZodOverride. +export const ConfigModelID = Schema.String.annotate({ + [ZodOverride]: z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) + +export type ConfigModelID = Schema.Schema.Type From 89029a20ef1548f6637c15f63f39f281e4a6dae7 Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 21:00:20 +0000 Subject: [PATCH 197/201] chore: generate --- packages/opencode/src/config/config.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 684fdf63ab..f228878d04 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -135,12 +135,10 @@ export const Info = z .array(z.string()) .optional() .describe("When set, ONLY these providers will be enabled. All other providers will be ignored"), - model: ConfigModelID.zod - .describe("Model to use in the format of provider/model, eg anthropic/claude-2") + model: ConfigModelID.zod.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), + small_model: ConfigModelID.zod + .describe("Small model to use for tasks like title generation in the format of provider/model") .optional(), - small_model: ConfigModelID.zod.describe( - "Small model to use for tasks like title generation in the format of provider/model", - ).optional(), default_agent: z .string() .optional() From 5980b0a5eeb8f7a8dc31433f86e458dbe3358269 Mon Sep 17 00:00:00 2001 From: Kit Langton Date: Fri, 17 Apr 2026 17:06:55 -0400 Subject: [PATCH 198/201] feat(effect-zod): add tuple support; migrate config/plugin to Effect Schema (#23178) --- .../src/cli/cmd/tui/config/tui-schema.ts | 2 +- packages/opencode/src/config/config.ts | 2 +- packages/opencode/src/config/plugin.ts | 15 ++++++---- packages/opencode/src/util/effect-zod.ts | 13 +++++++-- .../opencode/test/util/effect-zod.test.ts | 28 +++++++++++++++++-- 5 files changed, 48 insertions(+), 12 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui-schema.ts b/packages/opencode/src/cli/cmd/tui/config/tui-schema.ts index 66569efea5..ed79e8e524 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui-schema.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui-schema.ts @@ -31,7 +31,7 @@ export const TuiInfo = z $schema: z.string().optional(), theme: z.string().optional(), keybinds: KeybindOverride.optional(), - plugin: ConfigPlugin.Spec.array().optional(), + plugin: ConfigPlugin.Spec.zod.array().optional(), plugin_enabled: z.record(z.string(), z.boolean()).optional(), }) .extend(TuiOptions.shape) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index f228878d04..0f6d71f447 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -113,7 +113,7 @@ export const Info = z "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), // User-facing plugin config is stored as Specs; provenance gets attached later while configs are merged. - plugin: ConfigPlugin.Spec.array().optional(), + plugin: ConfigPlugin.Spec.zod.array().optional(), share: z .enum(["manual", "auto", "disabled"]) .optional() diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index 7d335bcc53..ebc3e2230d 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -1,16 +1,21 @@ import { Glob } from "@opencode-ai/shared/util/glob" -import z from "zod" +import { Schema } from "effect" import { pathToFileURL } from "url" import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" import path from "path" -const Options = z.record(z.string(), z.unknown()) -export type Options = z.infer +export const Options = Schema.Record(Schema.String, Schema.Unknown).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Options = Schema.Schema.Type // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. // It answers "what should we load?" but says nothing about where that value came from. -export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) -export type Spec = z.infer +export const Spec = Schema.Union([ + Schema.String, + Schema.mutable(Schema.Tuple([Schema.String, Options])), +]).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Spec = Schema.Schema.Type export type Scope = "global" | "local" diff --git a/packages/opencode/src/util/effect-zod.ts b/packages/opencode/src/util/effect-zod.ts index c3240deaac..771795ba68 100644 --- a/packages/opencode/src/util/effect-zod.ts +++ b/packages/opencode/src/util/effect-zod.ts @@ -119,9 +119,16 @@ function object(ast: SchemaAST.Objects): z.ZodTypeAny { } function array(ast: SchemaAST.Arrays): z.ZodTypeAny { - if (ast.elements.length > 0) return fail(ast) - if (ast.rest.length !== 1) return fail(ast) - return z.array(walk(ast.rest[0])) + // Pure variadic arrays: { elements: [], rest: [item] } + if (ast.elements.length === 0) { + if (ast.rest.length !== 1) return fail(ast) + return z.array(walk(ast.rest[0])) + } + // Fixed-length tuples: { elements: [a, b, ...], rest: [] } + // Tuples with a variadic tail (...rest) are not yet supported. + if (ast.rest.length > 0) return fail(ast) + const items = ast.elements.map(walk) + return z.tuple(items as [z.ZodTypeAny, ...Array]) } function decl(ast: SchemaAST.Declaration): z.ZodTypeAny { diff --git a/packages/opencode/test/util/effect-zod.test.ts b/packages/opencode/test/util/effect-zod.test.ts index 1c84c96f3a..ba67a60e6d 100644 --- a/packages/opencode/test/util/effect-zod.test.ts +++ b/packages/opencode/test/util/effect-zod.test.ts @@ -61,8 +61,32 @@ describe("util.effect-zod", () => { }) }) - test("throws for unsupported tuple schemas", () => { - expect(() => zod(Schema.Tuple([Schema.String, Schema.Number]))).toThrow("unsupported effect schema") + describe("Tuples", () => { + test("fixed-length tuple parses matching array", () => { + const out = zod(Schema.Tuple([Schema.String, Schema.Number])) + expect(out.parse(["a", 1])).toEqual(["a", 1]) + expect(out.safeParse(["a"]).success).toBe(false) + expect(out.safeParse(["a", "b"]).success).toBe(false) + }) + + test("single-element tuple parses a one-element array", () => { + const out = zod(Schema.Tuple([Schema.Boolean])) + expect(out.parse([true])).toEqual([true]) + expect(out.safeParse([true, false]).success).toBe(false) + }) + + test("tuple inside a union picks the right branch", () => { + const out = zod(Schema.Union([Schema.String, Schema.Tuple([Schema.String, Schema.Number])])) + expect(out.parse("hello")).toBe("hello") + expect(out.parse(["foo", 42])).toEqual(["foo", 42]) + expect(out.safeParse(["foo"]).success).toBe(false) + }) + + test("plain arrays still work (no element positions)", () => { + const out = zod(Schema.Array(Schema.String)) + expect(out.parse(["a", "b", "c"])).toEqual(["a", "b", "c"]) + expect(out.parse([])).toEqual([]) + }) }) test("string literal unions produce z.enum with enum in JSON Schema", () => { From 89e8994fd15c4ac1235930a07e0b6e37254df22b Mon Sep 17 00:00:00 2001 From: "opencode-agent[bot]" Date: Fri, 17 Apr 2026 21:08:00 +0000 Subject: [PATCH 199/201] chore: generate --- packages/opencode/src/config/plugin.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index ebc3e2230d..4277c1cd6d 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -11,10 +11,9 @@ export type Options = Schema.Schema.Type // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. // It answers "what should we load?" but says nothing about where that value came from. -export const Spec = Schema.Union([ - Schema.String, - Schema.mutable(Schema.Tuple([Schema.String, Options])), -]).pipe(withStatics((s) => ({ zod: zod(s) }))) +export const Spec = Schema.Union([Schema.String, Schema.mutable(Schema.Tuple([Schema.String, Options]))]).pipe( + withStatics((s) => ({ zod: zod(s) })), +) export type Spec = Schema.Schema.Type export type Scope = "global" | "local" From 0068ccec35493e2657678a4ab0654a278bd14685 Mon Sep 17 00:00:00 2001 From: Aiden Cline <63023139+rekram1-node@users.noreply.github.com> Date: Fri, 17 Apr 2026 16:27:32 -0500 Subject: [PATCH 200/201] fix: ensure copilot model list filters out disabled models (#23176) --- packages/opencode/src/plugin/github-copilot/copilot.ts | 1 - packages/opencode/src/plugin/github-copilot/models.ts | 9 ++++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/opencode/src/plugin/github-copilot/copilot.ts b/packages/opencode/src/plugin/github-copilot/copilot.ts index c9b7e3c1c7..c018f72bd5 100644 --- a/packages/opencode/src/plugin/github-copilot/copilot.ts +++ b/packages/opencode/src/plugin/github-copilot/copilot.ts @@ -1,6 +1,5 @@ import type { Hooks, PluginInput } from "@opencode-ai/plugin" import type { Model } from "@opencode-ai/sdk/v2" -import { Installation } from "@/installation" import { InstallationVersion } from "@/installation/version" import { iife } from "@/util/iife" import { Log } from "../../util" diff --git a/packages/opencode/src/plugin/github-copilot/models.ts b/packages/opencode/src/plugin/github-copilot/models.ts index 71d21afbe4..0aac0d3f5e 100644 --- a/packages/opencode/src/plugin/github-copilot/models.ts +++ b/packages/opencode/src/plugin/github-copilot/models.ts @@ -10,6 +10,11 @@ export const schema = z.object({ // every version looks like: `{model.id}-YYYY-MM-DD` version: z.string(), supported_endpoints: z.array(z.string()).optional(), + policy: z + .object({ + state: z.string().optional(), + }) + .optional(), capabilities: z.object({ family: z.string(), limits: z.object({ @@ -122,7 +127,9 @@ export async function get( }) const result = { ...existing } - const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) + const remote = new Map( + data.data.filter((m) => m.model_picker_enabled && m.policy?.state !== "disabled").map((m) => [m.id, m] as const), + ) // prune existing models whose api.id isn't in the endpoint response for (const [key, model] of Object.entries(result)) { From cded68a2e2e233b8026c50408771b25d0f4e9682 Mon Sep 17 00:00:00 2001 From: Dax Date: Fri, 17 Apr 2026 17:30:50 -0400 Subject: [PATCH 201/201] refactor(npm): use object-based package spec for install API (#23181) --- packages/opencode/src/cli/cmd/tui/config/tui.ts | 7 ++++++- packages/opencode/src/config/config.ts | 7 ++++++- packages/opencode/src/npm/index.ts | 16 +++++++++++----- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/packages/opencode/src/cli/cmd/tui/config/tui.ts b/packages/opencode/src/cli/cmd/tui/config/tui.ts index 179046e026..9d5cd65bfd 100644 --- a/packages/opencode/src/cli/cmd/tui/config/tui.ts +++ b/packages/opencode/src/cli/cmd/tui/config/tui.ts @@ -158,7 +158,12 @@ export const layer = Layer.effect( (dir) => npm .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + add: [ + { + name: "@opencode-ai/plugin", + version: InstallationLocal ? undefined : InstallationVersion, + }, + ], }) .pipe(Effect.forkScoped), { diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 0f6d71f447..a2d62eaa5e 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -518,7 +518,12 @@ export const layer = Layer.effect( const dep = yield* npmSvc .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + add: [ + { + name: "@opencode-ai/plugin", + version: InstallationLocal ? undefined : InstallationVersion, + }, + ], }) .pipe( Effect.exit, diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index f242598192..d92099bc3c 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -25,7 +25,12 @@ export interface Interface { readonly add: (pkg: string) => Effect.Effect readonly install: ( dir: string, - input?: { add: string[] }, + input?: { + add: { + name: string + version?: string + }[] + }, ) => Effect.Effect readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect readonly which: (pkg: string) => Effect.Effect> @@ -137,17 +142,18 @@ export const layer = Layer.effect( return resolveEntryPoint(first.name, first.path) }, Effect.scoped) - const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { + const install: Interface["install"] = Effect.fn("Npm.install")(function* (dir, input) { const canWrite = yield* afs.access(dir, { writable: true }).pipe( Effect.as(true), Effect.orElseSucceed(() => false), ) if (!canWrite) return + const add = input?.add.map((pkg) => [pkg.name, pkg.version].filter(Boolean).join("@")) ?? [] yield* Effect.gen(function* () { const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) if (!nodeModulesExists) { - yield* reify({ add: input?.add, dir }) + yield* reify({ add, dir }) return } }).pipe(Effect.withSpan("Npm.checkNodeModules")) @@ -163,7 +169,7 @@ export const layer = Layer.effect( ...Object.keys(pkgAny?.devDependencies || {}), ...Object.keys(pkgAny?.peerDependencies || {}), ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), + ...(input?.add || []).map((pkg) => pkg.name), ]) const root = lockAny?.packages?.[""] || {} @@ -176,7 +182,7 @@ export const layer = Layer.effect( for (const name of declared) { if (!locked.has(name)) { - yield* reify({ dir, add: input?.add }) + yield* reify({ dir, add }) return } }