diff --git a/.opencode/agent/translator.md b/.opencode/agent/translator.md index a987d01927..8ac7025f17 100644 --- a/.opencode/agent/translator.md +++ b/.opencode/agent/translator.md @@ -594,7 +594,6 @@ OPENCODE_DISABLE_CLAUDE_CODE OPENCODE_DISABLE_CLAUDE_CODE_PROMPT OPENCODE_DISABLE_CLAUDE_CODE_SKILLS OPENCODE_DISABLE_DEFAULT_PLUGINS -OPENCODE_DISABLE_FILETIME_CHECK OPENCODE_DISABLE_LSP_DOWNLOAD OPENCODE_DISABLE_MODELS_FETCH OPENCODE_DISABLE_PRUNE diff --git a/.opencode/opencode.jsonc b/.opencode/opencode.jsonc index 8380f7f719..82ab6d1b35 100644 --- a/.opencode/opencode.jsonc +++ b/.opencode/opencode.jsonc @@ -1,10 +1,6 @@ { "$schema": "https://opencode.ai/config.json", - "provider": { - "opencode": { - "options": {}, - }, - }, + "provider": {}, "permission": { "edit": { "packages/opencode/migration/*": "deny", diff --git a/AGENTS.md b/AGENTS.md index a7895c831f..44d08ae955 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,6 +14,7 @@ - Use Bun APIs when possible, like `Bun.file()` - Rely on type inference when possible; avoid explicit type annotations or interfaces unless necessary for exports or clarity - Prefer functional array methods (flatMap, filter, map) over for loops; use type guards on filter to maintain type inference downstream +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. Reduce total variable count by inlining when a value is only used once. diff --git a/bun.lock b/bun.lock index 63232cb29e..3cb3cbea60 100644 --- a/bun.lock +++ b/bun.lock @@ -29,7 +29,7 @@ }, "packages/app": { "name": "@opencode-ai/app", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -83,7 +83,7 @@ }, "packages/console/app": { "name": "@opencode-ai/console-app", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@cloudflare/vite-plugin": "1.15.2", "@ibm/plex": "6.4.1", @@ -117,7 +117,7 @@ }, "packages/console/core": { "name": "@opencode-ai/console-core", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@aws-sdk/client-sts": "3.782.0", "@jsx-email/render": "1.1.1", @@ -144,7 +144,7 @@ }, "packages/console/function": { "name": "@opencode-ai/console-function", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@ai-sdk/anthropic": "3.0.64", "@ai-sdk/openai": "3.0.48", @@ -168,7 +168,7 @@ }, "packages/console/mail": { "name": "@opencode-ai/console-mail", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", @@ -192,7 +192,7 @@ }, "packages/desktop": { "name": "@opencode-ai/desktop", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@opencode-ai/app": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -225,7 +225,7 @@ }, "packages/desktop-electron": { "name": "@opencode-ai/desktop-electron", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "effect": "catalog:", "electron-context-menu": "4.1.2", @@ -268,7 +268,7 @@ }, "packages/enterprise": { "name": "@opencode-ai/enterprise", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@opencode-ai/shared": "workspace:*", "@opencode-ai/ui": "workspace:*", @@ -297,7 +297,7 @@ }, "packages/function": { "name": "@opencode-ai/function", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@octokit/auth-app": "8.0.1", "@octokit/rest": "catalog:", @@ -313,7 +313,7 @@ }, "packages/opencode": { "name": "opencode", - "version": "1.4.7", + "version": "1.4.11", "bin": { "opencode": "./bin/opencode", }, @@ -322,15 +322,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.94", - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/amazon-bedrock": "4.0.95", + "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.102", + "@ai-sdk/gateway": "3.0.104", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.111", + "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", @@ -365,8 +365,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -386,7 +386,7 @@ "drizzle-orm": "catalog:", "effect": "catalog:", "fuzzysort": "3.1.0", - "gitlab-ai-provider": "6.4.2", + "gitlab-ai-provider": "6.6.0", "glob": "13.0.5", "google-auth-library": "10.5.0", "gray-matter": "4.0.3", @@ -458,23 +458,23 @@ }, "packages/plugin": { "name": "@opencode-ai/plugin", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@opencode-ai/sdk": "workspace:*", "effect": "catalog:", "zod": "catalog:", }, "devDependencies": { - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "@typescript/native-preview": "catalog:", "typescript": "catalog:", }, "peerDependencies": { - "@opentui/core": ">=0.1.99", - "@opentui/solid": ">=0.1.99", + "@opentui/core": ">=0.1.100", + "@opentui/solid": ">=0.1.100", }, "optionalPeers": [ "@opentui/core", @@ -493,7 +493,7 @@ }, "packages/sdk/js": { "name": "@opencode-ai/sdk", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "cross-spawn": "catalog:", }, @@ -508,7 +508,7 @@ }, "packages/shared": { "name": "@opencode-ai/shared", - "version": "1.4.7", + "version": "1.4.11", "bin": { "opencode": "./bin/opencode", }, @@ -532,7 +532,7 @@ }, "packages/slack": { "name": "@opencode-ai/slack", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@opencode-ai/sdk": "workspace:*", "@slack/bolt": "^3.17.1", @@ -567,7 +567,7 @@ }, "packages/ui": { "name": "@opencode-ai/ui", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@kobalte/core": "catalog:", "@opencode-ai/sdk": "workspace:*", @@ -616,7 +616,7 @@ }, "packages/web": { "name": "@opencode-ai/web", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@astrojs/cloudflare": "12.6.3", "@astrojs/markdown-remark": "6.3.1", @@ -675,6 +675,8 @@ "@npmcli/arborist": "9.4.0", "@octokit/rest": "22.0.0", "@openauthjs/openauth": "0.0.0-20250322224806", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "@pierre/diffs": "1.1.0-beta.18", "@playwright/test": "1.59.1", "@solid-primitives/storage": "4.3.3", @@ -690,7 +692,7 @@ "@types/node": "22.13.9", "@types/semver": "7.7.1", "@typescript/native-preview": "7.0.0-dev.20251207.1", - "ai": "6.0.158", + "ai": "6.0.168", "cross-spawn": "7.0.6", "diff": "8.0.2", "dompurify": "3.3.1", @@ -738,7 +740,7 @@ "@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="], - "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="], + "@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.95", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-qJKWEy+cNx3bLSJi/XpIVhv0P8KO0JFB1SvEroNWN8gKm820SIglBmXS10DTeXJdM5PPbQX4i/wJj5BHEk2LRQ=="], "@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="], @@ -758,11 +760,11 @@ "@ai-sdk/fireworks": ["@ai-sdk/fireworks@2.0.46", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XRKR0zgRyegdmtK5CDUEjlyRp0Fo+XVCdoG+301U1SGtgRIAYG3ObVtgzVJBVpJdHFSLHuYeLTnNiQoUxD7+FQ=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.102", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GrwDpaYJiVafrsA1MTbZtXPcQUI67g5AXiJo7Y1F8b+w+SiYHLk3ZIn1YmpQVoVAh2bjvxjj+Vo0AvfskuGH4g=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.104", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.2.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZKX5n74io8VIRlhIMSLWVlvT3sXC8Z7cZ9GHuWBWZDVi96+62AIsWuLGvMfcBA1STYuSoDrp6rIziZmvrTq0TA=="], "@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="], - "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.111", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5gILpAWWI5idfal/MfoH3tlQeSnOJ9jfL8JB8m2fdc3ue/9xoXkYDpXpDL/nyJImFjMCi6eR0Fpvlo/IKEWDIg=="], + "@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.112", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-cSfHCkM+9ZrFtQWIN1WlV93JPD+isGSdFxKj7u1L9m2aLVZajlXdcE41GL9hMt7ld7bZYE4NnZ+4VLxBAHE+Eg=="], "@ai-sdk/groq": ["@ai-sdk/groq@3.0.31", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XbbugpnFmXGu2TlXiq8KUJskP6/VVbuFcnFIGDzDIB/Chg6XHsNnqrTF80Zxkh0Pd3+NvbM+2Uqrtsndk6bDAg=="], @@ -1586,7 +1588,7 @@ "@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/sdk-logs": "0.214.0", "@opentelemetry/sdk-metrics": "2.6.1", "@opentelemetry/sdk-trace-base": "2.6.1", "protobufjs": "^7.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DSaYcuBRh6uozfsWN3R8HsN0yDhCuWP7tOFdkUOVaWD1KVJg8m4qiLUsg/tNhTLS9HUYUcwNpwL2eroLtsZZ/w=="], - "@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + "@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="], "@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.214.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.214.0", "@opentelemetry/core": "2.6.1", "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-zf6acnScjhsaBUU22zXZ/sLWim1dfhUAbGXdMmHmNG3LfBnQ3DKsOCITb2IZwoUsNNMTogqFKBnlIPPftUgGwA=="], @@ -2454,7 +2456,7 @@ "@valibot/to-json-schema": ["@valibot/to-json-schema@1.6.0", "", { "peerDependencies": { "valibot": "^1.3.0" } }, "sha512-d6rYyK5KVa2XdqamWgZ4/Nr+cXhxjy7lmpe6Iajw15J/jmU+gyxl2IEd1Otg1d7Rl3gOQL5reulnSypzBtYy1A=="], - "@vercel/oidc": ["@vercel/oidc@3.1.0", "", {}, "sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w=="], + "@vercel/oidc": ["@vercel/oidc@3.2.0", "", {}, "sha512-UycprH3T6n3jH0k44NHMa7pnFHGu/N05MjojYr+Mc6I7obkoLIJujSWwin1pCvdy/eOxrI/l3uDLQsmcrOb4ug=="], "@vitejs/plugin-react": ["@vitejs/plugin-react@4.7.0", "", { "dependencies": { "@babel/core": "^7.28.0", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.27", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA=="], @@ -2514,7 +2516,7 @@ "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], - "ai": ["ai@6.0.158", "", { "dependencies": { "@ai-sdk/gateway": "3.0.95", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-gLTp1UXFtMqKUi3XHs33K7UFglbvojkxF/aq337TxnLGOhHIW9+GyP2jwW4hYX87f1es+wId3VQoPRRu9zEStQ=="], + "ai": ["ai@6.0.168", "", { "dependencies": { "@ai-sdk/gateway": "3.0.104", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-2HqCJuO+1V2aV7vfYs5LFEUfxbkGX+5oa54q/gCCTL7KLTdbxcCu5D7TdLA5kwsrs3Szgjah9q6D9tpjHM3hUQ=="], "ai-gateway-provider": ["ai-gateway-provider@3.1.2", "", { "optionalDependencies": { "@ai-sdk/amazon-bedrock": "^4.0.62", "@ai-sdk/anthropic": "^3.0.46", "@ai-sdk/azure": "^3.0.31", "@ai-sdk/cerebras": "^2.0.34", "@ai-sdk/cohere": "^3.0.21", "@ai-sdk/deepgram": "^2.0.20", "@ai-sdk/deepseek": "^2.0.20", "@ai-sdk/elevenlabs": "^2.0.20", "@ai-sdk/fireworks": "^2.0.34", "@ai-sdk/google": "^3.0.30", "@ai-sdk/google-vertex": "^4.0.61", "@ai-sdk/groq": "^3.0.24", "@ai-sdk/mistral": "^3.0.20", "@ai-sdk/openai": "^3.0.30", "@ai-sdk/perplexity": "^3.0.19", "@ai-sdk/xai": "^3.0.57", "@openrouter/ai-sdk-provider": "^2.2.3" }, "peerDependencies": { "@ai-sdk/openai-compatible": "^2.0.0", "@ai-sdk/provider": "^3.0.0", "@ai-sdk/provider-utils": "^4.0.0", "ai": "^6.0.0" } }, "sha512-krGNnJSoO/gJ7Hbe5nQDlsBpDUGIBGtMQTRUaW7s1MylsfvLduba0TLWzQaGtOmNRkP0pGhtGlwsnS6FNQMlyw=="], @@ -3312,7 +3314,7 @@ "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], - "gitlab-ai-provider": ["gitlab-ai-provider@6.4.2", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=3.0.0", "@ai-sdk/provider-utils": ">=4.0.0" } }, "sha512-Wyw6uslCuipBOr/NYwAtpgXEUJj68iJY5aekad2DjePN99JetKVQBqkLgAy9PZp2EA4OuscfRQu9qKIBN/evNw=="], + "gitlab-ai-provider": ["gitlab-ai-provider@6.6.0", "", { "dependencies": { "@anthropic-ai/sdk": "^0.71.0", "@anycable/core": "^0.9.2", "graphql-request": "^6.1.0", "isomorphic-ws": "^5.0.0", "openai": "^6.16.0", "socket.io-client": "^4.8.1", "vscode-jsonrpc": "^8.2.1", "zod": "^3.25.76" }, "peerDependencies": { "@ai-sdk/provider": ">=3.0.0", "@ai-sdk/provider-utils": ">=4.0.0" } }, "sha512-jUxYnKA4XQaPc3wxACDZ8bPDXO0Mzx7cZaBDxbT2uGgLqtGZmSi+9tVNIg7louSS+s/ioVra3SoUz3iOFVhKPA=="], "glob": ["glob@13.0.5", "", { "dependencies": { "minimatch": "^10.2.1", "minipass": "^7.1.2", "path-scurry": "^2.0.0" } }, "sha512-BzXxZg24Ibra1pbQ/zE7Kys4Ua1ks7Bn6pKLkVPZ9FZe4JQS6/Q7ef3LG1H+k7lUf5l4T3PLSyYyYJVYUvfgTw=="], @@ -5152,7 +5154,7 @@ "@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="], @@ -5170,7 +5172,7 @@ "@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="], - "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="], @@ -5582,6 +5584,18 @@ "@opencode-ai/web/@shikijs/transformers": ["@shikijs/transformers@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/types": "3.20.0" } }, "sha512-PrHHMRr3Q5W1qB/42kJW6laqFyWdhrPF2hNR9qjOm1xcSiAO3hAHo7HaVyHE6pMyevmy3i51O8kuGGXC78uK3g=="], + "@opentelemetry/exporter-trace-otlp-http/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/resources/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/sdk-metrics/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + + "@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.6.1", "", { "dependencies": { "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA=="], + "@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="], "@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.10", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.3" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.10" }, "optionalPeers": ["solid-js"] }, "sha512-HCelrgua/Y+kqO8RyL04JBWS/cVdrtUv/h45GntgQY+cJl4eBcKkCDV3TdMjtKx1nXwRaR9QXslM/Npm1dxdZQ=="], @@ -5688,8 +5702,6 @@ "accepts/negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], - "ai/@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.95", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZmUNNbZl3V42xwQzPaNUi+s8eqR2lnrxf0bvB6YbLXpLjHYv0k2Y78t12cNOfY0bxGeuVVTLyk856uLuQIuXEQ=="], - "ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="], "ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="], @@ -5908,7 +5920,7 @@ "nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="], - "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="], + "opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.71", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bUWOzrzR0gJKJO/PLGMR4uH2dqEgqGhrsCV+sSpk4KtOEnUQlfjZI/F7BFlqSvVpFbjdgYRRLysAeEZpJ6S1lg=="], "opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="], diff --git a/nix/hashes.json b/nix/hashes.json index 239b72fd70..01366c82dc 100644 --- a/nix/hashes.json +++ b/nix/hashes.json @@ -1,8 +1,8 @@ { "nodeModules": { - "x86_64-linux": "sha256-tYAb5Mo39UW1VEejYuo0jW0jzH2OyY/HrqgiZL3rmjY=", - "aarch64-linux": "sha256-3zGKV5UwokXpmY0nT1mry3IhNf2EQYLKT7ac+/trmQA=", - "aarch64-darwin": "sha256-oKXAut7eu/eW5a43OT8+aFuH1F1tuIldTs+7PUXSCv4=", - "x86_64-darwin": "sha256-Az+9X1scOEhw3aOO8laKJoZjiuz3qlLTIk1bx25P/z4=" + "x86_64-linux": "sha256-GjpBQhvGLTM6NWX29b/mS+KjrQPl0w9VjQHH5jaK9SM=", + "aarch64-linux": "sha256-F5h9p+iZ8CASdUYaYR7O22NwBRa/iT+ZinUxO8lbPTc=", + "aarch64-darwin": "sha256-jWo5yvCtjVKRf9i5XUcTTaLtj2+G6+T1Td2llO/cT5I=", + "x86_64-darwin": "sha256-LzV+5/8P2mkiFHmt+a8zDeJjRbU8z9nssSA4tzv1HxA=" } } diff --git a/package.json b/package.json index 5fecc09922..ddd711adaf 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,8 @@ "@types/cross-spawn": "6.0.6", "@octokit/rest": "22.0.0", "@hono/zod-validator": "0.4.2", + "@opentui/core": "0.1.99", + "@opentui/solid": "0.1.99", "ulid": "3.0.1", "@kobalte/core": "0.13.11", "@types/luxon": "3.7.1", @@ -51,7 +53,7 @@ "drizzle-kit": "1.0.0-beta.19-d95b7a4", "drizzle-orm": "1.0.0-beta.19-d95b7a4", "effect": "4.0.0-beta.48", - "ai": "6.0.158", + "ai": "6.0.168", "cross-spawn": "7.0.6", "hono": "4.10.7", "hono-openapi": "1.1.2", diff --git a/packages/app/package.json b/packages/app/package.json index 2941637d08..5a1a4504ea 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/app", - "version": "1.4.7", + "version": "1.4.11", "description": "", "type": "module", "exports": { diff --git a/packages/app/src/components/session/session-header.tsx b/packages/app/src/components/session/session-header.tsx index 7acfdfc374..021e5be67e 100644 --- a/packages/app/src/components/session/session-header.tsx +++ b/packages/app/src/components/session/session-header.tsx @@ -8,7 +8,7 @@ import { Spinner } from "@opencode-ai/ui/spinner" import { showToast } from "@opencode-ai/ui/toast" import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip" import { getFilename } from "@opencode-ai/shared/util/path" -import { createEffect, createMemo, For, Show } from "solid-js" +import { createEffect, createMemo, createSignal, For, onMount, Show } from "solid-js" import { createStore } from "solid-js/store" import { Portal } from "solid-js/web" import { useCommand } from "@/context/command" @@ -16,6 +16,7 @@ import { useLanguage } from "@/context/language" import { useLayout } from "@/context/layout" import { usePlatform } from "@/context/platform" import { useServer } from "@/context/server" +import { useSettings } from "@/context/settings" import { useSync } from "@/context/sync" import { useTerminal } from "@/context/terminal" import { focusTerminalById } from "@/pages/session/helpers" @@ -134,6 +135,7 @@ export function SessionHeader() { const server = useServer() const platform = usePlatform() const language = useLanguage() + const settings = useSettings() const sync = useSync() const terminal = useTerminal() const { params, view } = useSessionLayout() @@ -151,6 +153,11 @@ export function SessionHeader() { }) const hotkey = createMemo(() => command.keybind("file.open")) const os = createMemo(() => detectOS(platform)) + const isDesktopBeta = platform.platform === "desktop" && import.meta.env.VITE_OPENCODE_CHANNEL === "beta" + const search = createMemo(() => !isDesktopBeta || settings.general.showSearch()) + const tree = createMemo(() => !isDesktopBeta || settings.general.showFileTree()) + const term = createMemo(() => !isDesktopBeta || settings.general.showTerminal()) + const status = createMemo(() => !isDesktopBeta || settings.general.showStatus()) const [exists, setExists] = createStore>>({ finder: true, @@ -262,12 +269,16 @@ export function SessionHeader() { .catch((err: unknown) => showRequestError(language, err)) } - const centerMount = createMemo(() => document.getElementById("opencode-titlebar-center")) - const rightMount = createMemo(() => document.getElementById("opencode-titlebar-right")) + const [centerMount, setCenterMount] = createSignal(null) + const [rightMount, setRightMount] = createSignal(null) + onMount(() => { + setCenterMount(document.getElementById("opencode-titlebar-center")) + setRightMount(document.getElementById("opencode-titlebar-right")) + }) return ( <> - + {(mount) => ( - + + + diff --git a/packages/app/src/components/settings-general.tsx b/packages/app/src/components/settings-general.tsx index b4ac061df4..c380fb69b3 100644 --- a/packages/app/src/components/settings-general.tsx +++ b/packages/app/src/components/settings-general.tsx @@ -106,6 +106,7 @@ export const SettingsGeneral: Component = () => { permission.disableAutoAccept(params.id, value) } + const desktop = createMemo(() => platform.platform === "desktop") const check = () => { if (!platform.checkUpdate) return @@ -279,6 +280,74 @@ export const SettingsGeneral: Component = () => { ) + const AdvancedSection = () => ( +
+

{language.t("settings.general.section.advanced")}

+ + + +
+ settings.general.setShowFileTree(checked)} + /> +
+
+ + +
+ settings.general.setShowNavigation(checked)} + /> +
+
+ + +
+ settings.general.setShowSearch(checked)} + /> +
+
+ + +
+ settings.general.setShowTerminal(checked)} + /> +
+
+ + +
+ settings.general.setShowStatus(checked)} + /> +
+
+
+
+ ) + const AppearanceSection = () => (

{language.t("settings.general.section.appearance")}

@@ -527,6 +596,7 @@ export const SettingsGeneral: Component = () => {
) + console.log(import.meta.env) return (
@@ -609,6 +679,10 @@ export const SettingsGeneral: Component = () => { ) }} + + + +
) diff --git a/packages/app/src/components/titlebar.tsx b/packages/app/src/components/titlebar.tsx index b7edb85ede..409fcbeff6 100644 --- a/packages/app/src/components/titlebar.tsx +++ b/packages/app/src/components/titlebar.tsx @@ -11,6 +11,7 @@ import { useLayout } from "@/context/layout" import { usePlatform } from "@/context/platform" import { useCommand } from "@/context/command" import { useLanguage } from "@/context/language" +import { useSettings } from "@/context/settings" import { applyPath, backPath, forwardPath } from "./titlebar-history" type TauriDesktopWindow = { @@ -40,6 +41,7 @@ export function Titlebar() { const platform = usePlatform() const command = useCommand() const language = useLanguage() + const settings = useSettings() const theme = useTheme() const navigate = useNavigate() const location = useLocation() @@ -78,6 +80,7 @@ export function Titlebar() { const canBack = createMemo(() => history.index > 0) const canForward = createMemo(() => history.index < history.stack.length - 1) const hasProjects = createMemo(() => layout.projects.list().length > 0) + const nav = createMemo(() => import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || settings.general.showNavigation()) const back = () => { const next = backPath(history) @@ -255,13 +258,12 @@ export function Titlebar() {
- +
+
+
+ +
+ + } + > <>
-
- +
+ + + +
{ + if (!panelProps.mobile) scrollContainerRef = el + }} + class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar [overflow-anchor:none]" + > + + + {(directory) => ( + + )} + + +
+ + store.activeWorkspace} + workspaceLabel={workspaceLabel} + /> + +
- } - > - <> -
- -
-
- - - -
{ - if (!panelProps.mobile) scrollContainerRef = el - }} - class="size-full flex flex-col py-2 gap-4 overflow-y-auto no-scrollbar [overflow-anchor:none]" - > - - - {(directory) => ( - - )} - - -
- - store.activeWorkspace} - workspaceLabel={workspaceLabel} - /> - -
-
- - -
- +
+
+ + )}
) - const [loading] = createResource( - () => route()?.store?.[0]?.bootstrapPromise, - (p) => p, - ) - return (
- {(autoselecting(), loading()) ?? ""} + {autoselecting() ?? ""}
diff --git a/packages/app/src/pages/layout/sidebar-workspace.tsx b/packages/app/src/pages/layout/sidebar-workspace.tsx index c1836fa8a4..0202cfc3be 100644 --- a/packages/app/src/pages/layout/sidebar-workspace.tsx +++ b/packages/app/src/pages/layout/sidebar-workspace.tsx @@ -317,12 +317,11 @@ export const SortableWorkspace = (props: { }) const open = createMemo(() => props.ctx.workspaceExpanded(props.directory, local())) const boot = createMemo(() => open() || active()) - const booted = createMemo((prev) => prev || workspaceStore.status === "complete", false) const count = createMemo(() => sessions()?.length ?? 0) const hasMore = createMemo(() => workspaceStore.sessionTotal > count()) + const query = useQuery(() => ({ ...loadSessionsQuery(props.project.worktree) })) const busy = createMemo(() => props.ctx.isBusy(props.directory)) - const wasBusy = createMemo((prev) => prev || busy(), false) - const loading = createMemo(() => open() && !booted() && count() === 0 && !wasBusy()) + const loading = () => query.isLoading const touch = createMediaQuery("(hover: none)") const showNew = createMemo(() => !loading() && (touch() || count() === 0 || (active() && !params.id))) const loadMore = async () => { @@ -427,7 +426,7 @@ export const SortableWorkspace = (props: { mobile={props.mobile} ctx={props.ctx} showNew={showNew} - loading={loading} + loading={() => query.isLoading && count() === 0} sessions={sessions} hasMore={hasMore} loadMore={loadMore} @@ -453,11 +452,10 @@ export const LocalWorkspace = (props: { }) const slug = createMemo(() => base64Encode(props.project.worktree)) const sessions = createMemo(() => sortedRootSessions(workspace().store, props.sortNow())) - const booted = createMemo((prev) => prev || workspace().store.status === "complete", false) const count = createMemo(() => sessions()?.length ?? 0) const query = useQuery(() => ({ ...loadSessionsQuery(props.project.worktree) })) - const loading = createMemo(() => query.isPending && count() === 0) const hasMore = createMemo(() => workspace().store.sessionTotal > count()) + const loading = () => query.isLoading && count() === 0 const loadMore = async () => { workspace().setStore("limit", (limit) => (limit ?? 0) + 5) await globalSync.project.loadSessions(props.project.worktree) @@ -473,7 +471,7 @@ export const LocalWorkspace = (props: { mobile={props.mobile} ctx={props.ctx} showNew={() => false} - loading={() => query.isLoading} + loading={loading} sessions={sessions} hasMore={hasMore} loadMore={loadMore} diff --git a/packages/app/src/pages/session.tsx b/packages/app/src/pages/session.tsx index c4d642bf8d..4ae973b858 100644 --- a/packages/app/src/pages/session.tsx +++ b/packages/app/src/pages/session.tsx @@ -1,6 +1,6 @@ -import type { Project, UserMessage, VcsFileDiff } from "@opencode-ai/sdk/v2" +import type { Project, UserMessage } from "@opencode-ai/sdk/v2" import { useDialog } from "@opencode-ai/ui/context/dialog" -import { useMutation } from "@tanstack/solid-query" +import { createQuery, skipToken, useMutation, useQueryClient } from "@tanstack/solid-query" import { batch, onCleanup, @@ -324,6 +324,7 @@ export default function Page() { const local = useLocal() const file = useFile() const sync = useSync() + const queryClient = useQueryClient() const dialog = useDialog() const language = useLanguage() const sdk = useSDK() @@ -518,26 +519,6 @@ export default function Page() { deferRender: false, }) - const [vcs, setVcs] = createStore<{ - diff: { - git: VcsFileDiff[] - branch: VcsFileDiff[] - } - ready: { - git: boolean - branch: boolean - } - }>({ - diff: { - git: [] as VcsFileDiff[], - branch: [] as VcsFileDiff[], - }, - ready: { - git: false, - branch: false, - }, - }) - const [followup, setFollowup] = persisted( Persist.workspace(sdk.directory, "followup", ["followup.v1"]), createStore<{ @@ -571,68 +552,6 @@ export default function Page() { let todoTimer: number | undefined let diffFrame: number | undefined let diffTimer: number | undefined - const vcsTask = new Map>() - const vcsRun = new Map() - - const bumpVcs = (mode: VcsMode) => { - const next = (vcsRun.get(mode) ?? 0) + 1 - vcsRun.set(mode, next) - return next - } - - const resetVcs = (mode?: VcsMode) => { - const list = mode ? [mode] : (["git", "branch"] as const) - list.forEach((item) => { - bumpVcs(item) - vcsTask.delete(item) - setVcs("diff", item, []) - setVcs("ready", item, false) - }) - } - - const loadVcs = (mode: VcsMode, force = false) => { - if (sync.project?.vcs !== "git") return Promise.resolve() - if (!force && vcs.ready[mode]) return Promise.resolve() - - if (force) { - if (vcsTask.has(mode)) bumpVcs(mode) - vcsTask.delete(mode) - setVcs("ready", mode, false) - } - - const current = vcsTask.get(mode) - if (current) return current - - const run = bumpVcs(mode) - - const task = sdk.client.vcs - .diff({ mode }) - .then((result) => { - if (vcsRun.get(mode) !== run) return - setVcs("diff", mode, list(result.data)) - setVcs("ready", mode, true) - }) - .catch((error) => { - if (vcsRun.get(mode) !== run) return - console.debug("[session-review] failed to load vcs diff", { mode, error }) - setVcs("diff", mode, []) - setVcs("ready", mode, true) - }) - .finally(() => { - if (vcsTask.get(mode) === task) vcsTask.delete(mode) - }) - - vcsTask.set(mode, task) - return task - } - - const refreshVcs = () => { - resetVcs() - const mode = untrack(vcsMode) - if (!mode) return - if (!untrack(wantsReview)) return - void loadVcs(mode, true) - } createComputed((prev) => { const open = desktopReviewOpen() @@ -663,21 +582,52 @@ export default function Page() { list.push("turn") return list }) + const mobileChanges = createMemo(() => !isDesktop() && store.mobileTab === "changes") + const wantsReview = createMemo(() => + isDesktop() + ? desktopFileTreeOpen() || (desktopReviewOpen() && activeTab() === "review") + : store.mobileTab === "changes", + ) const vcsMode = createMemo(() => { if (store.changes === "git" || store.changes === "branch") return store.changes }) - const reviewDiffs = createMemo(() => { - if (store.changes === "git") return list(vcs.diff.git) - if (store.changes === "branch") return list(vcs.diff.branch) + const vcsKey = createMemo( + () => ["session-vcs", sdk.directory, sync.data.vcs?.branch ?? "", sync.data.vcs?.default_branch ?? ""] as const, + ) + const vcsQuery = createQuery(() => { + const mode = vcsMode() + const enabled = wantsReview() && sync.project?.vcs === "git" + + return { + queryKey: [...vcsKey(), mode] as const, + enabled, + staleTime: Number.POSITIVE_INFINITY, + gcTime: 60 * 1000, + queryFn: mode + ? () => + sdk.client.vcs + .diff({ mode }) + .then((result) => list(result.data)) + .catch((error) => { + console.debug("[session-review] failed to load vcs diff", { mode, error }) + return [] + }) + : skipToken, + } + }) + const refreshVcs = () => void queryClient.invalidateQueries({ queryKey: vcsKey() }) + const reviewDiffs = () => { + if (store.changes === "git" || store.changes === "branch") + // avoids suspense + return vcsQuery.isFetched ? (vcsQuery.data ?? []) : [] return turnDiffs() - }) - const reviewCount = createMemo(() => reviewDiffs().length) - const hasReview = createMemo(() => reviewCount() > 0) - const reviewReady = createMemo(() => { - if (store.changes === "git") return vcs.ready.git - if (store.changes === "branch") return vcs.ready.branch + } + const reviewCount = () => reviewDiffs().length + const hasReview = () => reviewCount() > 0 + const reviewReady = () => { + if (store.changes === "git" || store.changes === "branch") return !vcsQuery.isPending return true - }) + } const newSessionWorktree = createMemo(() => { if (store.newSessionWorktree === "create") return "create" @@ -897,27 +847,6 @@ export default function Page() { ), ) - createEffect( - on( - () => sdk.directory, - () => { - resetVcs() - }, - { defer: true }, - ), - ) - - createEffect( - on( - () => [sync.data.vcs?.branch, sync.data.vcs?.default_branch] as const, - (next, prev) => { - if (prev === undefined || same(next, prev)) return - refreshVcs() - }, - { defer: true }, - ), - ) - const stopVcs = sdk.event.listen((evt) => { if (evt.details.type !== "file.watcher.updated") return const props = @@ -1051,13 +980,6 @@ export default function Page() { } } - const mobileChanges = createMemo(() => !isDesktop() && store.mobileTab === "changes") - const wantsReview = createMemo(() => - isDesktop() - ? desktopFileTreeOpen() || (desktopReviewOpen() && activeTab() === "review") - : store.mobileTab === "changes", - ) - createEffect(() => { const list = changesOptions() if (list.includes(store.changes)) return @@ -1066,22 +988,12 @@ export default function Page() { setStore("changes", next) }) - createEffect(() => { - const mode = vcsMode() - if (!mode) return - if (!wantsReview()) return - void loadVcs(mode) - }) - createEffect( on( () => sync.data.session_status[params.id ?? ""]?.type, (next, prev) => { - const mode = vcsMode() - if (!mode) return - if (!wantsReview()) return if (next !== "idle" || prev === undefined || prev === "idle") return - void loadVcs(mode, true) + refreshVcs() }, { defer: true }, ), diff --git a/packages/app/src/pages/session/session-side-panel.tsx b/packages/app/src/pages/session/session-side-panel.tsx index cddbea84d6..99197f0a70 100644 --- a/packages/app/src/pages/session/session-side-panel.tsx +++ b/packages/app/src/pages/session/session-side-panel.tsx @@ -19,6 +19,9 @@ import { useCommand } from "@/context/command" import { useFile, type SelectedLineRange } from "@/context/file" import { useLanguage } from "@/context/language" import { useLayout } from "@/context/layout" +import { usePlatform } from "@/context/platform" +import { useSettings } from "@/context/settings" +import { useSync } from "@/context/sync" import { createFileTabListSync } from "@/pages/session/file-tab-scroll" import { FileTabContent } from "@/pages/session/file-tabs" import { createOpenSessionFileTab, createSessionTabs, getTabReorderIndex, type Sizing } from "@/pages/session/helpers" @@ -39,6 +42,9 @@ export function SessionSidePanel(props: { size: Sizing }) { const layout = useLayout() + const platform = usePlatform() + const settings = useSettings() + const sync = useSync() const file = useFile() const language = useLanguage() const command = useCommand() @@ -46,9 +52,15 @@ export function SessionSidePanel(props: { const { sessionKey, tabs, view } = useSessionLayout() const isDesktop = createMediaQuery("(min-width: 768px)") + const shown = createMemo( + () => + platform.platform !== "desktop" || + import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || + settings.general.showFileTree(), + ) const reviewOpen = createMemo(() => isDesktop() && view().reviewPanel.opened()) - const fileOpen = createMemo(() => isDesktop() && layout.fileTree.opened()) + const fileOpen = createMemo(() => isDesktop() && shown() && layout.fileTree.opened()) const open = createMemo(() => reviewOpen() || fileOpen()) const reviewTab = createMemo(() => isDesktop()) const panelWidth = createMemo(() => { @@ -341,98 +353,99 @@ export function SessionSidePanel(props: {
-
+
- - - - {props.reviewCount()}{" "} - {language.t( - props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other", - )} - - - {language.t("session.files.all")} - - - - - - - {language.t("common.loading")} - {language.t("common.loading.ellipsis")} -
- } - > + + + + {props.reviewCount()}{" "} + {language.t( + props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other", + )} + + + {language.t("session.files.all")} + + + + + + + {language.t("common.loading")} + {language.t("common.loading.ellipsis")} +
+ } + > + props.focusReviewDiff(node.path)} + /> + + + + + + + {empty(language.t("session.files.empty"))} + props.focusReviewDiff(node.path)} + onFileClick={(node) => openTab(file.tab(node.path))} /> - - - {empty(props.empty())} - - - - - {empty(language.t("session.files.empty"))} - - openTab(file.tab(node.path))} - /> - - - - -
- -
props.size.start()}> - { - props.size.touch() - layout.fileTree.resize(width) - }} - /> + + + +
-
-
+ +
props.size.start()}> + { + props.size.touch() + layout.fileTree.resize(width) + }} + /> +
+
+ + diff --git a/packages/app/src/pages/session/use-session-commands.tsx b/packages/app/src/pages/session/use-session-commands.tsx index b5d2544636..d649aeb0cb 100644 --- a/packages/app/src/pages/session/use-session-commands.tsx +++ b/packages/app/src/pages/session/use-session-commands.tsx @@ -7,8 +7,10 @@ import { useLanguage } from "@/context/language" import { useLayout } from "@/context/layout" import { useLocal } from "@/context/local" import { usePermission } from "@/context/permission" +import { usePlatform } from "@/context/platform" import { usePrompt } from "@/context/prompt" import { useSDK } from "@/context/sdk" +import { useSettings } from "@/context/settings" import { useSync } from "@/context/sync" import { useTerminal } from "@/context/terminal" import { showToast } from "@opencode-ai/ui/toast" @@ -39,8 +41,10 @@ export const useSessionCommands = (actions: SessionCommandContext) => { const language = useLanguage() const local = useLocal() const permission = usePermission() + const platform = usePlatform() const prompt = usePrompt() const sdk = useSDK() + const settings = useSettings() const sync = useSync() const terminal = useTerminal() const layout = useLayout() @@ -66,6 +70,10 @@ export const useSessionCommands = (actions: SessionCommandContext) => { }) const activeFileTab = tabState.activeFileTab const closableTab = tabState.closableTab + const shown = () => + platform.platform !== "desktop" || + import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || + settings.general.showFileTree() const idle = { type: "idle" as const } const status = () => sync.data.session_status[params.id ?? ""] ?? idle @@ -457,12 +465,16 @@ export const useSessionCommands = (actions: SessionCommandContext) => { keybind: "mod+shift+r", onSelect: () => view().reviewPanel.toggle(), }), - viewCommand({ - id: "fileTree.toggle", - title: language.t("command.fileTree.toggle"), - keybind: "mod+\\", - onSelect: () => layout.fileTree.toggle(), - }), + ...(shown() + ? [ + viewCommand({ + id: "fileTree.toggle", + title: language.t("command.fileTree.toggle"), + keybind: "mod+\\", + onSelect: () => layout.fileTree.toggle(), + }), + ] + : []), viewCommand({ id: "input.focus", title: language.t("command.input.focus"), diff --git a/packages/app/src/utils/persist.ts b/packages/app/src/utils/persist.ts index dce0e94c3b..0cac30cb1e 100644 --- a/packages/app/src/utils/persist.ts +++ b/packages/app/src/utils/persist.ts @@ -469,7 +469,7 @@ export function persisted( state, setState, init, - Object.assign(() => ready() === true, { + Object.assign(() => (ready.loading ? false : ready.latest === true), { promise: init instanceof Promise ? init : undefined, }), ] diff --git a/packages/console/app/package.json b/packages/console/app/package.json index 8783f3fd05..200a5e30e3 100644 --- a/packages/console/app/package.json +++ b/packages/console/app/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-app", - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/console/app/src/routes/zen/util/handler.ts b/packages/console/app/src/routes/zen/util/handler.ts index d1c5985a81..2e576eaf68 100644 --- a/packages/console/app/src/routes/zen/util/handler.ts +++ b/packages/console/app/src/routes/zen/util/handler.ts @@ -45,6 +45,7 @@ import { LiteData } from "@opencode-ai/console-core/lite.js" import { Resource } from "@opencode-ai/console-resource" import { i18n, type Key } from "~/i18n" import { localeFromRequest } from "~/lib/language" +import { createModelTpmLimiter } from "./modelTpmLimiter" type ZenData = Awaited> type RetryOptions = { @@ -121,6 +122,8 @@ export async function handler( const authInfo = await authenticate(modelInfo, zenApiKey) const billingSource = validateBilling(authInfo, modelInfo) logger.metric({ source: billingSource }) + const modelTpmLimiter = createModelTpmLimiter(modelInfo.providers) + const modelTpmLimits = await modelTpmLimiter?.check() const retriableRequest = async (retry: RetryOptions = { excludeProviders: [], retryCount: 0 }) => { const providerInfo = selectProvider( @@ -133,6 +136,7 @@ export async function handler( trialProviders, retry, stickyProvider, + modelTpmLimits, ) validateModelSettings(billingSource, authInfo) updateProviderKey(authInfo, providerInfo) @@ -229,6 +233,7 @@ export async function handler( const usageInfo = providerInfo.normalizeUsage(json.usage) const costInfo = calculateCost(modelInfo, usageInfo) await trialLimiter?.track(usageInfo) + await modelTpmLimiter?.track(providerInfo.id, providerInfo.model, usageInfo) await trackUsage(sessionId, billingSource, authInfo, modelInfo, providerInfo, usageInfo, costInfo) await reload(billingSource, authInfo, costInfo) json.cost = calculateOccurredCost(billingSource, costInfo) @@ -278,6 +283,7 @@ export async function handler( const usageInfo = providerInfo.normalizeUsage(usage) const costInfo = calculateCost(modelInfo, usageInfo) await trialLimiter?.track(usageInfo) + await modelTpmLimiter?.track(providerInfo.id, providerInfo.model, usageInfo) await trackUsage(sessionId, billingSource, authInfo, modelInfo, providerInfo, usageInfo, costInfo) await reload(billingSource, authInfo, costInfo) const cost = calculateOccurredCost(billingSource, costInfo) @@ -433,12 +439,16 @@ export async function handler( trialProviders: string[] | undefined, retry: RetryOptions, stickyProvider: string | undefined, + modelTpmLimits: Record | undefined, ) { const modelProvider = (() => { + // Byok is top priority b/c if user set their own API key, we should use it + // instead of using the sticky provider for the same session if (authInfo?.provider?.credentials) { return modelInfo.providers.find((provider) => provider.id === modelInfo.byokProvider) } + // Always use the same provider for the same session if (stickyProvider) { const provider = modelInfo.providers.find((provider) => provider.id === stickyProvider) if (provider) return provider @@ -451,10 +461,20 @@ export async function handler( } if (retry.retryCount !== MAX_FAILOVER_RETRIES) { - const providers = modelInfo.providers + const allProviders = modelInfo.providers .filter((provider) => !provider.disabled) + .filter((provider) => provider.weight !== 0) .filter((provider) => !retry.excludeProviders.includes(provider.id)) - .flatMap((provider) => Array(provider.weight ?? 1).fill(provider)) + .filter((provider) => { + if (!provider.tpmLimit) return true + const usage = modelTpmLimits?.[`${provider.id}/${provider.model}`] ?? 0 + return usage < provider.tpmLimit * 1_000_000 + }) + + const topPriority = Math.min(...allProviders.map((p) => p.priority)) + const providers = allProviders + .filter((p) => p.priority <= topPriority) + .flatMap((provider) => Array(provider.weight).fill(provider)) // Use the last 4 characters of session ID to select a provider const identifier = sessionId.length ? sessionId : ip diff --git a/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts new file mode 100644 index 0000000000..9a834a1a5b --- /dev/null +++ b/packages/console/app/src/routes/zen/util/modelTpmLimiter.ts @@ -0,0 +1,51 @@ +import { and, Database, eq, inArray, sql } from "@opencode-ai/console-core/drizzle/index.js" +import { ModelRateLimitTable } from "@opencode-ai/console-core/schema/ip.sql.js" +import { UsageInfo } from "./provider/provider" + +export function createModelTpmLimiter(providers: { id: string; model: string; tpmLimit?: number }[]) { + const keys = providers.filter((p) => p.tpmLimit).map((p) => `${p.id}/${p.model}`) + if (keys.length === 0) return + + const yyyyMMddHHmm = new Date(Date.now()) + .toISOString() + .replace(/[^0-9]/g, "") + .substring(0, 12) + + return { + check: async () => { + const data = await Database.use((tx) => + tx + .select() + .from(ModelRateLimitTable) + .where(and(inArray(ModelRateLimitTable.key, keys), eq(ModelRateLimitTable.interval, yyyyMMddHHmm))), + ) + + // convert to map of model to count + return data.reduce( + (acc, curr) => { + acc[curr.key] = curr.count + return acc + }, + {} as Record, + ) + }, + track: async (id: string, model: string, usageInfo: UsageInfo) => { + const key = `${id}/${model}` + if (!keys.includes(key)) return + const usage = + usageInfo.inputTokens + + usageInfo.outputTokens + + (usageInfo.reasoningTokens ?? 0) + + (usageInfo.cacheReadTokens ?? 0) + + (usageInfo.cacheWrite5mTokens ?? 0) + + (usageInfo.cacheWrite1hTokens ?? 0) + if (usage <= 0) return + await Database.use((tx) => + tx + .insert(ModelRateLimitTable) + .values({ key, interval: yyyyMMddHHmm, count: usage }) + .onDuplicateKeyUpdate({ set: { count: sql`${ModelRateLimitTable.count} + ${usage}` } }), + ) + }, + } +} diff --git a/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql b/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql new file mode 100644 index 0000000000..41a4efe68e --- /dev/null +++ b/packages/console/core/migrations/20260417071612_tidy_diamondback/migration.sql @@ -0,0 +1,6 @@ +CREATE TABLE `model_rate_limit` ( + `key` varchar(255) NOT NULL, + `interval` varchar(40) NOT NULL, + `count` int NOT NULL, + CONSTRAINT PRIMARY KEY(`key`,`interval`) +); diff --git a/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json new file mode 100644 index 0000000000..2152bfa76f --- /dev/null +++ b/packages/console/core/migrations/20260417071612_tidy_diamondback/snapshot.json @@ -0,0 +1,2567 @@ +{ + "version": "6", + "dialect": "mysql", + "id": "93c492af-c95b-4213-9fc2-38c3dd10374d", + "prevIds": ["a09a925d-6cdd-4e7c-b8b1-11c259928b4c"], + "ddl": [ + { + "name": "account", + "entityType": "tables" + }, + { + "name": "auth", + "entityType": "tables" + }, + { + "name": "benchmark", + "entityType": "tables" + }, + { + "name": "billing", + "entityType": "tables" + }, + { + "name": "lite", + "entityType": "tables" + }, + { + "name": "payment", + "entityType": "tables" + }, + { + "name": "subscription", + "entityType": "tables" + }, + { + "name": "usage", + "entityType": "tables" + }, + { + "name": "ip_rate_limit", + "entityType": "tables" + }, + { + "name": "ip", + "entityType": "tables" + }, + { + "name": "key_rate_limit", + "entityType": "tables" + }, + { + "name": "model_rate_limit", + "entityType": "tables" + }, + { + "name": "key", + "entityType": "tables" + }, + { + "name": "model", + "entityType": "tables" + }, + { + "name": "provider", + "entityType": "tables" + }, + { + "name": "user", + "entityType": "tables" + }, + { + "name": "workspace", + "entityType": "tables" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "account" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "account" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "auth" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "auth" + }, + { + "type": "enum('email','github','google')", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subject", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "account_id", + "entityType": "columns", + "table": "auth" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "agent", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "mediumtext", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "result", + "entityType": "columns", + "table": "benchmark" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "customer_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(32)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_type", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(4)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_method_last4", + "entityType": "columns", + "table": "billing" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "balance", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_limit", + "entityType": "columns", + "table": "billing" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_usage_updated", + "entityType": "columns", + "table": "billing" + }, + { + "type": "boolean", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_trigger", + "entityType": "columns", + "table": "billing" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_amount", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reload_error", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_reload_error", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_reload_locked_till", + "entityType": "columns", + "table": "billing" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(28)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "enum('20','100','200')", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "subscription_plan", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_subscription_booked", + "entityType": "columns", + "table": "billing" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_subscription_selected", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(28)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "lite_subscription_id", + "entityType": "columns", + "table": "billing" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "lite", + "entityType": "columns", + "table": "billing" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "rolling_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "weekly_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_rolling_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_weekly_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_updated", + "entityType": "columns", + "table": "lite" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "customer_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "invoice_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "payment_id", + "entityType": "columns", + "table": "payment" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "amount", + "entityType": "columns", + "table": "payment" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_refunded", + "entityType": "columns", + "table": "payment" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "enrichment", + "entityType": "columns", + "table": "payment" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "rolling_usage", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "fixed_usage", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_rolling_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_fixed_updated", + "entityType": "columns", + "table": "subscription" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "usage" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "input_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "output_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "reasoning_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_read_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_write_5m_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cache_write_1h_tokens", + "entityType": "columns", + "table": "usage" + }, + { + "type": "bigint", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "cost", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "usage" + }, + { + "type": "json", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "enrichment", + "entityType": "columns", + "table": "usage" + }, + { + "type": "varchar(45)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "ip", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "varchar(10)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "ip_rate_limit" + }, + { + "type": "varchar(45)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "ip", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "ip" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "ip" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "usage", + "entityType": "columns", + "table": "ip" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "varchar(40)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "key_rate_limit" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "varchar(40)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "interval", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "int", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "count", + "entityType": "columns", + "table": "model_rate_limit" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "key", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "user_id", + "entityType": "columns", + "table": "key" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_used", + "entityType": "columns", + "table": "key" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "model" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "model", + "entityType": "columns", + "table": "model" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "provider" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(64)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "provider", + "entityType": "columns", + "table": "provider" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "credentials", + "entityType": "columns", + "table": "provider" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "account_id", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_seen", + "entityType": "columns", + "table": "user" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "color", + "entityType": "columns", + "table": "user" + }, + { + "type": "enum('admin','member')", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "role", + "entityType": "columns", + "table": "user" + }, + { + "type": "int", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_limit", + "entityType": "columns", + "table": "user" + }, + { + "type": "bigint", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "monthly_usage", + "entityType": "columns", + "table": "user" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_monthly_usage_updated", + "entityType": "columns", + "table": "user" + }, + { + "type": "varchar(30)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "varchar(255)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "slug", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "varchar(255)", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(now())", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": true, + "autoIncrement": false, + "default": "(CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3))", + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "timestamp(3)", + "notNull": false, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "time_deleted", + "entityType": "columns", + "table": "workspace" + }, + { + "columns": ["id"], + "name": "PRIMARY", + "table": "account", + "entityType": "pks" + }, + { + "columns": ["id"], + "name": "PRIMARY", + "table": "auth", + "entityType": "pks" + }, + { + "columns": ["id"], + "name": "PRIMARY", + "table": "benchmark", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "billing", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "lite", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "payment", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "subscription", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "usage", + "entityType": "pks" + }, + { + "columns": ["ip", "interval"], + "name": "PRIMARY", + "table": "ip_rate_limit", + "entityType": "pks" + }, + { + "columns": ["ip"], + "name": "PRIMARY", + "table": "ip", + "entityType": "pks" + }, + { + "columns": ["key", "interval"], + "name": "PRIMARY", + "table": "key_rate_limit", + "entityType": "pks" + }, + { + "columns": ["key", "interval"], + "name": "PRIMARY", + "table": "model_rate_limit", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "key", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "model", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "provider", + "entityType": "pks" + }, + { + "columns": ["workspace_id", "id"], + "name": "PRIMARY", + "table": "user", + "entityType": "pks" + }, + { + "columns": ["id"], + "name": "PRIMARY", + "table": "workspace", + "entityType": "pks" + }, + { + "columns": [ + { + "value": "provider", + "isExpression": false + }, + { + "value": "subject", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "provider", + "entityType": "indexes", + "table": "auth" + }, + { + "columns": [ + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "account_id", + "entityType": "indexes", + "table": "auth" + }, + { + "columns": [ + { + "value": "time_created", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "time_created", + "entityType": "indexes", + "table": "benchmark" + }, + { + "columns": [ + { + "value": "customer_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_customer_id", + "entityType": "indexes", + "table": "billing" + }, + { + "columns": [ + { + "value": "subscription_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_subscription_id", + "entityType": "indexes", + "table": "billing" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "user_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_user_id", + "entityType": "indexes", + "table": "lite" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "user_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_user_id", + "entityType": "indexes", + "table": "subscription" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "time_created", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "usage_time_created", + "entityType": "indexes", + "table": "usage" + }, + { + "columns": [ + { + "value": "key", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_key", + "entityType": "indexes", + "table": "key" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "model", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "model_workspace_model", + "entityType": "indexes", + "table": "model" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "provider", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "workspace_provider", + "entityType": "indexes", + "table": "provider" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "user_account_id", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + }, + { + "value": "email", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "user_email", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "account_id", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_account_id", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "email", + "isExpression": false + } + ], + "isUnique": false, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "global_email", + "entityType": "indexes", + "table": "user" + }, + { + "columns": [ + { + "value": "slug", + "isExpression": false + } + ], + "isUnique": true, + "using": null, + "algorithm": null, + "lock": null, + "nameExplicit": true, + "name": "slug", + "entityType": "indexes", + "table": "workspace" + } + ], + "renames": [] +} diff --git a/packages/console/core/package.json b/packages/console/core/package.json index cdefd0e609..f233726e69 100644 --- a/packages/console/core/package.json +++ b/packages/console/core/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/console-core", - "version": "1.4.7", + "version": "1.4.11", "private": true, "type": "module", "license": "MIT", diff --git a/packages/console/core/src/model.ts b/packages/console/core/src/model.ts index 3d614d3034..6281382d65 100644 --- a/packages/console/core/src/model.ts +++ b/packages/console/core/src/model.ts @@ -34,6 +34,8 @@ export namespace ZenData { z.object({ id: z.string(), model: z.string(), + priority: z.number().optional(), + tpmLimit: z.number().optional(), weight: z.number().optional(), disabled: z.boolean().optional(), storeModel: z.string().optional(), @@ -123,10 +125,16 @@ export namespace ZenData { ), models: (() => { const normalize = (model: z.infer) => { - const composite = model.providers.find((p) => compositeProviders[p.id].length > 1) + const providers = model.providers.map((p) => ({ + ...p, + priority: p.priority ?? Infinity, + weight: p.weight ?? 1, + })) + const composite = providers.find((p) => compositeProviders[p.id].length > 1) if (!composite) return { trialProvider: model.trialProvider ? [model.trialProvider] : undefined, + providers, } const weightMulti = compositeProviders[composite.id].length @@ -137,17 +145,16 @@ export namespace ZenData { if (model.trialProvider === composite.id) return compositeProviders[composite.id].map((p) => p.id) return [model.trialProvider] })(), - providers: model.providers.flatMap((p) => + providers: providers.flatMap((p) => p.id === composite.id ? compositeProviders[p.id].map((sub) => ({ ...p, id: sub.id, - weight: p.weight ?? 1, })) : [ { ...p, - weight: (p.weight ?? 1) * weightMulti, + weight: p.weight * weightMulti, }, ], ), diff --git a/packages/console/core/src/schema/ip.sql.ts b/packages/console/core/src/schema/ip.sql.ts index a840a78c19..830842e64d 100644 --- a/packages/console/core/src/schema/ip.sql.ts +++ b/packages/console/core/src/schema/ip.sql.ts @@ -30,3 +30,13 @@ export const KeyRateLimitTable = mysqlTable( }, (table) => [primaryKey({ columns: [table.key, table.interval] })], ) + +export const ModelRateLimitTable = mysqlTable( + "model_rate_limit", + { + key: varchar("key", { length: 255 }).notNull(), + interval: varchar("interval", { length: 40 }).notNull(), + count: int("count").notNull(), + }, + (table) => [primaryKey({ columns: [table.key, table.interval] })], +) diff --git a/packages/console/function/package.json b/packages/console/function/package.json index 898c540bac..1142230bb7 100644 --- a/packages/console/function/package.json +++ b/packages/console/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-function", - "version": "1.4.7", + "version": "1.4.11", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json index 46ff28b7d1..860150aa28 100644 --- a/packages/console/mail/package.json +++ b/packages/console/mail/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/console-mail", - "version": "1.4.7", + "version": "1.4.11", "dependencies": { "@jsx-email/all": "2.2.3", "@jsx-email/cli": "1.4.3", diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json index e1f69b5b20..8142b12ada 100644 --- a/packages/desktop-electron/package.json +++ b/packages/desktop-electron/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop-electron", "private": true, - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "homepage": "https://opencode.ai", diff --git a/packages/desktop/package.json b/packages/desktop/package.json index d8eea4ea36..a23342bdec 100644 --- a/packages/desktop/package.json +++ b/packages/desktop/package.json @@ -1,7 +1,7 @@ { "name": "@opencode-ai/desktop", "private": true, - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json index 12a72e647f..f565159628 100644 --- a/packages/enterprise/package.json +++ b/packages/enterprise/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/enterprise", - "version": "1.4.7", + "version": "1.4.11", "private": true, "type": "module", "license": "MIT", diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml index d164534cf7..32039c097a 100644 --- a/packages/extensions/zed/extension.toml +++ b/packages/extensions/zed/extension.toml @@ -1,7 +1,7 @@ id = "opencode" name = "OpenCode" description = "The open source coding agent." -version = "1.4.7" +version = "1.4.11" schema_version = 1 authors = ["Anomaly"] repository = "https://github.com/anomalyco/opencode" @@ -11,26 +11,26 @@ name = "OpenCode" icon = "./icons/opencode.svg" [agent_servers.opencode.targets.darwin-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-arm64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.11/opencode-darwin-arm64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.darwin-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.11/opencode-darwin-x64.zip" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-aarch64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-arm64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.11/opencode-linux-arm64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.linux-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-x64.tar.gz" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.11/opencode-linux-x64.tar.gz" cmd = "./opencode" args = ["acp"] [agent_servers.opencode.targets.windows-x86_64] -archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-windows-x64.zip" +archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.11/opencode-windows-x64.zip" cmd = "./opencode.exe" args = ["acp"] diff --git a/packages/function/package.json b/packages/function/package.json index 36a9ddc321..5d4229f64f 100644 --- a/packages/function/package.json +++ b/packages/function/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/function", - "version": "1.4.7", + "version": "1.4.11", "$schema": "https://json.schemastore.org/package.json", "private": true, "type": "module", diff --git a/packages/opencode/AGENTS.md b/packages/opencode/AGENTS.md index f0f32fdd16..d7fb844f0d 100644 --- a/packages/opencode/AGENTS.md +++ b/packages/opencode/AGENTS.md @@ -9,6 +9,63 @@ - **Output**: creates `migration/_/migration.sql` and `snapshot.json`. - **Tests**: migration tests should read the per-folder layout (no `_journal.json`). +# Module shape + +Do not use `export namespace Foo { ... }` for module organization. It is not +standard ESM, it prevents tree-shaking, and it breaks Node's native TypeScript +runner. Use flat top-level exports combined with a self-reexport at the bottom +of the file: + +```ts +// src/foo/foo.ts +export interface Interface { ... } +export class Service extends Context.Service()("@opencode/Foo") {} +export const layer = Layer.effect(Service, ...) +export const defaultLayer = layer.pipe(...) + +export * as Foo from "./foo" +``` + +Consumers import the namespace projection: + +```ts +import { Foo } from "@/foo/foo" + +yield * Foo.Service +Foo.layer +Foo.defaultLayer +``` + +Namespace-private helpers stay as non-exported top-level declarations in the +same file — they remain inaccessible to consumers (they are not projected by +`export * as`) but are usable by the file's own code. + +## When the file is an `index.ts` + +If the module is `foo/index.ts` (single-namespace directory), use `"."` for +the self-reexport source rather than `"./index"`: + +```ts +// src/foo/index.ts +export const thing = ... + +export * as Foo from "." +``` + +## Multi-sibling directories + +For directories with several independent modules (e.g. `src/session/`, +`src/config/`), keep each sibling as its own file with its own self-reexport, +and do not add a barrel `index.ts`. Consumers import the specific sibling: + +```ts +import { SessionRetry } from "@/session/retry" +import { SessionStatus } from "@/session/status" +``` + +Barrels in multi-sibling directories force every import through the barrel to +evaluate every sibling, which defeats tree-shaking and slows module load. + # opencode Effect rules Use these rules when writing or migrating Effect code. @@ -23,6 +80,10 @@ See `specs/effect/migration.md` for the compact pattern reference and examples. - Use `Effect.callback` for callback-based APIs. - Prefer `DateTime.nowAsDate` over `new Date(yield* Clock.currentTimeMillis)` when you need a `Date`. +## Module conventions + +- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module. + ## Schemas and errors - Use `Schema.Class` for multi-field data. diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 1dabd91b8d..2acbc4fe84 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.7", + "version": "1.4.11", "name": "opencode", "type": "module", "license": "MIT", @@ -79,15 +79,15 @@ "@actions/github": "6.0.1", "@agentclientprotocol/sdk": "0.16.1", "@ai-sdk/alibaba": "1.0.17", - "@ai-sdk/amazon-bedrock": "4.0.94", - "@ai-sdk/anthropic": "3.0.70", + "@ai-sdk/amazon-bedrock": "4.0.95", + "@ai-sdk/anthropic": "3.0.71", "@ai-sdk/azure": "3.0.49", "@ai-sdk/cerebras": "2.0.41", "@ai-sdk/cohere": "3.0.27", "@ai-sdk/deepinfra": "2.0.41", - "@ai-sdk/gateway": "3.0.102", + "@ai-sdk/gateway": "3.0.104", "@ai-sdk/google": "3.0.63", - "@ai-sdk/google-vertex": "4.0.111", + "@ai-sdk/google-vertex": "4.0.112", "@ai-sdk/groq": "3.0.31", "@ai-sdk/mistral": "3.0.27", "@ai-sdk/openai": "3.0.53", @@ -122,8 +122,8 @@ "@opentelemetry/exporter-trace-otlp-http": "0.214.0", "@opentelemetry/sdk-trace-base": "2.6.1", "@opentelemetry/sdk-trace-node": "2.6.1", - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -143,7 +143,7 @@ "drizzle-orm": "catalog:", "effect": "catalog:", "fuzzysort": "3.1.0", - "gitlab-ai-provider": "6.4.2", + "gitlab-ai-provider": "6.6.0", "glob": "13.0.5", "google-auth-library": "10.5.0", "gray-matter": "4.0.3", diff --git a/packages/opencode/script/batch-unwrap-pr.ts b/packages/opencode/script/batch-unwrap-pr.ts deleted file mode 100644 index 5730501412..0000000000 --- a/packages/opencode/script/batch-unwrap-pr.ts +++ /dev/null @@ -1,230 +0,0 @@ -#!/usr/bin/env bun -/** - * Automate the full per-file namespace→self-reexport migration: - * - * 1. Create a worktree at ../opencode-worktrees/ns- on a new branch - * `kit/ns-` off `origin/dev`. - * 2. Symlink `node_modules` from the main repo into the worktree root so - * builds work without a fresh `bun install`. - * 3. Run `script/unwrap-and-self-reexport.ts` on the target file inside the worktree. - * 4. Verify: - * - `bunx --bun tsgo --noEmit` (pre-existing plugin.ts cross-worktree - * noise ignored — we compare against a pre-change baseline captured - * via `git stash`, so only NEW errors fail). - * - `bun run --conditions=browser ./src/index.ts generate`. - * - Relevant tests under `test/` if that directory exists. - * 5. Commit, push with `--no-verify`, and open a PR titled after the - * namespace. - * - * Usage: - * - * bun script/batch-unwrap-pr.ts src/file/ignore.ts - * bun script/batch-unwrap-pr.ts src/file/ignore.ts src/file/watcher.ts # multiple - * bun script/batch-unwrap-pr.ts --dry-run src/file/ignore.ts # plan only - * - * Repo assumptions: - * - * - Main checkout at /Users/kit/code/open-source/opencode (configurable via - * --repo-root=...). - * - Worktree root at /Users/kit/code/open-source/opencode-worktrees - * (configurable via --worktree-root=...). - * - * The script does NOT enable auto-merge; that's a separate manual step if we - * want it. - */ - -import fs from "node:fs" -import path from "node:path" -import { spawnSync, type SpawnSyncReturns } from "node:child_process" - -type Cmd = string[] - -function run( - cwd: string, - cmd: Cmd, - opts: { capture?: boolean; allowFail?: boolean; stdin?: string } = {}, -): SpawnSyncReturns { - const result = spawnSync(cmd[0], cmd.slice(1), { - cwd, - stdio: opts.capture ? ["pipe", "pipe", "pipe"] : ["inherit", "inherit", "inherit"], - encoding: "utf-8", - input: opts.stdin, - }) - if (!opts.allowFail && result.status !== 0) { - const label = `${path.basename(cmd[0])} ${cmd.slice(1).join(" ")}` - console.error(`[fail] ${label} (cwd=${cwd})`) - if (opts.capture) { - if (result.stdout) console.error(result.stdout) - if (result.stderr) console.error(result.stderr) - } - process.exit(result.status ?? 1) - } - return result -} - -function fileSlug(fileArg: string): string { - // src/file/ignore.ts → file-ignore - return fileArg - .replace(/^src\//, "") - .replace(/\.tsx?$/, "") - .replace(/[\/_]/g, "-") -} - -function readNamespace(absFile: string): string { - const content = fs.readFileSync(absFile, "utf-8") - const match = content.match(/^export\s+namespace\s+(\w+)\s*\{/m) - if (!match) { - console.error(`no \`export namespace\` found in ${absFile}`) - process.exit(1) - } - return match[1] -} - -// --------------------------------------------------------------------------- - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const repoRoot = ( - args.find((a) => a.startsWith("--repo-root=")) ?? "--repo-root=/Users/kit/code/open-source/opencode" -).split("=")[1] -const worktreeRoot = ( - args.find((a) => a.startsWith("--worktree-root=")) ?? "--worktree-root=/Users/kit/code/open-source/opencode-worktrees" -).split("=")[1] -const targets = args.filter((a) => !a.startsWith("--")) - -if (targets.length === 0) { - console.error("Usage: bun script/batch-unwrap-pr.ts [more files...] [--dry-run]") - process.exit(1) -} - -if (!fs.existsSync(worktreeRoot)) fs.mkdirSync(worktreeRoot, { recursive: true }) - -for (const rel of targets) { - const absSrc = path.join(repoRoot, "packages", "opencode", rel) - if (!fs.existsSync(absSrc)) { - console.error(`skip ${rel}: file does not exist under ${repoRoot}/packages/opencode`) - continue - } - const slug = fileSlug(rel) - const branch = `kit/ns-${slug}` - const wt = path.join(worktreeRoot, `ns-${slug}`) - const ns = readNamespace(absSrc) - - console.log(`\n=== ${rel} → ${ns} (branch=${branch} wt=${path.basename(wt)}) ===`) - - if (dryRun) { - console.log(` would create worktree ${wt}`) - console.log(` would run unwrap on packages/opencode/${rel}`) - console.log(` would commit, push, and open PR`) - continue - } - - // Sync dev (fetch only; we branch off origin/dev directly). - run(repoRoot, ["git", "fetch", "origin", "dev", "--quiet"]) - - // Create worktree + branch. - if (fs.existsSync(wt)) { - console.log(` worktree already exists at ${wt}; skipping`) - continue - } - run(repoRoot, ["git", "worktree", "add", "-b", branch, wt, "origin/dev"]) - - // Symlink node_modules so bun/tsgo work without a full install. - // We link both the repo root and packages/opencode, since the opencode - // package has its own local node_modules (including bunfig.toml preload deps - // like @opentui/solid) that aren't hoisted to the root. - const wtRootNodeModules = path.join(wt, "node_modules") - if (!fs.existsSync(wtRootNodeModules)) { - fs.symlinkSync(path.join(repoRoot, "node_modules"), wtRootNodeModules) - } - const wtOpencode = path.join(wt, "packages", "opencode") - const wtOpencodeNodeModules = path.join(wtOpencode, "node_modules") - if (!fs.existsSync(wtOpencodeNodeModules)) { - fs.symlinkSync(path.join(repoRoot, "packages", "opencode", "node_modules"), wtOpencodeNodeModules) - } - const wtTarget = path.join(wt, "packages", "opencode", rel) - - // Baseline tsgo output (pre-change). - const baselinePath = path.join(wt, ".ns-baseline.txt") - const baseline = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) - fs.writeFileSync(baselinePath, (baseline.stdout ?? "") + (baseline.stderr ?? "")) - - // Run the unwrap script from the MAIN repo checkout (where the tooling - // lives) targeting the worktree's file by absolute path. We run from the - // worktree root (not `packages/opencode`) to avoid triggering the - // bunfig.toml preload, which needs `@opentui/solid` that only the TUI - // workspace has installed. - const unwrapScript = path.join(repoRoot, "packages", "opencode", "script", "unwrap-and-self-reexport.ts") - run(wt, ["bun", unwrapScript, wtTarget]) - - // Post-change tsgo. - const after = run(wtOpencode, ["bunx", "--bun", "tsgo", "--noEmit"], { capture: true, allowFail: true }) - const afterText = (after.stdout ?? "") + (after.stderr ?? "") - - // Compare line-sets to detect NEW tsgo errors. - const sanitize = (s: string) => - s - .split("\n") - .map((l) => l.replace(/\s+$/, "")) - .filter(Boolean) - .sort() - .join("\n") - const baselineSorted = sanitize(fs.readFileSync(baselinePath, "utf-8")) - const afterSorted = sanitize(afterText) - if (baselineSorted !== afterSorted) { - console.log(` tsgo output differs from baseline. Showing diff:`) - const diffResult = spawnSync("diff", ["-u", baselinePath, "-"], { input: afterText, encoding: "utf-8" }) - if (diffResult.stdout) console.log(diffResult.stdout) - if (diffResult.stderr) console.log(diffResult.stderr) - console.error(` aborting ${rel}; investigate manually in ${wt}`) - process.exit(1) - } - - // SDK build. - run(wtOpencode, ["bun", "run", "--conditions=browser", "./src/index.ts", "generate"], { capture: true }) - - // Run tests for the directory, if a matching test dir exists. - const dirName = path.basename(path.dirname(rel)) - const testDir = path.join(wt, "packages", "opencode", "test", dirName) - if (fs.existsSync(testDir)) { - const testResult = run(wtOpencode, ["bun", "run", "test", `test/${dirName}`], { capture: true, allowFail: true }) - const combined = (testResult.stdout ?? "") + (testResult.stderr ?? "") - if (testResult.status !== 0) { - console.error(combined) - console.error(` tests failed for ${rel}; aborting`) - process.exit(1) - } - // Surface the summary line if present. - const summary = combined - .split("\n") - .filter((l) => /\bpass\b|\bfail\b/.test(l)) - .slice(-3) - .join("\n") - if (summary) console.log(` tests: ${summary.replace(/\n/g, " | ")}`) - } else { - console.log(` tests: no test/${dirName} directory, skipping`) - } - - // Clean up baseline file before committing. - fs.unlinkSync(baselinePath) - - // Commit, push, open PR. - const commitMsg = `refactor: unwrap ${ns} namespace + self-reexport` - run(wt, ["git", "add", "-A"]) - run(wt, ["git", "commit", "-m", commitMsg]) - run(wt, ["git", "push", "-u", "origin", branch, "--no-verify"]) - - const prBody = [ - "## Summary", - `- Unwrap the \`${ns}\` namespace in \`packages/opencode/${rel}\` to flat top-level exports.`, - `- Append \`export * as ${ns} from "./${path.basename(rel, ".ts")}"\` so consumers keep the same \`${ns}.x\` import ergonomics.`, - "", - "## Verification (local)", - "- `bunx --bun tsgo --noEmit` — no new errors vs baseline.", - "- `bun run --conditions=browser ./src/index.ts generate` — clean.", - `- \`bun run test test/${dirName}\` — all pass (if applicable).`, - ].join("\n") - run(wt, ["gh", "pr", "create", "--title", commitMsg, "--base", "dev", "--body", prBody]) - - console.log(` PR opened for ${rel}`) -} diff --git a/packages/opencode/script/collapse-barrel.ts b/packages/opencode/script/collapse-barrel.ts deleted file mode 100644 index 05bb11589c..0000000000 --- a/packages/opencode/script/collapse-barrel.ts +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env bun -/** - * Collapse a single-namespace barrel directory into a dir/index.ts module. - * - * Given a directory `src/foo/` that contains: - * - * - `index.ts` (exactly `export * as Foo from "./foo"`) - * - `foo.ts` (the real implementation) - * - zero or more sibling files - * - * this script: - * - * 1. Deletes the old `index.ts` barrel. - * 2. `git mv`s `foo.ts` → `index.ts` so the implementation IS the directory entry. - * 3. Appends `export * as Foo from "."` to the new `index.ts`. - * 4. Rewrites any same-directory sibling `*.ts` files that imported - * `./foo` (with or without the namespace name) to import `"."` instead. - * - * Consumer files outside the directory keep importing from the directory - * (`"@/foo"` / `"../foo"` / etc.) and continue to work, because - * `dir/index.ts` now provides the `Foo` named export directly. - * - * Usage: - * - * bun script/collapse-barrel.ts src/bus - * bun script/collapse-barrel.ts src/bus --dry-run - * - * Notes: - * - * - Only works on directories whose barrel is a single - * `export * as Name from "./file"` line. Refuses otherwise. - * - Refuses if the implementation file name already conflicts with - * `index.ts`. - * - Safe to run repeatedly: a second run on an already-collapsed dir - * will exit with a clear message. - */ - -import fs from "node:fs" -import path from "node:path" -import { spawnSync } from "node:child_process" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const targetArg = args.find((a) => !a.startsWith("--")) - -if (!targetArg) { - console.error("Usage: bun script/collapse-barrel.ts [--dry-run]") - process.exit(1) -} - -const dir = path.resolve(targetArg) -const indexPath = path.join(dir, "index.ts") - -if (!fs.existsSync(dir) || !fs.statSync(dir).isDirectory()) { - console.error(`Not a directory: ${dir}`) - process.exit(1) -} -if (!fs.existsSync(indexPath)) { - console.error(`No index.ts in ${dir}`) - process.exit(1) -} - -// Validate barrel shape. -const indexContent = fs.readFileSync(indexPath, "utf-8").trim() -const match = indexContent.match(/^export\s+\*\s+as\s+(\w+)\s+from\s+["']\.\/([^"']+)["']\s*;?\s*$/) -if (!match) { - console.error(`Not a simple single-namespace barrel:\n${indexContent}`) - process.exit(1) -} -const namespaceName = match[1] -const implRel = match[2].replace(/\.ts$/, "") -const implPath = path.join(dir, `${implRel}.ts`) - -if (!fs.existsSync(implPath)) { - console.error(`Implementation file not found: ${implPath}`) - process.exit(1) -} - -if (implRel === "index") { - console.error(`Nothing to do — impl file is already index.ts`) - process.exit(0) -} - -console.log(`Collapsing ${path.relative(process.cwd(), dir)}`) -console.log(` namespace: ${namespaceName}`) -console.log(` impl file: ${implRel}.ts → index.ts`) - -// Figure out which sibling files need rewriting. -const siblings = fs - .readdirSync(dir) - .filter((f) => f.endsWith(".ts") || f.endsWith(".tsx")) - .filter((f) => f !== "index.ts" && f !== `${implRel}.ts`) - .map((f) => path.join(dir, f)) - -type SiblingEdit = { file: string; content: string } -const siblingEdits: SiblingEdit[] = [] - -for (const sibling of siblings) { - const content = fs.readFileSync(sibling, "utf-8") - // Match any import or re-export referring to "./" inside this directory. - const siblingRegex = new RegExp(`(from\\s*["'])\\.\\/${implRel.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&")}(["'])`, "g") - if (!siblingRegex.test(content)) continue - const updated = content.replace(siblingRegex, `$1.$2`) - siblingEdits.push({ file: sibling, content: updated }) -} - -if (siblingEdits.length > 0) { - console.log(` sibling rewrites: ${siblingEdits.length}`) - for (const edit of siblingEdits) { - console.log(` ${path.relative(process.cwd(), edit.file)}`) - } -} else { - console.log(` sibling rewrites: none`) -} - -if (dryRun) { - console.log(`\n(dry run) would:`) - console.log(` - delete ${path.relative(process.cwd(), indexPath)}`) - console.log(` - git mv ${path.relative(process.cwd(), implPath)} ${path.relative(process.cwd(), indexPath)}`) - console.log(` - append \`export * as ${namespaceName} from "."\` to the new index.ts`) - for (const edit of siblingEdits) { - console.log(` - rewrite sibling: ${path.relative(process.cwd(), edit.file)}`) - } - process.exit(0) -} - -// Apply: remove the old barrel, git-mv the impl onto it, then rewrite content. -// We can't git-mv on top of an existing tracked file, so we remove the barrel first. -function runGit(...cmd: string[]) { - const res = spawnSync("git", cmd, { stdio: "inherit" }) - if (res.status !== 0) { - console.error(`git ${cmd.join(" ")} failed`) - process.exit(res.status ?? 1) - } -} - -// Step 1: remove the barrel -runGit("rm", "-f", indexPath) - -// Step 2: rename the impl file into index.ts -runGit("mv", implPath, indexPath) - -// Step 3: append the self-reexport to the new index.ts -const newContent = fs.readFileSync(indexPath, "utf-8") -const trimmed = newContent.endsWith("\n") ? newContent : newContent + "\n" -fs.writeFileSync(indexPath, `${trimmed}\nexport * as ${namespaceName} from "."\n`) -console.log(` appended: export * as ${namespaceName} from "."`) - -// Step 4: rewrite siblings -for (const edit of siblingEdits) { - fs.writeFileSync(edit.file, edit.content) -} -if (siblingEdits.length > 0) { - console.log(` rewrote ${siblingEdits.length} sibling file(s)`) -} - -console.log(`\nDone. Verify with:`) -console.log(` cd packages/opencode`) -console.log(` bunx --bun tsgo --noEmit`) -console.log(` bun run --conditions=browser ./src/index.ts generate`) -console.log(` bun run test`) diff --git a/packages/opencode/script/unwrap-and-self-reexport.ts b/packages/opencode/script/unwrap-and-self-reexport.ts deleted file mode 100644 index 5ae703182e..0000000000 --- a/packages/opencode/script/unwrap-and-self-reexport.ts +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env bun -/** - * Unwrap a single `export namespace` in a file into flat top-level exports - * plus a self-reexport at the bottom of the same file. - * - * Usage: - * - * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts - * bun script/unwrap-and-self-reexport.ts src/file/ignore.ts --dry-run - * - * Input file shape: - * - * // imports ... - * - * export namespace FileIgnore { - * export function ...(...) { ... } - * const helper = ... - * } - * - * Output shape: - * - * // imports ... - * - * export function ...(...) { ... } - * const helper = ... - * - * export * as FileIgnore from "./ignore" - * - * What the script does: - * - * 1. Uses ast-grep to locate the single `export namespace Foo { ... }` block. - * 2. Removes the `export namespace Foo {` line and the matching closing `}`. - * 3. Dedents the body by one indent level (2 spaces). - * 4. Rewrites `Foo.Bar` self-references inside the file to just `Bar` - * (but only for names that are actually exported from the namespace — - * non-exported members get the same treatment so references remain valid). - * 5. Appends `export * as Foo from "./"` at the end of the file. - * - * What it does NOT do: - * - * - Does not create or modify barrel `index.ts` files. - * - Does not rewrite any consumer imports. Consumers already import from - * the file path itself (e.g. `import { FileIgnore } from "../file/ignore"`); - * the self-reexport keeps that import working unchanged. - * - Does not handle files with more than one `export namespace` declaration. - * The script refuses that case. - * - * Requires: ast-grep (`brew install ast-grep`). - */ - -import fs from "node:fs" -import path from "node:path" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const targetArg = args.find((a) => !a.startsWith("--")) - -if (!targetArg) { - console.error("Usage: bun script/unwrap-and-self-reexport.ts [--dry-run]") - process.exit(1) -} - -const absPath = path.resolve(targetArg) -if (!fs.existsSync(absPath) || !fs.statSync(absPath).isFile()) { - console.error(`Not a file: ${absPath}`) - process.exit(1) -} - -// Locate the namespace block with ast-grep (accurate AST boundaries). -const ast = Bun.spawnSync( - ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], - { stdout: "pipe", stderr: "pipe" }, -) -if (ast.exitCode !== 0) { - console.error("ast-grep failed:", ast.stderr.toString()) - process.exit(1) -} - -type AstMatch = { - range: { start: { line: number; column: number }; end: { line: number; column: number } } - metaVariables: { single: Record } -} -const matches = JSON.parse(ast.stdout.toString()) as AstMatch[] -if (matches.length === 0) { - console.error(`No \`export namespace\` found in ${path.relative(process.cwd(), absPath)}`) - process.exit(1) -} -if (matches.length > 1) { - console.error(`File has ${matches.length} \`export namespace\` declarations — this script handles one per file.`) - for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) - process.exit(1) -} - -const match = matches[0] -const nsName = match.metaVariables.single.NAME.text -const startLine = match.range.start.line -const endLine = match.range.end.line - -const original = fs.readFileSync(absPath, "utf-8") -const lines = original.split("\n") - -// Split the file into before/body/after. -const before = lines.slice(0, startLine) -const body = lines.slice(startLine + 1, endLine) -const after = lines.slice(endLine + 1) - -// Dedent body by one indent level (2 spaces). -const dedented = body.map((line) => { - if (line === "") return "" - if (line.startsWith(" ")) return line.slice(2) - return line -}) - -// Collect all top-level declared identifiers inside the namespace body so we can -// rewrite `Foo.X` → `X` when X is one of them. We gather BOTH exported and -// non-exported names because the namespace body might reference its own -// non-exported helpers via `Foo.helper` too. -const declaredNames = new Set() -const declRe = - /^\s*(?:export\s+)?(?:abstract\s+)?(?:async\s+)?(?:const|let|var|function|class|interface|type|enum)\s+(\w+)/ -for (const line of dedented) { - const m = line.match(declRe) - if (m) declaredNames.add(m[1]) -} -// Also capture `export { X, Y }` re-exports inside the namespace. -const reExportRe = /export\s*\{\s*([^}]+)\}/g -for (const line of dedented) { - for (const reExport of line.matchAll(reExportRe)) { - for (const part of reExport[1].split(",")) { - const name = part - .trim() - .split(/\s+as\s+/) - .pop()! - .trim() - if (name) declaredNames.add(name) - } - } -} - -// Rewrite `Foo.X` → `X` inside the body, avoiding matches in strings, comments, -// templates. We walk the line char-by-char rather than using a regex so we can -// skip over those segments cleanly. -let rewriteCount = 0 -function rewriteLine(line: string): string { - const out: string[] = [] - let i = 0 - let stringQuote: string | null = null - while (i < line.length) { - const ch = line[i] - // String / template literal pass-through. - if (stringQuote) { - out.push(ch) - if (ch === "\\" && i + 1 < line.length) { - out.push(line[i + 1]) - i += 2 - continue - } - if (ch === stringQuote) stringQuote = null - i++ - continue - } - if (ch === '"' || ch === "'" || ch === "`") { - stringQuote = ch - out.push(ch) - i++ - continue - } - // Line comment: emit the rest of the line untouched. - if (ch === "/" && line[i + 1] === "/") { - out.push(line.slice(i)) - i = line.length - continue - } - // Block comment: emit until "*/" if present on same line; else rest of line. - if (ch === "/" && line[i + 1] === "*") { - const end = line.indexOf("*/", i + 2) - if (end === -1) { - out.push(line.slice(i)) - i = line.length - } else { - out.push(line.slice(i, end + 2)) - i = end + 2 - } - continue - } - // Try to match `Foo.` at this position. - if (line.startsWith(nsName + ".", i)) { - // Make sure the char before is NOT a word character (otherwise we'd be in the middle of another identifier). - const prev = i === 0 ? "" : line[i - 1] - if (!/\w/.test(prev)) { - const after = line.slice(i + nsName.length + 1) - const nameMatch = after.match(/^([A-Za-z_$][\w$]*)/) - if (nameMatch && declaredNames.has(nameMatch[1])) { - out.push(nameMatch[1]) - i += nsName.length + 1 + nameMatch[1].length - rewriteCount++ - continue - } - } - } - out.push(ch) - i++ - } - return out.join("") -} -const rewrittenBody = dedented.map(rewriteLine) - -// Assemble the new file. Collapse multiple trailing blank lines so the -// self-reexport sits cleanly at the end. -const basename = path.basename(absPath, ".ts") -const assembled = [...before, ...rewrittenBody, ...after].join("\n") -const trimmed = assembled.replace(/\s+$/g, "") -const output = `${trimmed}\n\nexport * as ${nsName} from "./${basename}"\n` - -if (dryRun) { - console.log(`--- dry run: ${path.relative(process.cwd(), absPath)} ---`) - console.log(`namespace: ${nsName}`) - console.log(`body lines: ${body.length}`) - console.log(`declared names: ${Array.from(declaredNames).join(", ") || "(none)"}`) - console.log(`self-refs rewr: ${rewriteCount}`) - console.log(`self-reexport: export * as ${nsName} from "./${basename}"`) - console.log(`output preview (last 10 lines):`) - const outputLines = output.split("\n") - for (const l of outputLines.slice(Math.max(0, outputLines.length - 10))) { - console.log(` ${l}`) - } - process.exit(0) -} - -fs.writeFileSync(absPath, output) -console.log(`unwrapped ${path.relative(process.cwd(), absPath)} → ${nsName}`) -console.log(` body lines: ${body.length}`) -console.log(` self-refs rewr: ${rewriteCount}`) -console.log(` self-reexport: export * as ${nsName} from "./${basename}"`) -console.log("") -console.log("Next: verify with") -console.log(" bunx --bun tsgo --noEmit") -console.log(" bun run --conditions=browser ./src/index.ts generate") -console.log( - ` bun run test test/${path.relative(path.join(path.dirname(absPath), "..", ".."), absPath).replace(/\.ts$/, "")}*`, -) diff --git a/packages/opencode/script/unwrap-namespace.ts b/packages/opencode/script/unwrap-namespace.ts deleted file mode 100644 index 45c16f6c73..0000000000 --- a/packages/opencode/script/unwrap-namespace.ts +++ /dev/null @@ -1,305 +0,0 @@ -#!/usr/bin/env bun -/** - * Unwrap a TypeScript `export namespace` into flat exports + barrel. - * - * Usage: - * bun script/unwrap-namespace.ts src/bus/index.ts - * bun script/unwrap-namespace.ts src/bus/index.ts --dry-run - * bun script/unwrap-namespace.ts src/pty/index.ts --name service # avoid collision with pty.ts - * - * What it does: - * 1. Reads the file and finds the `export namespace Foo { ... }` block - * (uses ast-grep for accurate AST-based boundary detection) - * 2. Removes the namespace wrapper and dedents the body - * 3. Fixes self-references (e.g. Config.PermissionAction → PermissionAction) - * 4. If the file is index.ts, renames it to .ts - * 5. Creates/updates index.ts with `export * as Foo from "./"` - * 6. Rewrites import paths across src/, test/, and script/ - * 7. Fixes sibling imports within the same directory - * - * Requires: ast-grep (`brew install ast-grep` or `cargo install ast-grep`) - */ - -import path from "path" -import fs from "fs" - -const args = process.argv.slice(2) -const dryRun = args.includes("--dry-run") -const nameFlag = args.find((a, i) => args[i - 1] === "--name") -const filePath = args.find((a) => !a.startsWith("--") && args[args.indexOf(a) - 1] !== "--name") - -if (!filePath) { - console.error("Usage: bun script/unwrap-namespace.ts [--dry-run] [--name ]") - process.exit(1) -} - -const absPath = path.resolve(filePath) -if (!fs.existsSync(absPath)) { - console.error(`File not found: ${absPath}`) - process.exit(1) -} - -const src = fs.readFileSync(absPath, "utf-8") -const lines = src.split("\n") - -// Use ast-grep to find the namespace boundaries accurately. -// This avoids false matches from braces in strings, templates, comments, etc. -const astResult = Bun.spawnSync( - ["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath], - { stdout: "pipe", stderr: "pipe" }, -) - -if (astResult.exitCode !== 0) { - console.error("ast-grep failed:", astResult.stderr.toString()) - process.exit(1) -} - -const matches = JSON.parse(astResult.stdout.toString()) as Array<{ - text: string - range: { start: { line: number; column: number }; end: { line: number; column: number } } - metaVariables: { single: Record; multi: Record> } -}> - -if (matches.length === 0) { - console.error("No `export namespace Foo { ... }` found in file") - process.exit(1) -} - -if (matches.length > 1) { - console.error(`Found ${matches.length} namespaces — this script handles one at a time`) - console.error("Namespaces found:") - for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`) - process.exit(1) -} - -const match = matches[0] -const nsName = match.metaVariables.single.NAME.text -const nsLine = match.range.start.line // 0-indexed -const closeLine = match.range.end.line // 0-indexed, the line with closing `}` - -console.log(`Found: export namespace ${nsName} { ... }`) -console.log(` Lines ${nsLine + 1}–${closeLine + 1} (${closeLine - nsLine + 1} lines)`) - -// Build the new file content: -// 1. Everything before the namespace declaration (imports, etc.) -// 2. The namespace body, dedented by one level (2 spaces) -// 3. Everything after the closing brace (rare, but possible) -const before = lines.slice(0, nsLine) -const body = lines.slice(nsLine + 1, closeLine) -const after = lines.slice(closeLine + 1) - -// Dedent: remove exactly 2 leading spaces from each line -const dedented = body.map((line) => { - if (line === "") return "" - if (line.startsWith(" ")) return line.slice(2) - return line -}) - -let newContent = [...before, ...dedented, ...after].join("\n") - -// --- Fix self-references --- -// After unwrapping, references like `Config.PermissionAction` inside the same file -// need to become just `PermissionAction`. Only fix code positions, not strings. -const exportedNames = new Set() -const exportRegex = /export\s+(?:const|function|class|interface|type|enum|abstract\s+class)\s+(\w+)/g -for (const line of dedented) { - for (const m of line.matchAll(exportRegex)) exportedNames.add(m[1]) -} -const reExportRegex = /export\s*\{\s*([^}]+)\}/g -for (const line of dedented) { - for (const m of line.matchAll(reExportRegex)) { - for (const name of m[1].split(",")) { - const trimmed = name - .trim() - .split(/\s+as\s+/) - .pop()! - .trim() - if (trimmed) exportedNames.add(trimmed) - } - } -} - -let selfRefCount = 0 -if (exportedNames.size > 0) { - const fixedLines = newContent.split("\n").map((line) => { - // Split line into string-literal and code segments to avoid replacing inside strings - const segments: Array<{ text: string; isString: boolean }> = [] - let i = 0 - let current = "" - let inString: string | null = null - - while (i < line.length) { - const ch = line[i] - if (inString) { - current += ch - if (ch === "\\" && i + 1 < line.length) { - current += line[i + 1] - i += 2 - continue - } - if (ch === inString) { - segments.push({ text: current, isString: true }) - current = "" - inString = null - } - i++ - continue - } - if (ch === '"' || ch === "'" || ch === "`") { - if (current) segments.push({ text: current, isString: false }) - current = ch - inString = ch - i++ - continue - } - if (ch === "/" && i + 1 < line.length && line[i + 1] === "/") { - current += line.slice(i) - segments.push({ text: current, isString: true }) - current = "" - i = line.length - continue - } - current += ch - i++ - } - if (current) segments.push({ text: current, isString: !!inString }) - - return segments - .map((seg) => { - if (seg.isString) return seg.text - let result = seg.text - for (const name of exportedNames) { - const pattern = `${nsName}.${name}` - while (result.includes(pattern)) { - const idx = result.indexOf(pattern) - const charBefore = idx > 0 ? result[idx - 1] : " " - const charAfter = idx + pattern.length < result.length ? result[idx + pattern.length] : " " - if (/\w/.test(charBefore) || /\w/.test(charAfter)) break - result = result.slice(0, idx) + name + result.slice(idx + pattern.length) - selfRefCount++ - } - } - return result - }) - .join("") - }) - newContent = fixedLines.join("\n") -} - -// Figure out file naming -const dir = path.dirname(absPath) -const basename = path.basename(absPath, ".ts") -const isIndex = basename === "index" -const implName = nameFlag ?? (isIndex ? nsName.replace(/([a-z])([A-Z])/g, "$1-$2").toLowerCase() : basename) -const implFile = path.join(dir, `${implName}.ts`) -const indexFile = path.join(dir, "index.ts") -const barrelLine = `export * as ${nsName} from "./${implName}"\n` - -console.log("") -if (isIndex) { - console.log(`Plan: rename ${basename}.ts → ${implName}.ts, create new index.ts barrel`) -} else { - console.log(`Plan: rewrite ${basename}.ts in place, create index.ts barrel`) -} -if (selfRefCount > 0) console.log(`Fixed ${selfRefCount} self-reference(s) (${nsName}.X → X)`) -console.log("") - -if (dryRun) { - console.log("--- DRY RUN ---") - console.log("") - console.log(`=== ${implName}.ts (first 30 lines) ===`) - newContent - .split("\n") - .slice(0, 30) - .forEach((l, i) => console.log(` ${i + 1}: ${l}`)) - console.log(" ...") - console.log("") - console.log(`=== index.ts ===`) - console.log(` ${barrelLine.trim()}`) - console.log("") - if (!isIndex) { - const relDir = path.relative(path.resolve("src"), dir) - console.log(`=== Import rewrites (would apply) ===`) - console.log(` ${relDir}/${basename}" → ${relDir}" across src/, test/, script/`) - } else { - console.log("No import rewrites needed (was index.ts)") - } -} else { - if (isIndex) { - fs.writeFileSync(implFile, newContent) - fs.writeFileSync(indexFile, barrelLine) - console.log(`Wrote ${implName}.ts (${newContent.split("\n").length} lines)`) - console.log(`Wrote index.ts (barrel)`) - } else { - fs.writeFileSync(absPath, newContent) - if (fs.existsSync(indexFile)) { - const existing = fs.readFileSync(indexFile, "utf-8") - if (!existing.includes(`export * as ${nsName}`)) { - fs.appendFileSync(indexFile, barrelLine) - console.log(`Appended to existing index.ts`) - } else { - console.log(`index.ts already has ${nsName} export`) - } - } else { - fs.writeFileSync(indexFile, barrelLine) - console.log(`Wrote index.ts (barrel)`) - } - console.log(`Rewrote ${basename}.ts (${newContent.split("\n").length} lines)`) - } - - // --- Rewrite import paths across src/, test/, script/ --- - const relDir = path.relative(path.resolve("src"), dir) - if (!isIndex) { - const oldTail = `${relDir}/${basename}` - const searchDirs = ["src", "test", "script"].filter((d) => fs.existsSync(d)) - const rgResult = Bun.spawnSync(["rg", "-l", `from.*${oldTail}"`, ...searchDirs], { - stdout: "pipe", - stderr: "pipe", - }) - const filesToRewrite = rgResult.stdout - .toString() - .trim() - .split("\n") - .filter((f) => f.length > 0) - - if (filesToRewrite.length > 0) { - console.log(`\nRewriting imports in ${filesToRewrite.length} file(s)...`) - for (const file of filesToRewrite) { - const content = fs.readFileSync(file, "utf-8") - fs.writeFileSync(file, content.replaceAll(`${oldTail}"`, `${relDir}"`)) - } - console.log(` Done: ${oldTail}" → ${relDir}"`) - } else { - console.log("\nNo import rewrites needed") - } - } else { - console.log("\nNo import rewrites needed (was index.ts)") - } - - // --- Fix sibling imports within the same directory --- - const siblingFiles = fs.readdirSync(dir).filter((f) => { - if (!f.endsWith(".ts")) return false - if (f === "index.ts" || f === `${implName}.ts`) return false - return true - }) - - let siblingFixCount = 0 - for (const sibFile of siblingFiles) { - const sibPath = path.join(dir, sibFile) - const content = fs.readFileSync(sibPath, "utf-8") - const pattern = new RegExp(`from\\s+["']\\./${basename}["']`, "g") - if (pattern.test(content)) { - fs.writeFileSync(sibPath, content.replace(pattern, `from "."`)) - siblingFixCount++ - } - } - if (siblingFixCount > 0) { - console.log(`Fixed ${siblingFixCount} sibling import(s) in ${path.basename(dir)}/ (./${basename} → .)`) - } -} - -console.log("") -console.log("=== Verify ===") -console.log("") -console.log("bunx --bun tsgo --noEmit # typecheck") -console.log("bun run test # run tests") diff --git a/packages/opencode/specs/effect/facades.md b/packages/opencode/specs/effect/facades.md index e2d9d3d8a1..8bf7d97bad 100644 --- a/packages/opencode/specs/effect/facades.md +++ b/packages/opencode/specs/effect/facades.md @@ -1,12 +1,13 @@ # Facade removal checklist -Concrete inventory of the remaining `makeRuntime(...)`-backed service facades in `packages/opencode`. +Concrete inventory of the remaining `makeRuntime(...)`-backed facades in `packages/opencode`. -As of 2026-04-13, latest `origin/dev`: +Current status on this branch: -- `src/` still has 15 `makeRuntime(...)` call sites. -- 13 of those are still in scope for facade removal. -- 2 are excluded from this checklist: `bus/index.ts` and `effect/cross-spawn-spawner.ts`. +- `src/` has 5 `makeRuntime(...)` call sites total. +- 2 are intentionally excluded from this checklist: `src/bus/index.ts` and `src/effect/cross-spawn-spawner.ts`. +- 1 is tracked primarily by the instance-context migration rather than facade removal: `src/project/instance.ts`. +- That leaves 2 live runtime-backed service facades still worth tracking here: `src/npm/index.ts` and `src/cli/cmd/tui/config/tui.ts`. Recent progress: @@ -15,8 +16,9 @@ Recent progress: ## Priority hotspots -- `server/instance/session.ts` still depends on `Session`, `SessionPrompt`, `SessionRevert`, `SessionCompaction`, `SessionSummary`, `ShareSession`, `Agent`, and `Permission` facades. -- `src/effect/app-runtime.ts` still references many facade namespaces directly, so it should stay in view during each deletion. +- `src/cli/cmd/tui/config/tui.ts` still exports `makeRuntime(...)` plus async facade helpers for `get()` and `waitForDependencies()`. +- `src/npm/index.ts` still exports `makeRuntime(...)` plus async facade helpers for `install()`, `add()`, `outdated()`, and `which()`. +- `src/project/instance.ts` still uses a dedicated runtime for project boot, but that file is really part of the broader legacy instance-context transition tracked in `instance-context.md`. ## Completed Batches @@ -184,53 +186,34 @@ These were the recurring mistakes and useful corrections from the first two batc 5. For CLI readability, extract file-local preload helpers when the handler starts doing config load + service load + batched effect fanout inline. 6. When rebasing a facade branch after nearby merges, prefer the already-cleaned service/test version over older inline facade-era code. -## Next batch +## Remaining work -Recommended next five, in order: +Most of the original facade-removal backlog is already done. The practical remaining work is narrower now: -1. `src/permission/index.ts` -2. `src/agent/agent.ts` -3. `src/session/summary.ts` -4. `src/session/revert.ts` -5. `src/mcp/auth.ts` - -Why this batch: - -- It keeps pushing the session-adjacent cleanup without jumping straight into `session/index.ts` or `session/prompt.ts`. -- `Permission`, `Agent`, `SessionSummary`, and `SessionRevert` all reduce fanout in `server/instance/session.ts`. -- `McpAuth` is small and closely related to the just-landed `MCP` cleanup. - -After that batch, the expected follow-up is the main session cluster: - -1. `src/session/index.ts` -2. `src/session/prompt.ts` -3. `src/session/compaction.ts` +1. remove the `Npm` runtime-backed facade from `src/npm/index.ts` +2. remove the `TuiConfig` runtime-backed facade from `src/cli/cmd/tui/config/tui.ts` +3. keep `src/project/instance.ts` in the separate instance-context migration, not this checklist ## Checklist -- [ ] `src/session/index.ts` (`Session`) - facades: `create`, `fork`, `get`, `setTitle`, `setArchived`, `setPermission`, `setRevert`, `messages`, `children`, `remove`, `updateMessage`, `removeMessage`, `removePart`, `updatePart`; main callers: `server/instance/session.ts`, `cli/cmd/session.ts`, `cli/cmd/export.ts`, `cli/cmd/github.ts`; tests: `test/server/session-actions.test.ts`, `test/server/session-list.test.ts`, `test/server/global-session-list.test.ts` -- [ ] `src/session/prompt.ts` (`SessionPrompt`) - facades: `prompt`, `resolvePromptParts`, `cancel`, `loop`, `shell`, `command`; main callers: `server/instance/session.ts`, `cli/cmd/github.ts`; tests: `test/session/prompt.test.ts`, `test/session/prompt-effect.test.ts`, `test/session/structured-output-integration.test.ts` -- [ ] `src/session/revert.ts` (`SessionRevert`) - facades: `revert`, `unrevert`, `cleanup`; main callers: `server/instance/session.ts`; tests: `test/session/revert-compact.test.ts` -- [ ] `src/session/compaction.ts` (`SessionCompaction`) - facades: `isOverflow`, `prune`, `create`; main callers: `server/instance/session.ts`; tests: `test/session/compaction.test.ts` -- [ ] `src/session/summary.ts` (`SessionSummary`) - facades: `summarize`, `diff`; main callers: `session/prompt.ts`, `session/processor.ts`, `server/instance/session.ts`; tests: `test/session/snapshot-tool-race.test.ts` -- [ ] `src/share/session.ts` (`ShareSession`) - facades: `create`, `share`, `unshare`; main callers: `server/instance/session.ts`, `cli/cmd/github.ts` -- [ ] `src/agent/agent.ts` (`Agent`) - facades: `get`, `list`, `defaultAgent`, `generate`; main callers: `cli/cmd/agent.ts`, `server/instance/session.ts`, `server/instance/experimental.ts`; tests: `test/agent/agent.test.ts` -- [ ] `src/permission/index.ts` (`Permission`) - facades: `ask`, `reply`, `list`; main callers: `server/instance/permission.ts`, `server/instance/session.ts`, `session/llm.ts`; tests: `test/permission/next.test.ts` -- [x] `src/file/index.ts` (`File`) - facades removed and merged. -- [x] `src/lsp/index.ts` (`LSP`) - facades removed and merged. -- [x] `src/mcp/index.ts` (`MCP`) - facades removed and merged. -- [x] `src/config/config.ts` (`Config`) - facades removed and merged. -- [x] `src/provider/provider.ts` (`Provider`) - facades removed and merged. -- [x] `src/pty/index.ts` (`Pty`) - facades removed and merged. -- [x] `src/skill/index.ts` (`Skill`) - facades removed and merged. -- [x] `src/project/vcs.ts` (`Vcs`) - facades removed and merged. -- [x] `src/tool/registry.ts` (`ToolRegistry`) - facades removed and merged. -- [ ] `src/worktree/index.ts` (`Worktree`) - facades: `makeWorktreeInfo`, `createFromInfo`, `create`, `remove`, `reset`; main callers: `control-plane/adaptors/worktree.ts`, `server/instance/experimental.ts`; tests: `test/project/worktree.test.ts`, `test/project/worktree-remove.test.ts` -- [x] `src/auth/index.ts` (`Auth`) - facades removed and merged. -- [ ] `src/mcp/auth.ts` (`McpAuth`) - facades: `get`, `getForUrl`, `all`, `set`, `remove`, `updateTokens`, `updateClientInfo`, `updateCodeVerifier`, `updateOAuthState`; main callers: `mcp/oauth-provider.ts`, `cli/cmd/mcp.ts`; tests: `test/mcp/oauth-auto-connect.test.ts` -- [ ] `src/plugin/index.ts` (`Plugin`) - facades: `trigger`, `list`, `init`; main callers: `agent/agent.ts`, `session/llm.ts`, `project/bootstrap.ts`; tests: `test/plugin/trigger.test.ts`, `test/provider/provider.test.ts` -- [ ] `src/project/project.ts` (`Project`) - facades: `fromDirectory`, `discover`, `initGit`, `update`, `sandboxes`, `addSandbox`, `removeSandbox`; main callers: `project/instance.ts`, `server/instance/project.ts`, `server/instance/experimental.ts`; tests: `test/project/project.test.ts`, `test/project/migrate-global.test.ts` -- [ ] `src/snapshot/index.ts` (`Snapshot`) - facades: `init`, `track`, `patch`, `restore`, `revert`, `diff`, `diffFull`; main callers: `project/bootstrap.ts`, `cli/cmd/debug/snapshot.ts`; tests: `test/snapshot/snapshot.test.ts`, `test/session/revert-compact.test.ts` +- [ ] `src/npm/index.ts` (`Npm`) - still exports runtime-backed async facade helpers on top of `Npm.Service` +- [ ] `src/cli/cmd/tui/config/tui.ts` (`TuiConfig`) - still exports runtime-backed async facade helpers on top of `TuiConfig.Service` +- [x] `src/session/session.ts` / `src/session/prompt.ts` / `src/session/revert.ts` / `src/session/summary.ts` - service-local facades removed +- [x] `src/agent/agent.ts` (`Agent`) - service-local facades removed +- [x] `src/permission/index.ts` (`Permission`) - service-local facades removed +- [x] `src/worktree/index.ts` (`Worktree`) - service-local facades removed +- [x] `src/plugin/index.ts` (`Plugin`) - service-local facades removed +- [x] `src/snapshot/index.ts` (`Snapshot`) - service-local facades removed +- [x] `src/file/index.ts` (`File`) - facades removed and merged +- [x] `src/lsp/index.ts` (`LSP`) - facades removed and merged +- [x] `src/mcp/index.ts` (`MCP`) - facades removed and merged +- [x] `src/config/config.ts` (`Config`) - facades removed and merged +- [x] `src/provider/provider.ts` (`Provider`) - facades removed and merged +- [x] `src/pty/index.ts` (`Pty`) - facades removed and merged +- [x] `src/skill/index.ts` (`Skill`) - facades removed and merged +- [x] `src/project/vcs.ts` (`Vcs`) - facades removed and merged +- [x] `src/tool/registry.ts` (`ToolRegistry`) - facades removed and merged +- [x] `src/auth/index.ts` (`Auth`) - facades removed and merged ## Excluded `makeRuntime(...)` sites diff --git a/packages/opencode/specs/effect/http-api.md b/packages/opencode/specs/effect/http-api.md index 71b50250ed..93ef81a325 100644 --- a/packages/opencode/specs/effect/http-api.md +++ b/packages/opencode/specs/effect/http-api.md @@ -76,7 +76,7 @@ Many route boundaries still use Zod-first validators. That does not block all ex ### Mixed handler styles -Many current `server/instance/*.ts` handlers still call async facades directly. Migrating those to composed `Effect.gen(...)` handlers is the low-risk step to do first. +Many current `server/routes/instance/*.ts` handlers still mix composed Effect code with smaller Promise- or ALS-backed seams. Migrating those to consistent `Effect.gen(...)` handlers is the low-risk step to do first. ### Non-JSON routes @@ -90,7 +90,7 @@ The current server composition, middleware, and docs flow are Hono-centered toda ### 1. Finish the prerequisites first -- continue route-handler effectification in `server/instance/*.ts` +- continue route-handler effectification in `server/routes/instance/*.ts` - continue schema migration toward Effect Schema-first DTOs and errors - keep removing service facades @@ -98,9 +98,9 @@ The current server composition, middleware, and docs flow are Hono-centered toda Introduce one small `HttpApi` group for plain JSON endpoints only. Good initial candidates are the least stateful endpoints in: -- `server/instance/question.ts` -- `server/instance/provider.ts` -- `server/instance/permission.ts` +- `server/routes/instance/question.ts` +- `server/routes/instance/provider.ts` +- `server/routes/instance/permission.ts` Avoid `session.ts`, SSE, websocket, and TUI-facing routes first. @@ -155,9 +155,9 @@ This gives: As each route group is ported to `HttpApi`: -1. change its `root` path from `/experimental/httpapi/` to `/` -2. add `.all("/", handler)` / `.all("//*", handler)` to the flag block in `instance/index.ts` -3. for partial ports (e.g. only `GET /provider/auth`), bridge only the specific path +1. add `.get(...)` / `.post(...)` bridge entries to the flag block in `server/routes/instance/index.ts` +2. for partial ports (e.g. only `GET /provider/auth`), bridge only the specific path +3. keep the legacy Hono route registered behind it for OpenAPI / SDK generation until the spec pipeline changes 4. verify SDK output is unchanged Leave streaming-style endpoints on Hono until there is a clear reason to move them. @@ -189,10 +189,46 @@ Ordering for a route-group migration: SDK shape rule: -- every schema migration must preserve the generated SDK output byte-for-byte -- `Schema.Class` emits a named `$ref` in OpenAPI via its identifier — use it only for types that already had `.meta({ ref })` in the old Zod schema -- inner / nested types that were anonymous in the old Zod schema should stay as `Schema.Struct` (not `Schema.Class`) to avoid introducing new named components in the OpenAPI spec -- if a diff appears in `packages/sdk/js/src/v2/gen/types.gen.ts`, the migration introduced an unintended API surface change — fix it before merging +- every schema migration must preserve the generated SDK output byte-for-byte **unless the new ref is intentional** (see Schema.Class vs Schema.Struct below) +- if an unintended diff appears in `packages/sdk/js/src/v2/gen/types.gen.ts`, the migration introduced an unintended API surface change — fix it before merging + +### Schema.Class vs Schema.Struct + +The pattern choice determines whether a schema becomes a **named** export in the SDK or stays **anonymous inline**. + +**Schema.Class** emits a named `$ref` in OpenAPI via its identifier → produces a named `export type Foo = ...` in `types.gen.ts`: + +```ts +export class Info extends Schema.Class("FooConfig")({ ... }) { + static readonly zod = zod(this) +} +``` + +**Schema.Struct** stays anonymous and is inlined everywhere it is referenced: + +```ts +export const Info = Schema.Struct({ ... }).pipe( + withStatics((s) => ({ zod: zod(s) })), +) +export type Info = Schema.Schema.Type +``` + +When to use each: + +- Use **Schema.Class** when: + - the original Zod had `.meta({ ref: ... })` (preserve the existing named SDK type byte-for-byte) + - the schema is a top-level endpoint request or response (SDK consumers benefit from a stable importable name) +- Use **Schema.Struct** when: + - the type is only used as a nested field inside another named schema + - the original Zod was anonymous and promoting it would bloat SDK types with no import value + +Promoting a previously-anonymous schema to Schema.Class is acceptable when it is top-level or endpoint-facing, but call it out in the PR — it is an additive SDK change (`export type Foo = ...` newly appears) even if it preserves the JSON shape. + +Schemas that are **not** pure objects (enums, unions, records, tuples) cannot use Schema.Class. For those, add `.annotate({ identifier: "FooName" })` to get the same named-ref behavior: + +```ts +export const Action = Schema.Literals(["ask", "allow", "deny"]).annotate({ identifier: "PermissionActionConfig" }) +``` Temporary exception: @@ -231,7 +267,7 @@ Use the same sequence for each route group. 3. Apply the schema migration ordering above so those types are Effect Schema-first. 4. Define the `HttpApi` contract separately from the handlers. 5. Implement handlers by yielding the existing service from context. -6. Mount the new surface in parallel under an experimental prefix. +6. Mount the new surface in parallel behind the `OPENCODE_EXPERIMENTAL_HTTPAPI` bridge. 7. Regenerate the SDK and verify zero diff against `dev` (see SDK shape rule above). 8. Add one end-to-end test and one OpenAPI-focused test. 9. Compare ergonomics before migrating the next endpoint. @@ -250,20 +286,20 @@ Placement rule: - keep `HttpApi` code under `src/server`, not `src/effect` - `src/effect` should stay focused on runtimes, layers, instance state, and shared Effect plumbing - place each `HttpApi` slice next to the HTTP boundary it serves -- for instance-scoped routes, prefer `src/server/instance/httpapi/*` -- if control-plane routes ever migrate, prefer `src/server/control/httpapi/*` +- for instance-scoped routes, prefer `src/server/routes/instance/httpapi/*` +- if control-plane routes ever migrate, prefer `src/server/routes/control/httpapi/*` Suggested file layout for a repeatable spike: -- `src/server/instance/httpapi/question.ts` — contract and handler layer for one route group -- `src/server/instance/httpapi/server.ts` — standalone Effect HTTP server that composes all groups -- `test/server/question-httpapi.test.ts` — end-to-end test against the real service +- `src/server/routes/instance/httpapi/question.ts` — contract and handler layer for one route group +- `src/server/routes/instance/httpapi/server.ts` — bridged Effect HTTP layer that composes all groups +- route or OpenAPI verification should live alongside the existing server tests; there is no dedicated `question-httpapi` test file on this branch Suggested responsibilities: - `question.ts` defines the `HttpApi` contract and `HttpApiBuilder.group(...)` handlers -- `server.ts` composes all route groups into one `HttpRouter.serve` layer with shared middleware (auth, instance lookup) -- tests use `ExperimentalHttpApiServer.layerTest` to run against a real in-process HTTP server +- `server.ts` composes all route groups into one `HttpRouter.toWebHandler(...)` bridge with shared middleware (auth, instance lookup) +- tests should verify the bridged routes through the normal server surface ## Example migration shape @@ -283,33 +319,33 @@ Each route-group spike should follow the same shape. - keep handler bodies thin - keep transport mapping at the HTTP boundary only -### 3. Standalone server +### 3. Bridged server -- the Effect HTTP server is self-contained in `httpapi/server.ts` -- it is **not** mounted into the Hono app — no bridge, no `toWebHandler` -- route paths use the `/experimental/httpapi` prefix so they match the eventual cutover -- each route group exposes its own OpenAPI doc endpoint +- the Effect HTTP layer is composed in `httpapi/server.ts` +- it is mounted into the Hono app via `HttpRouter.toWebHandler(...)` +- routes keep their normal instance paths and are gated by the `OPENCODE_EXPERIMENTAL_HTTPAPI` flag +- the legacy Hono handlers stay registered after the bridge so current OpenAPI / SDK generation still works ### 4. Verification - seed real state through the existing service -- call the experimental endpoints +- call the bridged endpoints with the flag enabled - assert that the service behavior is unchanged - assert that the generated OpenAPI contains the migrated paths and schemas ## Boundary composition -The standalone Effect server owns its own middleware stack. It does not share middleware with the Hono server. +The Effect `HttpApi` layer owns its own auth and instance middleware, but it is currently mounted inside the existing Hono server. ### Auth -- the standalone server implements auth as an `HttpApiMiddleware.Service` using `HttpApiSecurity.basic` +- the bridged `HttpApi` layer implements auth as an `HttpApiMiddleware.Service` using `HttpApiSecurity.basic` - each route group's `HttpApi` is wrapped with `.middleware(Authorization)` before being served -- this is independent of the Hono `AuthMiddleware` — when the Effect server eventually replaces Hono, this becomes the only auth layer +- this is independent of the Hono auth layer; the current bridge keeps the responsibility local to the `HttpApi` slice ### Instance and workspace lookup -- the standalone server resolves instance context via an `HttpRouter.middleware` that reads `x-opencode-directory` headers and `directory` query params +- the bridged `HttpApi` layer resolves instance context via an `HttpRouter.middleware` that reads `x-opencode-directory` headers and `directory` query params - this is the Effect equivalent of the Hono `WorkspaceRouterMiddleware` - `HttpApi` handlers yield services from context and assume the correct instance has already been provided @@ -324,7 +360,7 @@ The standalone Effect server owns its own middleware stack. It does not share mi The first slice is successful if: -- the standalone Effect server starts and serves the endpoints independently of the Hono server +- the bridged endpoints serve correctly through the existing Hono host when the flag is enabled - the handlers reuse the existing Effect service - request decoding and response shapes are schema-defined from canonical Effect schemas - any remaining Zod boundary usage is derived from `.zod` or clearly temporary @@ -365,17 +401,16 @@ Current instance route inventory: endpoints: `GET /question`, `POST /question/:requestID/reply`, `POST /question/:requestID/reject` - `permission` - `bridged` endpoints: `GET /permission`, `POST /permission/:requestID/reply` -- `provider` - `bridged` (partial) - bridged endpoint: `GET /provider/auth` - not yet ported: `GET /provider`, OAuth mutations -- `config` - `next` - best next endpoint: `GET /config/providers` +- `provider` - `bridged` + endpoints: `GET /provider`, `GET /provider/auth`, `POST /provider/:providerID/oauth/authorize`, `POST /provider/:providerID/oauth/callback` +- `config` - `bridged` (partial) + bridged endpoint: `GET /config/providers` later endpoint: `GET /config` defer `PATCH /config` for now -- `project` - `later` - best small reads: `GET /project`, `GET /project/current` +- `project` - `bridged` (partial) + bridged endpoints: `GET /project`, `GET /project/current` defer git-init mutation first -- `workspace` - `later` +- `workspace` - `next` best small reads: `GET /experimental/workspace/adaptor`, `GET /experimental/workspace`, `GET /experimental/workspace/status` defer create/remove mutations first - `file` - `later` @@ -393,12 +428,12 @@ Current instance route inventory: - `tui` - `defer` queue-style UI bridge, weak early `HttpApi` fit -Recommended near-term sequence after the first spike: +Recommended near-term sequence: -1. `provider` auth read endpoint -2. `config` providers read endpoint -3. `project` read endpoints -4. `workspace` read endpoints +1. `workspace` read endpoints (`GET /experimental/workspace/adaptor`, `GET /experimental/workspace`, `GET /experimental/workspace/status`) +2. `config` full read endpoint (`GET /config`) +3. `file` JSON read endpoints +4. `mcp` JSON read endpoints ## Checklist @@ -411,8 +446,12 @@ Recommended near-term sequence after the first spike: - [x] gate behind `OPENCODE_EXPERIMENTAL_HTTPAPI` flag - [x] verify OTEL spans and HTTP logs flow to motel - [x] bridge question, permission, and provider auth routes -- [ ] port remaining provider endpoints (`GET /provider`, OAuth mutations) -- [ ] port `config` read endpoints +- [x] port remaining provider endpoints (`GET /provider`, OAuth mutations) +- [x] port `config` providers read endpoint +- [x] port `project` read endpoints (`GET /project`, `GET /project/current`) +- [ ] port `workspace` read endpoints +- [ ] port `GET /config` full read endpoint +- [ ] port `file` JSON read endpoints - [ ] decide when to remove the flag and make Effect routes the default ## Rule of thumb diff --git a/packages/opencode/specs/effect/instance-context.md b/packages/opencode/specs/effect/instance-context.md index 6c160a9477..7d0d7eb13c 100644 --- a/packages/opencode/specs/effect/instance-context.md +++ b/packages/opencode/specs/effect/instance-context.md @@ -157,7 +157,7 @@ Direct legacy usage means any source file that still calls one of: - `Instance.reload(...)` - `Instance.dispose()` / `Instance.disposeAll()` -Current total: `54` files in `packages/opencode/src`. +Current total: `56` files in `packages/opencode/src`. ### Core bridge and plumbing @@ -177,13 +177,13 @@ Migration rule: These are the current request-entry seams that still create or consume instance context through the legacy helper. -- `src/server/instance/middleware.ts` -- `src/server/instance/index.ts` -- `src/server/instance/project.ts` -- `src/server/instance/workspace.ts` -- `src/server/instance/file.ts` -- `src/server/instance/experimental.ts` -- `src/server/instance/global.ts` +- `src/server/routes/instance/middleware.ts` +- `src/server/routes/instance/index.ts` +- `src/server/routes/instance/project.ts` +- `src/server/routes/control/workspace.ts` +- `src/server/routes/instance/file.ts` +- `src/server/routes/instance/experimental.ts` +- `src/server/routes/global.ts` Migration rule: @@ -239,7 +239,7 @@ Migration rule: These modules are already the best near-term migration targets because they are in Effect code but still read sync getters from the legacy helper. - `src/agent/agent.ts` -- `src/config/tui-migrate.ts` +- `src/cli/cmd/tui/config/tui-migrate.ts` - `src/file/index.ts` - `src/file/watcher.ts` - `src/format/formatter.ts` @@ -250,7 +250,7 @@ These modules are already the best near-term migration targets because they are - `src/project/vcs.ts` - `src/provider/provider.ts` - `src/pty/index.ts` -- `src/session/index.ts` +- `src/session/session.ts` - `src/session/instruction.ts` - `src/session/llm.ts` - `src/session/system.ts` diff --git a/packages/opencode/specs/effect/loose-ends.md b/packages/opencode/specs/effect/loose-ends.md index a2fed492b3..4e7ada7ff9 100644 --- a/packages/opencode/specs/effect/loose-ends.md +++ b/packages/opencode/specs/effect/loose-ends.md @@ -4,11 +4,11 @@ Small follow-ups that do not fit neatly into the main facade, route, tool, or sc ## Config / TUI -- [ ] `config/tui.ts` - finish the internal Effect migration after the `Instance.state(...)` removal. +- [ ] `cli/cmd/tui/config/tui.ts` - finish the internal Effect migration. Keep the current precedence and migration semantics intact while converting the remaining internal async helpers (`loadState`, `mergeFile`, `loadFile`, `load`) to `Effect.gen(...)` / `Effect.fn(...)`. -- [ ] `config/tui.ts` callers - once the internal service is stable, migrate plain async callers to use `TuiConfig.Service` directly where that actually simplifies the code. +- [ ] `cli/cmd/tui/config/tui.ts` callers - once the internal service is stable, migrate plain async callers to use `TuiConfig.Service` directly where that actually simplifies the code. Likely first callers: `cli/cmd/tui/attach.ts`, `cli/cmd/tui/thread.ts`, `cli/cmd/tui/plugin/runtime.ts`. -- [ ] `env/index.ts` - move the last production `Instance.state(...)` usage onto `InstanceState` (or its replacement) so `Instance.state` can be deleted. +- [x] `env/index.ts` - already uses `InstanceState.make(...)`. ## ConfigPaths @@ -21,14 +21,12 @@ Small follow-ups that do not fit neatly into the main facade, route, tool, or sc - `readFile(...)` - `parseText(...)` - [ ] `config/config.ts` - switch internal config loading from `Effect.promise(() => ConfigPaths.*(...))` to `yield* paths.*(...)` once the service exists. -- [ ] `config/tui.ts` - switch TUI config loading from async `ConfigPaths.*` wrappers to the `ConfigPaths.Service` once that service exists. -- [ ] `config/tui-migrate.ts` - decide whether to leave this as a plain async module using wrapper functions or effectify it fully after `ConfigPaths.Service` lands. +- [ ] `cli/cmd/tui/config/tui.ts` - switch TUI config loading from async `ConfigPaths.*` wrappers to the `ConfigPaths.Service` once that service exists. +- [ ] `cli/cmd/tui/config/tui-migrate.ts` - decide whether to leave this as a plain async module using wrapper functions or effectify it fully after `ConfigPaths.Service` lands. ## Instance cleanup -- [ ] `project/instance.ts` - remove `Instance.state(...)` once `env/index.ts` is migrated. -- [ ] `project/state.ts` - delete the bespoke per-instance state helper after the last production caller is gone. -- [ ] `test/project/state.test.ts` - replace or delete the old `Instance.state(...)` tests after the removal. +- [ ] `project/instance.ts` - keep shrinking the legacy ALS / Promise cache after the remaining `Instance.*` callers move over. ## Notes diff --git a/packages/opencode/specs/effect/migration.md b/packages/opencode/specs/effect/migration.md index 105a82290b..947eef5a15 100644 --- a/packages/opencode/specs/effect/migration.md +++ b/packages/opencode/specs/effect/migration.md @@ -9,7 +9,7 @@ Use `InstanceState` (from `src/effect/instance-state.ts`) for services that need Use `makeRuntime` (from `src/effect/run-service.ts`) to create a per-service `ManagedRuntime` that lazily initializes and shares layers via a global `memoMap`. Returns `{ runPromise, runFork, runCallback }`. - Global services (no per-directory state): Account, Auth, AppFileSystem, Installation, Truncate, Worktree -- Instance-scoped (per-directory state via InstanceState): Agent, Bus, Command, Config, File, FileTime, FileWatcher, Format, LSP, MCP, Permission, Plugin, ProviderAuth, Pty, Question, SessionStatus, Skill, Snapshot, ToolRegistry, Vcs +- Instance-scoped (per-directory state via InstanceState): Agent, Bus, Command, Config, File, FileWatcher, Format, LSP, MCP, Permission, Plugin, ProviderAuth, Pty, Question, SessionStatus, Skill, Snapshot, ToolRegistry, Vcs Rule of thumb: if two open directories should not share one copy of the service, it needs `InstanceState`. @@ -19,53 +19,43 @@ See `instance-context.md` for the phased plan to remove the legacy ALS / promise ## Service shape -Every service follows the same pattern — a single namespace with the service definition, layer, `runPromise`, and async facade functions: +Every service follows the same pattern: one module, flat top-level exports, traced Effect methods, and a self-reexport at the bottom when the file is the public module. ```ts -export namespace Foo { - export interface Interface { - readonly get: (id: FooID) => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/Foo") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - // For instance-scoped services: - const state = yield* InstanceState.make( - Effect.fn("Foo.state")(() => Effect.succeed({ ... })), - ) - - const get = Effect.fn("Foo.get")(function* (id: FooID) { - const s = yield* InstanceState.get(state) - // ... - }) - - return Service.of({ get }) - }), - ) - - // Optional: wire dependencies - export const defaultLayer = layer.pipe(Layer.provide(FooDep.layer)) - - // Per-service runtime (inside the namespace) - const { runPromise } = makeRuntime(Service, defaultLayer) - - // Async facade functions - export async function get(id: FooID) { - return runPromise((svc) => svc.get(id)) - } +export interface Interface { + readonly get: (id: FooID) => Effect.Effect } + +export class Service extends Context.Service()("@opencode/Foo") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const state = yield* InstanceState.make( + Effect.fn("Foo.state")(() => Effect.succeed({ ... })), + ) + + const get = Effect.fn("Foo.get")(function* (id: FooID) { + const s = yield* InstanceState.get(state) + // ... + }) + + return Service.of({ get }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(FooDep.layer)) + +export * as Foo from "." ``` Rules: -- Keep everything in one namespace, one file — no separate `service.ts` / `index.ts` split -- `runPromise` goes inside the namespace (not exported unless tests need it) -- Facade functions are plain `async function` — no `fn()` wrappers -- Use `Effect.fn("Namespace.method")` for all Effect functions (for tracing) -- No `Layer.fresh` — InstanceState handles per-directory isolation +- Keep the service surface in one module; prefer flat top-level exports over `export namespace Foo { ... }` +- Use `Effect.fn("Foo.method")` for Effect methods +- Use a self-reexport (`export * as Foo from "."` or `"./foo"`) for the public namespace projection +- Avoid service-local `makeRuntime(...)` facades unless a file is still intentionally in the older migration phase +- No `Layer.fresh` for normal per-directory isolation; use `InstanceState` ## Schema → Zod interop @@ -195,7 +185,6 @@ This checklist is only about the service shape migration. Many of these services - [x] `Config` — `config/config.ts` - [x] `Discovery` — `skill/discovery.ts` (dependency-only layer, no standalone runtime) - [x] `File` — `file/index.ts` -- [x] `FileTime` — `file/time.ts` - [x] `FileWatcher` — `file/watcher.ts` - [x] `Format` — `format/index.ts` - [x] `Installation` — `installation/index.ts` @@ -267,7 +256,7 @@ Tool-specific filesystem cleanup notes live in `tools.md`. ## Destroying the facades -This phase is still broadly open. As of 2026-04-13 there are still 15 `makeRuntime(...)` call sites under `src/`, with 13 still in scope for facade removal. The live checklist now lives in `facades.md`. +This phase is no longer broadly open. There are 5 `makeRuntime(...)` call sites under `src/`, and only a small subset are still ordinary facade-removal targets. The live checklist now lives in `facades.md`. These facades exist because cyclic imports used to force each service to build its own independent runtime. Now that the layer DAG is acyclic and `AppRuntime` (`src/effect/app-runtime.ts`) composes everything into one `ManagedRuntime`, we're removing them. @@ -298,12 +287,11 @@ For each service, the migration is roughly: - `ShareNext` — migrated 2026-04-11. Swapped remaining async callers to `AppRuntime.runPromise(ShareNext.Service.use(...))`, removed the `makeRuntime(...)` facade, and kept instance bootstrap on the shared app runtime. - `SessionTodo` — migrated 2026-04-10. Already matched the target service shape in `session/todo.ts`: single namespace, traced Effect methods, and no `makeRuntime(...)` facade remained; checklist updated to reflect the completed migration. - `Storage` — migrated 2026-04-10. One production caller (`Session.diff`) and all storage.test.ts tests converted to effectful style. Facades and `makeRuntime` removed. -- `SessionRunState` — migrated 2026-04-11. Single caller in `server/instance/session.ts` converted; facade removed. -- `Account` — migrated 2026-04-11. Callers in `server/instance/experimental.ts` and `cli/cmd/account.ts` converted; facade removed. +- `SessionRunState` — migrated 2026-04-11. Single caller in `server/routes/instance/session.ts` converted; facade removed. +- `Account` — migrated 2026-04-11. Callers in `server/routes/instance/experimental.ts` and `cli/cmd/account.ts` converted; facade removed. - `Instruction` — migrated 2026-04-11. Test-only callers converted; facade removed. -- `FileTime` — migrated 2026-04-11. Test-only callers converted; facade removed. - `FileWatcher` — migrated 2026-04-11. Callers in `project/bootstrap.ts` and test converted; facade removed. -- `Question` — migrated 2026-04-11. Callers in `server/instance/question.ts` and test converted; facade removed. +- `Question` — migrated 2026-04-11. Callers in `server/routes/instance/question.ts` and test converted; facade removed. - `Truncate` — migrated 2026-04-11. Caller in `tool/tool.ts` and test converted; facade removed. ## Route handler effectification diff --git a/packages/opencode/specs/effect/namespace-treeshake.md b/packages/opencode/specs/effect/namespace-treeshake.md deleted file mode 100644 index ef78c762bb..0000000000 --- a/packages/opencode/specs/effect/namespace-treeshake.md +++ /dev/null @@ -1,256 +0,0 @@ -# Namespace → self-reexport migration - -Migrate every `export namespace Foo { ... }` to flat top-level exports plus a -single self-reexport line at the bottom of the same file: - -```ts -export * as Foo from "./foo" -``` - -No barrel `index.ts` files. No cross-directory indirection. Consumers keep the -exact same `import { Foo } from "../foo/foo"` ergonomics. - -## Why this pattern - -We tested three options against Bun, esbuild, Rollup (what Vite uses under the -hood), Bun's runtime, and Node's native TypeScript runner. - -``` - heavy.ts loaded? - A. namespace B. barrel C. self-reexport -Bun bundler YES YES no -esbuild YES YES no -Rollup (Vite) YES YES no -Bun runtime YES YES no -Node --experimental-strip-types SYNTAX ERROR YES no -``` - -- **`export namespace`** compiles to an IIFE. Bundlers see one opaque function - call and can't analyze what's used. Node's native TS runner rejects the - syntax outright: `SyntaxError: TypeScript namespace declaration is not -supported in strip-only mode`. -- **Barrel `index.ts`** files (`export * as Foo from "./foo"` in a separate - file) force every re-exported sibling to evaluate when you import one name. - Siblings with side effects (top-level imports of SDKs, etc.) always load. -- **Self-reexport** keeps the file as plain ESM. Bundlers see static named - exports. The module is only pulled in when something actually imports from - it. There is no barrel hop, so no sibling contamination and no circular - import hazard. - -Bundle overhead for the self-reexport wrapper is roughly 240 bytes per module -(`Object.defineProperty` namespace proxy). At ~100 modules that's ~24KB — -negligible for a CLI binary. - -## The pattern - -### Before - -```ts -// src/permission/arity.ts -export namespace BashArity { - export function prefix(tokens: string[]) { ... } -} -``` - -### After - -```ts -// src/permission/arity.ts -export function prefix(tokens: string[]) { ... } - -export * as BashArity from "./arity" -``` - -Consumers don't change at all: - -```ts -import { BashArity } from "@/permission/arity" -BashArity.prefix(...) // still works -``` - -Editors still auto-import `BashArity` like any named export, because the file -does have a named `BashArity` export at the module top level. - -### Odd but harmless - -`BashArity.BashArity.BashArity.prefix(...)` compiles and runs because the -namespace contains a re-export of itself. Nobody would write that. Not a -problem. - -## Why this is different from what we tried first - -An earlier pass used sibling barrel files (`index.ts` with `export * as ...`). -That turned out to be wrong for our constraints: - -1. The barrel file always loads all its sibling modules when you import - through it, even if you only need one. For our CLI this is exactly the - cost we're trying to avoid. -2. Barrel + sibling imports made it very easy to accidentally create circular - imports that only surface as `ReferenceError` at runtime, not at - typecheck. - -The self-reexport has none of those issues. There is no indirection. The -file and the namespace are the same unit. - -## Why this matters for startup - -The worst import chain in the codebase looks like: - -``` -src/index.ts - └── FormatError from src/cli/error.ts - ├── { Provider } from provider/provider.ts (~1700 lines) - │ ├── 20+ @ai-sdk/* packages - │ ├── @aws-sdk/credential-providers - │ ├── google-auth-library - │ └── more - ├── { Config } from config/config.ts (~1600 lines) - └── { MCP } from mcp/mcp.ts (~900 lines) -``` - -All of that currently gets pulled in just to do `.isInstance()` on a handful -of error classes. The namespace IIFE shape is the main reason bundlers cannot -strip the unused parts. Self-reexport + flat ESM fixes it. - -## Automation - -From `packages/opencode`: - -```bash -bun script/unwrap-namespace.ts [--dry-run] -``` - -The script: - -1. Uses ast-grep to locate the `export namespace Foo { ... }` block accurately. -2. Removes the `export namespace Foo {` line and the matching closing `}`. -3. Dedents the body by one indent level (2 spaces). -4. Rewrites `Foo.Bar` self-references inside the file to just `Bar`. -5. Appends `export * as Foo from "./"` at the bottom of the file. -6. Never creates a barrel `index.ts`. - -### Typical flow for one file - -```bash -# 1. Preview -bun script/unwrap-namespace.ts src/permission/arity.ts --dry-run - -# 2. Apply -bun script/unwrap-namespace.ts src/permission/arity.ts - -# 3. Verify -cd packages/opencode -bunx --bun tsgo --noEmit -bun run --conditions=browser ./src/index.ts generate -bun run test -``` - -### Consumer imports usually don't need to change - -Most consumers already import straight from the file, e.g.: - -```ts -import { BashArity } from "@/permission/arity" -import { Config } from "@/config/config" -``` - -Because the file itself now does `export * as Foo from "./foo"`, those imports -keep working with zero edits. - -The only edits needed are when a consumer was importing through a previous -barrel (`"@/config"` or `"../config"` resolving to `config/index.ts`). In -that case, repoint it at the file: - -```ts -// before -import { Config } from "@/config" - -// after -import { Config } from "@/config/config" -``` - -### Dynamic imports in tests - -If a test did `const { Foo } = await import("../../src/x/y")`, the destructure -still works because of the self-reexport. No change required. - -## Verification checklist (per PR) - -Run all of these locally before pushing: - -```bash -cd packages/opencode -bunx --bun tsgo --noEmit -bun run --conditions=browser ./src/index.ts generate -bun run test -``` - -Also do a quick grep in `src/`, `test/`, and `script/` to make sure no -consumer is still importing the namespace from an old barrel path that no -longer exports it. - -The SDK build step (`bun run --conditions=browser ./src/index.ts generate`) -evaluates every module eagerly and is the most reliable way to catch circular -import regressions at runtime — the typechecker does not catch these. - -## Rules for new code - -- No new `export namespace`. -- Every module directory has a single canonical file — typically - `dir/index.ts` — with flat top-level exports and a self-reexport at the - bottom: - `export * as Foo from "."` -- Consumers import from the directory: - `import { Foo } from "@/dir"` or `import { Foo } from "../dir"`. -- No sibling barrel files. If a directory has multiple independent - namespaces, they each get their own file (e.g. `config/config.ts`, - `config/plugin.ts`) and their own self-reexport; the `index.ts` in that - directory stays minimal or does not exist. -- If a file needs a sibling, import the sibling file directly: - `import * as Sibling from "./sibling"`, not `from "."`. - -### Why `dir/index.ts` + `"."` is fine for us - -A single-file module (e.g. `pty/`) can live entirely in `dir/index.ts` -with `export * as Foo from "."` at the bottom. Consumers write the -short form: - -```ts -import { Pty } from "@/pty" -``` - -This works in Bun runtime, Bun build, esbuild, and Rollup. It does NOT -work under Node's `--experimental-strip-types` runner: - -``` -node --experimental-strip-types entry.ts - ERR_UNSUPPORTED_DIR_IMPORT: Directory import '/.../pty' is not supported -``` - -Node requires an explicit file or a `package.json#exports` map for ESM. -We don't care about that target right now because the opencode CLI is -built with Bun and the web apps are built with Vite/Rollup. If we ever -want to run raw `.ts` through Node, we'll need to either use explicit -`.ts` extensions everywhere or add per-directory `package.json` exports -maps. - -### When NOT to collapse to `index.ts` - -Some directories contain multiple independent namespaces where -`dir/index.ts` would be misleading. Examples: - -- `config/` has `Config`, `ConfigPaths`, `ConfigMarkdown`, `ConfigPlugin`, - `ConfigKeybinds`. Each lives in its own file with its own self-reexport - (`config/config.ts`, `config/plugin.ts`, etc.). Consumers import the - specific one: `import { ConfigPlugin } from "@/config/plugin"`. -- Same shape for `session/`, `server/`, etc. - -Collapsing one of those into `index.ts` would mean picking a single -"canonical" namespace for the directory, which breaks the symmetry and -hides the other files. - -## Scope - -There are still dozens of `export namespace` files left across the codebase. -Each one is its own small PR. Do them one at a time, verified locally, rather -than batching by directory. diff --git a/packages/opencode/specs/effect/routes.md b/packages/opencode/specs/effect/routes.md index f6a61d2342..3bf7e1b556 100644 --- a/packages/opencode/specs/effect/routes.md +++ b/packages/opencode/specs/effect/routes.md @@ -39,28 +39,26 @@ This eliminates multiple `runPromise` round-trips and lets handlers compose natu ## Current route files -Current instance route files live under `src/server/instance`, not `server/routes`. +Current instance route files live under `src/server/routes/instance`. -The main migration targets are: +Files that are already mostly on the intended service-yielding shape: -- [ ] `server/instance/session.ts` — heaviest; still has many direct facade calls for Session, SessionPrompt, SessionRevert, SessionCompaction, SessionShare, SessionSummary, Agent, Bus -- [ ] `server/instance/global.ts` — still has direct facade calls for Config and instance lifecycle actions -- [ ] `server/instance/provider.ts` — still has direct facade calls for Config and Provider -- [ ] `server/instance/question.ts` — partially converted; still worth tracking here until it consistently uses the composed style -- [ ] `server/instance/pty.ts` — still calls Pty facades directly -- [ ] `server/instance/experimental.ts` — mixed state; some handlers are already composed, others still use facades +- [x] `server/routes/instance/question.ts` — handlers yield `Question.Service` +- [x] `server/routes/instance/provider.ts` — handlers yield `Provider.Service`, `ProviderAuth.Service`, and `Config.Service` +- [x] `server/routes/instance/permission.ts` — handlers yield `Permission.Service` +- [x] `server/routes/instance/mcp.ts` — handlers mostly yield `MCP.Service` +- [x] `server/routes/instance/pty.ts` — handlers yield `Pty.Service` -Additional route files that still participate in the migration: +Files still worth tracking here: -- [ ] `server/instance/index.ts` — Vcs, Agent, Skill, LSP, Format -- [ ] `server/instance/file.ts` — Ripgrep, File, LSP -- [ ] `server/instance/mcp.ts` — MCP facade-heavy -- [ ] `server/instance/permission.ts` — Permission -- [ ] `server/instance/workspace.ts` — Workspace -- [ ] `server/instance/tui.ts` — Bus and Session -- [ ] `server/instance/middleware.ts` — Session and Workspace lookups +- [ ] `server/routes/instance/session.ts` — still the heaviest mixed file; many handlers are composed, but the file still mixes patterns and has direct `Bus.publish(...)` / `Session.list(...)` usage +- [ ] `server/routes/instance/index.ts` — mostly converted, but still has direct `Instance.dispose()` / `Instance.*` reads for `/instance/dispose` and `/path` +- [ ] `server/routes/instance/file.ts` — most handlers yield services, but `/find` still passes `Instance.directory` directly into ripgrep and `/find/symbol` is still stubbed +- [ ] `server/routes/instance/experimental.ts` — mixed state; many handlers are composed, but some still rely on `runRequest(...)` or direct `Instance.project` reads +- [ ] `server/routes/instance/middleware.ts` — still enters the instance via `Instance.provide(...)` +- [ ] `server/routes/global.ts` — still uses `Instance.disposeAll()` and remains partly outside the fully-composed style ## Notes -- Some handlers already use `AppRuntime.runPromise(Effect.gen(...))` in isolated places. Keep pushing those files toward one consistent style. -- Route conversion is closely tied to facade removal. As services lose `makeRuntime`-backed async exports, route handlers should switch to yielding the service directly. +- Route conversion is now less about facade removal and more about removing the remaining direct `Instance.*` reads, `Instance.provide(...)` boundaries, and small Promise-style bridges inside route files. +- `jsonRequest(...)` / `runRequest(...)` already provide a good intermediate shape for many handlers. The remaining cleanup is mostly consistency work in the heavier files. diff --git a/packages/opencode/specs/effect/server-package.md b/packages/opencode/specs/effect/server-package.md index 10be7b9aed..06e89c18de 100644 --- a/packages/opencode/specs/effect/server-package.md +++ b/packages/opencode/specs/effect/server-package.md @@ -40,13 +40,13 @@ Everything still lives in `packages/opencode`. Important current facts: - there is no `packages/core` or `packages/cli` workspace yet -- `packages/server` now exists as a minimal scaffold package, but it does not own any real route contracts, handlers, or runtime composition yet +- there is no `packages/server` workspace yet on this branch - the main host server is still Hono-based in `src/server/server.ts` - current OpenAPI generation is Hono-based through `Server.openapi()` and `cli/cmd/generate.ts` - the Effect runtime and app layer are centralized in `src/effect/app-runtime.ts` and `src/effect/run-service.ts` -- there is already one experimental Effect `HttpApi` slice at `src/server/instance/httpapi/question.ts` -- that experimental slice is mounted under `/experimental/httpapi/question` -- that experimental slice already has an end-to-end test at `test/server/question-httpapi.test.ts` +- there are already bridged Effect `HttpApi` slices under `src/server/routes/instance/httpapi/*` +- those slices are mounted into the Hono server behind `OPENCODE_EXPERIMENTAL_HTTPAPI` +- the bridge currently covers `question`, `permission`, `provider`, partial `config`, and partial `project` routes This means the package split should start from an extraction path, not from greenfield package ownership. @@ -209,17 +209,19 @@ Current host and route composition: - `src/server/server.ts` - `src/server/control/index.ts` -- `src/server/instance/index.ts` +- `src/server/routes/instance/index.ts` - `src/server/middleware.ts` - `src/server/adapter.bun.ts` - `src/server/adapter.node.ts` -Current experimental `HttpApi` slice: +Current bridged `HttpApi` slices: -- `src/server/instance/httpapi/question.ts` -- `src/server/instance/httpapi/index.ts` -- `src/server/instance/experimental.ts` -- `test/server/question-httpapi.test.ts` +- `src/server/routes/instance/httpapi/question.ts` +- `src/server/routes/instance/httpapi/permission.ts` +- `src/server/routes/instance/httpapi/provider.ts` +- `src/server/routes/instance/httpapi/config.ts` +- `src/server/routes/instance/httpapi/project.ts` +- `src/server/routes/instance/httpapi/server.ts` Current OpenAPI flow: @@ -245,7 +247,7 @@ Keep in `packages/opencode` for now: - `src/server/server.ts` - `src/server/control/index.ts` -- `src/server/instance/*.ts` +- `src/server/routes/**/*.ts` - `src/server/middleware.ts` - `src/server/adapter.*.ts` - `src/effect/app-runtime.ts` @@ -305,14 +307,13 @@ Bad early migration targets: ## First vertical slice -The first slice for the package split is the existing experimental `question` group. +The first slice for the package split is still the existing `question` `HttpApi` group. Why `question` first: - it already exists as an experimental `HttpApi` slice - it already follows the desired contract and implementation split in one file - it is already mounted through the current Hono host -- it already has an end-to-end test - it is JSON-only - it has low blast radius @@ -357,7 +358,7 @@ Done means: Scope: -- extract the pure `HttpApi` contract from `src/server/instance/httpapi/question.ts` +- extract the pure `HttpApi` contract from `src/server/routes/instance/httpapi/question.ts` - place it in `packages/server/src/definition/question.ts` - aggregate it in `packages/server/src/definition/api.ts` - generate OpenAPI in `packages/server/src/openapi.ts` @@ -399,8 +400,9 @@ Scope: - replace local experimental question route wiring in `packages/opencode` - keep the same mount path: -- `/experimental/httpapi/question` -- `/experimental/httpapi/question/doc` +- `/question` +- `/question/:requestID/reply` +- `/question/:requestID/reject` Rules: @@ -569,7 +571,7 @@ For package-split PRs, validate the smallest useful thing. Typical validation for the first waves: - `bun typecheck` in the touched package directory or directories -- the relevant route test, especially `test/server/question-httpapi.test.ts` +- the relevant server / route coverage for the migrated slice - merged OpenAPI coverage if the PR touches spec generation Do not run tests from repo root. diff --git a/packages/opencode/specs/effect/tools.md b/packages/opencode/specs/effect/tools.md index e97e0d23e0..7b47831709 100644 --- a/packages/opencode/specs/effect/tools.md +++ b/packages/opencode/specs/effect/tools.md @@ -36,7 +36,7 @@ This keeps tool tests aligned with the production service graph and makes follow ## Exported tools -These exported tool definitions already exist in `src/tool` and are on the current Effect-native `Tool.define(...)` path: +These exported tool definitions currently use `Tool.define(...)` in `src/tool`: - [x] `apply_patch.ts` - [x] `bash.ts` @@ -45,7 +45,6 @@ These exported tool definitions already exist in `src/tool` and are on the curre - [x] `glob.ts` - [x] `grep.ts` - [x] `invalid.ts` -- [x] `ls.ts` - [x] `lsp.ts` - [x] `multiedit.ts` - [x] `plan.ts` @@ -60,7 +59,7 @@ These exported tool definitions already exist in `src/tool` and are on the curre Notes: -- `batch.ts` is no longer a current tool file and should not be tracked here. +- There is no current `ls.ts` tool file on this branch. - `truncate.ts` is an Effect service used by tools, not a tool definition itself. - `mcp-exa.ts`, `external-directory.ts`, and `schema.ts` are support modules, not standalone tool definitions. @@ -73,7 +72,7 @@ Current spot cleanups worth tracking: - [ ] `read.ts` — still bridges to Node stream / `readline` helpers and Promise-based binary detection - [ ] `bash.ts` — already uses Effect child-process primitives; only keep tracking shell-specific platform bridges and parser/loading details as they come up - [ ] `webfetch.ts` — already uses `HttpClient`; remaining work is limited to smaller boundary helpers like HTML text extraction -- [ ] `file/ripgrep.ts` — adjacent to tool migration; still has raw fs/process usage that affects `grep.ts` and `ls.ts` +- [ ] `file/ripgrep.ts` — adjacent to tool migration; still has raw fs/process usage that affects `grep.ts` and file-search routes - [ ] `patch/index.ts` — adjacent to tool migration; still has raw fs usage behind patch application Notable items that are already effectively on the target path and do not need separate migration bullets right now: @@ -83,7 +82,6 @@ Notable items that are already effectively on the target path and do not need se - `write.ts` - `codesearch.ts` - `websearch.ts` -- `ls.ts` - `multiedit.ts` - `edit.ts` diff --git a/packages/opencode/src/account/account.ts b/packages/opencode/src/account/account.ts index 657c61b1e5..a0aed88cba 100644 --- a/packages/opencode/src/account/account.ts +++ b/packages/opencode/src/account/account.ts @@ -181,10 +181,10 @@ export interface Interface { export class Service extends Context.Service()("@opencode/Account") {} -export const layer: Layer.Layer = Layer.effect( +export const layer: Layer.Layer = Layer.effect( Service, Effect.gen(function* () { - const repo = yield* AccountRepo + const repo = yield* AccountRepo.Service const http = yield* HttpClient.HttpClient const httpRead = withTransientReadRetry(http) const httpOk = HttpClient.filterStatusOk(http) @@ -452,3 +452,5 @@ export const layer: Layer.Layer = Parameters>[0] const ACCOUNT_STATE_ID = 1 -export namespace AccountRepo { - export interface Service { - readonly active: () => Effect.Effect, AccountRepoError> - readonly list: () => Effect.Effect - readonly remove: (accountID: AccountID) => Effect.Effect - readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect - readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> - readonly persistToken: (input: { - accountID: AccountID - accessToken: AccessToken - refreshToken: RefreshToken - expiry: Option.Option - }) => Effect.Effect - readonly persistAccount: (input: { - id: AccountID - email: string - url: string - accessToken: AccessToken - refreshToken: RefreshToken - expiry: number - orgID: Option.Option - }) => Effect.Effect - } +export interface Interface { + readonly active: () => Effect.Effect, AccountRepoError> + readonly list: () => Effect.Effect + readonly remove: (accountID: AccountID) => Effect.Effect + readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect + readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> + readonly persistToken: (input: { + accountID: AccountID + accessToken: AccessToken + refreshToken: RefreshToken + expiry: Option.Option + }) => Effect.Effect + readonly persistAccount: (input: { + id: AccountID + email: string + url: string + accessToken: AccessToken + refreshToken: RefreshToken + expiry: number + orgID: Option.Option + }) => Effect.Effect } -export class AccountRepo extends Context.Service()("@opencode/AccountRepo") { - static readonly layer: Layer.Layer = Layer.effect( - AccountRepo, - Effect.gen(function* () { - const decode = Schema.decodeUnknownSync(Info) +export class Service extends Context.Service()("@opencode/AccountRepo") {} - const query = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.use(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), +export const layer: Layer.Layer = Layer.effect( + Service, + Effect.gen(function* () { + const decode = Schema.decodeUnknownSync(Info) + + const query = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.use(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const tx = (f: DbTransactionCallback) => + Effect.try({ + try: () => Database.transaction(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const current = (db: DbClient) => { + const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() + if (!state?.active_account_id) return + const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() + if (!account) return + return { ...account, active_org_id: state.active_org_id ?? null } + } + + const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { + const id = Option.getOrNull(orgID) + return db + .insert(AccountStateTable) + .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) + .onConflictDoUpdate({ + target: AccountStateTable.id, + set: { active_account_id: accountID, active_org_id: id }, }) + .run() + } - const tx = (f: DbTransactionCallback) => - Effect.try({ - try: () => Database.transaction(f), - catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), - }) + const active = Effect.fn("AccountRepo.active")(() => + query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), + ) - const current = (db: DbClient) => { - const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() - if (!state?.active_account_id) return - const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() - if (!account) return - return { ...account, active_org_id: state.active_org_id ?? null } - } + const list = Effect.fn("AccountRepo.list")(() => + query((db) => + db + .select() + .from(AccountTable) + .all() + .map((row: AccountRow) => decode({ ...row, active_org_id: null })), + ), + ) - const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { - const id = Option.getOrNull(orgID) - return db - .insert(AccountStateTable) - .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) - .onConflictDoUpdate({ - target: AccountStateTable.id, - set: { active_account_id: accountID, active_org_id: id }, - }) + const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => + tx((db) => { + db.update(AccountStateTable) + .set({ active_account_id: null, active_org_id: null }) + .where(eq(AccountStateTable.active_account_id, accountID)) .run() - } + db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() + }).pipe(Effect.asVoid), + ) - const active = Effect.fn("AccountRepo.active")(() => - query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), - ) + const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => + query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), + ) - const list = Effect.fn("AccountRepo.list")(() => - query((db) => - db - .select() - .from(AccountTable) - .all() - .map((row: AccountRow) => decode({ ...row, active_org_id: null })), - ), - ) + const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => + query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( + Effect.map(Option.fromNullishOr), + ), + ) - const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => - tx((db) => { - db.update(AccountStateTable) - .set({ active_account_id: null, active_org_id: null }) - .where(eq(AccountStateTable.active_account_id, accountID)) - .run() - db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() - }).pipe(Effect.asVoid), - ) + const persistToken = Effect.fn("AccountRepo.persistToken")((input) => + query((db) => + db + .update(AccountTable) + .set({ + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: Option.getOrNull(input.expiry), + }) + .where(eq(AccountTable.id, input.accountID)) + .run(), + ).pipe(Effect.asVoid), + ) - const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => - query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), - ) + const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => + tx((db) => { + const url = normalizeServerUrl(input.url) - const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => - query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( - Effect.map(Option.fromNullishOr), - ), - ) - - const persistToken = Effect.fn("AccountRepo.persistToken")((input) => - query((db) => - db - .update(AccountTable) - .set({ - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: Option.getOrNull(input.expiry), - }) - .where(eq(AccountTable.id, input.accountID)) - .run(), - ).pipe(Effect.asVoid), - ) - - const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => - tx((db) => { - const url = normalizeServerUrl(input.url) - - db.insert(AccountTable) - .values({ - id: input.id, + db.insert(AccountTable) + .values({ + id: input.id, + email: input.email, + url, + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: input.expiry, + }) + .onConflictDoUpdate({ + target: AccountTable.id, + set: { email: input.email, url, access_token: input.accessToken, refresh_token: input.refreshToken, token_expiry: input.expiry, - }) - .onConflictDoUpdate({ - target: AccountTable.id, - set: { - email: input.email, - url, - access_token: input.accessToken, - refresh_token: input.refreshToken, - token_expiry: input.expiry, - }, - }) - .run() - void state(db, input.id, input.orgID) - }).pipe(Effect.asVoid), - ) + }, + }) + .run() + void state(db, input.id, input.orgID) + }).pipe(Effect.asVoid), + ) - return AccountRepo.of({ - active, - list, - remove, - use, - getRow, - persistToken, - persistAccount, - }) - }), - ) -} + return Service.of({ + active, + list, + remove, + use, + getRow, + persistToken, + persistAccount, + }) + }), +) + +export * as AccountRepo from "./repo" diff --git a/packages/opencode/src/auth/auth.ts b/packages/opencode/src/auth/auth.ts deleted file mode 100644 index 598178fad1..0000000000 --- a/packages/opencode/src/auth/auth.ts +++ /dev/null @@ -1,95 +0,0 @@ -import path from "path" -import { Effect, Layer, Record, Result, Schema, Context } from "effect" -import { zod } from "@/util/effect-zod" -import { Global } from "../global" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" - -export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key" - -const file = path.join(Global.Path.data, "auth.json") - -const fail = (message: string) => (cause: unknown) => new AuthError({ message, cause }) - -export class Oauth extends Schema.Class("OAuth")({ - type: Schema.Literal("oauth"), - refresh: Schema.String, - access: Schema.String, - expires: Schema.Number, - accountId: Schema.optional(Schema.String), - enterpriseUrl: Schema.optional(Schema.String), -}) {} - -export class Api extends Schema.Class("ApiAuth")({ - type: Schema.Literal("api"), - key: Schema.String, - metadata: Schema.optional(Schema.Record(Schema.String, Schema.String)), -}) {} - -export class WellKnown extends Schema.Class("WellKnownAuth")({ - type: Schema.Literal("wellknown"), - key: Schema.String, - token: Schema.String, -}) {} - -const _Info = Schema.Union([Oauth, Api, WellKnown]).annotate({ discriminator: "type", identifier: "Auth" }) -export const Info = Object.assign(_Info, { zod: zod(_Info) }) -export type Info = Schema.Schema.Type - -export class AuthError extends Schema.TaggedErrorClass()("AuthError", { - message: Schema.String, - cause: Schema.optional(Schema.Defect), -}) {} - -export interface Interface { - readonly get: (providerID: string) => Effect.Effect - readonly all: () => Effect.Effect, AuthError> - readonly set: (key: string, info: Info) => Effect.Effect - readonly remove: (key: string) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/Auth") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fsys = yield* AppFileSystem.Service - const decode = Schema.decodeUnknownOption(Info) - - const all = Effect.fn("Auth.all")(function* () { - if (process.env.OPENCODE_AUTH_CONTENT) { - try { - return JSON.parse(process.env.OPENCODE_AUTH_CONTENT) - } catch (err) {} - } - - const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record - return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined)) - }) - - const get = Effect.fn("Auth.get")(function* (providerID: string) { - return (yield* all())[providerID] - }) - - const set = Effect.fn("Auth.set")(function* (key: string, info: Info) { - const norm = key.replace(/\/+$/, "") - const data = yield* all() - if (norm !== key) delete data[key] - delete data[norm + "/"] - yield* fsys - .writeJson(file, { ...data, [norm]: info }, 0o600) - .pipe(Effect.mapError(fail("Failed to write auth data"))) - }) - - const remove = Effect.fn("Auth.remove")(function* (key: string) { - const norm = key.replace(/\/+$/, "") - const data = yield* all() - delete data[key] - delete data[norm] - yield* fsys.writeJson(file, data, 0o600).pipe(Effect.mapError(fail("Failed to write auth data"))) - }) - - return Service.of({ get, all, set, remove }) - }), -) - -export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) diff --git a/packages/opencode/src/auth/index.ts b/packages/opencode/src/auth/index.ts index 9174745fd8..5b4b5120f8 100644 --- a/packages/opencode/src/auth/index.ts +++ b/packages/opencode/src/auth/index.ts @@ -1,2 +1,97 @@ -export * as Auth from "./auth" -export { OAUTH_DUMMY_KEY } from "./auth" +import path from "path" +import { Effect, Layer, Record, Result, Schema, Context } from "effect" +import { zod } from "@/util/effect-zod" +import { Global } from "../global" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" + +export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key" + +const file = path.join(Global.Path.data, "auth.json") + +const fail = (message: string) => (cause: unknown) => new AuthError({ message, cause }) + +export class Oauth extends Schema.Class("OAuth")({ + type: Schema.Literal("oauth"), + refresh: Schema.String, + access: Schema.String, + expires: Schema.Number, + accountId: Schema.optional(Schema.String), + enterpriseUrl: Schema.optional(Schema.String), +}) {} + +export class Api extends Schema.Class("ApiAuth")({ + type: Schema.Literal("api"), + key: Schema.String, + metadata: Schema.optional(Schema.Record(Schema.String, Schema.String)), +}) {} + +export class WellKnown extends Schema.Class("WellKnownAuth")({ + type: Schema.Literal("wellknown"), + key: Schema.String, + token: Schema.String, +}) {} + +const _Info = Schema.Union([Oauth, Api, WellKnown]).annotate({ discriminator: "type", identifier: "Auth" }) +export const Info = Object.assign(_Info, { zod: zod(_Info) }) +export type Info = Schema.Schema.Type + +export class AuthError extends Schema.TaggedErrorClass()("AuthError", { + message: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export interface Interface { + readonly get: (providerID: string) => Effect.Effect + readonly all: () => Effect.Effect, AuthError> + readonly set: (key: string, info: Info) => Effect.Effect + readonly remove: (key: string) => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/Auth") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const fsys = yield* AppFileSystem.Service + const decode = Schema.decodeUnknownOption(Info) + + const all = Effect.fn("Auth.all")(function* () { + if (process.env.OPENCODE_AUTH_CONTENT) { + try { + return JSON.parse(process.env.OPENCODE_AUTH_CONTENT) + } catch (err) {} + } + + const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record + return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined)) + }) + + const get = Effect.fn("Auth.get")(function* (providerID: string) { + return (yield* all())[providerID] + }) + + const set = Effect.fn("Auth.set")(function* (key: string, info: Info) { + const norm = key.replace(/\/+$/, "") + const data = yield* all() + if (norm !== key) delete data[key] + delete data[norm + "/"] + yield* fsys + .writeJson(file, { ...data, [norm]: info }, 0o600) + .pipe(Effect.mapError(fail("Failed to write auth data"))) + }) + + const remove = Effect.fn("Auth.remove")(function* (key: string) { + const norm = key.replace(/\/+$/, "") + const data = yield* all() + delete data[key] + delete data[norm] + yield* fsys.writeJson(file, data, 0o600).pipe(Effect.mapError(fail("Failed to write auth data"))) + }) + + return Service.of({ get, all, set, remove }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) + +export * as Auth from "." diff --git a/packages/opencode/src/cli/cmd/account.ts b/packages/opencode/src/cli/cmd/account.ts index 89680ebe0a..38c28032cd 100644 --- a/packages/opencode/src/cli/cmd/account.ts +++ b/packages/opencode/src/cli/cmd/account.ts @@ -1,8 +1,8 @@ import { cmd } from "./cmd" import { Duration, Effect, Match, Option } from "effect" import { UI } from "../ui" -import { AccountID, Account, OrgID, PollExpired, type PollResult } from "@/account" -import { type AccountError } from "@/account/schema" +import { Account } from "@/account/account" +import { AccountID, OrgID, PollExpired, type PollResult, type AccountError } from "@/account/schema" import { AppRuntime } from "@/effect/app-runtime" import * as Prompt from "../effect/prompt" import open from "open" diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 8255c007d0..a58ff05648 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -148,7 +148,16 @@ export function tui(input: { - + Promise }) { }) local.model.set({ providerID, modelID }, { recent: true }) } - // Handle --session without --fork immediately (fork is handled in createEffect below) if (args.sessionID && !args.fork) { route.navigate({ type: "session", @@ -420,12 +428,8 @@ function App(props: { onSnapshot?: () => Promise }) { aliases: ["clear"], }, onSelect: () => { - const current = promptRef.current - // Don't require focus - if there's any text, preserve it - const currentPrompt = current?.current?.input ? current.current : undefined route.navigate({ type: "home", - initialPrompt: currentPrompt, }) dialog.clear() }, @@ -602,7 +606,7 @@ function App(props: { onSnapshot?: () => Promise }) { category: "System", }, { - title: "Toggle theme mode", + title: mode() === "dark" ? "Switch to light mode" : "Switch to dark mode", value: "theme.switch_mode", onSelect: (dialog) => { setMode(mode() === "dark" ? "light" : "dark") diff --git a/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx b/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx new file mode 100644 index 0000000000..541ecea4e1 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx @@ -0,0 +1,130 @@ +import { BoxRenderable, RGBA } from "@opentui/core" +import { createMemo, createSignal, For, onCleanup, onMount } from "solid-js" +import { tint, useTheme } from "@tui/context/theme" + +const PERIOD = 4600 +const RINGS = 3 +const WIDTH = 3.8 +const TAIL = 9.5 +const AMP = 0.55 +const TAIL_AMP = 0.16 +const BREATH_AMP = 0.05 +const BREATH_SPEED = 0.0008 +// Offset so bg ring emits from GO center at the moment the logo pulse peaks. +const PHASE_OFFSET = 0.29 + +export type BgPulseMask = { + x: number + y: number + width: number + height: number + pad?: number + strength?: number +} + +export function BgPulse(props: { centerX?: number; centerY?: number; masks?: BgPulseMask[] }) { + const { theme } = useTheme() + const [now, setNow] = createSignal(performance.now()) + const [size, setSize] = createSignal<{ width: number; height: number }>({ width: 0, height: 0 }) + let box: BoxRenderable | undefined + + const timer = setInterval(() => setNow(performance.now()), 50) + onCleanup(() => clearInterval(timer)) + + const sync = () => { + if (!box) return + setSize({ width: box.width, height: box.height }) + } + + onMount(() => { + sync() + box?.on("resize", sync) + }) + + onCleanup(() => { + box?.off("resize", sync) + }) + + const grid = createMemo(() => { + const t = now() + const w = size().width + const h = size().height + if (w === 0 || h === 0) return [] as RGBA[][] + const cxv = props.centerX ?? w / 2 + const cyv = props.centerY ?? h / 2 + const reach = Math.hypot(Math.max(cxv, w - cxv), Math.max(cyv, h - cyv) * 2) + TAIL + const ringStates = Array.from({ length: RINGS }, (_, i) => { + const offset = i / RINGS + const phase = (t / PERIOD + offset - PHASE_OFFSET + 1) % 1 + const envelope = Math.sin(phase * Math.PI) + const eased = envelope * envelope * (3 - 2 * envelope) + return { + head: phase * reach, + eased, + } + }) + const normalizedMasks = props.masks?.map((m) => { + const pad = m.pad ?? 2 + return { + left: m.x - pad, + right: m.x + m.width + pad, + top: m.y - pad, + bottom: m.y + m.height + pad, + pad, + strength: m.strength ?? 0.85, + } + }) + const rows = [] as RGBA[][] + for (let y = 0; y < h; y++) { + const row = [] as RGBA[] + for (let x = 0; x < w; x++) { + const dx = x + 0.5 - cxv + const dy = (y + 0.5 - cyv) * 2 + const dist = Math.hypot(dx, dy) + let level = 0 + for (const ring of ringStates) { + const delta = dist - ring.head + const crest = Math.abs(delta) < WIDTH ? 0.5 + 0.5 * Math.cos((delta / WIDTH) * Math.PI) : 0 + const tail = delta < 0 && delta > -TAIL ? (1 + delta / TAIL) ** 2.3 : 0 + level += (crest * AMP + tail * TAIL_AMP) * ring.eased + } + const edgeFalloff = Math.max(0, 1 - (dist / (reach * 0.85)) ** 2) + const breath = (0.5 + 0.5 * Math.sin(t * BREATH_SPEED)) * BREATH_AMP + let maskAtten = 1 + if (normalizedMasks) { + for (const m of normalizedMasks) { + if (x < m.left || x > m.right || y < m.top || y > m.bottom) continue + const inX = Math.min(x - m.left, m.right - x) + const inY = Math.min(y - m.top, m.bottom - y) + const edge = Math.min(inX / m.pad, inY / m.pad, 1) + const eased = edge * edge * (3 - 2 * edge) + const reduce = 1 - m.strength * eased + if (reduce < maskAtten) maskAtten = reduce + } + } + const strength = Math.min(1, ((level / RINGS) * edgeFalloff + breath * edgeFalloff) * maskAtten) + row.push(tint(theme.backgroundPanel, theme.primary, strength * 0.7)) + } + rows.push(row) + } + return rows + }) + + return ( + (box = item)} width="100%" height="100%"> + + {(row) => ( + + + {(color) => ( + + {" "} + + )} + + + )} + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx index f42ba15ec0..49bf42c63e 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-command.tsx @@ -63,6 +63,7 @@ function init() { useKeyboard((evt) => { if (suspended()) return if (dialog.stack.length > 0) return + if (evt.defaultPrevented) return for (const option of entries()) { if (!isEnabled(option)) continue if (option.keybind && keybind.match(option.keybind, evt)) { diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx index 2d200ca3b8..ace4b090bc 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-go-upsell.tsx @@ -1,12 +1,16 @@ -import { RGBA, TextAttributes } from "@opentui/core" +import { BoxRenderable, RGBA, TextAttributes } from "@opentui/core" import { useKeyboard } from "@opentui/solid" import open from "open" -import { createSignal } from "solid-js" +import { createSignal, onCleanup, onMount } from "solid-js" import { selectedForeground, useTheme } from "@tui/context/theme" import { useDialog, type DialogContext } from "@tui/ui/dialog" import { Link } from "@tui/ui/link" +import { GoLogo } from "./logo" +import { BgPulse, type BgPulseMask } from "./bg-pulse" const GO_URL = "https://opencode.ai/go" +const PAD_X = 3 +const PAD_TOP_OUTER = 1 export type DialogGoUpsellProps = { onClose?: (dontShowAgain?: boolean) => void @@ -27,62 +31,116 @@ export function DialogGoUpsell(props: DialogGoUpsellProps) { const dialog = useDialog() const { theme } = useTheme() const fg = selectedForeground(theme) - const [selected, setSelected] = createSignal(0) + const [selected, setSelected] = createSignal<"dismiss" | "subscribe">("subscribe") + const [center, setCenter] = createSignal<{ x: number; y: number } | undefined>() + const [masks, setMasks] = createSignal([]) + let content: BoxRenderable | undefined + let logoBox: BoxRenderable | undefined + let headingBox: BoxRenderable | undefined + let descBox: BoxRenderable | undefined + let buttonsBox: BoxRenderable | undefined + + const sync = () => { + if (!content || !logoBox) return + setCenter({ + x: logoBox.x - content.x + logoBox.width / 2, + y: logoBox.y - content.y + logoBox.height / 2 + PAD_TOP_OUTER, + }) + const next: BgPulseMask[] = [] + const baseY = PAD_TOP_OUTER + for (const b of [headingBox, descBox, buttonsBox]) { + if (!b) continue + next.push({ + x: b.x - content.x, + y: b.y - content.y + baseY, + width: b.width, + height: b.height, + pad: 2, + strength: 0.78, + }) + } + setMasks(next) + } + + onMount(() => { + sync() + for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.on("resize", sync) + }) + + onCleanup(() => { + for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.off("resize", sync) + }) useKeyboard((evt) => { if (evt.name === "left" || evt.name === "right" || evt.name === "tab") { - setSelected((s) => (s === 0 ? 1 : 0)) + setSelected((s) => (s === "subscribe" ? "dismiss" : "subscribe")) return } - if (evt.name !== "return") return - if (selected() === 0) subscribe(props, dialog) - else dismiss(props, dialog) + if (evt.name === "return") { + if (selected() === "subscribe") subscribe(props, dialog) + else dismiss(props, dialog) + } }) return ( - - - - Free limit reached - - dialog.clear()}> - esc - + (content = item)}> + + - - - Subscribe to OpenCode Go to keep going with reliable access to the best open-source models, starting at - $5/month. - - + + (headingBox = item)} flexDirection="row" justifyContent="space-between"> + + Free limit reached + + dialog.clear()}> + esc + + + (descBox = item)} gap={0}> + + Subscribe to + + OpenCode Go + + for reliable access to the + + best open-source models, starting at $5/month. + + + (logoBox = item)}> + + - - - setSelected(0)} - onMouseUp={() => subscribe(props, dialog)} - > - - subscribe - - - setSelected(1)} - onMouseUp={() => dismiss(props, dialog)} - > - (buttonsBox = item)} flexDirection="row" justifyContent="space-between"> + setSelected("dismiss")} + onMouseUp={() => dismiss(props, dialog)} > - don't show again - + + don't show again + + + setSelected("subscribe")} + onMouseUp={() => subscribe(props, dialog)} + > + + subscribe + + diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx index 75c79dcdd8..32342e7724 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-session-list.tsx @@ -113,7 +113,11 @@ export function DialogSessionList() { const today = new Date().toDateString() return sessions() .filter((x) => x.parentID === undefined) - .toSorted((a, b) => b.time.updated - a.time.updated) + .toSorted((a, b) => { + const updatedDay = new Date(b.time.updated).setHours(0, 0, 0, 0) - new Date(a.time.updated).setHours(0, 0, 0, 0) + if (updatedDay !== 0) return updatedDay + return b.time.created - a.time.created + }) .map((x) => { const workspace = x.workspaceID ? project.workspace.get(x.workspaceID) : undefined @@ -135,15 +139,10 @@ export function DialogSessionList() { {desc}{" "} - ■ + ● ) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx index ad5cd45782..a16c98a9f4 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-create.tsx @@ -139,7 +139,13 @@ export async function restoreWorkspaceSession(input: { total: result.data.total, }) - await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()]).catch((err) => { + input.project.workspace.set(input.workspaceID) + + try { + await input.sync.bootstrap({ fatal: false }) + } catch (e) {} + + await Promise.all([input.project.workspace.sync(), input.sync.session.sync(input.sessionID)]).catch((err) => { log.error("session restore refresh failed", { workspaceID: input.workspaceID, sessionID: input.sessionID, @@ -229,6 +235,10 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) = }) const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => { + toast.show({ + message: "Creating workspace failed", + variant: "error", + }) log.error("workspace create request failed", { type, error: errorData(err), diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx new file mode 100644 index 0000000000..7a21798534 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-unavailable.tsx @@ -0,0 +1,81 @@ +import { TextAttributes } from "@opentui/core" +import { useKeyboard } from "@opentui/solid" +import { createStore } from "solid-js/store" +import { For } from "solid-js" +import { useTheme } from "../context/theme" +import { useDialog } from "../ui/dialog" + +export function DialogWorkspaceUnavailable(props: { onRestore?: () => boolean | void | Promise }) { + const dialog = useDialog() + const { theme } = useTheme() + const [store, setStore] = createStore({ + active: "restore" as "cancel" | "restore", + }) + + const options = ["cancel", "restore"] as const + + async function confirm() { + if (store.active === "cancel") { + dialog.clear() + return + } + const result = await props.onRestore?.() + if (result === false) return + } + + useKeyboard((evt) => { + if (evt.name === "return") { + evt.preventDefault() + evt.stopPropagation() + void confirm() + return + } + if (evt.name === "left") { + evt.preventDefault() + evt.stopPropagation() + setStore("active", "cancel") + return + } + if (evt.name === "right") { + evt.preventDefault() + evt.stopPropagation() + setStore("active", "restore") + } + }) + + return ( + + + + Workspace Unavailable + + dialog.clear()}> + esc + + + + This session is attached to a workspace that is no longer available. + + + Would you like to restore this session into a new workspace? + + + + {(item) => ( + { + setStore("active", item) + void confirm() + }} + > + {item} + + )} + + + + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/logo.tsx b/packages/opencode/src/cli/cmd/tui/component/logo.tsx index e53974871a..bee104a35d 100644 --- a/packages/opencode/src/cli/cmd/tui/component/logo.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/logo.tsx @@ -1,8 +1,61 @@ import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core" -import { For, createMemo, createSignal, onCleanup, type JSX } from "solid-js" +import { For, createMemo, createSignal, onCleanup, onMount, type JSX } from "solid-js" import { useTheme, tint } from "@tui/context/theme" import * as Sound from "@tui/util/sound" -import { logo } from "@/cli/logo" +import { go, logo } from "@/cli/logo" + +export type LogoShape = { + left: string[] + right: string[] +} + +type ShimmerConfig = { + period: number + rings: number + sweepFraction: number + coreWidth: number + coreAmp: number + softWidth: number + softAmp: number + tail: number + tailAmp: number + haloWidth: number + haloOffset: number + haloAmp: number + breathBase: number + noise: number + ambientAmp: number + ambientCenter: number + ambientWidth: number + shadowMix: number + primaryMix: number + originX: number + originY: number +} + +const shimmerConfig: ShimmerConfig = { + period: 4600, + rings: 2, + sweepFraction: 1, + coreWidth: 1.2, + coreAmp: 1.9, + softWidth: 10, + softAmp: 1.6, + tail: 5, + tailAmp: 0.64, + haloWidth: 4.3, + haloOffset: 0.6, + haloAmp: 0.16, + breathBase: 0.04, + noise: 0.1, + ambientAmp: 0.36, + ambientCenter: 0.5, + ambientWidth: 0.34, + shadowMix: 0.1, + primaryMix: 0.3, + originX: 4.5, + originY: 13.5, +} // Shadow markers (rendered chars in parens): // _ = full shadow cell (space with bg=shadow) @@ -74,9 +127,6 @@ type Frame = { spark: number } -const LEFT = logo.left[0]?.length ?? 0 -const FULL = logo.left.map((line, i) => line + " ".repeat(GAP) + logo.right[i]) -const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94 const NEAR = [ [1, 0], [1, 1], @@ -140,7 +190,7 @@ function noise(x: number, y: number, t: number) { } function lit(char: string) { - return char !== " " && char !== "_" && char !== "~" + return char !== " " && char !== "_" && char !== "~" && char !== "," } function key(x: number, y: number) { @@ -188,12 +238,12 @@ function route(list: Array<{ x: number; y: number }>) { return path } -function mapGlyphs() { +function mapGlyphs(full: string[]) { const cells = [] as Array<{ x: number; y: number }> - for (let y = 0; y < FULL.length; y++) { - for (let x = 0; x < (FULL[y]?.length ?? 0); x++) { - if (lit(FULL[y]?.[x] ?? " ")) cells.push({ x, y }) + for (let y = 0; y < full.length; y++) { + for (let x = 0; x < (full[y]?.length ?? 0); x++) { + if (lit(full[y]?.[x] ?? " ")) cells.push({ x, y }) } } @@ -237,9 +287,25 @@ function mapGlyphs() { return { glyph, trace, center } } -const MAP = mapGlyphs() +type LogoContext = { + LEFT: number + FULL: string[] + SPAN: number + MAP: ReturnType + shape: LogoShape +} -function shimmer(x: number, y: number, frame: Frame) { +function build(shape: LogoShape): LogoContext { + const LEFT = shape.left[0]?.length ?? 0 + const FULL = shape.left.map((line, i) => line + " ".repeat(GAP) + shape.right[i]) + const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94 + return { LEFT, FULL, SPAN, MAP: mapGlyphs(FULL), shape } +} + +const DEFAULT = build(logo) +const GO = build(go) + +function shimmer(x: number, y: number, frame: Frame, ctx: LogoContext) { return frame.list.reduce((best, item) => { const age = frame.t - item.at if (age < SHIMMER_IN || age > LIFE) return best @@ -247,7 +313,7 @@ function shimmer(x: number, y: number, frame: Frame) { const dy = y * 2 + 1 - item.y const dist = Math.hypot(dx, dy) const p = age / LIFE - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) const lag = r - dist if (lag < 0.18 || lag > SHIMMER_OUT) return best const band = Math.exp(-(((lag - 1.05) / 0.68) ** 2)) @@ -258,19 +324,19 @@ function shimmer(x: number, y: number, frame: Frame) { }, 0) } -function remain(x: number, y: number, item: Release, t: number) { +function remain(x: number, y: number, item: Release, t: number, ctx: LogoContext) { const age = t - item.at if (age < 0 || age > LIFE) return 0 const p = age / LIFE const dx = x + 0.5 - item.x - 0.5 const dy = y * 2 + 1 - item.y * 2 - 1 const dist = Math.hypot(dx, dy) - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) if (dist > r) return 1 return clamp((r - dist) / 1.35 < 1 ? 1 - (r - dist) / 1.35 : 0) } -function wave(x: number, y: number, frame: Frame, live: boolean) { +function wave(x: number, y: number, frame: Frame, live: boolean, ctx: LogoContext) { return frame.list.reduce((sum, item) => { const age = frame.t - item.at if (age < 0 || age > LIFE) return sum @@ -278,7 +344,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) { const dx = x + 0.5 - item.x const dy = y * 2 + 1 - item.y const dist = Math.hypot(dx, dy) - const r = SPAN * (1 - (1 - p) ** EXPAND) + const r = ctx.SPAN * (1 - (1 - p) ** EXPAND) const fade = (1 - p) ** 1.32 const j = 1.02 + noise(x + item.x * 0.7, y + item.y * 0.7, item.at * 0.002 + age * 0.06) * 0.52 const edge = Math.exp(-(((dist - r) / WIDTH) ** 2)) * GAIN * fade * item.force * j @@ -292,7 +358,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) { }, 0) } -function field(x: number, y: number, frame: Frame) { +function field(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest @@ -326,11 +392,11 @@ function field(x: number, y: number, frame: Frame) { Math.max(0, noise(item.x * 3.1, item.y * 2.7, frame.t * 1.7) - 0.72) * Math.exp(-(dist * dist) / 0.15) * lerp(0.08, 0.42, body) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 return (core + shell + ember + ring + fork + glitch + lash + flicker - dim) * fade } -function pick(x: number, y: number, frame: Frame) { +function pick(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest @@ -339,26 +405,26 @@ function pick(x: number, y: number, frame: Frame) { const dx = x + 0.5 - item.x - 0.5 const dy = y * 2 + 1 - item.y * 2 - 1 const dist = Math.hypot(dx, dy) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 return Math.exp(-(dist * dist) / 1.7) * lerp(0.2, 0.96, rise) * fade } -function select(x: number, y: number) { - const direct = MAP.glyph.get(key(x, y)) +function select(x: number, y: number, ctx: LogoContext) { + const direct = ctx.MAP.glyph.get(key(x, y)) if (direct !== undefined) return direct - const near = NEAR.map(([dx, dy]) => MAP.glyph.get(key(x + dx, y + dy))).find( + const near = NEAR.map(([dx, dy]) => ctx.MAP.glyph.get(key(x + dx, y + dy))).find( (item): item is number => item !== undefined, ) return near } -function trace(x: number, y: number, frame: Frame) { +function trace(x: number, y: number, frame: Frame, ctx: LogoContext) { const held = frame.hold const rest = frame.release const item = held ?? rest if (!item || item.glyph === undefined) return 0 - const step = MAP.trace.get(key(x, y)) + const step = ctx.MAP.trace.get(key(x, y)) if (!step || step.glyph !== item.glyph || step.l < 2) return 0 const age = frame.t - item.at const rise = held ? ramp(age, HOLD, CHARGE) : rest!.rise @@ -368,29 +434,125 @@ function trace(x: number, y: number, frame: Frame) { const dist = Math.min(Math.abs(step.i - head), step.l - Math.abs(step.i - head)) const tail = (head - TAIL + step.l) % step.l const lag = Math.min(Math.abs(step.i - tail), step.l - Math.abs(step.i - tail)) - const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1 + const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1 const core = Math.exp(-((dist / 1.05) ** 2)) * lerp(0.8, 2.35, rise) const glow = Math.exp(-((dist / 1.85) ** 2)) * lerp(0.08, 0.34, rise) const trail = Math.exp(-((lag / 1.45) ** 2)) * lerp(0.04, 0.42, rise) return (core + glow + trail) * appear * fade } -function bloom(x: number, y: number, frame: Frame) { +function idle( + x: number, + pixelY: number, + frame: Frame, + ctx: LogoContext, + state: IdleState, +): { glow: number; peak: number; primary: number } { + const cfg = state.cfg + const dx = x + 0.5 - cfg.originX + const dy = pixelY - cfg.originY + const dist = Math.hypot(dx, dy) + const angle = Math.atan2(dy, dx) + const wob1 = noise(x * 0.32, pixelY * 0.25, frame.t * 0.0005) - 0.5 + const wob2 = noise(x * 0.12, pixelY * 0.08, frame.t * 0.00022) - 0.5 + const ripple = Math.sin(angle * 3 + frame.t * 0.0012) * 0.3 + const jitter = (wob1 * 0.55 + wob2 * 0.32 + ripple * 0.18) * cfg.noise + const traveled = dist + jitter + let glow = 0 + let peak = 0 + let halo = 0 + let primary = 0 + let ambient = 0 + for (const active of state.active) { + const head = active.head + const eased = active.eased + const delta = traveled - head + // Use shallower exponent (1.6 vs 2) for softer edges on the Gaussians + // so adjacent pixels have smaller brightness deltas + const core = Math.exp(-(Math.abs(delta / cfg.coreWidth) ** 1.8)) + const soft = Math.exp(-(Math.abs(delta / cfg.softWidth) ** 1.6)) + const tailRange = cfg.tail * 2.6 + const tail = delta < 0 && delta > -tailRange ? (1 + delta / tailRange) ** 2.6 : 0 + const haloDelta = delta + cfg.haloOffset + const haloBand = Math.exp(-(Math.abs(haloDelta / cfg.haloWidth) ** 1.6)) + glow += (soft * cfg.softAmp + tail * cfg.tailAmp) * eased + peak += core * cfg.coreAmp * eased + halo += haloBand * cfg.haloAmp * eased + // Primary-tinted fringe follows the halo (which trails behind the core) and the tail + primary += (haloBand + tail * 0.6) * eased + ambient += active.ambient + } + ambient /= state.rings + return { + glow: glow / state.rings, + peak: cfg.breathBase + ambient + (peak + halo) / state.rings, + primary: (primary / state.rings) * cfg.primaryMix, + } +} + +function bloom(x: number, y: number, frame: Frame, ctx: LogoContext) { const item = frame.glow if (!item) return 0 - const glyph = MAP.glyph.get(key(x, y)) + const glyph = ctx.MAP.glyph.get(key(x, y)) if (glyph !== item.glyph) return 0 const age = frame.t - item.at if (age < 0 || age > GLOW_OUT) return 0 const p = age / GLOW_OUT const flash = (1 - p) ** 2 - const dx = x + 0.5 - MAP.center.get(item.glyph)!.x - const dy = y * 2 + 1 - MAP.center.get(item.glyph)!.y + const dx = x + 0.5 - ctx.MAP.center.get(item.glyph)!.x + const dy = y * 2 + 1 - ctx.MAP.center.get(item.glyph)!.y const bias = Math.exp(-((Math.hypot(dx, dy) / 2.8) ** 2)) return lerp(item.force, item.force * 0.18, p) * lerp(0.72, 1.1, bias) * flash } -export function Logo() { +type IdleState = { + cfg: ShimmerConfig + reach: number + rings: number + active: Array<{ + head: number + eased: number + ambient: number + }> +} + +function buildIdleState(t: number, ctx: LogoContext): IdleState { + const cfg = shimmerConfig + const w = ctx.FULL[0]?.length ?? 1 + const h = ctx.FULL.length * 2 + const corners: [number, number][] = [ + [0, 0], + [w, 0], + [0, h], + [w, h], + ] + let maxCorner = 0 + for (const [cx, cy] of corners) { + const d = Math.hypot(cx - cfg.originX, cy - cfg.originY) + if (d > maxCorner) maxCorner = d + } + const reach = maxCorner + cfg.tail * 2 + const rings = Math.max(1, Math.floor(cfg.rings)) + const active = [] as IdleState["active"] + for (let i = 0; i < rings; i++) { + const offset = i / rings + const cyclePhase = (t / cfg.period + offset) % 1 + if (cyclePhase >= cfg.sweepFraction) continue + const phase = cyclePhase / cfg.sweepFraction + const envelope = Math.sin(phase * Math.PI) + const eased = envelope * envelope * (3 - 2 * envelope) + const d = (phase - cfg.ambientCenter) / cfg.ambientWidth + active.push({ + head: phase * reach, + eased, + ambient: Math.abs(d) < 1 ? (1 - d * d) ** 2 * cfg.ambientAmp : 0, + }) + } + return { cfg, reach, rings, active } +} + +export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } = {}) { + const ctx = props.shape ? build(props.shape) : DEFAULT const { theme } = useTheme() const [rings, setRings] = createSignal([]) const [hold, setHold] = createSignal() @@ -430,6 +592,7 @@ export function Logo() { } if (!live) setRelease(undefined) if (live || hold() || release() || glow()) return + if (props.idle) return stop() } @@ -438,8 +601,20 @@ export function Logo() { timer = setInterval(tick, 16) } + onCleanup(() => { + stop() + hum = false + Sound.dispose() + }) + + onMount(() => { + if (!props.idle) return + setNow(performance.now()) + start() + }) + const hit = (x: number, y: number) => { - const char = FULL[y]?.[x] + const char = ctx.FULL[y]?.[x] return char !== undefined && char !== " " } @@ -448,7 +623,7 @@ export function Logo() { if (last) burst(last.x, last.y) setNow(t) if (!last) setRelease(undefined) - setHold({ x, y, at: t, glyph: select(x, y) }) + setHold({ x, y, at: t, glyph: select(x, y, ctx) }) hum = false start() } @@ -508,6 +683,8 @@ export function Logo() { } }) + const idleState = createMemo(() => (props.idle ? buildIdleState(frame().t, ctx) : undefined)) + const renderLine = ( line: string, y: number, @@ -516,24 +693,64 @@ export function Logo() { off: number, frame: Frame, dusk: Frame, + state: IdleState | undefined, ): JSX.Element[] => { const shadow = tint(theme.background, ink, 0.25) const attrs = bold ? TextAttributes.BOLD : undefined return Array.from(line).map((char, i) => { - const h = field(off + i, y, frame) - const n = wave(off + i, y, frame, lit(char)) + h - const s = wave(off + i, y, dusk, false) + h - const p = lit(char) ? pick(off + i, y, frame) : 0 - const e = lit(char) ? trace(off + i, y, frame) : 0 - const b = lit(char) ? bloom(off + i, y, frame) : 0 - const q = shimmer(off + i, y, frame) + if (char === " ") { + return ( + + {char} + + ) + } + + const h = field(off + i, y, frame, ctx) + const charLit = lit(char) + // Sub-pixel sampling: cells are 2 pixels tall. Sample at top (y*2) and bottom (y*2+1) pixel rows. + const pulseTop = state ? idle(off + i, y * 2, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 } + const pulseBot = state ? idle(off + i, y * 2 + 1, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 } + const peakMixTop = charLit ? Math.min(1, pulseTop.peak) : 0 + const peakMixBot = charLit ? Math.min(1, pulseBot.peak) : 0 + const primaryMixTop = charLit ? Math.min(1, pulseTop.primary) : 0 + const primaryMixBot = charLit ? Math.min(1, pulseBot.primary) : 0 + // Layer primary tint first, then white peak on top — so the halo/tail pulls toward primary, + // while the bright core stays pure white + const inkTopTint = primaryMixTop > 0 ? tint(ink, theme.primary, primaryMixTop) : ink + const inkBotTint = primaryMixBot > 0 ? tint(ink, theme.primary, primaryMixBot) : ink + const inkTop = peakMixTop > 0 ? tint(inkTopTint, PEAK, peakMixTop) : inkTopTint + const inkBot = peakMixBot > 0 ? tint(inkBotTint, PEAK, peakMixBot) : inkBotTint + // For the non-peak-aware brightness channels, use the average of top/bot + const pulse = { + glow: (pulseTop.glow + pulseBot.glow) / 2, + peak: (pulseTop.peak + pulseBot.peak) / 2, + primary: (pulseTop.primary + pulseBot.primary) / 2, + } + const peakMix = charLit ? Math.min(1, pulse.peak) : 0 + const primaryMix = charLit ? Math.min(1, pulse.primary) : 0 + const inkPrimary = primaryMix > 0 ? tint(ink, theme.primary, primaryMix) : ink + const inkTinted = peakMix > 0 ? tint(inkPrimary, PEAK, peakMix) : inkPrimary + const shadowMixCfg = state?.cfg.shadowMix ?? shimmerConfig.shadowMix + const shadowMixTop = Math.min(1, pulseTop.peak * shadowMixCfg) + const shadowMixBot = Math.min(1, pulseBot.peak * shadowMixCfg) + const shadowTop = shadowMixTop > 0 ? tint(shadow, PEAK, shadowMixTop) : shadow + const shadowBot = shadowMixBot > 0 ? tint(shadow, PEAK, shadowMixBot) : shadow + const shadowMix = Math.min(1, pulse.peak * shadowMixCfg) + const shadowTinted = shadowMix > 0 ? tint(shadow, PEAK, shadowMix) : shadow + const n = wave(off + i, y, frame, charLit, ctx) + h + const s = wave(off + i, y, dusk, false, ctx) + h + const p = charLit ? pick(off + i, y, frame, ctx) : 0 + const e = charLit ? trace(off + i, y, frame, ctx) : 0 + const b = charLit ? bloom(off + i, y, frame, ctx) : 0 + const q = shimmer(off + i, y, frame, ctx) if (char === "_") { return ( @@ -545,8 +762,8 @@ export function Logo() { if (char === "^") { return ( @@ -557,34 +774,60 @@ export function Logo() { if (char === "~") { return ( - + ) } - if (char === " ") { + if (char === ",") { return ( - - {char} + + ▄ + + ) + } + + // Solid █: render as ▀ so the top pixel (fg) and bottom pixel (bg) can carry independent shimmer values + if (char === "█") { + return ( + + ▀ + + ) + } + + // ▀ top-half-lit: fg uses top-pixel sample, bg stays transparent/panel + if (char === "▀") { + return ( + + ▀ + + ) + } + + // ▄ bottom-half-lit: fg uses bottom-pixel sample + if (char === "▄") { + return ( + + ▄ ) } return ( - + {char} ) }) } - onCleanup(() => { - stop() - hum = false - Sound.dispose() - }) - const mouse = (evt: MouseEvent) => { if (!box) return if ((evt.type === "down" || evt.type === "drag") && evt.button === MouseButton.LEFT) { @@ -613,17 +856,28 @@ export function Logo() { position="absolute" top={0} left={0} - width={FULL[0]?.length ?? 0} - height={FULL.length} + width={ctx.FULL[0]?.length ?? 0} + height={ctx.FULL.length} zIndex={1} onMouse={mouse} /> - + {(line, index) => ( - {renderLine(line, index(), theme.textMuted, false, 0, frame(), dusk())} - {renderLine(logo.right[index()], index(), theme.text, true, LEFT + GAP, frame(), dusk())} + {renderLine(line, index(), props.ink ?? theme.textMuted, !!props.ink, 0, frame(), dusk(), idleState())} + + + {renderLine( + ctx.shape.right[index()], + index(), + props.ink ?? theme.text, + true, + ctx.LEFT + GAP, + frame(), + dusk(), + idleState(), + )} )} @@ -631,3 +885,9 @@ export function Logo() { ) } + +export function GoLogo() { + const { theme } = useTheme() + const base = tint(theme.background, theme.text, 0.62) + return +} diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index 82c4a7222f..2e08e66a4a 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -1,18 +1,19 @@ -import { BoxRenderable, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core" +import { BoxRenderable, RGBA, TextareaRenderable, MouseEvent, PasteEvent, decodePasteBytes } from "@opentui/core" import { createEffect, createMemo, onMount, createSignal, onCleanup, on, Show, Switch, Match } from "solid-js" import "opentui-spinner/solid" import path from "path" import { fileURLToPath } from "url" import { Filesystem } from "@/util" import { useLocal } from "@tui/context/local" -import { useTheme } from "@tui/context/theme" +import { tint, useTheme } from "@tui/context/theme" import { EmptyBorder, SplitBorder } from "@tui/component/border" import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" +import { useProject } from "@tui/context/project" import { useSync } from "@tui/context/sync" import { useEvent } from "@tui/context/event" import { MessageID, PartID } from "@/session/schema" -import { createStore, produce } from "solid-js/store" +import { createStore, produce, unwrap } from "solid-js/store" import { useKeybind } from "@tui/context/keybind" import { usePromptHistory, type PromptInfo } from "./history" import { assign } from "./part" @@ -35,8 +36,11 @@ import { DialogProvider as DialogProviderConnect } from "../dialog-provider" import { DialogAlert } from "../../ui/dialog-alert" import { useToast } from "../../ui/toast" import { useKV } from "../../context/kv" +import { createFadeIn } from "../../util/signal" import { useTextareaKeybindings } from "../textarea-keybindings" import { DialogSkill } from "../dialog-skill" +import { DialogWorkspaceCreate, restoreWorkspaceSession } from "../dialog-workspace-create" +import { DialogWorkspaceUnavailable } from "../dialog-workspace-unavailable" import { useArgs } from "@tui/context/args" export type PromptProps = { @@ -75,6 +79,12 @@ function randomIndex(count: number) { return Math.floor(Math.random() * count) } +function fadeColor(color: RGBA, alpha: number) { + return RGBA.fromValues(color.r, color.g, color.b, color.a * alpha) +} + +let stashed: { prompt: PromptInfo; cursor: number } | undefined + export function Prompt(props: PromptProps) { let input: TextareaRenderable let anchor: BoxRenderable @@ -85,6 +95,7 @@ export function Prompt(props: PromptProps) { const args = useArgs() const sdk = useSDK() const route = useRoute() + const project = useProject() const sync = useSync() const dialog = useDialog() const toast = useToast() @@ -95,6 +106,7 @@ export function Prompt(props: PromptProps) { const renderer = useRenderer() const { theme, syntax } = useTheme() const kv = useKV() + const animationsEnabled = createMemo(() => kv.get("animations_enabled", true)) const list = createMemo(() => props.placeholders?.normal ?? []) const shell = createMemo(() => props.placeholders?.shell ?? []) const [auto, setAuto] = createSignal() @@ -233,9 +245,11 @@ export function Prompt(props: PromptProps) { keybind: "input_submit", category: "Prompt", hidden: true, - onSelect: (dialog) => { + onSelect: async (dialog) => { if (!input.focused) return - void submit() + const handled = await submit() + if (!handled) return + dialog.clear() }, }, @@ -433,26 +447,47 @@ export function Prompt(props: PromptProps) { }, } + onMount(() => { + const saved = stashed + stashed = undefined + if (store.prompt.input) return + if (saved && saved.prompt.input) { + input.setText(saved.prompt.input) + setStore("prompt", saved.prompt) + restoreExtmarksFromParts(saved.prompt.parts) + input.cursorOffset = saved.cursor + } + }) + onCleanup(() => { + if (store.prompt.input) { + stashed = { prompt: unwrap(store.prompt), cursor: input.cursorOffset } + } props.ref?.(undefined) }) createEffect(() => { if (!input || input.isDestroyed) return if (props.visible === false || dialog.stack.length > 0) { - input.blur() + if (input.focused) input.blur() return } // Slot/plugin updates can remount the background prompt while a dialog is open. // Keep focus with the dialog and let the prompt reclaim it after the dialog closes. - input.focus() + if (!input.focused) input.focus() }) createEffect(() => { if (!input || input.isDestroyed) return + const capture = + store.mode === "normal" + ? auto()?.visible + ? (["escape", "navigate", "submit", "tab"] as const) + : (["tab"] as const) + : undefined input.traits = { - capture: auto()?.visible ? ["escape", "navigate", "submit", "tab"] : undefined, + capture, suspend: !!props.disabled || store.mode === "shell", status: store.mode === "shell" ? "SHELL" : undefined, } @@ -599,20 +634,48 @@ export function Prompt(props: PromptProps) { setStore("prompt", "input", input.plainText) syncExtmarksWithPromptParts() } - if (props.disabled) return - if (autocomplete?.visible) return - if (!store.prompt.input) return + if (props.disabled) return false + if (autocomplete?.visible) return false + if (!store.prompt.input) return false const agent = local.agent.current() - if (!agent) return + if (!agent) return false const trimmed = store.prompt.input.trim() if (trimmed === "exit" || trimmed === "quit" || trimmed === ":q") { void exit() - return + return true } const selectedModel = local.model.current() if (!selectedModel) { void promptModelWarning() - return + return false + } + + const workspaceSession = props.sessionID ? sync.session.get(props.sessionID) : undefined + const workspaceID = workspaceSession?.workspaceID + const workspaceStatus = workspaceID ? (project.workspace.status(workspaceID) ?? "error") : undefined + if (props.sessionID && workspaceID && workspaceStatus !== "connected") { + dialog.replace(() => ( + { + dialog.replace(() => ( + + restoreWorkspaceSession({ + dialog, + sdk, + sync, + project, + toast, + workspaceID: nextWorkspaceID, + sessionID: props.sessionID!, + }) + } + /> + )) + }} + /> + )) + return false } let sessionID = props.sessionID @@ -627,7 +690,7 @@ export function Prompt(props: PromptProps) { variant: "error", }) - return + return true } sessionID = res.data.id @@ -741,6 +804,7 @@ export function Prompt(props: PromptProps) { }) }, 50) input.clear() + return true } const exit = useExit() @@ -841,6 +905,14 @@ export function Prompt(props: PromptProps) { return !!current }) + const agentMetaAlpha = createFadeIn(() => !!local.agent.current(), animationsEnabled) + const modelMetaAlpha = createFadeIn(() => !!local.agent.current() && store.mode === "normal", animationsEnabled) + const variantMetaAlpha = createFadeIn( + () => !!local.agent.current() && store.mode === "normal" && showVariant(), + animationsEnabled, + ) + const borderHighlight = createMemo(() => tint(theme.border, highlight(), agentMetaAlpha())) + const placeholderText = createMemo(() => { if (props.showPlaceholder === false) return undefined if (store.mode === "shell") { @@ -901,7 +973,7 @@ export function Prompt(props: PromptProps) { (anchor = r)} visible={props.visible !== false}> }> {(agent) => ( <> - {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + + {store.mode === "shell" ? "Shell" : Locale.titlecase(agent().name)} + - + · + {local.model.parsed().model} - {currentProviderLabel()} + {currentProviderLabel()} - · + · - {local.model.variant.current()} + + {local.model.variant.current()} + @@ -1146,7 +1226,7 @@ export function Prompt(props: PromptProps) { ) { const data = { ...raw } if (!("tui" in data)) return data @@ -90,37 +89,44 @@ async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) { acc.result.plugin_origins = plugins } -async function loadState(ctx: { directory: string }) { - let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) - const directories = await ConfigPaths.directories(ctx.directory) - const custom = customPath() - await migrateTuiConfig({ directories, custom, cwd: ctx.directory }) - // Re-compute after migration since migrateTuiConfig may have created new tui.json files - projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory) +const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory: string }) { + // Every config dir we may read from: global config dir, any `.opencode` + // folders between cwd and home, and OPENCODE_CONFIG_DIR. + const directories = yield* ConfigPaths.directories(ctx.directory) + yield* Effect.promise(() => migrateTuiConfig({ directories, cwd: ctx.directory })) + + const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : yield* ConfigPaths.files("tui", ctx.directory) const acc: Acc = { result: {}, } + // 1. Global tui config (lowest precedence). for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } - if (custom) { - await mergeFile(acc, custom, ctx) - log.debug("loaded custom tui config", { path: custom }) + // 2. Explicit OPENCODE_TUI_CONFIG override, if set. + if (Flag.OPENCODE_TUI_CONFIG) { + const configFile = Flag.OPENCODE_TUI_CONFIG + yield* Effect.promise(() => mergeFile(acc, configFile, ctx)).pipe(Effect.orDie) + log.debug("loaded custom tui config", { path: configFile }) } + // 3. Project tui files, applied root-first so the closest file wins. for (const file of projectFiles) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } + // 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while + // walking up the tree. Also returned below so callers can install plugin + // dependencies from each location. const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) for (const dir of dirs) { if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue for (const file of ConfigPaths.fileInDirectory(dir, "tui")) { - await mergeFile(acc, file, ctx) + yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie) } } @@ -139,20 +145,25 @@ async function loadState(ctx: { directory: string }) { config: acc.result, dirs: acc.result.plugin?.length ? dirs : [], } -} +}) export const layer = Layer.effect( Service, Effect.gen(function* () { const directory = yield* CurrentWorkingDirectory const npm = yield* Npm.Service - const data = yield* Effect.promise(() => loadState({ directory })) + const data = yield* loadState({ directory }) const deps = yield* Effect.forEach( data.dirs, (dir) => npm .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], + add: [ + { + name: "@opencode-ai/plugin", + version: InstallationLocal ? undefined : InstallationVersion, + }, + ], }) .pipe(Effect.forkScoped), { @@ -169,7 +180,7 @@ export const layer = Layer.effect( }).pipe(Effect.withSpan("TuiConfig.layer")), ) -export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer)) +export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer), Layer.provide(AppFileSystem.defaultLayer)) const { runPromise } = makeRuntime(Service, defaultLayer) @@ -191,23 +202,18 @@ async function loadFile(filepath: string): Promise { } async function load(text: string, configFilepath: string): Promise { - return ConfigParse.load(Info, text, { - type: "path", - path: configFilepath, - missing: "empty", - normalize: (data) => { + return ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" }) + .then((expanded) => ConfigParse.jsonc(expanded, configFilepath)) + .then((data) => { if (!isRecord(data)) return {} // Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json // (mirroring the old opencode.json shape) still get their settings applied. - return normalize(data) - }, - }) + return ConfigParse.schema(Info, normalize(data), configFilepath) + }) .then((data) => resolvePlugins(data, configFilepath)) .catch((error) => { log.warn("invalid tui config", { path: configFilepath, error }) return {} }) } - -export * as TuiConfig from "./tui" diff --git a/packages/opencode/src/cli/cmd/tui/context/local.tsx b/packages/opencode/src/cli/cmd/tui/context/local.tsx index bb73c65378..9104837641 100644 --- a/packages/opencode/src/cli/cmd/tui/context/local.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/local.tsx @@ -75,7 +75,9 @@ export const { use: useLocal, provider: LocalProvider } = createSimpleContext({ }, move(direction: 1 | -1) { batch(() => { - let next = agents().findIndex((x) => x.name === agentStore.current) + direction + const current = this.current() + if (!current) return + let next = agents().findIndex((x) => x.name === current.name) + direction if (next < 0) next = agents().length - 1 if (next >= agents().length) next = 0 const value = agents()[next] diff --git a/packages/opencode/src/cli/cmd/tui/context/project.tsx b/packages/opencode/src/cli/cmd/tui/context/project.tsx index 26e5c075d7..22dd94bc82 100644 --- a/packages/opencode/src/cli/cmd/tui/context/project.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/project.tsx @@ -10,18 +10,21 @@ export const { use: useProject, provider: ProjectProvider } = createSimpleContex name: "Project", init: () => { const sdk = useSDK() + + const defaultPath = { + home: "", + state: "", + config: "", + worktree: "", + directory: sdk.directory ?? "", + } satisfies Path + const [store, setStore] = createStore({ project: { id: undefined as string | undefined, }, instance: { - path: { - home: "", - state: "", - config: "", - worktree: "", - directory: sdk.directory ?? "", - } satisfies Path, + path: defaultPath, }, workspace: { current: undefined as string | undefined, @@ -38,7 +41,7 @@ export const { use: useProject, provider: ProjectProvider } = createSimpleContex ]) batch(() => { - setStore("instance", "path", reconcile(path.data!)) + setStore("instance", "path", reconcile(path.data || defaultPath)) setStore("project", "id", project.data?.id) }) } diff --git a/packages/opencode/src/cli/cmd/tui/context/route.tsx b/packages/opencode/src/cli/cmd/tui/context/route.tsx index e9f463a13f..35be17801b 100644 --- a/packages/opencode/src/cli/cmd/tui/context/route.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/route.tsx @@ -1,16 +1,16 @@ -import { createStore } from "solid-js/store" +import { createStore, reconcile } from "solid-js/store" import { createSimpleContext } from "./helper" import type { PromptInfo } from "../component/prompt/history" export type HomeRoute = { type: "home" - initialPrompt?: PromptInfo + prompt?: PromptInfo } export type SessionRoute = { type: "session" sessionID: string - initialPrompt?: PromptInfo + prompt?: PromptInfo } export type PluginRoute = { @@ -23,13 +23,14 @@ export type Route = HomeRoute | SessionRoute | PluginRoute export const { use: useRoute, provider: RouteProvider } = createSimpleContext({ name: "Route", - init: () => { + init: (props: { initialRoute?: Route }) => { const [store, setStore] = createStore( - process.env["OPENCODE_ROUTE"] - ? JSON.parse(process.env["OPENCODE_ROUTE"]) - : { - type: "home", - }, + props.initialRoute ?? + (process.env["OPENCODE_ROUTE"] + ? JSON.parse(process.env["OPENCODE_ROUTE"]) + : { + type: "home", + }), ) return { @@ -37,7 +38,7 @@ export const { use: useRoute, provider: RouteProvider } = createSimpleContext({ return store }, navigate(route: Route) { - setStore(route) + setStore(reconcile(route)) }, } }, diff --git a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx index 14d3062886..6a240ceef8 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx @@ -2,6 +2,7 @@ import { createOpencodeClient } from "@opencode-ai/sdk/v2" import type { GlobalEvent } from "@opencode-ai/sdk/v2" import { createSimpleContext } from "./helper" import { createGlobalEmitter } from "@solid-primitives/event-bus" +import { Flag } from "@/flag/flag" import { batch, onCleanup, onMount } from "solid-js" export type EventSource = { @@ -39,6 +40,8 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ let queue: GlobalEvent[] = [] let timer: Timer | undefined let last = 0 + const retryDelay = 1000 + const maxRetryDelay = 30000 const flush = () => { if (queue.length === 0) return @@ -73,9 +76,20 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ const ctrl = new AbortController() sse = ctrl ;(async () => { + let attempt = 0 while (true) { if (abort.signal.aborted || ctrl.signal.aborted) break - const events = await sdk.global.event({ signal: ctrl.signal }) + + const events = await sdk.global.event({ + signal: ctrl.signal, + sseMaxRetryAttempts: 0, + }) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + // Start syncing workspaces, it's important to do this after + // we've started listening to events + await sdk.sync.start().catch(() => {}) + } for await (const event of events.stream) { if (ctrl.signal.aborted) break @@ -84,6 +98,12 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ if (timer) clearTimeout(timer) if (queue.length > 0) flush() + attempt += 1 + if (abort.signal.aborted || ctrl.signal.aborted) break + + // Exponential backoff + const backoff = Math.min(retryDelay * 2 ** (attempt - 1), maxRetryDelay) + await new Promise((resolve) => setTimeout(resolve, backoff)) } })().catch(() => {}) } @@ -92,6 +112,12 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ if (props.events) { const unsub = await props.events.subscribe(handleEvent) onCleanup(unsub) + + if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { + // Start syncing workspaces, it's important to do this after + // we've started listening to events + await sdk.sync.start().catch(() => {}) + } } else { startSSE() } diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index b5734e67d0..d2a7e5c4d0 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -27,7 +27,7 @@ import { createSimpleContext } from "./helper" import type { Snapshot } from "@/snapshot" import { useExit } from "./exit" import { useArgs } from "./args" -import { batch, createEffect, on } from "solid-js" +import { batch, onMount } from "solid-js" import { Log } from "@/util" import { emptyConsoleState, type ConsoleState } from "@/config/console-state" @@ -108,6 +108,9 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ const project = useProject() const sdk = useSDK() + const fullSyncedSessions = new Set() + let syncedWorkspace = project.workspace.current() + event.subscribe((event) => { switch (event.type) { case "server.instance.disposed": @@ -350,9 +353,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ const exit = useExit() const args = useArgs() - async function bootstrap() { - console.log("bootstrapping") + async function bootstrap(input: { fatal?: boolean } = {}) { + const fatal = input.fatal ?? true const workspace = project.workspace.current() + if (workspace !== syncedWorkspace) { + fullSyncedSessions.clear() + syncedWorkspace = workspace + } const start = Date.now() - 30 * 24 * 60 * 60 * 1000 const sessionListPromise = sdk.client.session .list({ start: start }) @@ -441,20 +448,17 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ name: e instanceof Error ? e.name : undefined, stack: e instanceof Error ? e.stack : undefined, }) - await exit(e) + if (fatal) { + await exit(e) + } else { + throw e + } }) } - const fullSyncedSessions = new Set() - createEffect( - on( - () => project.workspace.current(), - () => { - fullSyncedSessions.clear() - void bootstrap() - }, - ), - ) + onMount(() => { + void bootstrap() + }) const result = { data: store, @@ -463,6 +467,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ return store.status }, get ready() { + return true if (process.env.OPENCODE_FAST_BOOT) return true return store.status !== "loading" }, diff --git a/packages/opencode/src/cli/cmd/tui/context/theme.tsx b/packages/opencode/src/cli/cmd/tui/context/theme.tsx index 679be8f254..04670429da 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/theme.tsx @@ -397,7 +397,7 @@ export const { use: useTheme, provider: ThemeProvider } = createSimpleContext({ if (store.lock) return apply(mode) } - renderer.on(CliRenderEvents.THEME_MODE, handle) + // renderer.on(CliRenderEvents.THEME_MODE, handle) const refresh = () => { renderer.clearPaletteCache() diff --git a/packages/opencode/src/cli/cmd/tui/layer.ts b/packages/opencode/src/cli/cmd/tui/layer.ts index 734106f8a6..64cba08e82 100644 --- a/packages/opencode/src/cli/cmd/tui/layer.ts +++ b/packages/opencode/src/cli/cmd/tui/layer.ts @@ -1,6 +1,6 @@ import { Layer } from "effect" import { TuiConfig } from "./config/tui" -import { Npm } from "@opencode-ai/shared/npm" +import { Npm } from "@/npm" import { Observability } from "@/effect/observability" export const CliLayer = Observability.layer.pipe(Layer.merge(TuiConfig.layer), Layer.provide(Npm.defaultLayer)) diff --git a/packages/opencode/src/cli/cmd/tui/plugin/api.tsx b/packages/opencode/src/cli/cmd/tui/plugin/api.tsx index d2b495ca31..5bea483807 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/api.tsx +++ b/packages/opencode/src/cli/cmd/tui/plugin/api.tsx @@ -91,7 +91,7 @@ function routeCurrent(route: ReturnType): TuiPluginApi["route"] name: "session", params: { sessionID: route.data.sessionID, - initialPrompt: route.data.initialPrompt, + prompt: route.data.prompt, }, } } diff --git a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts index e1b2eca1dd..e4a0e59eb1 100644 --- a/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts +++ b/packages/opencode/src/cli/cmd/tui/plugin/runtime.ts @@ -918,113 +918,113 @@ async function installPluginBySpec( } } -export namespace TuiPluginRuntime { - let dir = "" - let loaded: Promise | undefined - let runtime: RuntimeState | undefined - export const Slot = View +let dir = "" +let loaded: Promise | undefined +let runtime: RuntimeState | undefined +export const Slot = View - export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { - const cwd = process.cwd() - if (loaded) { - if (dir !== cwd) { - throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) - } - return loaded +export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) { + const cwd = process.cwd() + if (loaded) { + if (dir !== cwd) { + throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`) } - - dir = cwd - loaded = load(input) return loaded } - export function list() { - if (!runtime) return [] - return listPluginStatus(runtime) - } + dir = cwd + loaded = load(input) + return loaded +} - export async function activatePlugin(id: string) { - return activatePluginById(runtime, id, true) - } +export function list() { + if (!runtime) return [] + return listPluginStatus(runtime) +} - export async function deactivatePlugin(id: string) { - return deactivatePluginById(runtime, id, true) - } +export async function activatePlugin(id: string) { + return activatePluginById(runtime, id, true) +} - export async function addPlugin(spec: string) { - return addPluginBySpec(runtime, spec) - } +export async function deactivatePlugin(id: string) { + return deactivatePluginById(runtime, id, true) +} - export async function installPlugin(spec: string, options?: { global?: boolean }) { - return installPluginBySpec(runtime, spec, options?.global) - } +export async function addPlugin(spec: string) { + return addPluginBySpec(runtime, spec) +} - export async function dispose() { - const task = loaded - loaded = undefined - dir = "" - if (task) await task - const state = runtime - runtime = undefined - if (!state) return - const queue = [...state.plugins].reverse() - for (const plugin of queue) { - await deactivatePluginEntry(state, plugin, false) - } - } +export async function installPlugin(spec: string, options?: { global?: boolean }) { + return installPluginBySpec(runtime, spec, options?.global) +} - async function load(input: { api: Api; config: TuiConfig.Info }) { - const { api, config } = input - const cwd = process.cwd() - const slots = setupSlots(api) - const next: RuntimeState = { - directory: cwd, - api, - slots, - plugins: [], - plugins_by_id: new Map(), - pending: new Map(), - } - runtime = next - try { - await Instance.provide({ - directory: cwd, - fn: async () => { - const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) - if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { - log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) - } - - for (const item of INTERNAL_TUI_PLUGINS) { - log.info("loading internal tui plugin", { id: item.id }) - const entry = loadInternalPlugin(item) - const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) - addPluginEntry(next, { - id: entry.id, - load: entry, - meta, - themes: {}, - plugin: entry.module.tui, - enabled: true, - }) - } - - const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) - await addExternalPluginEntries(next, ready) - - applyInitialPluginEnabledState(next, config) - for (const plugin of next.plugins) { - if (!plugin.enabled) continue - // Keep plugin execution sequential for deterministic side effects: - // command registration order affects keybind/command precedence, - // route registration is last-wins when ids collide, - // and hook chains rely on stable plugin ordering. - await activatePluginEntry(next, plugin, false) - } - }, - }) - } catch (error) { - fail("failed to load tui plugins", { directory: cwd, error }) - } +export async function dispose() { + const task = loaded + loaded = undefined + dir = "" + if (task) await task + const state = runtime + runtime = undefined + if (!state) return + const queue = [...state.plugins].reverse() + for (const plugin of queue) { + await deactivatePluginEntry(state, plugin, false) } } + +async function load(input: { api: Api; config: TuiConfig.Info }) { + const { api, config } = input + const cwd = process.cwd() + const slots = setupSlots(api) + const next: RuntimeState = { + directory: cwd, + api, + slots, + plugins: [], + plugins_by_id: new Map(), + pending: new Map(), + } + runtime = next + try { + await Instance.provide({ + directory: cwd, + fn: async () => { + const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? []) + if (Flag.OPENCODE_PURE && config.plugin_origins?.length) { + log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length }) + } + + for (const item of INTERNAL_TUI_PLUGINS) { + log.info("loading internal tui plugin", { id: item.id }) + const entry = loadInternalPlugin(item) + const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id) + addPluginEntry(next, { + id: entry.id, + load: entry, + meta, + themes: {}, + plugin: entry.module.tui, + enabled: true, + }) + } + + const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies()) + await addExternalPluginEntries(next, ready) + + applyInitialPluginEnabledState(next, config) + for (const plugin of next.plugins) { + if (!plugin.enabled) continue + // Keep plugin execution sequential for deterministic side effects: + // command registration order affects keybind/command precedence, + // route registration is last-wins when ids collide, + // and hook chains rely on stable plugin ordering. + await activatePluginEntry(next, plugin, false) + } + }, + }) + } catch (error) { + fail("failed to load tui plugins", { directory: cwd, error }) + } +} + +export * as TuiPluginRuntime from "./runtime" diff --git a/packages/opencode/src/cli/cmd/tui/routes/home.tsx b/packages/opencode/src/cli/cmd/tui/routes/home.tsx index 1cce7fb396..2f0ff07e9a 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/home.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/home.tsx @@ -10,7 +10,6 @@ import { usePromptRef } from "../context/prompt" import { useLocal } from "../context/local" import { TuiPluginRuntime } from "../plugin" -// TODO: what is the best way to do this? let once = false const placeholder = { normal: ["Fix a TODO in the codebase", "What is the tech stack of this project?", "Fix broken tests"], @@ -31,8 +30,8 @@ export function Home() { setRef(r) promptRef.set(r) if (once || !r) return - if (route.initialPrompt) { - r.set(route.initialPrompt) + if (route.prompt) { + r.set(route.prompt) once = true return } diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx index 0ce33a59a9..8d1e4438c8 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-fork-from-timeline.tsx @@ -38,7 +38,7 @@ export function DialogForkFromTimeline(props: { sessionID: string; onMove: (mess messageID: message.id, }) const parts = sync.data.part[message.id] ?? [] - const initialPrompt = parts.reduce( + const prompt = parts.reduce( (agg, part) => { if (part.type === "text") { if (!part.synthetic) agg.input += part.text @@ -51,7 +51,7 @@ export function DialogForkFromTimeline(props: { sessionID: string; onMove: (mess route.navigate({ sessionID: forked.data!.id, type: "session", - initialPrompt, + prompt, }) dialog.clear() }, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx index 412b4d87eb..aeea2f52ad 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/dialog-message.tsx @@ -81,25 +81,23 @@ export function DialogMessage(props: { sessionID: props.sessionID, messageID: props.messageID, }) - const initialPrompt = (() => { - const msg = message() - if (!msg) return undefined - const parts = sync.data.part[msg.id] - return parts.reduce( - (agg, part) => { - if (part.type === "text") { - if (!part.synthetic) agg.input += part.text - } - if (part.type === "file") agg.parts.push(part) - return agg - }, - { input: "", parts: [] as PromptInfo["parts"] }, - ) - })() + const msg = message() + const prompt = msg + ? sync.data.part[msg.id].reduce( + (agg, part) => { + if (part.type === "text") { + if (!part.synthetic) agg.input += part.text + } + if (part.type === "file") agg.parts.push(part) + return agg + }, + { input: "", parts: [] as PromptInfo["parts"] }, + ) + : undefined route.navigate({ sessionID: result.data!.id, type: "session", - initialPrompt, + prompt, }) dialog.clear() }, diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index b0514bf1b1..ccca4d1eba 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -54,7 +54,6 @@ import { useSDK } from "@tui/context/sdk" import { useCommandDialog } from "@tui/component/dialog-command" import type { DialogContext } from "@tui/ui/dialog" import { useKeybind } from "@tui/context/keybind" -import { parsePatch } from "diff" import { useDialog } from "../../ui/dialog" import { TodoItem } from "../../component/todo-item" import { DialogMessage } from "./dialog-message" @@ -88,6 +87,7 @@ import { getScrollAcceleration } from "../../util/scroll" import { TuiPluginRuntime } from "../../plugin" import { DialogGoUpsell } from "../../component/dialog-go-upsell" import { SessionRetry } from "@/session/retry" +import { getRevertDiffFiles } from "../../util/revert-diff" addDefaultParsers(parsers.parsers) @@ -181,27 +181,32 @@ export function Session() { const sdk = useSDK() createEffect(async () => { - await sdk.client.session - .get({ sessionID: route.sessionID }, { throwOnError: true }) - .then((x) => { - project.workspace.set(x.data?.workspaceID) - }) - .then(() => sync.session.sync(route.sessionID)) - .then(() => { - if (scroll) scroll.scrollBy(100_000) - }) - .catch((e) => { - console.error(e) - toast.show({ - message: `Session not found: ${route.sessionID}`, - variant: "error", - }) - return navigate({ type: "home" }) + const previousWorkspace = project.workspace.current() + const result = await sdk.client.session.get({ sessionID: route.sessionID }, { throwOnError: true }) + if (!result.data) { + toast.show({ + message: `Session not found: ${route.sessionID}`, + variant: "error", }) + navigate({ type: "home" }) + return + } + + if (result.data.workspaceID !== previousWorkspace) { + project.workspace.set(result.data.workspaceID) + + // Sync all the data for this workspace. Note that this + // workspace may not exist anymore which is why this is not + // fatal. If it doesn't we still want to show the session + // (which will be non-interactive) + try { + await sync.bootstrap({ fatal: false }) + } catch (e) {} + } + await sync.session.sync(route.sessionID) + if (scroll) scroll.scrollBy(100_000) }) - // Handle initial prompt from fork - let seeded = false let lastSwitch: string | undefined = undefined event.on("message.part.updated", (evt) => { const part = evt.properties.part @@ -219,14 +224,15 @@ export function Session() { } }) + let seeded = false let scroll: ScrollBoxRenderable let prompt: PromptRef | undefined const bind = (r: PromptRef | undefined) => { prompt = r promptRef.set(r) - if (seeded || !route.initialPrompt || !r) return + if (seeded || !route.prompt || !r) return seeded = true - r.set(route.initialPrompt) + r.set(route.prompt) } const keybind = useKeybind() const dialog = useDialog() @@ -991,31 +997,7 @@ export function Session() { const revertInfo = createMemo(() => session()?.revert) const revertMessageID = createMemo(() => revertInfo()?.messageID) - const revertDiffFiles = createMemo(() => { - const diffText = revertInfo()?.diff ?? "" - if (!diffText) return [] - - try { - const patches = parsePatch(diffText) - return patches.map((patch) => { - const filename = patch.newFileName || patch.oldFileName || "unknown" - const cleanFilename = filename.replace(/^[ab]\//, "") - return { - filename: cleanFilename, - additions: patch.hunks.reduce( - (sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("+")).length, - 0, - ), - deletions: patch.hunks.reduce( - (sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("-")).length, - 0, - ), - } - }) - } catch { - return [] - } - }) + const revertDiffFiles = createMemo(() => getRevertDiffFiles(revertInfo()?.diff ?? "")) const revertRevertedMessages = createMemo(() => { const messageID = revertMessageID() diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx index 06bc270644..6d92752efe 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/sidebar.tsx @@ -1,17 +1,31 @@ +import { useProject } from "@tui/context/project" import { useSync } from "@tui/context/sync" import { createMemo, Show } from "solid-js" import { useTheme } from "../../context/theme" import { useTuiConfig } from "../../context/tui-config" -import { InstallationVersion } from "@/installation/version" +import { InstallationChannel, InstallationVersion } from "@/installation/version" import { TuiPluginRuntime } from "../../plugin" import { getScrollAcceleration } from "../../util/scroll" export function Sidebar(props: { sessionID: string; overlay?: boolean }) { + const project = useProject() const sync = useSync() const { theme } = useTheme() const tuiConfig = useTuiConfig() const session = createMemo(() => sync.session.get(props.sessionID)) + const workspaceStatus = () => { + const workspaceID = session()?.workspaceID + if (!workspaceID) return "error" + return project.workspace.status(workspaceID) ?? "error" + } + const workspaceLabel = () => { + const workspaceID = session()?.workspaceID + if (!workspaceID) return "unknown" + const info = project.workspace.get(workspaceID) + if (!info) return "unknown" + return `${info.type}: ${info.name}` + } const scrollAcceleration = createMemo(() => getScrollAcceleration(tuiConfig)) return ( @@ -48,6 +62,15 @@ export function Sidebar(props: { sessionID: string; overlay?: boolean }) { {session()!.title} + + {props.sessionID} + + + + {" "} + {workspaceLabel()} + + {session()!.share!.url} diff --git a/packages/opencode/src/cli/cmd/tui/thread.ts b/packages/opencode/src/cli/cmd/tui/thread.ts index 96ceb905c5..e3e9eb8117 100644 --- a/packages/opencode/src/cli/cmd/tui/thread.ts +++ b/packages/opencode/src/cli/cmd/tui/thread.ts @@ -15,6 +15,7 @@ import type { EventSource } from "./context/sdk" import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32" import { writeHeapSnapshot } from "v8" import { TuiConfig } from "./config/tui" +import { OPENCODE_PROCESS_ROLE, OPENCODE_RUN_ID, ensureRunID, sanitizedProcessEnv } from "@/util/opencode-process" declare global { const OPENCODE_WORKER_PATH: string @@ -129,11 +130,13 @@ export const TuiThreadCommand = cmd({ return } const cwd = Filesystem.resolve(process.cwd()) + const env = sanitizedProcessEnv({ + [OPENCODE_PROCESS_ROLE]: "worker", + [OPENCODE_RUN_ID]: ensureRunID(), + }) const worker = new Worker(file, { - env: Object.fromEntries( - Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined), - ), + env, }) worker.onerror = (e) => { Log.Default.error("thread error", { diff --git a/packages/opencode/src/cli/cmd/tui/util/revert-diff.ts b/packages/opencode/src/cli/cmd/tui/util/revert-diff.ts new file mode 100644 index 0000000000..6ee1737f0b --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/util/revert-diff.ts @@ -0,0 +1,18 @@ +import { parsePatch } from "diff" + +export function getRevertDiffFiles(diffText: string) { + if (!diffText) return [] + + try { + return parsePatch(diffText).map((patch) => { + const filename = [patch.newFileName, patch.oldFileName].find((item) => item && item !== "/dev/null") ?? "unknown" + return { + filename: filename.replace(/^[ab]\//, ""), + additions: patch.hunks.reduce((sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("+")).length, 0), + deletions: patch.hunks.reduce((sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("-")).length, 0), + } + }) + } catch { + return [] + } +} diff --git a/packages/opencode/src/cli/cmd/tui/util/signal.ts b/packages/opencode/src/cli/cmd/tui/util/signal.ts index 15b57886d6..7d20ae04ba 100644 --- a/packages/opencode/src/cli/cmd/tui/util/signal.ts +++ b/packages/opencode/src/cli/cmd/tui/util/signal.ts @@ -1,7 +1,41 @@ -import { createSignal, type Accessor } from "solid-js" +import { createEffect, createSignal, on, onCleanup, type Accessor } from "solid-js" import { debounce, type Scheduled } from "@solid-primitives/scheduled" export function createDebouncedSignal(value: T, ms: number): [Accessor, Scheduled<[value: T]>] { const [get, set] = createSignal(value) return [get, debounce((v: T) => set(() => v), ms)] } + +export function createFadeIn(show: Accessor, enabled: Accessor) { + const [alpha, setAlpha] = createSignal(show() ? 1 : 0) + let revealed = show() + + createEffect( + on([show, enabled], ([visible, animate]) => { + if (!visible) { + setAlpha(0) + return + } + + if (!animate || revealed) { + revealed = true + setAlpha(1) + return + } + + const start = performance.now() + revealed = true + setAlpha(0) + + const timer = setInterval(() => { + const progress = Math.min((performance.now() - start) / 160, 1) + setAlpha(progress * progress * (3 - 2 * progress)) + if (progress >= 1) clearInterval(timer) + }, 16) + + onCleanup(() => clearInterval(timer)) + }), + ) + + return alpha +} diff --git a/packages/opencode/src/cli/cmd/tui/worker.ts b/packages/opencode/src/cli/cmd/tui/worker.ts index 393a407eb0..8cec99c615 100644 --- a/packages/opencode/src/cli/cmd/tui/worker.ts +++ b/packages/opencode/src/cli/cmd/tui/worker.ts @@ -11,6 +11,9 @@ import { Flag } from "@/flag/flag" import { writeHeapSnapshot } from "node:v8" import { Heap } from "@/cli/heap" import { AppRuntime } from "@/effect/app-runtime" +import { ensureProcessMetadata } from "@/util/opencode-process" + +ensureProcessMetadata("worker") await Log.init({ print: process.argv.includes("--print-logs"), diff --git a/packages/opencode/src/cli/logo.ts b/packages/opencode/src/cli/logo.ts index 44fb93c15b..a58a8cf995 100644 --- a/packages/opencode/src/cli/logo.ts +++ b/packages/opencode/src/cli/logo.ts @@ -3,4 +3,9 @@ export const logo = { right: [" ▄ ", "█▀▀▀ █▀▀█ █▀▀█ █▀▀█", "█___ █__█ █__█ █^^^", "▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀"], } -export const marks = "_^~" +export const go = { + left: [" ", "█▀▀▀", "█_^█", "▀▀▀▀"], + right: [" ", "█▀▀█", "█__█", "▀▀▀▀"], +} + +export const marks = "_^~," diff --git a/packages/opencode/src/config/agent.ts b/packages/opencode/src/config/agent.ts index f754f009d4..9053b19fc1 100644 --- a/packages/opencode/src/config/agent.ts +++ b/packages/opencode/src/config/agent.ts @@ -15,7 +15,7 @@ const log = Log.create({ service: "config" }) export const Info = z .object({ - model: ConfigModelID.optional(), + model: ConfigModelID.zod.optional(), variant: z .string() .optional() diff --git a/packages/opencode/src/config/command.ts b/packages/opencode/src/config/command.ts index 9799250567..3e0adccc30 100644 --- a/packages/opencode/src/config/command.ts +++ b/packages/opencode/src/config/command.ts @@ -1,10 +1,12 @@ export * as ConfigCommand from "./command" import { Log } from "../util" -import z from "zod" +import { Schema } from "effect" import { NamedError } from "@opencode-ai/shared/util/error" import { Glob } from "@opencode-ai/shared/util/glob" import { Bus } from "@/bus" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" import { configEntryNameFromPath } from "./entry-name" import { InvalidError } from "./error" import * as ConfigMarkdown from "./markdown" @@ -12,15 +14,15 @@ import { ConfigModelID } from "./model-id" const log = Log.create({ service: "config" }) -export const Info = z.object({ - template: z.string(), - description: z.string().optional(), - agent: z.string().optional(), - model: ConfigModelID.optional(), - subtask: z.boolean().optional(), -}) +export const Info = Schema.Struct({ + template: Schema.String, + description: Schema.optional(Schema.String), + agent: Schema.optional(Schema.String), + model: Schema.optional(ConfigModelID), + subtask: Schema.optional(Schema.Boolean), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Info = z.infer +export type Info = Schema.Schema.Type export async function load(dir: string) { const result: Record = {} @@ -49,7 +51,7 @@ export async function load(dir: string) { ...md.data, template: md.content.trim(), } - const parsed = Info.safeParse(config) + const parsed = Info.zod.safeParse(config) if (parsed.success) { result[config.name] = parsed.data continue diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 3cbc539600..179c6a6093 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -12,32 +12,35 @@ import { Auth } from "../auth" import { Env } from "../env" import { applyEdits, modify } from "jsonc-parser" import { Instance, type InstanceContext } from "../project/instance" -import * as LSPServer from "../lsp/server" import { InstallationLocal, InstallationVersion } from "@/installation/version" import { existsSync } from "fs" import { GlobalBus } from "@/bus/global" import { Event } from "../server/event" -import { Account } from "@/account" +import { Account } from "@/account/account" import { isRecord } from "@/util/record" -import { InvalidError, JsonError } from "./error" import type { ConsoleState } from "./console-state" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { InstanceState } from "@/effect" import { Context, Duration, Effect, Exit, Fiber, Layer, Option } from "effect" import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" import { InstanceRef } from "@/effect/instance-ref" -import { Npm } from "@opencode-ai/shared/npm" import { ConfigAgent } from "./agent" +import { ConfigCommand } from "./command" +import { ConfigFormatter } from "./formatter" +import { ConfigLayout } from "./layout" +import { ConfigLSP } from "./lsp" +import { ConfigManaged } from "./managed" import { ConfigMCP } from "./mcp" import { ConfigModelID } from "./model-id" -import { ConfigPlugin } from "./plugin" -import { ConfigManaged } from "./managed" -import { ConfigCommand } from "./command" import { ConfigParse } from "./parse" -import { ConfigPermission } from "./permission" -import { ConfigProvider } from "./provider" -import { ConfigSkills } from "./skills" import { ConfigPaths } from "./paths" +import { ConfigPermission } from "./permission" +import { ConfigPlugin } from "./plugin" +import { ConfigProvider } from "./provider" +import { ConfigServer } from "./server" +import { ConfigSkills } from "./skills" +import { ConfigVariable } from "./variable" +import { Npm } from "@/npm" const log = Log.create({ service: "config" }) @@ -72,23 +75,9 @@ async function resolveLoadedPlugins( return config } -export const Server = z - .object({ - port: z.number().int().positive().optional().describe("Port to listen on"), - hostname: z.string().optional().describe("Hostname to listen on"), - mdns: z.boolean().optional().describe("Enable mDNS service discovery"), - mdnsDomain: z.string().optional().describe("Custom domain name for mDNS service (default: opencode.local)"), - cors: z.array(z.string()).optional().describe("Additional domains to allow for CORS"), - }) - .strict() - .meta({ - ref: "ServerConfig", - }) - -export const Layout = z.enum(["auto", "stretch"]).meta({ - ref: "LayoutConfig", -}) -export type Layout = z.infer +export const Server = ConfigServer.Server.zod +export const Layout = ConfigLayout.Layout.zod +export type Layout = ConfigLayout.Layout export const Info = z .object({ @@ -96,10 +85,10 @@ export const Info = z logLevel: Log.Level.optional().describe("Log level"), server: Server.optional().describe("Server configuration for opencode serve and web commands"), command: z - .record(z.string(), ConfigCommand.Info) + .record(z.string(), ConfigCommand.Info.zod) .optional() .describe("Command configuration, see https://opencode.ai/docs/commands"), - skills: ConfigSkills.Info.optional().describe("Additional skill folder paths"), + skills: ConfigSkills.Info.zod.optional().describe("Additional skill folder paths"), watcher: z .object({ ignore: z.array(z.string()).optional(), @@ -112,7 +101,7 @@ export const Info = z "Enable or disable snapshot tracking. When false, filesystem snapshots are not recorded and undoing or reverting will not undo/redo file changes. Defaults to true.", ), // User-facing plugin config is stored as Specs; provenance gets attached later while configs are merged. - plugin: ConfigPlugin.Spec.array().optional(), + plugin: ConfigPlugin.Spec.zod.array().optional(), share: z .enum(["manual", "auto", "disabled"]) .optional() @@ -134,10 +123,10 @@ export const Info = z .array(z.string()) .optional() .describe("When set, ONLY these providers will be enabled. All other providers will be ignored"), - model: ConfigModelID.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), - small_model: ConfigModelID.describe( - "Small model to use for tasks like title generation in the format of provider/model", - ).optional(), + model: ConfigModelID.zod.describe("Model to use in the format of provider/model, eg anthropic/claude-2").optional(), + small_model: ConfigModelID.zod + .describe("Small model to use for tasks like title generation in the format of provider/model") + .optional(), default_agent: z .string() .optional() @@ -170,14 +159,14 @@ export const Info = z .optional() .describe("Agent configuration, see https://opencode.ai/docs/agents"), provider: z - .record(z.string(), ConfigProvider.Info) + .record(z.string(), ConfigProvider.Info.zod) .optional() .describe("Custom provider configurations and model overrides"), mcp: z .record( z.string(), z.union([ - ConfigMCP.Info, + ConfigMCP.Info.zod, z .object({ enabled: z.boolean(), @@ -187,56 +176,8 @@ export const Info = z ) .optional() .describe("MCP (Model Context Protocol) server configurations"), - formatter: z - .union([ - z.literal(false), - z.record( - z.string(), - z.object({ - disabled: z.boolean().optional(), - command: z.array(z.string()).optional(), - environment: z.record(z.string(), z.string()).optional(), - extensions: z.array(z.string()).optional(), - }), - ), - ]) - .optional(), - lsp: z - .union([ - z.literal(false), - z.record( - z.string(), - z.union([ - z.object({ - disabled: z.literal(true), - }), - z.object({ - command: z.array(z.string()), - extensions: z.array(z.string()).optional(), - disabled: z.boolean().optional(), - env: z.record(z.string(), z.string()).optional(), - initialization: z.record(z.string(), z.any()).optional(), - }), - ]), - ), - ]) - .optional() - .refine( - (data) => { - if (!data) return true - if (typeof data === "boolean") return true - const serverIds = new Set(Object.values(LSPServer).map((s) => s.id)) - - return Object.entries(data).every(([id, config]) => { - if (config.disabled) return true - if (serverIds.has(id)) return true - return Boolean(config.extensions) - }) - }, - { - error: "For custom LSP servers, 'extensions' array is required.", - }, - ), + formatter: ConfigFormatter.Info.zod.optional(), + lsp: ConfigLSP.Info.zod.optional(), instructions: z.array(z.string()).optional().describe("Additional instruction files or patterns to include"), layout: Layout.optional().describe("@deprecated Always uses stretch layout."), permission: ConfigPermission.Info.optional(), @@ -375,24 +316,16 @@ export const layer = Layer.effect( text: string, options: { path: string } | { dir: string; source: string }, ) { - if (!("path" in options)) { - return yield* Effect.promise(() => - ConfigParse.load(Info, text, { - type: "virtual", - dir: options.dir, - source: options.source, - normalize: normalizeLoadedConfig, - }), - ) - } - - const data = yield* Effect.promise(() => - ConfigParse.load(Info, text, { - type: "path", - path: options.path, - normalize: normalizeLoadedConfig, - }), + const source = "path" in options ? options.path : options.source + const expanded = yield* Effect.promise(() => + ConfigVariable.substitute( + "path" in options ? { text, type: "path", path: options.path } : { text, type: "virtual", ...options }, + ), ) + const parsed = ConfigParse.jsonc(expanded, source) + const data = ConfigParse.schema(Info, normalizeLoadedConfig(parsed, source), source) + if (!("path" in options)) return data + yield* Effect.promise(() => resolveLoadedPlugins(data, options.path)) if (!data.$schema) { data.$schema = "https://opencode.ai/config.json" @@ -468,260 +401,266 @@ export const layer = Layer.effect( } }) - const loadInstanceState = Effect.fn("Config.loadInstanceState")(function* (ctx: InstanceContext) { - const auth = yield* authSvc.all().pipe(Effect.orDie) + const loadInstanceState = Effect.fn("Config.loadInstanceState")( + function* (ctx: InstanceContext) { + const auth = yield* authSvc.all().pipe(Effect.orDie) - let result: Info = {} - const consoleManagedProviders = new Set() - let activeOrgName: string | undefined + let result: Info = {} + const consoleManagedProviders = new Set() + let activeOrgName: string | undefined - const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { - if (source.startsWith("http://") || source.startsWith("https://")) return "global" - if (source === "OPENCODE_CONFIG_CONTENT") return "local" - if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" - return "global" - }) - - const mergePluginOrigins = Effect.fnUntraced(function* ( - source: string, - // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step - // is attached. - list: ConfigPlugin.Spec[] | undefined, - // Scope can be inferred from the source path, but some callers already know whether the config should - // behave as global or local and can pass that explicitly. - kind?: ConfigPlugin.Scope, - ) { - if (!list?.length) return - const hit = kind ?? (yield* pluginScopeForSource(source)) - // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while - // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. - const plugins = ConfigPlugin.deduplicatePluginOrigins([ - ...(result.plugin_origins ?? []), - ...list.map((spec) => ({ spec, source, scope: hit })), - ]) - result.plugin = plugins.map((item) => item.spec) - result.plugin_origins = plugins - }) - - const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { - result = mergeConfigConcatArrays(result, next) - return mergePluginOrigins(source, next.plugin, kind) - } - - for (const [key, value] of Object.entries(auth)) { - if (value.type === "wellknown") { - const url = key.replace(/\/+$/, "") - process.env[value.key] = value.token - log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) - const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) - if (!response.ok) { - throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) - } - const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } - const remoteConfig = wellknown.config ?? {} - if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" - const source = `${url}/.well-known/opencode` - const next = yield* loadConfig(JSON.stringify(remoteConfig), { - dir: path.dirname(source), - source, - }) - yield* merge(source, next, "global") - log.debug("loaded remote config from well-known", { url }) - } - } - - const global = yield* getGlobal() - yield* merge(Global.Path.config, global, "global") - - if (Flag.OPENCODE_CONFIG) { - yield* merge(Flag.OPENCODE_CONFIG, yield* loadFile(Flag.OPENCODE_CONFIG)) - log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG }) - } - - if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { - for (const file of yield* Effect.promise(() => - ConfigPaths.projectFiles("opencode", ctx.directory, ctx.worktree), - )) { - yield* merge(file, yield* loadFile(file), "local") - } - } - - result.agent = result.agent || {} - result.mode = result.mode || {} - result.plugin = result.plugin || [] - - const directories = yield* Effect.promise(() => ConfigPaths.directories(ctx.directory, ctx.worktree)) - - if (Flag.OPENCODE_CONFIG_DIR) { - log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR }) - } - - const deps: Fiber.Fiber[] = [] - - for (const dir of directories) { - if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { - for (const file of ["opencode.json", "opencode.jsonc"]) { - const source = path.join(dir, file) - log.debug(`loading config from ${source}`) - yield* merge(source, yield* loadFile(source)) - result.agent ??= {} - result.mode ??= {} - result.plugin ??= [] - } - } - - yield* ensureGitignore(dir).pipe(Effect.orDie) - - const dep = yield* npmSvc - .install(dir, { - add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)], - }) - .pipe( - Effect.exit, - Effect.tap((exit) => - Exit.isFailure(exit) - ? Effect.sync(() => { - log.warn("background dependency install failed", { dir, error: String(exit.cause) }) - }) - : Effect.void, - ), - Effect.asVoid, - Effect.forkDetach, - ) - deps.push(dep) - - result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) - result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) - result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) - // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load - // returns normalized Specs and we only need to attach origin metadata here. - const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) - yield* mergePluginOrigins(dir, list) - } - - if (process.env.OPENCODE_CONFIG_CONTENT) { - const source = "OPENCODE_CONFIG_CONTENT" - const next = yield* loadConfig(process.env.OPENCODE_CONFIG_CONTENT, { - dir: ctx.directory, - source, + const pluginScopeForSource = Effect.fnUntraced(function* (source: string) { + if (source.startsWith("http://") || source.startsWith("https://")) return "global" + if (source === "OPENCODE_CONFIG_CONTENT") return "local" + if (yield* InstanceRef.use((ctx) => Effect.succeed(Instance.containsPath(source, ctx)))) return "local" + return "global" }) - yield* merge(source, next, "local") - log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") - } - const activeAccount = Option.getOrUndefined( - yield* accountSvc.active().pipe(Effect.catch(() => Effect.succeed(Option.none()))), - ) - if (activeAccount?.active_org_id) { - const accountID = activeAccount.id - const orgID = activeAccount.active_org_id - const url = activeAccount.url - yield* Effect.gen(function* () { - const [configOpt, tokenOpt] = yield* Effect.all( - [accountSvc.config(accountID, orgID), accountSvc.token(accountID)], - { concurrency: 2 }, - ) - if (Option.isSome(tokenOpt)) { - process.env["OPENCODE_CONSOLE_TOKEN"] = tokenOpt.value - yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) - } + const mergePluginOrigins = Effect.fnUntraced(function* ( + source: string, + // mergePluginOrigins receives raw Specs from one config source, before provenance for this merge step + // is attached. + list: ConfigPlugin.Spec[] | undefined, + // Scope can be inferred from the source path, but some callers already know whether the config should + // behave as global or local and can pass that explicitly. + kind?: ConfigPlugin.Scope, + ) { + if (!list?.length) return + const hit = kind ?? (yield* pluginScopeForSource(source)) + // Merge newly seen plugin origins with previously collected ones, then dedupe by plugin identity while + // keeping the winning source/scope metadata for downstream installs, writes, and diagnostics. + const plugins = ConfigPlugin.deduplicatePluginOrigins([ + ...(result.plugin_origins ?? []), + ...list.map((spec) => ({ spec, source, scope: hit })), + ]) + result.plugin = plugins.map((item) => item.spec) + result.plugin_origins = plugins + }) - if (Option.isSome(configOpt)) { - const source = `${url}/api/config` - const next = yield* loadConfig(JSON.stringify(configOpt.value), { + const merge = (source: string, next: Info, kind?: ConfigPlugin.Scope) => { + result = mergeConfigConcatArrays(result, next) + return mergePluginOrigins(source, next.plugin, kind) + } + + for (const [key, value] of Object.entries(auth)) { + if (value.type === "wellknown") { + const url = key.replace(/\/+$/, "") + process.env[value.key] = value.token + log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) + const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) + if (!response.ok) { + throw new Error(`failed to fetch remote config from ${url}: ${response.status}`) + } + const wellknown = (yield* Effect.promise(() => response.json())) as { config?: Record } + const remoteConfig = wellknown.config ?? {} + if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" + const source = `${url}/.well-known/opencode` + const next = yield* loadConfig(JSON.stringify(remoteConfig), { dir: path.dirname(source), source, }) - for (const providerID of Object.keys(next.provider ?? {})) { - consoleManagedProviders.add(providerID) - } yield* merge(source, next, "global") + log.debug("loaded remote config from well-known", { url }) } - }).pipe( - Effect.withSpan("Config.loadActiveOrgConfig"), - Effect.catch((err) => { - log.debug("failed to fetch remote account config", { - error: err instanceof Error ? err.message : String(err), + } + + const global = yield* getGlobal() + yield* merge(Global.Path.config, global, "global") + + if (Flag.OPENCODE_CONFIG) { + yield* merge(Flag.OPENCODE_CONFIG, yield* loadFile(Flag.OPENCODE_CONFIG)) + log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG }) + } + + if (!Flag.OPENCODE_DISABLE_PROJECT_CONFIG) { + for (const file of yield* ConfigPaths.files("opencode", ctx.directory, ctx.worktree).pipe(Effect.orDie)) { + yield* merge(file, yield* loadFile(file), "local") + } + } + + result.agent = result.agent || {} + result.mode = result.mode || {} + result.plugin = result.plugin || [] + + const directories = yield* ConfigPaths.directories(ctx.directory, ctx.worktree) + + if (Flag.OPENCODE_CONFIG_DIR) { + log.debug("loading config from OPENCODE_CONFIG_DIR", { path: Flag.OPENCODE_CONFIG_DIR }) + } + + const deps: Fiber.Fiber[] = [] + + for (const dir of directories) { + if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { + for (const file of ["opencode.json", "opencode.jsonc"]) { + const source = path.join(dir, file) + log.debug(`loading config from ${source}`) + yield* merge(source, yield* loadFile(source)) + result.agent ??= {} + result.mode ??= {} + result.plugin ??= [] + } + } + + yield* ensureGitignore(dir).pipe(Effect.orDie) + + const dep = yield* npmSvc + .install(dir, { + add: [ + { + name: "@opencode-ai/plugin", + version: InstallationLocal ? undefined : InstallationVersion, + }, + ], }) - return Effect.void - }), - ) - } + .pipe( + Effect.exit, + Effect.tap((exit) => + Exit.isFailure(exit) + ? Effect.sync(() => { + log.warn("background dependency install failed", { dir, error: String(exit.cause) }) + }) + : Effect.void, + ), + Effect.asVoid, + Effect.forkDetach, + ) + deps.push(dep) - const managedDir = ConfigManaged.managedConfigDir() - if (existsSync(managedDir)) { - for (const file of ["opencode.json", "opencode.jsonc"]) { - const source = path.join(managedDir, file) - yield* merge(source, yield* loadFile(source), "global") + result.command = mergeDeep(result.command ?? {}, yield* Effect.promise(() => ConfigCommand.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.load(dir))) + result.agent = mergeDeep(result.agent ?? {}, yield* Effect.promise(() => ConfigAgent.loadMode(dir))) + // Auto-discovered plugins under `.opencode/plugin(s)` are already local files, so ConfigPlugin.load + // returns normalized Specs and we only need to attach origin metadata here. + const list = yield* Effect.promise(() => ConfigPlugin.load(dir)) + yield* mergePluginOrigins(dir, list) } - } - // macOS managed preferences (.mobileconfig deployed via MDM) override everything - const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) - if (managed) { - result = mergeConfigConcatArrays( - result, - yield* loadConfig(managed.text, { - dir: path.dirname(managed.source), - source: managed.source, - }), + if (process.env.OPENCODE_CONFIG_CONTENT) { + const source = "OPENCODE_CONFIG_CONTENT" + const next = yield* loadConfig(process.env.OPENCODE_CONFIG_CONTENT, { + dir: ctx.directory, + source, + }) + yield* merge(source, next, "local") + log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") + } + + const activeAccount = Option.getOrUndefined( + yield* accountSvc.active().pipe(Effect.catch(() => Effect.succeed(Option.none()))), ) - } + if (activeAccount?.active_org_id) { + const accountID = activeAccount.id + const orgID = activeAccount.active_org_id + const url = activeAccount.url + yield* Effect.gen(function* () { + const [configOpt, tokenOpt] = yield* Effect.all( + [accountSvc.config(accountID, orgID), accountSvc.token(accountID)], + { concurrency: 2 }, + ) + if (Option.isSome(tokenOpt)) { + process.env["OPENCODE_CONSOLE_TOKEN"] = tokenOpt.value + yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) + } - for (const [name, mode] of Object.entries(result.mode ?? {})) { - result.agent = mergeDeep(result.agent ?? {}, { - [name]: { - ...mode, - mode: "primary" as const, - }, - }) - } + if (Option.isSome(configOpt)) { + const source = `${url}/api/config` + const next = yield* loadConfig(JSON.stringify(configOpt.value), { + dir: path.dirname(source), + source, + }) + for (const providerID of Object.keys(next.provider ?? {})) { + consoleManagedProviders.add(providerID) + } + yield* merge(source, next, "global") + } + }).pipe( + Effect.withSpan("Config.loadActiveOrgConfig"), + Effect.catch((err) => { + log.debug("failed to fetch remote account config", { + error: err instanceof Error ? err.message : String(err), + }) + return Effect.void + }), + ) + } - if (Flag.OPENCODE_PERMISSION) { - result.permission = mergeDeep(result.permission ?? {}, JSON.parse(Flag.OPENCODE_PERMISSION)) - } - - if (result.tools) { - const perms: Record = {} - for (const [tool, enabled] of Object.entries(result.tools)) { - const action: ConfigPermission.Action = enabled ? "allow" : "deny" - if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { - perms.edit = action - continue + const managedDir = ConfigManaged.managedConfigDir() + if (existsSync(managedDir)) { + for (const file of ["opencode.json", "opencode.jsonc"]) { + const source = path.join(managedDir, file) + yield* merge(source, yield* loadFile(source), "global") } - perms[tool] = action } - result.permission = mergeDeep(perms, result.permission ?? {}) - } - if (!result.username) result.username = os.userInfo().username + // macOS managed preferences (.mobileconfig deployed via MDM) override everything + const managed = yield* Effect.promise(() => ConfigManaged.readManagedPreferences()) + if (managed) { + result = mergeConfigConcatArrays( + result, + yield* loadConfig(managed.text, { + dir: path.dirname(managed.source), + source: managed.source, + }), + ) + } - if (result.autoshare === true && !result.share) { - result.share = "auto" - } + for (const [name, mode] of Object.entries(result.mode ?? {})) { + result.agent = mergeDeep(result.agent ?? {}, { + [name]: { + ...mode, + mode: "primary" as const, + }, + }) + } - if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) { - result.compaction = { ...result.compaction, auto: false } - } - if (Flag.OPENCODE_DISABLE_PRUNE) { - result.compaction = { ...result.compaction, prune: false } - } + if (Flag.OPENCODE_PERMISSION) { + result.permission = mergeDeep(result.permission ?? {}, JSON.parse(Flag.OPENCODE_PERMISSION)) + } - return { - config: result, - directories, - deps, - consoleState: { - consoleManagedProviders: Array.from(consoleManagedProviders), - activeOrgName, - switchableOrgCount: 0, - }, - } - }) + if (result.tools) { + const perms: Record = {} + for (const [tool, enabled] of Object.entries(result.tools)) { + const action: ConfigPermission.Action = enabled ? "allow" : "deny" + if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") { + perms.edit = action + continue + } + perms[tool] = action + } + result.permission = mergeDeep(perms, result.permission ?? {}) + } + + if (!result.username) result.username = os.userInfo().username + + if (result.autoshare === true && !result.share) { + result.share = "auto" + } + + if (Flag.OPENCODE_DISABLE_AUTOCOMPACT) { + result.compaction = { ...result.compaction, auto: false } + } + if (Flag.OPENCODE_DISABLE_PRUNE) { + result.compaction = { ...result.compaction, prune: false } + } + + return { + config: result, + directories, + deps, + consoleState: { + consoleManagedProviders: Array.from(consoleManagedProviders), + activeOrgName, + switchableOrgCount: 0, + }, + } + }, + Effect.provideService(AppFileSystem.Service, fs), + ) const state = yield* InstanceState.make( Effect.fn("Config.state")(function* (ctx) { - return yield* loadInstanceState(ctx) + return yield* loadInstanceState(ctx).pipe(Effect.orDie) }), ) @@ -773,17 +712,16 @@ export const layer = Layer.effect( const updateGlobal = Effect.fn("Config.updateGlobal")(function* (config: Info) { const file = globalConfigFile() const before = (yield* readConfigFile(file)) ?? "{}" - const input = writable(config) let next: Info if (!file.endsWith(".jsonc")) { - const existing = ConfigParse.parse(Info, before, file) - const merged = mergeDeep(writable(existing), input) + const existing = ConfigParse.schema(Info, ConfigParse.jsonc(before, file), file) + const merged = mergeDeep(writable(existing), writable(config)) yield* fs.writeFileString(file, JSON.stringify(merged, null, 2)).pipe(Effect.orDie) next = merged } else { - const updated = patchJsonc(before, input) - next = ConfigParse.parse(Info, updated, file) + const updated = patchJsonc(before, writable(config)) + next = ConfigParse.schema(Info, ConfigParse.jsonc(updated, file), file) yield* fs.writeFileString(file, updated).pipe(Effect.orDie) } diff --git a/packages/opencode/src/config/console-state.ts b/packages/opencode/src/config/console-state.ts index cf96a4e305..08668afe4e 100644 --- a/packages/opencode/src/config/console-state.ts +++ b/packages/opencode/src/config/console-state.ts @@ -1,15 +1,16 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" -export const ConsoleState = z.object({ - consoleManagedProviders: z.array(z.string()), - activeOrgName: z.string().optional(), - switchableOrgCount: z.number().int().nonnegative(), -}) +export class ConsoleState extends Schema.Class("ConsoleState")({ + consoleManagedProviders: Schema.mutable(Schema.Array(Schema.String)), + activeOrgName: Schema.optional(Schema.String), + switchableOrgCount: Schema.Number, +}) { + static readonly zod = zod(this) +} -export type ConsoleState = z.infer - -export const emptyConsoleState: ConsoleState = { +export const emptyConsoleState: ConsoleState = ConsoleState.make({ consoleManagedProviders: [], activeOrgName: undefined, switchableOrgCount: 0, -} +}) diff --git a/packages/opencode/src/config/formatter.ts b/packages/opencode/src/config/formatter.ts new file mode 100644 index 0000000000..8c1f09a247 --- /dev/null +++ b/packages/opencode/src/config/formatter.ts @@ -0,0 +1,17 @@ +export * as ConfigFormatter from "./formatter" + +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" + +export const Entry = Schema.Struct({ + disabled: Schema.optional(Schema.Boolean), + command: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + environment: Schema.optional(Schema.Record(Schema.String, Schema.String)), + extensions: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) + +export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]).pipe( + withStatics((s) => ({ zod: zod(s) })), +) +export type Info = Schema.Schema.Type diff --git a/packages/opencode/src/config/index.ts b/packages/opencode/src/config/index.ts index c4a1c608b1..a05c29d25c 100644 --- a/packages/opencode/src/config/index.ts +++ b/packages/opencode/src/config/index.ts @@ -2,6 +2,8 @@ export * as Config from "./config" export * as ConfigAgent from "./agent" export * as ConfigCommand from "./command" export * as ConfigError from "./error" +export * as ConfigFormatter from "./formatter" +export * as ConfigLSP from "./lsp" export * as ConfigVariable from "./variable" export { ConfigManaged } from "./managed" export * as ConfigMarkdown from "./markdown" diff --git a/packages/opencode/src/config/keybinds.ts b/packages/opencode/src/config/keybinds.ts index cb146b7cae..8a22289d2a 100644 --- a/packages/opencode/src/config/keybinds.ts +++ b/packages/opencode/src/config/keybinds.ts @@ -106,7 +106,12 @@ export const Keybinds = z input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"), input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"), input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"), - input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"), + input_undo: z + .string() + .optional() + // On Windows prepend ctrl+z since terminal_suspend releases the binding. + .default(process.platform === "win32" ? "ctrl+z,ctrl+-,super+z" : "ctrl+-,super+z") + .describe("Undo in input"), input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"), input_word_forward: z .string() @@ -144,7 +149,12 @@ export const Keybinds = z session_child_cycle: z.string().optional().default("right").describe("Go to next child session"), session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"), session_parent: z.string().optional().default("up").describe("Go to parent session"), - terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"), + terminal_suspend: z + .string() + .optional() + .default("ctrl+z") + .transform((v) => (process.platform === "win32" ? "none" : v)) + .describe("Suspend terminal"), terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"), tips_toggle: z.string().optional().default("h").describe("Toggle tips on home screen"), plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"), diff --git a/packages/opencode/src/config/layout.ts b/packages/opencode/src/config/layout.ts new file mode 100644 index 0000000000..49c34b6639 --- /dev/null +++ b/packages/opencode/src/config/layout.ts @@ -0,0 +1,10 @@ +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" + +export const Layout = Schema.Literals(["auto", "stretch"]) + .annotate({ identifier: "LayoutConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Layout = Schema.Schema.Type + +export * as ConfigLayout from "./layout" diff --git a/packages/opencode/src/config/lsp.ts b/packages/opencode/src/config/lsp.ts new file mode 100644 index 0000000000..1cf93177e4 --- /dev/null +++ b/packages/opencode/src/config/lsp.ts @@ -0,0 +1,45 @@ +export * as ConfigLSP from "./lsp" + +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" +import * as LSPServer from "../lsp/server" + +export const Disabled = Schema.Struct({ + disabled: Schema.Literal(true), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) + +export const Entry = Schema.Union([ + Disabled, + Schema.Struct({ + command: Schema.mutable(Schema.Array(Schema.String)), + extensions: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + disabled: Schema.optional(Schema.Boolean), + env: Schema.optional(Schema.Record(Schema.String, Schema.String)), + initialization: Schema.optional(Schema.Record(Schema.String, Schema.Unknown)), + }), +]).pipe(withStatics((s) => ({ zod: zod(s) }))) + +/** + * For custom (non-builtin) LSP server entries, `extensions` is required so the + * client knows which files the server should attach to. Builtin server IDs and + * explicitly disabled entries are exempt. + */ +export const requiresExtensionsForCustomServers = Schema.makeFilter< + boolean | Record> +>((data) => { + if (typeof data === "boolean") return undefined + const serverIds = new Set(Object.values(LSPServer).map((server) => server.id)) + const ok = Object.entries(data).every(([id, config]) => { + if ("disabled" in config && config.disabled) return true + if (serverIds.has(id)) return true + return "extensions" in config && Boolean(config.extensions) + }) + return ok ? undefined : "For custom LSP servers, 'extensions' array is required." +}) + +export const Info = Schema.Union([Schema.Boolean, Schema.Record(Schema.String, Entry)]) + .check(requiresExtensionsForCustomServers) + .pipe(withStatics((s) => ({ zod: zod(s) }))) + +export type Info = Schema.Schema.Type diff --git a/packages/opencode/src/config/mcp.ts b/packages/opencode/src/config/mcp.ts index 5036cd6e4f..8b77bc4c28 100644 --- a/packages/opencode/src/config/mcp.ts +++ b/packages/opencode/src/config/mcp.ts @@ -1,68 +1,62 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Local = z - .object({ - type: z.literal("local").describe("Type of MCP server connection"), - command: z.string().array().describe("Command and arguments to run the MCP server"), - environment: z - .record(z.string(), z.string()) - .optional() - .describe("Environment variables to set when running the MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpLocalConfig", - }) +export class Local extends Schema.Class("McpLocalConfig")({ + type: Schema.Literal("local").annotate({ description: "Type of MCP server connection" }), + command: Schema.mutable(Schema.Array(Schema.String)).annotate({ + description: "Command and arguments to run the MCP server", + }), + environment: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ + description: "Environment variables to set when running the MCP server", + }), + enabled: Schema.optional(Schema.Boolean).annotate({ + description: "Enable or disable the MCP server on startup", + }), + timeout: Schema.optional(Schema.Number).annotate({ + description: "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", + }), +}) { + static readonly zod = zod(this) +} -export const OAuth = z - .object({ - clientId: z - .string() - .optional() - .describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."), - clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"), - scope: z.string().optional().describe("OAuth scopes to request during authorization"), - redirectUri: z - .string() - .optional() - .describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."), - }) - .strict() - .meta({ - ref: "McpOAuthConfig", - }) -export type OAuth = z.infer +export class OAuth extends Schema.Class("McpOAuthConfig")({ + clientId: Schema.optional(Schema.String).annotate({ + description: "OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted.", + }), + clientSecret: Schema.optional(Schema.String).annotate({ + description: "OAuth client secret (if required by the authorization server)", + }), + scope: Schema.optional(Schema.String).annotate({ description: "OAuth scopes to request during authorization" }), + redirectUri: Schema.optional(Schema.String).annotate({ + description: "OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback).", + }), +}) { + static readonly zod = zod(this) +} -export const Remote = z - .object({ - type: z.literal("remote").describe("Type of MCP server connection"), - url: z.string().describe("URL of the remote MCP server"), - enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"), - headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"), - oauth: z - .union([OAuth, z.literal(false)]) - .optional() - .describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."), - timeout: z - .number() - .int() - .positive() - .optional() - .describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."), - }) - .strict() - .meta({ - ref: "McpRemoteConfig", - }) +export class Remote extends Schema.Class("McpRemoteConfig")({ + type: Schema.Literal("remote").annotate({ description: "Type of MCP server connection" }), + url: Schema.String.annotate({ description: "URL of the remote MCP server" }), + enabled: Schema.optional(Schema.Boolean).annotate({ + description: "Enable or disable the MCP server on startup", + }), + headers: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ + description: "Headers to send with the request", + }), + oauth: Schema.optional(Schema.Union([OAuth, Schema.Literal(false)])).annotate({ + description: "OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection.", + }), + timeout: Schema.optional(Schema.Number).annotate({ + description: "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", + }), +}) { + static readonly zod = zod(this) +} -export const Info = z.discriminatedUnion("type", [Local, Remote]) -export type Info = z.infer +export const Info = Schema.Union([Local, Remote]) + .annotate({ discriminator: "type" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Schema.Schema.Type export * as ConfigMCP from "./mcp" diff --git a/packages/opencode/src/config/model-id.ts b/packages/opencode/src/config/model-id.ts index 909e9aa929..3ad9e035ce 100644 --- a/packages/opencode/src/config/model-id.ts +++ b/packages/opencode/src/config/model-id.ts @@ -1,3 +1,14 @@ +import { Schema } from "effect" import z from "zod" +import { zod, ZodOverride } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const ConfigModelID = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) +// The original Zod schema carried an external $ref pointing at the models.dev +// JSON schema. That external reference is not a named SDK component — it is a +// literal pointer to an outside schema — so the walker cannot re-derive it +// from AST metadata. Preserve the exact original Zod via ZodOverride. +export const ConfigModelID = Schema.String.annotate({ + [ZodOverride]: z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) + +export type ConfigModelID = Schema.Schema.Type diff --git a/packages/opencode/src/config/parse.ts b/packages/opencode/src/config/parse.ts index 65cc483859..7472029ead 100644 --- a/packages/opencode/src/config/parse.ts +++ b/packages/opencode/src/config/parse.ts @@ -1,80 +1,44 @@ export * as ConfigParse from "./parse" -import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser" +import { type ParseError as JsoncParseError, parse as parseJsoncImpl, printParseErrorCode } from "jsonc-parser" import z from "zod" -import { ConfigVariable } from "./variable" import { InvalidError, JsonError } from "./error" type Schema = z.ZodType -type VariableMode = "error" | "empty" -export type LoadOptions = - | { - type: "path" - path: string - missing?: VariableMode - normalize?: (data: unknown, source: string) => unknown - } - | { - type: "virtual" - dir: string - source: string - missing?: VariableMode - normalize?: (data: unknown, source: string) => unknown - } - -function issues(text: string, errors: JsoncParseError[]) { - const lines = text.split("\n") - return errors - .map((e) => { - const beforeOffset = text.substring(0, e.offset).split("\n") - const line = beforeOffset.length - const column = beforeOffset[beforeOffset.length - 1].length + 1 - const problemLine = lines[line - 1] - - const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` - if (!problemLine) return error - - return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` - }) - .join("\n") -} - -export function parse(schema: Schema, text: string, filepath: string): T { +export function jsonc(text: string, filepath: string): unknown { const errors: JsoncParseError[] = [] - const data = parseJsonc(text, errors, { allowTrailingComma: true }) + const data = parseJsoncImpl(text, errors, { allowTrailingComma: true }) if (errors.length) { + const lines = text.split("\n") + const issues = errors + .map((e) => { + const beforeOffset = text.substring(0, e.offset).split("\n") + const line = beforeOffset.length + const column = beforeOffset[beforeOffset.length - 1].length + 1 + const problemLine = lines[line - 1] + + const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}` + if (!problemLine) return error + + return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^` + }) + .join("\n") throw new JsonError({ path: filepath, - message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${issues(text, errors)}\n--- End ---`, + message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${issues}\n--- End ---`, }) } + return data +} + +export function schema(schema: Schema, data: unknown, source: string): T { const parsed = schema.safeParse(data) if (parsed.success) return parsed.data throw new InvalidError({ - path: filepath, + path: source, issues: parsed.error.issues, }) } - -export async function load(schema: Schema, text: string, options: LoadOptions): Promise { - const source = options.type === "path" ? options.path : options.source - const expanded = await ConfigVariable.substitute( - text, - options.type === "path" ? { type: "path", path: options.path } : options, - options.missing, - ) - const data = parse(z.unknown(), expanded, source) - const normalized = options.normalize ? options.normalize(data, source) : data - const parsed = schema.safeParse(normalized) - if (!parsed.success) { - throw new InvalidError({ - path: source, - issues: parsed.error.issues, - }) - } - - return parsed.data -} diff --git a/packages/opencode/src/config/paths.ts b/packages/opencode/src/config/paths.ts index dcf0c940f2..db4b914f76 100644 --- a/packages/opencode/src/config/paths.ts +++ b/packages/opencode/src/config/paths.ts @@ -6,33 +6,41 @@ import { Flag } from "@/flag/flag" import { Global } from "@/global" import { unique } from "remeda" import { JsonError } from "./error" +import * as Effect from "effect/Effect" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" -export async function projectFiles(name: string, directory: string, worktree?: string) { - return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true }) -} +export const files = Effect.fn("ConfigPaths.projectFiles")(function* ( + name: string, + directory: string, + worktree?: string, +) { + const afs = yield* AppFileSystem.Service + return (yield* afs.up({ + targets: [`${name}.jsonc`, `${name}.json`], + start: directory, + stop: worktree, + })).toReversed() +}) -export async function directories(directory: string, worktree?: string) { +export const directories = Effect.fn("ConfigPaths.directories")(function* (directory: string, worktree?: string) { + const afs = yield* AppFileSystem.Service return unique([ Global.Path.config, ...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG - ? await Array.fromAsync( - Filesystem.up({ - targets: [".opencode"], - start: directory, - stop: worktree, - }), - ) + ? yield* afs.up({ + targets: [".opencode"], + start: directory, + stop: worktree, + }) : []), - ...(await Array.fromAsync( - Filesystem.up({ - targets: [".opencode"], - start: Global.Path.home, - stop: Global.Path.home, - }), - )), + ...(yield* afs.up({ + targets: [".opencode"], + start: Global.Path.home, + stop: Global.Path.home, + })), ...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []), ]) -} +}) export function fileInDirectory(dir: string, name: string) { return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)] diff --git a/packages/opencode/src/config/permission.ts b/packages/opencode/src/config/permission.ts index af01f6f2a3..7cfbaec01f 100644 --- a/packages/opencode/src/config/permission.ts +++ b/packages/opencode/src/config/permission.ts @@ -1,5 +1,8 @@ export * as ConfigPermission from "./permission" +import { Schema } from "effect" import z from "zod" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" const permissionPreprocess = (val: unknown) => { if (typeof val === "object" && val !== null && !Array.isArray(val)) { @@ -8,20 +11,20 @@ const permissionPreprocess = (val: unknown) => { return val } -export const Action = z.enum(["ask", "allow", "deny"]).meta({ - ref: "PermissionActionConfig", -}) -export type Action = z.infer +export const Action = Schema.Literals(["ask", "allow", "deny"]) + .annotate({ identifier: "PermissionActionConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Action = Schema.Schema.Type -export const Object = z.record(z.string(), Action).meta({ - ref: "PermissionObjectConfig", -}) -export type Object = z.infer +export const Object = Schema.Record(Schema.String, Action) + .annotate({ identifier: "PermissionObjectConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Object = Schema.Schema.Type -export const Rule = z.union([Action, Object]).meta({ - ref: "PermissionRuleConfig", -}) -export type Rule = z.infer +export const Rule = Schema.Union([Action, Object]) + .annotate({ identifier: "PermissionRuleConfig" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Rule = Schema.Schema.Type const transform = (x: unknown): Record => { if (typeof x === "string") return { "*": x as Action } @@ -41,25 +44,25 @@ export const Info = z z .object({ __originalKeys: z.string().array().optional(), - read: Rule.optional(), - edit: Rule.optional(), - glob: Rule.optional(), - grep: Rule.optional(), - list: Rule.optional(), - bash: Rule.optional(), - task: Rule.optional(), - external_directory: Rule.optional(), - todowrite: Action.optional(), - question: Action.optional(), - webfetch: Action.optional(), - websearch: Action.optional(), - codesearch: Action.optional(), - lsp: Rule.optional(), - doom_loop: Action.optional(), - skill: Rule.optional(), + read: Rule.zod.optional(), + edit: Rule.zod.optional(), + glob: Rule.zod.optional(), + grep: Rule.zod.optional(), + list: Rule.zod.optional(), + bash: Rule.zod.optional(), + task: Rule.zod.optional(), + external_directory: Rule.zod.optional(), + todowrite: Action.zod.optional(), + question: Action.zod.optional(), + webfetch: Action.zod.optional(), + websearch: Action.zod.optional(), + codesearch: Action.zod.optional(), + lsp: Rule.zod.optional(), + doom_loop: Action.zod.optional(), + skill: Rule.zod.optional(), }) - .catchall(Rule) - .or(Action), + .catchall(Rule.zod) + .or(Action.zod), ) .transform(transform) .meta({ diff --git a/packages/opencode/src/config/plugin.ts b/packages/opencode/src/config/plugin.ts index 7d335bcc53..4277c1cd6d 100644 --- a/packages/opencode/src/config/plugin.ts +++ b/packages/opencode/src/config/plugin.ts @@ -1,16 +1,20 @@ import { Glob } from "@opencode-ai/shared/util/glob" -import z from "zod" +import { Schema } from "effect" import { pathToFileURL } from "url" import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" import path from "path" -const Options = z.record(z.string(), z.unknown()) -export type Options = z.infer +export const Options = Schema.Record(Schema.String, Schema.Unknown).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Options = Schema.Schema.Type // Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options. // It answers "what should we load?" but says nothing about where that value came from. -export const Spec = z.union([z.string(), z.tuple([z.string(), Options])]) -export type Spec = z.infer +export const Spec = Schema.Union([Schema.String, Schema.mutable(Schema.Tuple([Schema.String, Options]))]).pipe( + withStatics((s) => ({ zod: zod(s) })), +) +export type Spec = Schema.Schema.Type export type Scope = "global" | "local" diff --git a/packages/opencode/src/config/provider.ts b/packages/opencode/src/config/provider.ts index 877677519f..212e716251 100644 --- a/packages/opencode/src/config/provider.ts +++ b/packages/opencode/src/config/provider.ts @@ -1,120 +1,114 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Model = z - .object({ - id: z.string(), - name: z.string(), - family: z.string().optional(), - release_date: z.string(), - attachment: z.boolean(), - reasoning: z.boolean(), - temperature: z.boolean(), - tool_call: z.boolean(), - interleaved: z - .union([ - z.literal(true), - z - .object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }) - .strict(), - ]) - .optional(), - cost: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - context_over_200k: z - .object({ - input: z.number(), - output: z.number(), - cache_read: z.number().optional(), - cache_write: z.number().optional(), - }) - .optional(), - }) - .optional(), - limit: z.object({ - context: z.number(), - input: z.number().optional(), - output: z.number(), +const PositiveInt = Schema.Number.check(Schema.isInt()).check(Schema.isGreaterThan(0)) + +export const Model = Schema.Struct({ + id: Schema.optional(Schema.String), + name: Schema.optional(Schema.String), + family: Schema.optional(Schema.String), + release_date: Schema.optional(Schema.String), + attachment: Schema.optional(Schema.Boolean), + reasoning: Schema.optional(Schema.Boolean), + temperature: Schema.optional(Schema.Boolean), + tool_call: Schema.optional(Schema.Boolean), + interleaved: Schema.optional( + Schema.Union([ + Schema.Literal(true), + Schema.Struct({ + field: Schema.Literals(["reasoning_content", "reasoning_details"]), + }), + ]), + ), + cost: Schema.optional( + Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache_read: Schema.optional(Schema.Number), + cache_write: Schema.optional(Schema.Number), + context_over_200k: Schema.optional( + Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache_read: Schema.optional(Schema.Number), + cache_write: Schema.optional(Schema.Number), + }), + ), }), - modalities: z - .object({ - input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])), - }) - .optional(), - experimental: z.boolean().optional(), - status: z.enum(["alpha", "beta", "deprecated"]).optional(), - provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()).optional(), - variants: z - .record( - z.string(), - z - .object({ - disabled: z.boolean().optional().describe("Disable this variant for the model"), - }) - .catchall(z.any()), - ) - .optional() - .describe("Variant-specific configuration"), - }) - .partial() + ), + limit: Schema.optional( + Schema.Struct({ + context: Schema.Number, + input: Schema.optional(Schema.Number), + output: Schema.Number, + }), + ), + modalities: Schema.optional( + Schema.Struct({ + input: Schema.mutable(Schema.Array(Schema.Literals(["text", "audio", "image", "video", "pdf"]))), + output: Schema.mutable(Schema.Array(Schema.Literals(["text", "audio", "image", "video", "pdf"]))), + }), + ), + experimental: Schema.optional(Schema.Boolean), + status: Schema.optional(Schema.Literals(["alpha", "beta", "deprecated"])), + provider: Schema.optional( + Schema.Struct({ npm: Schema.optional(Schema.String), api: Schema.optional(Schema.String) }), + ), + options: Schema.optional(Schema.Record(Schema.String, Schema.Any)), + headers: Schema.optional(Schema.Record(Schema.String, Schema.String)), + variants: Schema.optional( + Schema.Record( + Schema.String, + Schema.StructWithRest( + Schema.Struct({ + disabled: Schema.optional(Schema.Boolean).annotate({ description: "Disable this variant for the model" }), + }), + [Schema.Record(Schema.String, Schema.Any)], + ), + ).annotate({ description: "Variant-specific configuration" }), + ), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export const Info = z - .object({ - api: z.string().optional(), - name: z.string(), - env: z.array(z.string()), - id: z.string(), - npm: z.string().optional(), - whitelist: z.array(z.string()).optional(), - blacklist: z.array(z.string()).optional(), - options: z - .object({ - apiKey: z.string().optional(), - baseURL: z.string().optional(), - enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"), - setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"), - timeout: z - .union([ - z - .number() - .int() - .positive() - .describe( - "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - z.literal(false).describe("Disable timeout for this provider entirely."), - ]) - .optional() - .describe( +export class Info extends Schema.Class("ProviderConfig")({ + api: Schema.optional(Schema.String), + name: Schema.optional(Schema.String), + env: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + id: Schema.optional(Schema.String), + npm: Schema.optional(Schema.String), + whitelist: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + blacklist: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + options: Schema.optional( + Schema.StructWithRest( + Schema.Struct({ + apiKey: Schema.optional(Schema.String), + baseURL: Schema.optional(Schema.String), + enterpriseUrl: Schema.optional(Schema.String).annotate({ + description: "GitHub Enterprise URL for copilot authentication", + }), + setCacheKey: Schema.optional(Schema.Boolean).annotate({ + description: "Enable promptCacheKey for this provider (default false)", + }), + timeout: Schema.optional( + Schema.Union([PositiveInt, Schema.Literal(false)]).annotate({ + description: + "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", + }), + ).annotate({ + description: "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", - ), - chunkTimeout: z - .number() - .int() - .positive() - .optional() - .describe( + }), + chunkTimeout: Schema.optional(PositiveInt).annotate({ + description: "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", - ), - }) - .catchall(z.any()) - .optional(), - models: z.record(z.string(), Model).optional(), - }) - .partial() - .strict() - .meta({ - ref: "ProviderConfig", - }) - -export type Info = z.infer + }), + }), + [Schema.Record(Schema.String, Schema.Any)], + ), + ), + models: Schema.optional(Schema.Record(Schema.String, Model)), +}) { + static readonly zod = zod(this) +} export * as ConfigProvider from "./provider" diff --git a/packages/opencode/src/config/server.ts b/packages/opencode/src/config/server.ts new file mode 100644 index 0000000000..969a79964b --- /dev/null +++ b/packages/opencode/src/config/server.ts @@ -0,0 +1,20 @@ +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" + +export class Server extends Schema.Class("ServerConfig")({ + port: Schema.optional(Schema.Number.check(Schema.isInt()).check(Schema.isGreaterThan(0))).annotate({ + description: "Port to listen on", + }), + hostname: Schema.optional(Schema.String).annotate({ description: "Hostname to listen on" }), + mdns: Schema.optional(Schema.Boolean).annotate({ description: "Enable mDNS service discovery" }), + mdnsDomain: Schema.optional(Schema.String).annotate({ + description: "Custom domain name for mDNS service (default: opencode.local)", + }), + cors: Schema.optional(Schema.mutable(Schema.Array(Schema.String))).annotate({ + description: "Additional domains to allow for CORS", + }), +}) { + static readonly zod = zod(this) +} + +export * as ConfigServer from "./server" diff --git a/packages/opencode/src/config/skills.ts b/packages/opencode/src/config/skills.ts index 38cbf99e7d..f29d854f50 100644 --- a/packages/opencode/src/config/skills.ts +++ b/packages/opencode/src/config/skills.ts @@ -1,13 +1,16 @@ -import z from "zod" +import { Schema } from "effect" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" -export const Info = z.object({ - paths: z.array(z.string()).optional().describe("Additional paths to skill folders"), - urls: z - .array(z.string()) - .optional() - .describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"), -}) +export const Info = Schema.Struct({ + paths: Schema.optional(Schema.Array(Schema.String)).annotate({ + description: "Additional paths to skill folders", + }), + urls: Schema.optional(Schema.Array(Schema.String)).annotate({ + description: "URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)", + }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) -export type Info = z.infer +export type Info = Schema.Schema.Type export * as ConfigSkills from "./skills" diff --git a/packages/opencode/src/config/variable.ts b/packages/opencode/src/config/variable.ts index e016e33a21..e52db6147c 100644 --- a/packages/opencode/src/config/variable.ts +++ b/packages/opencode/src/config/variable.ts @@ -16,6 +16,11 @@ type ParseSource = dir: string } +type SubstituteInput = ParseSource & { + text: string + missing?: "error" | "empty" +} + function source(input: ParseSource) { return input.type === "path" ? input.path : input.source } @@ -25,8 +30,9 @@ function dir(input: ParseSource) { } /** Apply {env:VAR} and {file:path} substitutions to config text. */ -export async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") { - text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => { +export async function substitute(input: SubstituteInput) { + const missing = input.missing ?? "error" + let text = input.text.replace(/\{env:([^}]+)\}/g, (_, varName) => { return process.env[varName] || "" }) diff --git a/packages/opencode/src/control-plane/types.ts b/packages/opencode/src/control-plane/types.ts index 3961cd0e2a..07acd5ce58 100644 --- a/packages/opencode/src/control-plane/types.ts +++ b/packages/opencode/src/control-plane/types.ts @@ -28,7 +28,7 @@ export type WorkspaceAdaptor = { name: string description: string configure(info: WorkspaceInfo): WorkspaceInfo | Promise - create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise + create(info: WorkspaceInfo, env: Record, from?: WorkspaceInfo): Promise remove(info: WorkspaceInfo): Promise target(info: WorkspaceInfo): Target | Promise } diff --git a/packages/opencode/src/control-plane/workspace-context.ts b/packages/opencode/src/control-plane/workspace-context.ts index 3d4fa5baef..85ef596e7a 100644 --- a/packages/opencode/src/control-plane/workspace-context.ts +++ b/packages/opencode/src/control-plane/workspace-context.ts @@ -2,13 +2,13 @@ import { LocalContext } from "../util" import type { WorkspaceID } from "../control-plane/schema" export interface WorkspaceContext { - workspaceID: WorkspaceID + workspaceID: WorkspaceID | undefined } const context = LocalContext.create("instance") export const WorkspaceContext = { - async provide(input: { workspaceID: WorkspaceID; fn: () => R }): Promise { + async provide(input: { workspaceID?: WorkspaceID; fn: () => R }): Promise { return context.provide({ workspaceID: input.workspaceID }, () => input.fn()) }, diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 3af11707e8..e94d6c2c93 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -7,7 +7,7 @@ import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" import { Auth } from "@/auth" import { SyncEvent } from "@/sync" -import { EventTable } from "@/sync/event.sql" +import { EventSequenceTable, EventTable } from "@/sync/event.sql" import { Flag } from "@/flag/flag" import { Log } from "@/util" import { Filesystem } from "@/util" @@ -23,8 +23,8 @@ import { SessionTable } from "@/session/session.sql" import { SessionID } from "@/session/schema" import { errorData } from "@/util/error" import { AppRuntime } from "@/effect/app-runtime" -import { EventSequenceTable } from "@/sync/event.sql" import { waitEvent } from "./util" +import { WorkspaceContext } from "./workspace-context" export const Info = WorkspaceInfo.meta({ ref: "Workspace", @@ -34,7 +34,6 @@ export type Info = z.infer export const ConnectionStatus = z.object({ workspaceID: WorkspaceID.zod, status: z.enum(["connected", "connecting", "disconnected", "error"]), - error: z.string().optional(), }) export type ConnectionStatus = z.infer @@ -116,6 +115,8 @@ export const create = fn(CreateInput, async (input) => { OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))), OPENCODE_WORKSPACE_ID: config.id, OPENCODE_EXPERIMENTAL_WORKSPACES: "true", + OTEL_EXPORTER_OTLP_HEADERS: process.env.OTEL_EXPORTER_OTLP_HEADERS, + OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT, } await adaptor.create(config, env) @@ -298,22 +299,13 @@ export function list(project: Project.Info) { db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(), ) const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) - - for (const space of spaces) startSync(space) return spaces } -function lookup(id: WorkspaceID) { +export const get = fn(WorkspaceID.zod, async (id) => { const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) if (!row) return return fromRow(row) -} - -export const get = fn(WorkspaceID.zod, async (id) => { - const space = lookup(id) - if (!space) return - startSync(space) - return space }) export const remove = fn(WorkspaceID.zod, async (id) => { @@ -345,10 +337,10 @@ const connections = new Map() const aborts = new Map() const TIMEOUT = 5000 -function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) { +function setStatus(id: WorkspaceID, status: ConnectionStatus["status"]) { const prev = connections.get(id) - if (prev?.status === status && prev?.error === error) return - const next = { workspaceID: id, status, error } + if (prev?.status === status) return + const next = { workspaceID: id, status } connections.set(id, next) if (status === "error") { @@ -425,68 +417,145 @@ function route(url: string | URL, path: string) { return next } -async function syncWorkspace(space: Info, signal: AbortSignal) { +async function connectSSE(url: URL | string, headers: HeadersInit | undefined, signal: AbortSignal) { + const res = await fetch(route(url, "/global/event"), { + method: "GET", + headers, + signal, + }) + + if (!res.ok) throw new Error(`Workspace sync HTTP failure: ${res.status}`) + if (!res.body) throw new Error("No response body from global sync") + + return res.body +} + +async function syncHistory(space: Info, url: URL | string, headers: HeadersInit | undefined, signal: AbortSignal) { + const sessionIDs = Database.use((db) => + db + .select({ id: SessionTable.id }) + .from(SessionTable) + .where(eq(SessionTable.workspace_id, space.id)) + .all() + .map((row) => row.id), + ) + const state = sessionIDs.length + ? Object.fromEntries( + Database.use((db) => + db.select().from(EventSequenceTable).where(inArray(EventSequenceTable.aggregate_id, sessionIDs)).all(), + ).map((row) => [row.aggregate_id, row.seq]), + ) + : {} + + log.info("syncing workspace history", { + workspaceID: space.id, + sessions: sessionIDs.length, + known: Object.keys(state).length, + }) + + const requestHeaders = new Headers(headers) + requestHeaders.set("content-type", "application/json") + + const res = await fetch(route(url, "/sync/history"), { + method: "POST", + headers: requestHeaders, + body: JSON.stringify(state), + signal, + }) + + if (!res.ok) { + const body = await res.text() + throw new Error(`Workspace history HTTP failure: ${res.status} ${body}`) + } + + const events = await res.json() + + return WorkspaceContext.provide({ + workspaceID: space.id, + fn: () => { + for (const event of events) { + SyncEvent.replay( + { + id: event.id, + aggregateID: event.aggregate_id, + seq: event.seq, + type: event.type, + data: event.data, + }, + { publish: true }, + ) + } + }, + }) + + log.info("workspace history synced", { + workspaceID: space.id, + events: events.length, + }) +} + +async function syncWorkspaceLoop(space: Info, signal: AbortSignal) { + const adaptor = await getAdaptor(space.projectID, space.type) + const target = await adaptor.target(space) + + if (target.type === "local") return null + + let attempt = 0 + while (!signal.aborted) { log.info("connecting to global sync", { workspace: space.name }) setStatus(space.id, "connecting") - const adaptor = await getAdaptor(space.projectID, space.type) - const target = await adaptor.target(space) - - if (target.type === "local") return - - const res = await fetch(route(target.url, "/global/event"), { - method: "GET", - headers: target.headers, - signal, - }).catch((err: unknown) => { - setStatus(space.id, "error", err instanceof Error ? err.message : String(err)) - + let stream + try { + stream = await connectSSE(target.url, target.headers, signal) + await syncHistory(space, target.url, target.headers, signal) + } catch (err) { + stream = null + setStatus(space.id, "error") log.info("failed to connect to global sync", { workspace: space.name, - error: err, + err, }) - return undefined - }) - - if (!res || !res.ok || !res.body) { - const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}` - log.info("failed to connect to global sync", { workspace: space.name, error }) - setStatus(space.id, "error", error) - await sleep(1000) - continue } - log.info("global sync connected", { workspace: space.name }) - setStatus(space.id, "connected") + if (stream) { + attempt = 0 - await parseSSE(res.body, signal, (evt: any) => { - try { - if (!("payload" in evt)) return + log.info("global sync connected", { workspace: space.name }) + setStatus(space.id, "connected") - if (evt.payload.type === "sync") { - SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + await parseSSE(stream, signal, (evt: any) => { + try { + if (!("payload" in evt)) return + if (evt.payload.type === "server.heartbeat") return + + if (evt.payload.type === "sync") { + SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent) + } + + GlobalBus.emit("event", { + directory: evt.directory, + project: evt.project, + workspace: space.id, + payload: evt.payload, + }) + } catch (err) { + log.info("failed to replay global event", { + workspaceID: space.id, + error: err, + }) } + }) - GlobalBus.emit("event", { - directory: evt.directory, - project: evt.project, - workspace: space.id, - payload: evt.payload, - }) - } catch (err) { - log.info("failed to replay global event", { - workspaceID: space.id, - error: err, - }) - } - }) + log.info("disconnected from global sync: " + space.id) + setStatus(space.id, "disconnected") + } - log.info("disconnected from global sync: " + space.id) - setStatus(space.id, "disconnected") - - // TODO: Implement exponential backoff - await sleep(1000) + // Back off reconnect attempts up to 2 minutes while the workspace + // stays unavailable. + await sleep(Math.min(120_000, 1_000 * 2 ** attempt)) + attempt += 1 } } @@ -498,7 +567,7 @@ async function startSync(space: Info) { if (target.type === "local") { void Filesystem.exists(target.directory).then((exists) => { - setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist") + setStatus(space.id, exists ? "connected" : "error") }) return } @@ -510,10 +579,10 @@ async function startSync(space: Info) { const abort = new AbortController() aborts.set(space.id, abort) - void syncWorkspace(space, abort.signal).catch((error) => { + void syncWorkspaceLoop(space, abort.signal).catch((error) => { aborts.delete(space.id) - setStatus(space.id, "error", String(error)) + setStatus(space.id, "error") log.warn("workspace listener failed", { workspaceID: space.id, error, @@ -527,4 +596,19 @@ function stopSync(id: WorkspaceID) { connections.delete(id) } +export function startWorkspaceSyncing(projectID: ProjectID) { + const spaces = Database.use((db) => + db + .select({ workspace: WorkspaceTable }) + .from(WorkspaceTable) + .innerJoin(SessionTable, eq(SessionTable.workspace_id, WorkspaceTable.id)) + .where(eq(WorkspaceTable.project_id, projectID)) + .all(), + ) + + for (const row of new Map(spaces.map((row) => [row.workspace.id, row.workspace])).values()) { + void startSync(fromRow(row)) + } +} + export * as Workspace from "./workspace" diff --git a/packages/opencode/src/effect/app-runtime.ts b/packages/opencode/src/effect/app-runtime.ts index f06c41e319..d68e00a323 100644 --- a/packages/opencode/src/effect/app-runtime.ts +++ b/packages/opencode/src/effect/app-runtime.ts @@ -1,15 +1,14 @@ import { Layer, ManagedRuntime } from "effect" -import { attach, memoMap } from "./run-service" +import { attach } from "./run-service" import * as Observability from "./observability" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Bus } from "@/bus" import { Auth } from "@/auth" -import { Account } from "@/account" +import { Account } from "@/account/account" import { Config } from "@/config" import { Git } from "@/git" import { Ripgrep } from "@/file/ripgrep" -import { FileTime } from "@/file/time" import { File } from "@/file" import { FileWatcher } from "@/file/watcher" import { Storage } from "@/storage" @@ -47,7 +46,8 @@ import { Pty } from "@/pty" import { Installation } from "@/installation" import { ShareNext } from "@/share" import { SessionShare } from "@/share" -import { Npm } from "@opencode-ai/shared/npm" +import { Npm } from "@/npm" +import { memoMap } from "./memo-map" export const AppLayer = Layer.mergeAll( Npm.defaultLayer, @@ -58,7 +58,6 @@ export const AppLayer = Layer.mergeAll( Config.defaultLayer, Git.defaultLayer, Ripgrep.defaultLayer, - FileTime.defaultLayer, File.defaultLayer, FileWatcher.defaultLayer, Storage.defaultLayer, diff --git a/packages/opencode/src/effect/bootstrap-runtime.ts b/packages/opencode/src/effect/bootstrap-runtime.ts index 62b71e58b1..37698c43a5 100644 --- a/packages/opencode/src/effect/bootstrap-runtime.ts +++ b/packages/opencode/src/effect/bootstrap-runtime.ts @@ -1,5 +1,4 @@ import { Layer, ManagedRuntime } from "effect" -import { memoMap } from "./run-service" import { Plugin } from "@/plugin" import { LSP } from "@/lsp" @@ -12,6 +11,7 @@ import { Snapshot } from "@/snapshot" import { Bus } from "@/bus" import { Config } from "@/config" import * as Observability from "./observability" +import { memoMap } from "./memo-map" export const BootstrapLayer = Layer.mergeAll( Config.defaultLayer, diff --git a/packages/opencode/src/effect/logger.ts b/packages/opencode/src/effect/logger.ts index 21e0fc43ac..0e58b8acb4 100644 --- a/packages/opencode/src/effect/logger.ts +++ b/packages/opencode/src/effect/logger.ts @@ -3,6 +3,8 @@ import { Log } from "@/util" type Fields = Record +const normalizeKey = (key: string) => (key === "sessionID" ? "session.id" : key) + export interface Handle { readonly debug: (msg?: unknown, extra?: Fields) => Effect.Effect readonly info: (msg?: unknown, extra?: Fields) => Effect.Effect @@ -12,7 +14,11 @@ export interface Handle { } const clean = (input?: Fields): Fields => - Object.fromEntries(Object.entries(input ?? {}).filter((entry) => entry[1] !== undefined && entry[1] !== null)) + Object.fromEntries( + Object.entries(input ?? {}) + .filter((entry) => entry[1] !== undefined && entry[1] !== null) + .map(([key, value]) => [normalizeKey(key), value]), + ) const text = (input: unknown): string => { // oxlint-disable-next-line no-base-to-string diff --git a/packages/opencode/src/effect/memo-map.ts b/packages/opencode/src/effect/memo-map.ts new file mode 100644 index 0000000000..c797dbf42e --- /dev/null +++ b/packages/opencode/src/effect/memo-map.ts @@ -0,0 +1,3 @@ +import { Layer } from "effect" + +export const memoMap = Layer.makeMemoMapUnsafe() diff --git a/packages/opencode/src/effect/observability.ts b/packages/opencode/src/effect/observability.ts index efd16ffc09..1c385d60ae 100644 --- a/packages/opencode/src/effect/observability.ts +++ b/packages/opencode/src/effect/observability.ts @@ -4,9 +4,11 @@ import { OtlpLogger, OtlpSerialization } from "effect/unstable/observability" import * as EffectLogger from "./logger" import { Flag } from "@/flag/flag" import { InstallationChannel, InstallationVersion } from "@/installation/version" +import { ensureProcessMetadata } from "@/util/opencode-process" const base = Flag.OTEL_EXPORTER_OTLP_ENDPOINT export const enabled = !!base +const processID = crypto.randomUUID() const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS ? Flag.OTEL_EXPORTER_OTLP_HEADERS.split(",").reduce( @@ -19,26 +21,34 @@ const headers = Flag.OTEL_EXPORTER_OTLP_HEADERS ) : undefined -const resource = { - serviceName: "opencode", - serviceVersion: InstallationVersion, - attributes: { - "deployment.environment.name": InstallationChannel, - "opencode.client": Flag.OPENCODE_CLIENT, - }, +function resource() { + const processMetadata = ensureProcessMetadata("main") + return { + serviceName: "opencode", + serviceVersion: InstallationVersion, + attributes: { + "deployment.environment.name": InstallationChannel, + "opencode.client": Flag.OPENCODE_CLIENT, + "opencode.process_role": processMetadata.processRole, + "opencode.run_id": processMetadata.runID, + "service.instance.id": processID, + }, + } } -const logs = Logger.layer( - [ - EffectLogger.logger, - OtlpLogger.make({ - url: `${base}/v1/logs`, - resource, - headers, - }), - ], - { mergeWithExisting: false }, -).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer)) +function logs() { + return Logger.layer( + [ + EffectLogger.logger, + OtlpLogger.make({ + url: `${base}/v1/logs`, + resource: resource(), + headers, + }), + ], + { mergeWithExisting: false }, + ).pipe(Layer.provide(OtlpSerialization.layerJson), Layer.provide(FetchHttpClient.layer)) +} const traces = async () => { const NodeSdk = await import("@effect/opentelemetry/NodeSdk") @@ -58,7 +68,7 @@ const traces = async () => { context.setGlobalContextManager(mgr) return NodeSdk.layer(() => ({ - resource, + resource: resource(), spanProcessor: new SdkBase.BatchSpanProcessor( new OTLP.OTLPTraceExporter({ url: `${base}/v1/traces`, @@ -73,7 +83,7 @@ export const layer = !base : Layer.unwrap( Effect.gen(function* () { const trace = yield* Effect.promise(traces) - return Layer.mergeAll(trace, logs) + return Layer.mergeAll(trace, logs()) }), ) diff --git a/packages/opencode/src/effect/run-service.ts b/packages/opencode/src/effect/run-service.ts index 28265f9b27..98ff83ea59 100644 --- a/packages/opencode/src/effect/run-service.ts +++ b/packages/opencode/src/effect/run-service.ts @@ -6,8 +6,7 @@ import { InstanceRef, WorkspaceRef } from "./instance-ref" import * as Observability from "./observability" import { WorkspaceContext } from "@/control-plane/workspace-context" import type { InstanceContext } from "@/project/instance" - -export const memoMap = Layer.makeMemoMapUnsafe() +import { memoMap } from "./memo-map" type Refs = { instance?: InstanceContext diff --git a/packages/opencode/src/cli/effect/runtime.ts b/packages/opencode/src/effect/runtime.ts similarity index 90% rename from packages/opencode/src/cli/effect/runtime.ts rename to packages/opencode/src/effect/runtime.ts index 57b9f8ede9..ad7872f0b5 100644 --- a/packages/opencode/src/cli/effect/runtime.ts +++ b/packages/opencode/src/effect/runtime.ts @@ -1,7 +1,6 @@ -import { Observability } from "@/effect/observability" +import { Observability } from "./observability" import { Layer, type Context, ManagedRuntime, type Effect } from "effect" - -export const memoMap = Layer.makeMemoMapUnsafe() +import { memoMap } from "./memo-map" export function makeRuntime(service: Context.Service, layer: Layer.Layer) { let rt: ManagedRuntime.ManagedRuntime | undefined diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index 2f30b5400d..af4fbf76c8 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -356,8 +356,9 @@ export const layer = Layer.effect( ) const scan = Effect.fn("File.scan")(function* () { - if (Instance.directory === path.parse(Instance.directory).root) return - const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global" + const ctx = yield* InstanceState.context + if (ctx.directory === path.parse(ctx.directory).root) return + const isGlobalHome = ctx.directory === Global.Path.home && ctx.project.id === "global" const next: Entry = { files: [], dirs: [] } if (isGlobalHome) { @@ -366,14 +367,14 @@ export const layer = Layer.effect( const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name) const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name) - const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => [])) + const top = yield* appFs.readDirectoryEntries(ctx.directory).pipe(Effect.orElseSucceed(() => [])) for (const entry of top) { if (entry.type !== "directory") continue if (shouldIgnoreName(entry.name)) continue dirs.add(entry.name + "/") - const base = path.join(Instance.directory, entry.name) + const base = path.join(ctx.directory, entry.name) const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => [])) for (const child of children) { if (child.type !== "directory") continue @@ -384,7 +385,7 @@ export const layer = Layer.effect( next.dirs = Array.from(dirs).toSorted() } else { - const files = yield* rg.files({ cwd: Instance.directory }).pipe( + const files = yield* rg.files({ cwd: ctx.directory }).pipe( Stream.runCollect, Effect.map((chunk) => [...chunk]), ) @@ -416,7 +417,7 @@ export const layer = Layer.effect( }) const gitText = Effect.fnUntraced(function* (args: string[]) { - return (yield* git.run(args, { cwd: Instance.directory })).text() + return (yield* git.run(args, { cwd: (yield* InstanceState.context).directory })).text() }) const init = Effect.fn("File.init")(function* () { @@ -424,7 +425,8 @@ export const layer = Layer.effect( }) const status = Effect.fn("File.status")(function* () { - if (Instance.project.vcs !== "git") return [] + const ctx = yield* InstanceState.context + if (ctx.project.vcs !== "git") return [] const diffOutput = yield* gitText([ "-c", @@ -463,7 +465,7 @@ export const layer = Layer.effect( if (untrackedOutput.trim()) { for (const file of untrackedOutput.trim().split("\n")) { const content = yield* appFs - .readFileString(path.join(Instance.directory, file)) + .readFileString(path.join(ctx.directory, file)) .pipe(Effect.catch(() => Effect.succeed(undefined))) if (content === undefined) continue changed.push({ @@ -498,19 +500,22 @@ export const layer = Layer.effect( } return changed.map((item) => { - const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path) + const full = path.isAbsolute(item.path) ? item.path : path.join(ctx.directory, item.path) return { ...item, - path: path.relative(Instance.directory, full), + path: path.relative(ctx.directory, full), } }) }) const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) { using _ = log.time("read", { file }) - const full = path.join(Instance.directory, file) + const ctx = yield* InstanceState.context + const full = path.join(ctx.directory, file) - if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory") + if (!Instance.containsPath(full, ctx)) { + throw new Error("Access denied: path escapes project directory") + } if (isImageByExtension(file)) { const exists = yield* appFs.existsSafe(full) @@ -553,13 +558,13 @@ export const layer = Layer.effect( Effect.catch(() => Effect.succeed("")), ) - if (Instance.project.vcs === "git") { + if (ctx.project.vcs === "git") { let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file]) if (!diff.trim()) { diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file]) } if (diff.trim()) { - const original = yield* git.show(Instance.directory, "HEAD", file) + const original = yield* git.show(ctx.directory, "HEAD", file) const patch = structuredPatch(file, file, original, content, "old", "new", { context: Infinity, ignoreWhitespace: true, @@ -573,21 +578,24 @@ export const layer = Layer.effect( }) const list = Effect.fn("File.list")(function* (dir?: string) { + const ctx = yield* InstanceState.context const exclude = [".git", ".DS_Store"] let ignored = (_: string) => false - if (Instance.project.vcs === "git") { + if (ctx.project.vcs === "git") { const ig = ignore() - const gitignore = path.join(Instance.project.worktree, ".gitignore") + const gitignore = path.join(ctx.worktree, ".gitignore") const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed(""))) if (gitignoreText) ig.add(gitignoreText) - const ignoreFile = path.join(Instance.project.worktree, ".ignore") + const ignoreFile = path.join(ctx.worktree, ".ignore") const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed(""))) if (ignoreText) ig.add(ignoreText) ignored = ig.ignores.bind(ig) } - const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory - if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory") + const resolved = dir ? path.join(ctx.directory, dir) : ctx.directory + if (!Instance.containsPath(resolved, ctx)) { + throw new Error("Access denied: path escapes project directory") + } const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => [])) @@ -595,7 +603,7 @@ export const layer = Layer.effect( for (const entry of entries) { if (exclude.includes(entry.name)) continue const absolute = path.join(resolved, entry.name) - const file = path.relative(Instance.directory, absolute) + const file = path.relative(ctx.directory, absolute) const type = entry.type === "directory" ? "directory" : "file" nodes.push({ name: entry.name, diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index ac450108e1..3f16f6c501 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -7,6 +7,7 @@ import { ripgrep } from "ripgrep" import { Filesystem } from "@/util" import { Log } from "@/util" +import { sanitizedProcessEnv } from "@/util/opencode-process" const log = Log.create({ service: "ripgrep" }) @@ -157,9 +158,7 @@ type WorkerError = { } function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) + const env = sanitizedProcessEnv() delete env.RIPGREP_CONFIG_PATH return env } diff --git a/packages/opencode/src/file/ripgrep.worker.ts b/packages/opencode/src/file/ripgrep.worker.ts index 62094c7acc..21a3aef5cc 100644 --- a/packages/opencode/src/file/ripgrep.worker.ts +++ b/packages/opencode/src/file/ripgrep.worker.ts @@ -1,9 +1,8 @@ import { ripgrep } from "ripgrep" +import { sanitizedProcessEnv } from "@/util/opencode-process" function env() { - const env = Object.fromEntries( - Object.entries(process.env).filter((item): item is [string, string] => item[1] !== undefined), - ) + const env = sanitizedProcessEnv() delete env.RIPGREP_CONFIG_PATH return env } diff --git a/packages/opencode/src/file/time.ts b/packages/opencode/src/file/time.ts deleted file mode 100644 index cc26682d57..0000000000 --- a/packages/opencode/src/file/time.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { DateTime, Effect, Layer, Option, Semaphore, Context } from "effect" -import { InstanceState } from "@/effect" -import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { Flag } from "@/flag/flag" -import type { SessionID } from "@/session/schema" -import { Log } from "../util" - -const log = Log.create({ service: "file.time" }) - -export type Stamp = { - readonly read: Date - readonly mtime: number | undefined - readonly size: number | undefined -} - -const session = (reads: Map>, sessionID: SessionID) => { - const value = reads.get(sessionID) - if (value) return value - - const next = new Map() - reads.set(sessionID, next) - return next -} - -interface State { - reads: Map> - locks: Map -} - -export interface Interface { - readonly read: (sessionID: SessionID, file: string) => Effect.Effect - readonly get: (sessionID: SessionID, file: string) => Effect.Effect - readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect - readonly withLock: (filepath: string, fn: () => Effect.Effect) => Effect.Effect -} - -export class Service extends Context.Service()("@opencode/FileTime") {} - -export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const fsys = yield* AppFileSystem.Service - const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK - - const stamp = Effect.fnUntraced(function* (file: string) { - const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void)) - return { - read: yield* DateTime.nowAsDate, - mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined, - size: info ? Number(info.size) : undefined, - } - }) - const state = yield* InstanceState.make( - Effect.fn("FileTime.state")(() => - Effect.succeed({ - reads: new Map>(), - locks: new Map(), - }), - ), - ) - - const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) { - filepath = AppFileSystem.normalizePath(filepath) - const locks = (yield* InstanceState.get(state)).locks - const lock = locks.get(filepath) - if (lock) return lock - - const next = Semaphore.makeUnsafe(1) - locks.set(filepath, next) - return next - }) - - const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - log.info("read", { sessionID, file }) - session(reads, sessionID).set(file, yield* stamp(file)) - }) - - const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) { - file = AppFileSystem.normalizePath(file) - const reads = (yield* InstanceState.get(state)).reads - return reads.get(sessionID)?.get(file)?.read - }) - - const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) { - if (disableCheck) return - filepath = AppFileSystem.normalizePath(filepath) - - const reads = (yield* InstanceState.get(state)).reads - const time = reads.get(sessionID)?.get(filepath) - if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`) - - const next = yield* stamp(filepath) - const changed = next.mtime !== time.mtime || next.size !== time.size - if (!changed) return - - throw new Error( - `File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`, - ) - }) - - const withLock = Effect.fn("FileTime.withLock")(function* (filepath: string, fn: () => Effect.Effect) { - return yield* fn().pipe((yield* getLock(filepath)).withPermits(1)) - }) - - return Service.of({ read, get, assert, withLock }) - }), -).pipe(Layer.orDie) - -export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer)) - -export * as FileTime from "./time" diff --git a/packages/opencode/src/file/watcher.ts b/packages/opencode/src/file/watcher.ts index 3e3da444a5..dc20333758 100644 --- a/packages/opencode/src/file/watcher.ts +++ b/packages/opencode/src/file/watcher.ts @@ -19,145 +19,145 @@ import { Log } from "../util" declare const OPENCODE_LIBC: string | undefined -export namespace FileWatcher { - const log = Log.create({ service: "file.watcher" }) - const SUBSCRIBE_TIMEOUT_MS = 10_000 +const log = Log.create({ service: "file.watcher" }) +const SUBSCRIBE_TIMEOUT_MS = 10_000 - export const Event = { - Updated: BusEvent.define( - "file.watcher.updated", - z.object({ - file: z.string(), - event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]), - }), - ), - } - - const watcher = lazy((): typeof import("@parcel/watcher") | undefined => { - try { - const binding = require( - `@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`, - ) - return createWrapper(binding) as typeof import("@parcel/watcher") - } catch (error) { - log.error("failed to load watcher binding", { error }) - return - } - }) - - function getBackend() { - if (process.platform === "win32") return "windows" - if (process.platform === "darwin") return "fs-events" - if (process.platform === "linux") return "inotify" - } - - function protecteds(dir: string) { - return Protected.paths().filter((item) => { - const rel = path.relative(dir, item) - return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel) - }) - } - - export const hasNativeBinding = () => !!watcher() - - export interface Interface { - readonly init: () => Effect.Effect - } - - export class Service extends Context.Service()("@opencode/FileWatcher") {} - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const git = yield* Git.Service - - const state = yield* InstanceState.make( - Effect.fn("FileWatcher.state")( - function* () { - if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return - - log.info("init", { directory: Instance.directory }) - - const backend = getBackend() - if (!backend) { - log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform }) - return - } - - const w = watcher() - if (!w) return - - log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend }) - - const subs: ParcelWatcher.AsyncSubscription[] = [] - yield* Effect.addFinalizer(() => - Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))), - ) - - const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => { - if (err) return - for (const evt of evts) { - if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" }) - if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" }) - if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" }) - } - }) - - const subscribe = (dir: string, ignore: string[]) => { - const pending = w.subscribe(dir, cb, { ignore, backend }) - return Effect.gen(function* () { - const sub = yield* Effect.promise(() => pending) - subs.push(sub) - }).pipe( - Effect.timeout(SUBSCRIBE_TIMEOUT_MS), - Effect.catchCause((cause) => { - log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) }) - pending.then((s) => s.unsubscribe()).catch(() => {}) - return Effect.void - }), - ) - } - - const cfg = yield* config.get() - const cfgIgnores = cfg.watcher?.ignore ?? [] - - if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) { - yield* subscribe(Instance.directory, [ - ...FileIgnore.PATTERNS, - ...cfgIgnores, - ...protecteds(Instance.directory), - ]) - } - - if (Instance.project.vcs === "git") { - const result = yield* git.run(["rev-parse", "--git-dir"], { - cwd: Instance.project.worktree, - }) - const vcsDir = - result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined - if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) { - const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter( - (entry) => entry !== "HEAD", - ) - yield* subscribe(vcsDir, ignore) - } - } - }, - Effect.catchCause((cause) => { - log.error("failed to init watcher service", { cause: Cause.pretty(cause) }) - return Effect.void - }), - ), - ) - - return Service.of({ - init: Effect.fn("FileWatcher.init")(function* () { - yield* InstanceState.get(state) - }), - }) +export const Event = { + Updated: BusEvent.define( + "file.watcher.updated", + z.object({ + file: z.string(), + event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]), }), - ) - - export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer)) + ), } + +const watcher = lazy((): typeof import("@parcel/watcher") | undefined => { + try { + const binding = require( + `@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`, + ) + return createWrapper(binding) as typeof import("@parcel/watcher") + } catch (error) { + log.error("failed to load watcher binding", { error }) + return + } +}) + +function getBackend() { + if (process.platform === "win32") return "windows" + if (process.platform === "darwin") return "fs-events" + if (process.platform === "linux") return "inotify" +} + +function protecteds(dir: string) { + return Protected.paths().filter((item) => { + const rel = path.relative(dir, item) + return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel) + }) +} + +export const hasNativeBinding = () => !!watcher() + +export interface Interface { + readonly init: () => Effect.Effect +} + +export class Service extends Context.Service()("@opencode/FileWatcher") {} + +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const git = yield* Git.Service + + const state = yield* InstanceState.make( + Effect.fn("FileWatcher.state")( + function* () { + if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return + + log.info("init", { directory: Instance.directory }) + + const backend = getBackend() + if (!backend) { + log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform }) + return + } + + const w = watcher() + if (!w) return + + log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend }) + + const subs: ParcelWatcher.AsyncSubscription[] = [] + yield* Effect.addFinalizer(() => + Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))), + ) + + const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => { + if (err) return + for (const evt of evts) { + if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" }) + if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" }) + if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" }) + } + }) + + const subscribe = (dir: string, ignore: string[]) => { + const pending = w.subscribe(dir, cb, { ignore, backend }) + return Effect.gen(function* () { + const sub = yield* Effect.promise(() => pending) + subs.push(sub) + }).pipe( + Effect.timeout(SUBSCRIBE_TIMEOUT_MS), + Effect.catchCause((cause) => { + log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) }) + pending.then((s) => s.unsubscribe()).catch(() => {}) + return Effect.void + }), + ) + } + + const cfg = yield* config.get() + const cfgIgnores = cfg.watcher?.ignore ?? [] + + if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) { + yield* subscribe(Instance.directory, [ + ...FileIgnore.PATTERNS, + ...cfgIgnores, + ...protecteds(Instance.directory), + ]) + } + + if (Instance.project.vcs === "git") { + const result = yield* git.run(["rev-parse", "--git-dir"], { + cwd: Instance.project.worktree, + }) + const vcsDir = + result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined + if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) { + const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter( + (entry) => entry !== "HEAD", + ) + yield* subscribe(vcsDir, ignore) + } + } + }, + Effect.catchCause((cause) => { + log.error("failed to init watcher service", { cause: Cause.pretty(cause) }) + return Effect.void + }), + ), + ) + + return Service.of({ + init: Effect.fn("FileWatcher.init")(function* () { + yield* InstanceState.get(state) + }), + }) + }), +) + +export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer)) + +export * as FileWatcher from "./watcher" diff --git a/packages/opencode/src/flag/flag.ts b/packages/opencode/src/flag/flag.ts index 21923f982f..72c8931f5b 100644 --- a/packages/opencode/src/flag/flag.ts +++ b/packages/opencode/src/flag/flag.ts @@ -10,153 +10,98 @@ function falsy(key: string) { return value === "false" || value === "0" } -export namespace Flag { - export const OTEL_EXPORTER_OTLP_ENDPOINT = process.env["OTEL_EXPORTER_OTLP_ENDPOINT"] - export const OTEL_EXPORTER_OTLP_HEADERS = process.env["OTEL_EXPORTER_OTLP_HEADERS"] - - export const OPENCODE_AUTO_SHARE = truthy("OPENCODE_AUTO_SHARE") - export const OPENCODE_AUTO_HEAP_SNAPSHOT = truthy("OPENCODE_AUTO_HEAP_SNAPSHOT") - export const OPENCODE_GIT_BASH_PATH = process.env["OPENCODE_GIT_BASH_PATH"] - export const OPENCODE_CONFIG = process.env["OPENCODE_CONFIG"] - export declare const OPENCODE_PURE: boolean - export declare const OPENCODE_TUI_CONFIG: string | undefined - export declare const OPENCODE_CONFIG_DIR: string | undefined - export declare const OPENCODE_PLUGIN_META_FILE: string | undefined - export const OPENCODE_CONFIG_CONTENT = process.env["OPENCODE_CONFIG_CONTENT"] - export const OPENCODE_DISABLE_AUTOUPDATE = truthy("OPENCODE_DISABLE_AUTOUPDATE") - export const OPENCODE_ALWAYS_NOTIFY_UPDATE = truthy("OPENCODE_ALWAYS_NOTIFY_UPDATE") - export const OPENCODE_DISABLE_PRUNE = truthy("OPENCODE_DISABLE_PRUNE") - export const OPENCODE_DISABLE_TERMINAL_TITLE = truthy("OPENCODE_DISABLE_TERMINAL_TITLE") - export const OPENCODE_SHOW_TTFD = truthy("OPENCODE_SHOW_TTFD") - export const OPENCODE_PERMISSION = process.env["OPENCODE_PERMISSION"] - export const OPENCODE_DISABLE_DEFAULT_PLUGINS = truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS") - export const OPENCODE_DISABLE_LSP_DOWNLOAD = truthy("OPENCODE_DISABLE_LSP_DOWNLOAD") - export const OPENCODE_ENABLE_EXPERIMENTAL_MODELS = truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS") - export const OPENCODE_DISABLE_AUTOCOMPACT = truthy("OPENCODE_DISABLE_AUTOCOMPACT") - export const OPENCODE_DISABLE_MODELS_FETCH = truthy("OPENCODE_DISABLE_MODELS_FETCH") - export const OPENCODE_DISABLE_MOUSE = truthy("OPENCODE_DISABLE_MOUSE") - export const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE") - export const OPENCODE_DISABLE_CLAUDE_CODE_PROMPT = - OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT") - export const OPENCODE_DISABLE_CLAUDE_CODE_SKILLS = - OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS") - export const OPENCODE_DISABLE_EXTERNAL_SKILLS = - OPENCODE_DISABLE_CLAUDE_CODE_SKILLS || truthy("OPENCODE_DISABLE_EXTERNAL_SKILLS") - export declare const OPENCODE_DISABLE_PROJECT_CONFIG: boolean - export const OPENCODE_FAKE_VCS = process.env["OPENCODE_FAKE_VCS"] - export declare const OPENCODE_CLIENT: string - export const OPENCODE_SERVER_PASSWORD = process.env["OPENCODE_SERVER_PASSWORD"] - export const OPENCODE_SERVER_USERNAME = process.env["OPENCODE_SERVER_USERNAME"] - export const OPENCODE_ENABLE_QUESTION_TOOL = truthy("OPENCODE_ENABLE_QUESTION_TOOL") - - // Experimental - export const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL") - export const OPENCODE_EXPERIMENTAL_FILEWATCHER = Config.boolean("OPENCODE_EXPERIMENTAL_FILEWATCHER").pipe( - Config.withDefault(false), - ) - export const OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER = Config.boolean( - "OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER", - ).pipe(Config.withDefault(false)) - export const OPENCODE_EXPERIMENTAL_ICON_DISCOVERY = - OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_ICON_DISCOVERY") - - const copy = process.env["OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT"] - export const OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT = - copy === undefined ? process.platform === "win32" : truthy("OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT") - export const OPENCODE_ENABLE_EXA = - truthy("OPENCODE_ENABLE_EXA") || OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EXA") - export const OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS = number("OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS") - export const OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX = number("OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX") - export const OPENCODE_EXPERIMENTAL_OXFMT = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT") - export const OPENCODE_EXPERIMENTAL_LSP_TY = truthy("OPENCODE_EXPERIMENTAL_LSP_TY") - export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL") - export const OPENCODE_DISABLE_FILETIME_CHECK = Config.boolean("OPENCODE_DISABLE_FILETIME_CHECK").pipe( - Config.withDefault(false), - ) - export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE") - export const OPENCODE_EXPERIMENTAL_MARKDOWN = !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN") - export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"] - export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"] - export const OPENCODE_DISABLE_EMBEDDED_WEB_UI = truthy("OPENCODE_DISABLE_EMBEDDED_WEB_UI") - export const OPENCODE_DB = process.env["OPENCODE_DB"] - export const OPENCODE_DISABLE_CHANNEL_DB = truthy("OPENCODE_DISABLE_CHANNEL_DB") - export const OPENCODE_SKIP_MIGRATIONS = truthy("OPENCODE_SKIP_MIGRATIONS") - export const OPENCODE_STRICT_CONFIG_DEPS = truthy("OPENCODE_STRICT_CONFIG_DEPS") - - export const OPENCODE_WORKSPACE_ID = process.env["OPENCODE_WORKSPACE_ID"] - export const OPENCODE_EXPERIMENTAL_HTTPAPI = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_HTTPAPI") - export const OPENCODE_EXPERIMENTAL_WORKSPACES = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES") - - function number(key: string) { - const value = process.env[key] - if (!value) return undefined - const parsed = Number(value) - return Number.isInteger(parsed) && parsed > 0 ? parsed : undefined - } +function number(key: string) { + const value = process.env[key] + if (!value) return undefined + const parsed = Number(value) + return Number.isInteger(parsed) && parsed > 0 ? parsed : undefined } -// Dynamic getter for OPENCODE_DISABLE_PROJECT_CONFIG -// This must be evaluated at access time, not module load time, -// because external tooling may set this env var at runtime -Object.defineProperty(Flag, "OPENCODE_DISABLE_PROJECT_CONFIG", { - get() { +const OPENCODE_EXPERIMENTAL = truthy("OPENCODE_EXPERIMENTAL") +const OPENCODE_DISABLE_CLAUDE_CODE = truthy("OPENCODE_DISABLE_CLAUDE_CODE") +const OPENCODE_DISABLE_CLAUDE_CODE_SKILLS = + OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_SKILLS") +const copy = process.env["OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT"] + +export const Flag = { + OTEL_EXPORTER_OTLP_ENDPOINT: process.env["OTEL_EXPORTER_OTLP_ENDPOINT"], + OTEL_EXPORTER_OTLP_HEADERS: process.env["OTEL_EXPORTER_OTLP_HEADERS"], + + OPENCODE_AUTO_SHARE: truthy("OPENCODE_AUTO_SHARE"), + OPENCODE_AUTO_HEAP_SNAPSHOT: truthy("OPENCODE_AUTO_HEAP_SNAPSHOT"), + OPENCODE_GIT_BASH_PATH: process.env["OPENCODE_GIT_BASH_PATH"], + OPENCODE_CONFIG: process.env["OPENCODE_CONFIG"], + OPENCODE_CONFIG_CONTENT: process.env["OPENCODE_CONFIG_CONTENT"], + OPENCODE_DISABLE_AUTOUPDATE: truthy("OPENCODE_DISABLE_AUTOUPDATE"), + OPENCODE_ALWAYS_NOTIFY_UPDATE: truthy("OPENCODE_ALWAYS_NOTIFY_UPDATE"), + OPENCODE_DISABLE_PRUNE: truthy("OPENCODE_DISABLE_PRUNE"), + OPENCODE_DISABLE_TERMINAL_TITLE: truthy("OPENCODE_DISABLE_TERMINAL_TITLE"), + OPENCODE_SHOW_TTFD: truthy("OPENCODE_SHOW_TTFD"), + OPENCODE_PERMISSION: process.env["OPENCODE_PERMISSION"], + OPENCODE_DISABLE_DEFAULT_PLUGINS: truthy("OPENCODE_DISABLE_DEFAULT_PLUGINS"), + OPENCODE_DISABLE_LSP_DOWNLOAD: truthy("OPENCODE_DISABLE_LSP_DOWNLOAD"), + OPENCODE_ENABLE_EXPERIMENTAL_MODELS: truthy("OPENCODE_ENABLE_EXPERIMENTAL_MODELS"), + OPENCODE_DISABLE_AUTOCOMPACT: truthy("OPENCODE_DISABLE_AUTOCOMPACT"), + OPENCODE_DISABLE_MODELS_FETCH: truthy("OPENCODE_DISABLE_MODELS_FETCH"), + OPENCODE_DISABLE_MOUSE: truthy("OPENCODE_DISABLE_MOUSE"), + OPENCODE_DISABLE_CLAUDE_CODE, + OPENCODE_DISABLE_CLAUDE_CODE_PROMPT: OPENCODE_DISABLE_CLAUDE_CODE || truthy("OPENCODE_DISABLE_CLAUDE_CODE_PROMPT"), + OPENCODE_DISABLE_CLAUDE_CODE_SKILLS, + OPENCODE_DISABLE_EXTERNAL_SKILLS: OPENCODE_DISABLE_CLAUDE_CODE_SKILLS || truthy("OPENCODE_DISABLE_EXTERNAL_SKILLS"), + OPENCODE_FAKE_VCS: process.env["OPENCODE_FAKE_VCS"], + OPENCODE_SERVER_PASSWORD: process.env["OPENCODE_SERVER_PASSWORD"], + OPENCODE_SERVER_USERNAME: process.env["OPENCODE_SERVER_USERNAME"], + OPENCODE_ENABLE_QUESTION_TOOL: truthy("OPENCODE_ENABLE_QUESTION_TOOL"), + + // Experimental + OPENCODE_EXPERIMENTAL, + OPENCODE_EXPERIMENTAL_FILEWATCHER: Config.boolean("OPENCODE_EXPERIMENTAL_FILEWATCHER").pipe( + Config.withDefault(false), + ), + OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER: Config.boolean("OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER").pipe( + Config.withDefault(false), + ), + OPENCODE_EXPERIMENTAL_ICON_DISCOVERY: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_ICON_DISCOVERY"), + OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT: + copy === undefined ? process.platform === "win32" : truthy("OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT"), + OPENCODE_ENABLE_EXA: truthy("OPENCODE_ENABLE_EXA") || OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EXA"), + OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS: number("OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS"), + OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX: number("OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX"), + OPENCODE_EXPERIMENTAL_OXFMT: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_OXFMT"), + OPENCODE_EXPERIMENTAL_LSP_TY: truthy("OPENCODE_EXPERIMENTAL_LSP_TY"), + OPENCODE_EXPERIMENTAL_LSP_TOOL: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL"), + OPENCODE_EXPERIMENTAL_PLAN_MODE: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE"), + OPENCODE_EXPERIMENTAL_MARKDOWN: !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN"), + OPENCODE_MODELS_URL: process.env["OPENCODE_MODELS_URL"], + OPENCODE_MODELS_PATH: process.env["OPENCODE_MODELS_PATH"], + OPENCODE_DISABLE_EMBEDDED_WEB_UI: truthy("OPENCODE_DISABLE_EMBEDDED_WEB_UI"), + OPENCODE_DB: process.env["OPENCODE_DB"], + OPENCODE_DISABLE_CHANNEL_DB: truthy("OPENCODE_DISABLE_CHANNEL_DB"), + OPENCODE_SKIP_MIGRATIONS: truthy("OPENCODE_SKIP_MIGRATIONS"), + OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"), + + OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"], + OPENCODE_EXPERIMENTAL_HTTPAPI: truthy("OPENCODE_EXPERIMENTAL_HTTPAPI"), + OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"), + + // Evaluated at access time (not module load) because tests, the CLI, and + // external tooling set these env vars at runtime. + get OPENCODE_DISABLE_PROJECT_CONFIG() { return truthy("OPENCODE_DISABLE_PROJECT_CONFIG") }, - enumerable: true, - configurable: false, -}) - -// Dynamic getter for OPENCODE_TUI_CONFIG -// This must be evaluated at access time, not module load time, -// because tests and external tooling may set this env var at runtime -Object.defineProperty(Flag, "OPENCODE_TUI_CONFIG", { - get() { + get OPENCODE_TUI_CONFIG() { return process.env["OPENCODE_TUI_CONFIG"] }, - enumerable: true, - configurable: false, -}) - -// Dynamic getter for OPENCODE_CONFIG_DIR -// This must be evaluated at access time, not module load time, -// because external tooling may set this env var at runtime -Object.defineProperty(Flag, "OPENCODE_CONFIG_DIR", { - get() { + get OPENCODE_CONFIG_DIR() { return process.env["OPENCODE_CONFIG_DIR"] }, - enumerable: true, - configurable: false, -}) - -// Dynamic getter for OPENCODE_PURE -// This must be evaluated at access time, not module load time, -// because the CLI can set this flag at runtime -Object.defineProperty(Flag, "OPENCODE_PURE", { - get() { + get OPENCODE_PURE() { return truthy("OPENCODE_PURE") }, - enumerable: true, - configurable: false, -}) - -// Dynamic getter for OPENCODE_PLUGIN_META_FILE -// This must be evaluated at access time, not module load time, -// because tests and external tooling may set this env var at runtime -Object.defineProperty(Flag, "OPENCODE_PLUGIN_META_FILE", { - get() { + get OPENCODE_PLUGIN_META_FILE() { return process.env["OPENCODE_PLUGIN_META_FILE"] }, - enumerable: true, - configurable: false, -}) - -// Dynamic getter for OPENCODE_CLIENT -// This must be evaluated at access time, not module load time, -// because some commands override the client at runtime -Object.defineProperty(Flag, "OPENCODE_CLIENT", { - get() { + get OPENCODE_CLIENT() { return process.env["OPENCODE_CLIENT"] ?? "cli" }, - enumerable: true, - configurable: false, -}) +} diff --git a/packages/opencode/src/format/formatter.ts b/packages/opencode/src/format/formatter.ts index 36249db7db..03f8365274 100644 --- a/packages/opencode/src/format/formatter.ts +++ b/packages/opencode/src/format/formatter.ts @@ -1,15 +1,17 @@ import { Npm } from "../npm" -import { Instance } from "../project/instance" +import type { InstanceContext } from "../project/instance" import { Filesystem } from "../util" import { Process } from "../util" import { which } from "../util/which" import { Flag } from "@/flag/flag" +export interface Context extends Pick {} + export interface Info { name: string environment?: Record extensions: string[] - enabled(): Promise + enabled(context: Context): Promise } export const gofmt: Info = { @@ -65,8 +67,8 @@ export const prettier: Info = { ".graphql", ".gql", ], - async enabled() { - const items = await Filesystem.findUp("package.json", Instance.directory, Instance.worktree) + async enabled(context) { + const items = await Filesystem.findUp("package.json", context.directory, context.worktree) for (const item of items) { const json = await Filesystem.readJson<{ dependencies?: Record @@ -87,9 +89,9 @@ export const oxfmt: Info = { BUN_BE_BUN: "1", }, extensions: [".js", ".jsx", ".mjs", ".cjs", ".ts", ".tsx", ".mts", ".cts"], - async enabled() { + async enabled(context) { if (!Flag.OPENCODE_EXPERIMENTAL_OXFMT) return false - const items = await Filesystem.findUp("package.json", Instance.directory, Instance.worktree) + const items = await Filesystem.findUp("package.json", context.directory, context.worktree) for (const item of items) { const json = await Filesystem.readJson<{ dependencies?: Record @@ -137,10 +139,10 @@ export const biome: Info = { ".graphql", ".gql", ], - async enabled() { + async enabled(context) { const configs = ["biome.json", "biome.jsonc"] for (const config of configs) { - const found = await Filesystem.findUp(config, Instance.directory, Instance.worktree) + const found = await Filesystem.findUp(config, context.directory, context.worktree) if (found.length > 0) { const bin = await Npm.which("@biomejs/biome") if (bin) return [bin, "format", "--write", "$FILE"] @@ -163,8 +165,8 @@ export const zig: Info = { export const clang: Info = { name: "clang-format", extensions: [".c", ".cc", ".cpp", ".cxx", ".c++", ".h", ".hh", ".hpp", ".hxx", ".h++", ".ino", ".C", ".H"], - async enabled() { - const items = await Filesystem.findUp(".clang-format", Instance.directory, Instance.worktree) + async enabled(context) { + const items = await Filesystem.findUp(".clang-format", context.directory, context.worktree) if (items.length > 0) { const match = which("clang-format") if (match) return [match, "-i", "$FILE"] @@ -186,11 +188,11 @@ export const ktlint: Info = { export const ruff: Info = { name: "ruff", extensions: [".py", ".pyi"], - async enabled() { + async enabled(context) { if (!which("ruff")) return false const configs = ["pyproject.toml", "ruff.toml", ".ruff.toml"] for (const config of configs) { - const found = await Filesystem.findUp(config, Instance.directory, Instance.worktree) + const found = await Filesystem.findUp(config, context.directory, context.worktree) if (found.length > 0) { if (config === "pyproject.toml") { const content = await Filesystem.readText(found[0]) @@ -202,7 +204,7 @@ export const ruff: Info = { } const deps = ["requirements.txt", "pyproject.toml", "Pipfile"] for (const dep of deps) { - const found = await Filesystem.findUp(dep, Instance.directory, Instance.worktree) + const found = await Filesystem.findUp(dep, context.directory, context.worktree) if (found.length > 0) { const content = await Filesystem.readText(found[0]) if (content.includes("ruff")) return ["ruff", "format", "$FILE"] @@ -233,8 +235,8 @@ export const rlang: Info = { export const uvformat: Info = { name: "uv", extensions: [".py", ".pyi"], - async enabled() { - if (await ruff.enabled()) return false + async enabled(context) { + if (await ruff.enabled(context)) return false const uv = which("uv") if (uv == null) return false const output = await Process.run([uv, "format", "--help"], { nothrow: true }) @@ -286,9 +288,9 @@ export const dart: Info = { export const ocamlformat: Info = { name: "ocamlformat", extensions: [".ml", ".mli"], - async enabled() { + async enabled(context) { if (!which("ocamlformat")) return false - const items = await Filesystem.findUp(".ocamlformat", Instance.directory, Instance.worktree) + const items = await Filesystem.findUp(".ocamlformat", context.directory, context.worktree) if (items.length > 0) return ["ocamlformat", "-i", "$FILE"] return false }, @@ -357,8 +359,8 @@ export const rustfmt: Info = { export const pint: Info = { name: "pint", extensions: [".php"], - async enabled() { - const items = await Filesystem.findUp("composer.json", Instance.directory, Instance.worktree) + async enabled(context) { + const items = await Filesystem.findUp("composer.json", context.directory, context.worktree) for (const item of items) { const json = await Filesystem.readJson<{ require?: Record diff --git a/packages/opencode/src/format/index.ts b/packages/opencode/src/format/index.ts index d0ae59d05e..85934ce9c9 100644 --- a/packages/opencode/src/format/index.ts +++ b/packages/opencode/src/format/index.ts @@ -37,47 +37,14 @@ export const layer = Layer.effect( const spawner = yield* ChildProcessSpawner.ChildProcessSpawner const state = yield* InstanceState.make( - Effect.fn("Format.state")(function* (_ctx) { + Effect.fn("Format.state")(function* (ctx) { const commands: Record = {} const formatters: Record = {} - const cfg = yield* config.get() - - if (cfg.formatter !== false) { - for (const item of Object.values(Formatter)) { - formatters[item.name] = item - } - for (const [name, item] of Object.entries(cfg.formatter ?? {})) { - // Ruff and uv are both the same formatter, so disabling either should disable both. - if (["ruff", "uv"].includes(name) && (cfg.formatter?.ruff?.disabled || cfg.formatter?.uv?.disabled)) { - // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. - delete formatters.ruff - delete formatters.uv - continue - } - if (item.disabled) { - delete formatters[name] - continue - } - const info = mergeDeep(formatters[name] ?? {}, { - extensions: [], - ...item, - }) - - formatters[name] = { - ...info, - name, - enabled: async () => info.command ?? false, - } - } - } else { - log.info("all formatters are disabled") - } - async function getCommand(item: Formatter.Info) { let cmd = commands[item.name] if (cmd === false || cmd === undefined) { - cmd = await item.enabled() + cmd = await item.enabled(ctx) commands[item.name] = cmd } return cmd @@ -149,6 +116,48 @@ export const layer = Layer.effect( }) } + const cfg = yield* config.get() + + if (!cfg.formatter) { + log.info("all formatters are disabled") + log.info("init") + return { + formatters, + isEnabled, + formatFile, + } + } + + for (const item of Object.values(Formatter)) { + formatters[item.name] = item + } + + if (cfg.formatter !== true) { + for (const [name, item] of Object.entries(cfg.formatter)) { + const builtIn = Formatter[name as keyof typeof Formatter] + + // Ruff and uv are both the same formatter, so disabling either should disable both. + if (["ruff", "uv"].includes(name) && (cfg.formatter.ruff?.disabled || cfg.formatter.uv?.disabled)) { + // TODO combine formatters so shared backends like Ruff/uv don't need linked disable handling here. + delete formatters.ruff + delete formatters.uv + continue + } + if (item.disabled) { + delete formatters[name] + continue + } + const info = mergeDeep(builtIn ?? { extensions: [] }, item) + + formatters[name] = { + ...info, + name, + extensions: info.extensions ?? [], + enabled: builtIn && !info.command ? builtIn.enabled : async (_context) => info.command ?? false, + } + } + } + log.info("init") return { diff --git a/packages/opencode/src/index.ts b/packages/opencode/src/index.ts index 67de87c2aa..0a3a927b46 100644 --- a/packages/opencode/src/index.ts +++ b/packages/opencode/src/index.ts @@ -38,6 +38,9 @@ import { errorMessage } from "./util/error" import { PluginCommand } from "./cli/cmd/plug" import { Heap } from "./cli/heap" import { drizzle } from "drizzle-orm/bun-sqlite" +import { ensureProcessMetadata } from "./util/opencode-process" + +const processMetadata = ensureProcessMetadata("main") process.on("unhandledRejection", (e) => { Log.Default.error("rejection", { @@ -108,6 +111,8 @@ const cli = yargs(args) Log.Default.info("opencode", { version: InstallationVersion, args: process.argv.slice(2), + process_role: processMetadata.processRole, + run_id: processMetadata.runID, }) const marker = path.join(Global.Path.data, "opencode.db") diff --git a/packages/opencode/src/lsp/client.ts b/packages/opencode/src/lsp/client.ts index 59a64ca1ed..b20e8ae7f0 100644 --- a/packages/opencode/src/lsp/client.ts +++ b/packages/opencode/src/lsp/client.ts @@ -11,7 +11,6 @@ import z from "zod" import type * as LSPServer from "./server" import { NamedError } from "@opencode-ai/shared/util/error" import { withTimeout } from "../util/timeout" -import { Instance } from "../project/instance" import { Filesystem } from "../util" const DIAGNOSTICS_DEBOUNCE_MS = 150 @@ -39,7 +38,7 @@ export const Event = { ), } -export async function create(input: { serverID: string; server: LSPServer.Handle; root: string }) { +export async function create(input: { serverID: string; server: LSPServer.Handle; root: string; directory: string }) { const l = log.clone().tag("serverID", input.serverID) l.info("starting client") @@ -145,33 +144,33 @@ export async function create(input: { serverID: string; server: LSPServer.Handle return connection }, notify: { - async open(input: { path: string }) { - input.path = path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path) - const text = await Filesystem.readText(input.path) - const extension = path.extname(input.path) + async open(request: { path: string }) { + request.path = path.isAbsolute(request.path) ? request.path : path.resolve(input.directory, request.path) + const text = await Filesystem.readText(request.path) + const extension = path.extname(request.path) const languageId = LANGUAGE_EXTENSIONS[extension] ?? "plaintext" - const version = files[input.path] + const version = files[request.path] if (version !== undefined) { - log.info("workspace/didChangeWatchedFiles", input) + log.info("workspace/didChangeWatchedFiles", request) await connection.sendNotification("workspace/didChangeWatchedFiles", { changes: [ { - uri: pathToFileURL(input.path).href, + uri: pathToFileURL(request.path).href, type: 2, // Changed }, ], }) const next = version + 1 - files[input.path] = next + files[request.path] = next log.info("textDocument/didChange", { - path: input.path, + path: request.path, version: next, }) await connection.sendNotification("textDocument/didChange", { textDocument: { - uri: pathToFileURL(input.path).href, + uri: pathToFileURL(request.path).href, version: next, }, contentChanges: [{ text }], @@ -179,36 +178,36 @@ export async function create(input: { serverID: string; server: LSPServer.Handle return } - log.info("workspace/didChangeWatchedFiles", input) + log.info("workspace/didChangeWatchedFiles", request) await connection.sendNotification("workspace/didChangeWatchedFiles", { changes: [ { - uri: pathToFileURL(input.path).href, + uri: pathToFileURL(request.path).href, type: 1, // Created }, ], }) - log.info("textDocument/didOpen", input) - diagnostics.delete(input.path) + log.info("textDocument/didOpen", request) + diagnostics.delete(request.path) await connection.sendNotification("textDocument/didOpen", { textDocument: { - uri: pathToFileURL(input.path).href, + uri: pathToFileURL(request.path).href, languageId, version: 0, text, }, }) - files[input.path] = 0 + files[request.path] = 0 return }, }, get diagnostics() { return diagnostics }, - async waitForDiagnostics(input: { path: string }) { + async waitForDiagnostics(request: { path: string }) { const normalizedPath = Filesystem.normalizePath( - path.isAbsolute(input.path) ? input.path : path.resolve(Instance.directory, input.path), + path.isAbsolute(request.path) ? request.path : path.resolve(input.directory, request.path), ) log.info("waiting for diagnostics", { path: normalizedPath }) let unsub: () => void diff --git a/packages/opencode/src/lsp/diagnostic.ts b/packages/opencode/src/lsp/diagnostic.ts new file mode 100644 index 0000000000..4bc085e788 --- /dev/null +++ b/packages/opencode/src/lsp/diagnostic.ts @@ -0,0 +1,29 @@ +import * as LSPClient from "./client" + +const MAX_PER_FILE = 20 + +export function pretty(diagnostic: LSPClient.Diagnostic) { + const severityMap = { + 1: "ERROR", + 2: "WARN", + 3: "INFO", + 4: "HINT", + } + + const severity = severityMap[diagnostic.severity || 1] + const line = diagnostic.range.start.line + 1 + const col = diagnostic.range.start.character + 1 + + return `${severity} [${line}:${col}] ${diagnostic.message}` +} + +export function report(file: string, issues: LSPClient.Diagnostic[]) { + const errors = issues.filter((item) => item.severity === 1) + if (errors.length === 0) return "" + const limited = errors.slice(0, MAX_PER_FILE) + const more = errors.length - MAX_PER_FILE + const suffix = more > 0 ? `\n... and ${more} more` : "" + return `\n${limited.map(pretty).join("\n")}${suffix}\n` +} + +export * as Diagnostic from "./diagnostic" diff --git a/packages/opencode/src/lsp/lsp.ts b/packages/opencode/src/lsp/lsp.ts index d895e73256..aa519f9f7e 100644 --- a/packages/opencode/src/lsp/lsp.ts +++ b/packages/opencode/src/lsp/lsp.ts @@ -7,12 +7,12 @@ import { pathToFileURL, fileURLToPath } from "url" import * as LSPServer from "./server" import z from "zod" import { Config } from "../config" -import { Instance } from "../project/instance" import { Flag } from "@/flag/flag" import { Process } from "../util" import { spawn as lspspawn } from "./launch" import { Effect, Layer, Context } from "effect" import { InstanceState } from "@/effect" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" const log = Log.create({ service: "lsp" }) @@ -162,12 +162,12 @@ export const layer = Layer.effect( const config = yield* Config.Service const state = yield* InstanceState.make( - Effect.fn("LSP.state")(function* () { + Effect.fn("LSP.state")(function* (ctx) { const cfg = yield* config.get() const servers: Record = {} - if (cfg.lsp === false) { + if (!cfg.lsp) { log.info("all LSPs are disabled") } else { for (const server of Object.values(LSPServer)) { @@ -176,25 +176,27 @@ export const layer = Layer.effect( filterExperimentalServers(servers) - for (const [name, item] of Object.entries(cfg.lsp ?? {})) { - const existing = servers[name] - if (item.disabled) { - log.info(`LSP server ${name} is disabled`) - delete servers[name] - continue - } - servers[name] = { - ...existing, - id: name, - root: existing?.root ?? (async () => Instance.directory), - extensions: item.extensions ?? existing?.extensions ?? [], - spawn: async (root) => ({ - process: lspspawn(item.command[0], item.command.slice(1), { - cwd: root, - env: { ...process.env, ...item.env }, + if (cfg.lsp !== true) { + for (const [name, item] of Object.entries(cfg.lsp)) { + const existing = servers[name] + if (item.disabled) { + log.info(`LSP server ${name} is disabled`) + delete servers[name] + continue + } + servers[name] = { + ...existing, + id: name, + root: existing?.root ?? (async (_file, ctx) => ctx.directory), + extensions: item.extensions ?? existing?.extensions ?? [], + spawn: async (root) => ({ + process: lspspawn(item.command[0], item.command.slice(1), { + cwd: root, + env: { ...process.env, ...item.env }, + }), + initialization: item.initialization, }), - initialization: item.initialization, - }), + } } } @@ -223,7 +225,13 @@ export const layer = Layer.effect( ) const getClients = Effect.fnUntraced(function* (file: string) { - if (!Instance.containsPath(file)) return [] as LSPClient.Info[] + const ctx = yield* InstanceState.context + if ( + !AppFileSystem.contains(ctx.directory, file) && + (ctx.worktree === "/" || !AppFileSystem.contains(ctx.worktree, file)) + ) { + return [] as LSPClient.Info[] + } const s = yield* InstanceState.get(state) return yield* Effect.promise(async () => { const extension = path.parse(file).ext || file @@ -231,7 +239,7 @@ export const layer = Layer.effect( async function schedule(server: LSPServer.Info, root: string, key: string) { const handle = await server - .spawn(root) + .spawn(root, ctx) .then((value) => { if (!value) s.broken.add(key) return value @@ -249,6 +257,7 @@ export const layer = Layer.effect( serverID: server.id, server: handle, root, + directory: ctx.directory, }).catch(async (err) => { s.broken.add(key) await Process.stop(handle.process) @@ -271,7 +280,7 @@ export const layer = Layer.effect( for (const server of Object.values(s.servers)) { if (server.extensions.length && !server.extensions.includes(extension)) continue - const root = await server.root(file) + const root = await server.root(file, ctx) if (!root) continue if (s.broken.has(root + server.id)) continue @@ -324,13 +333,14 @@ export const layer = Layer.effect( }) const status = Effect.fn("LSP.status")(function* () { + const ctx = yield* InstanceState.context const s = yield* InstanceState.get(state) const result: Status[] = [] for (const client of s.clients) { result.push({ id: client.serverID, name: s.servers[client.serverID].id, - root: path.relative(Instance.directory, client.root), + root: path.relative(ctx.directory, client.root), status: "connected", }) } @@ -338,12 +348,13 @@ export const layer = Layer.effect( }) const hasClients = Effect.fn("LSP.hasClients")(function* (file: string) { + const ctx = yield* InstanceState.context const s = yield* InstanceState.get(state) return yield* Effect.promise(async () => { const extension = path.parse(file).ext || file for (const server of Object.values(s.servers)) { if (server.extensions.length && !server.extensions.includes(extension)) continue - const root = await server.root(file) + const root = await server.root(file, ctx) if (!root) continue if (s.broken.has(root + server.id)) continue return true @@ -505,30 +516,4 @@ export const layer = Layer.effect( export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer)) -export namespace Diagnostic { - const MAX_PER_FILE = 20 - - export function pretty(diagnostic: LSPClient.Diagnostic) { - const severityMap = { - 1: "ERROR", - 2: "WARN", - 3: "INFO", - 4: "HINT", - } - - const severity = severityMap[diagnostic.severity || 1] - const line = diagnostic.range.start.line + 1 - const col = diagnostic.range.start.character + 1 - - return `${severity} [${line}:${col}] ${diagnostic.message}` - } - - export function report(file: string, issues: LSPClient.Diagnostic[]) { - const errors = issues.filter((item) => item.severity === 1) - if (errors.length === 0) return "" - const limited = errors.slice(0, MAX_PER_FILE) - const more = errors.length - MAX_PER_FILE - const suffix = more > 0 ? `\n... and ${more} more` : "" - return `\n${limited.map(pretty).join("\n")}${suffix}\n` - } -} +export * as Diagnostic from "./diagnostic" diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts index 760e8eaba0..9182368063 100644 --- a/packages/opencode/src/lsp/server.ts +++ b/packages/opencode/src/lsp/server.ts @@ -6,7 +6,7 @@ import { Log } from "../util" import { text } from "node:stream/consumers" import fs from "fs/promises" import { Filesystem } from "../util" -import { Instance } from "../project/instance" +import type { InstanceContext } from "../project/instance" import { Flag } from "../flag/flag" import { Archive } from "../util" import { Process } from "../util" @@ -29,15 +29,15 @@ export interface Handle { initialization?: Record } -type RootFunction = (file: string) => Promise +type RootFunction = (file: string, ctx: InstanceContext) => Promise const NearestRoot = (includePatterns: string[], excludePatterns?: string[]): RootFunction => { - return async (file) => { + return async (file, ctx) => { if (excludePatterns) { const excludedFiles = Filesystem.up({ targets: excludePatterns, start: path.dirname(file), - stop: Instance.directory, + stop: ctx.directory, }) const excluded = await excludedFiles.next() await excludedFiles.return() @@ -46,11 +46,11 @@ const NearestRoot = (includePatterns: string[], excludePatterns?: string[]): Roo const files = Filesystem.up({ targets: includePatterns, start: path.dirname(file), - stop: Instance.directory, + stop: ctx.directory, }) const first = await files.next() await files.return() - if (!first.value) return Instance.directory + if (!first.value) return ctx.directory return path.dirname(first.value) } } @@ -60,16 +60,16 @@ export interface Info { extensions: string[] global?: boolean root: RootFunction - spawn(root: string): Promise + spawn(root: string, ctx: InstanceContext): Promise } export const Deno: Info = { id: "deno", - root: async (file) => { + root: async (file, ctx) => { const files = Filesystem.up({ targets: ["deno.json", "deno.jsonc"], start: path.dirname(file), - stop: Instance.directory, + stop: ctx.directory, }) const first = await files.next() await files.return() @@ -98,8 +98,8 @@ export const Typescript: Info = { ["deno.json", "deno.jsonc"], ), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"], - async spawn(root) { - const tsserver = Module.resolve("typescript/lib/tsserver.js", Instance.directory) + async spawn(root, ctx) { + const tsserver = Module.resolve("typescript/lib/tsserver.js", ctx.directory) log.info("typescript server", { tsserver }) if (!tsserver) return const bin = await Npm.which("typescript-language-server") @@ -154,8 +154,8 @@ export const ESLint: Info = { id: "eslint", root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts", ".vue"], - async spawn(root) { - const eslint = Module.resolve("eslint", Instance.directory) + async spawn(root, ctx) { + const eslint = Module.resolve("eslint", ctx.directory) if (!eslint) return log.info("spawning eslint server") const serverPath = path.join(Global.Path.bin, "vscode-eslint", "server", "out", "eslintServer.js") @@ -219,7 +219,7 @@ export const Oxlint: Info = { "package.json", ]), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts", ".vue", ".astro", ".svelte"], - async spawn(root) { + async spawn(root, ctx) { const ext = process.platform === "win32" ? ".cmd" : "" const serverTarget = path.join("node_modules", ".bin", "oxc_language_server" + ext) @@ -232,7 +232,7 @@ export const Oxlint: Info = { const candidates = Filesystem.up({ targets: [target], start: root, - stop: Instance.worktree, + stop: ctx.worktree, }) const first = await candidates.next() await candidates.return() @@ -344,10 +344,10 @@ export const Biome: Info = { export const Gopls: Info = { id: "gopls", - root: async (file) => { - const work = await NearestRoot(["go.work"])(file) + root: async (file, ctx) => { + const work = await NearestRoot(["go.work"])(file, ctx) if (work) return work - return NearestRoot(["go.mod", "go.sum"])(file) + return NearestRoot(["go.mod", "go.sum"])(file, ctx) }, extensions: [".go"], async spawn(root) { @@ -810,8 +810,8 @@ export const SourceKit: Info = { export const RustAnalyzer: Info = { id: "rust", - root: async (root) => { - const crateRoot = await NearestRoot(["Cargo.toml", "Cargo.lock"])(root) + root: async (file, ctx) => { + const crateRoot = await NearestRoot(["Cargo.toml", "Cargo.lock"])(file, ctx) if (crateRoot === undefined) { return undefined } @@ -834,7 +834,7 @@ export const RustAnalyzer: Info = { currentDir = parentDir // Stop if we've gone above the app root - if (!currentDir.startsWith(Instance.worktree)) break + if (!currentDir.startsWith(ctx.worktree)) break } return crateRoot @@ -1031,8 +1031,8 @@ export const Astro: Info = { id: "astro", extensions: [".astro"], root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]), - async spawn(root) { - const tsserver = Module.resolve("typescript/lib/tsserver.js", Instance.directory) + async spawn(root, ctx) { + const tsserver = Module.resolve("typescript/lib/tsserver.js", ctx.directory) if (!tsserver) { log.info("typescript not found, required for Astro language server") return @@ -1067,7 +1067,7 @@ export const Astro: Info = { export const JDTLS: Info = { id: "jdtls", - root: async (file) => { + root: async (file, ctx) => { // Without exclusions, NearestRoot defaults to instance directory so we can't // distinguish between a) no project found and b) project found at instance dir. // So we can't choose the root from (potential) monorepo markers first. @@ -1077,12 +1077,12 @@ export const JDTLS: Info = { const exclusionsForMonorepos = gradleMarkers.concat(settingsMarkers) const [projectRoot, wrapperRoot, settingsRoot] = await Promise.all([ - NearestRoot( - ["pom.xml", "build.gradle", "build.gradle.kts", ".project", ".classpath"], - exclusionsForMonorepos, - )(file), - NearestRoot(gradleMarkers, settingsMarkers)(file), - NearestRoot(settingsMarkers)(file), + NearestRoot(["pom.xml", "build.gradle", "build.gradle.kts", ".project", ".classpath"], exclusionsForMonorepos)( + file, + ctx, + ), + NearestRoot(gradleMarkers, settingsMarkers)(file, ctx), + NearestRoot(settingsMarkers)(file, ctx), ]) // If projectRoot is undefined we know we are in a monorepo or no project at all. @@ -1189,18 +1189,18 @@ export const JDTLS: Info = { export const KotlinLS: Info = { id: "kotlin-ls", extensions: [".kt", ".kts"], - root: async (file) => { + root: async (file, ctx) => { // 1) Nearest Gradle root (multi-project or included build) - const settingsRoot = await NearestRoot(["settings.gradle.kts", "settings.gradle"])(file) + const settingsRoot = await NearestRoot(["settings.gradle.kts", "settings.gradle"])(file, ctx) if (settingsRoot) return settingsRoot // 2) Gradle wrapper (strong root signal) - const wrapperRoot = await NearestRoot(["gradlew", "gradlew.bat"])(file) + const wrapperRoot = await NearestRoot(["gradlew", "gradlew.bat"])(file, ctx) if (wrapperRoot) return wrapperRoot // 3) Single-project or module-level build - const buildRoot = await NearestRoot(["build.gradle.kts", "build.gradle"])(file) + const buildRoot = await NearestRoot(["build.gradle.kts", "build.gradle"])(file, ctx) if (buildRoot) return buildRoot // 4) Maven fallback - return NearestRoot(["pom.xml"])(file) + return NearestRoot(["pom.xml"])(file, ctx) }, async spawn(root) { const distPath = path.join(Global.Path.bin, "kotlin-ls") @@ -1539,7 +1539,7 @@ export const Ocaml: Info = { export const BashLS: Info = { id: "bash", extensions: [".sh", ".bash", ".zsh", ".ksh"], - root: async () => Instance.directory, + root: async (_file, ctx) => ctx.directory, async spawn(root) { let binary = which("bash-language-server") const args: string[] = [] @@ -1734,7 +1734,7 @@ export const TexLab: Info = { export const DockerfileLS: Info = { id: "dockerfile", extensions: [".dockerfile", "Dockerfile"], - root: async () => Instance.directory, + root: async (_file, ctx) => ctx.directory, async spawn(root) { let binary = which("docker-langserver") const args: string[] = [] @@ -1799,16 +1799,16 @@ export const Clojure: Info = { export const Nixd: Info = { id: "nixd", extensions: [".nix"], - root: async (file) => { + root: async (file, ctx) => { // First, look for flake.nix - the most reliable Nix project root indicator - const flakeRoot = await NearestRoot(["flake.nix"])(file) - if (flakeRoot && flakeRoot !== Instance.directory) return flakeRoot + const flakeRoot = await NearestRoot(["flake.nix"])(file, ctx) + if (flakeRoot && flakeRoot !== ctx.directory) return flakeRoot // If no flake.nix, fall back to git repository root - if (Instance.worktree && Instance.worktree !== Instance.directory) return Instance.worktree + if (ctx.worktree && ctx.worktree !== ctx.directory) return ctx.worktree // Finally, use the instance directory as fallback - return Instance.directory + return ctx.directory }, async spawn(root) { const nixd = which("nixd") diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index ba53e7c0b5..09fcfc756a 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -14,7 +14,6 @@ import { ConfigMCP } from "../config/mcp" import { Log } from "../util" import { NamedError } from "@opencode-ai/shared/util/error" import z from "zod/v4" -import { Instance } from "../project/instance" import { Installation } from "../installation" import { InstallationVersion } from "../installation/version" import { withTimeout } from "@/util/timeout" @@ -391,7 +390,7 @@ export const layer = Layer.effect( mcp: ConfigMCP.Info & { type: "local" }, ) { const [cmd, ...args] = mcp.command - const cwd = Instance.directory + const cwd = yield* InstanceState.directory const transport = new StdioClientTransport({ stderr: "pipe", command: cmd, diff --git a/packages/opencode/src/npm/index.ts b/packages/opencode/src/npm/index.ts index 425b27f420..d92099bc3c 100644 --- a/packages/opencode/src/npm/index.ts +++ b/packages/opencode/src/npm/index.ts @@ -1,198 +1,277 @@ -import semver from "semver" -import z from "zod" -import { NamedError } from "@opencode-ai/shared/util/error" -import { Global } from "../global" -import { Log } from "../util" +export * as Npm from "." + import path from "path" -import { readdir, rm } from "fs/promises" -import { Filesystem } from "@/util" -import { Flock } from "@opencode-ai/shared/util/flock" +import semver from "semver" +import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" +import { NodeFileSystem } from "@effect/platform-node" +import { AppFileSystem } from "@opencode-ai/shared/filesystem" +import { Global } from "@opencode-ai/shared/global" +import { EffectFlock } from "@opencode-ai/shared/util/effect-flock" + +import { makeRuntime } from "../effect/runtime" + +export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { + add: Schema.Array(Schema.String).pipe(Schema.optional), + dir: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export interface EntryPoint { + readonly directory: string + readonly entrypoint: Option.Option +} + +export interface Interface { + readonly add: (pkg: string) => Effect.Effect + readonly install: ( + dir: string, + input?: { + add: { + name: string + version?: string + }[] + }, + ) => Effect.Effect + readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect + readonly which: (pkg: string) => Effect.Effect> +} + +export class Service extends Context.Service()("@opencode/Npm") {} -const log = Log.create({ service: "npm" }) const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined -export const InstallFailedError = NamedError.create( - "NpmInstallFailedError", - z.object({ - pkg: z.string(), - }), -) - export function sanitize(pkg: string) { if (!illegal) return pkg return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") } -function directory(pkg: string) { - return path.join(Global.Path.cache, "packages", sanitize(pkg)) -} - -function resolveEntryPoint(name: string, dir: string) { - let entrypoint: string | undefined +const resolveEntryPoint = (name: string, dir: string): EntryPoint => { + let entrypoint: Option.Option try { - entrypoint = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - } catch {} - const result = { + const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) + entrypoint = Option.some(resolved) + } catch { + entrypoint = Option.none() + } + return { directory: dir, entrypoint, } - return result } -export async function outdated(pkg: string, cachedVersion: string): Promise { - const response = await fetch(`https://registry.npmjs.org/${pkg}`) - if (!response.ok) { - log.warn("Failed to resolve latest version, using cached", { pkg, cachedVersion }) - return false - } - - const data = (await response.json()) as { "dist-tags"?: { latest?: string } } - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - log.warn("No latest version found, using cached", { pkg, cachedVersion }) - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) +interface ArboristNode { + name: string + path: string } -export async function add(pkg: string) { - const { Arborist } = await import("@npmcli/arborist") - const dir = directory(pkg) - await using _ = await Flock.acquire(`npm-install:${Filesystem.resolve(dir)}`) - log.info("installing package", { - pkg, - }) +interface ArboristTree { + edgesOut: Map +} - const arborist = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - const tree = await arborist.loadVirtual().catch(() => {}) - if (tree) { - const first = tree.edgesOut.values().next().value?.to - if (first) { - return resolveEntryPoint(first.name, first.path) - } - } +export const layer = Layer.effect( + Service, + Effect.gen(function* () { + const afs = yield* AppFileSystem.Service + const global = yield* Global.Service + const fs = yield* FileSystem.FileSystem + const flock = yield* EffectFlock.Service + const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) + const reify = (input: { dir: string; add?: string[] }) => + Effect.gen(function* () { + yield* flock.acquire(`npm-install:${input.dir}`) + const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) + const arborist = new Arborist({ + path: input.dir, + binLinks: true, + progress: false, + savePrefix: "", + ignoreScripts: true, + }) + return yield* Effect.tryPromise({ + try: () => + arborist.reify({ + add: input?.add || [], + save: true, + saveType: "prod", + }), + catch: (cause) => + new InstallFailedError({ + cause, + add: input?.add, + dir: input.dir, + }), + }) as Effect.Effect + }).pipe( + Effect.withSpan("Npm.reify", { + attributes: input, + }), + ) - const result = await arborist - .reify({ - add: [pkg], - save: true, - saveType: "prod", + const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { + const response = yield* Effect.tryPromise({ + try: () => fetch(`https://registry.npmjs.org/${pkg}`), + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + if (!response || !response.ok) { + return false + } + + const data = yield* Effect.tryPromise({ + try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, + catch: () => undefined, + }).pipe(Effect.orElseSucceed(() => undefined)) + + const latestVersion = data?.["dist-tags"]?.latest + if (!latestVersion) { + return false + } + + const range = /[\s^~*xX<>|=]/.test(cachedVersion) + if (range) return !semver.satisfies(latestVersion, cachedVersion) + + return semver.lt(cachedVersion, latestVersion) }) - .catch((cause) => { - throw new InstallFailedError( - { pkg }, - { - cause, - }, + + const add = Effect.fn("Npm.add")(function* (pkg: string) { + const dir = directory(pkg) + + const tree = yield* reify({ dir, add: [pkg] }) + const first = tree.edgesOut.values().next().value?.to + if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) + return resolveEntryPoint(first.name, first.path) + }, Effect.scoped) + + const install: Interface["install"] = Effect.fn("Npm.install")(function* (dir, input) { + const canWrite = yield* afs.access(dir, { writable: true }).pipe( + Effect.as(true), + Effect.orElseSucceed(() => false), + ) + if (!canWrite) return + + const add = input?.add.map((pkg) => [pkg.name, pkg.version].filter(Boolean).join("@")) ?? [] + yield* Effect.gen(function* () { + const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) + if (!nodeModulesExists) { + yield* reify({ add, dir }) + return + } + }).pipe(Effect.withSpan("Npm.checkNodeModules")) + + yield* Effect.gen(function* () { + const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) + const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) + + const pkgAny = pkg as any + const lockAny = lock as any + const declared = new Set([ + ...Object.keys(pkgAny?.dependencies || {}), + ...Object.keys(pkgAny?.devDependencies || {}), + ...Object.keys(pkgAny?.peerDependencies || {}), + ...Object.keys(pkgAny?.optionalDependencies || {}), + ...(input?.add || []).map((pkg) => pkg.name), + ]) + + const root = lockAny?.packages?.[""] || {} + const locked = new Set([ + ...Object.keys(root?.dependencies || {}), + ...Object.keys(root?.devDependencies || {}), + ...Object.keys(root?.peerDependencies || {}), + ...Object.keys(root?.optionalDependencies || {}), + ]) + + for (const name of declared) { + if (!locked.has(name)) { + yield* reify({ dir, add }) + return + } + } + }).pipe(Effect.withSpan("Npm.checkDirty")) + + return + }, Effect.scoped) + + const which = Effect.fn("Npm.which")(function* (pkg: string) { + const dir = directory(pkg) + const binDir = path.join(dir, "node_modules", ".bin") + + const pick = Effect.fnUntraced(function* () { + const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) + + if (files.length === 0) return Option.none() + if (files.length === 1) return Option.some(files[0]) + + const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) + + if (Option.isSome(pkgJson)) { + const parsed = pkgJson.value as { bin?: string | Record } + if (parsed?.bin) { + const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg + const bin = parsed.bin + if (typeof bin === "string") return Option.some(unscoped) + const keys = Object.keys(bin) + if (keys.length === 1) return Option.some(keys[0]) + return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) + } + } + + return Option.some(files[0]) + }) + + return yield* Effect.gen(function* () { + const bin = yield* pick() + if (Option.isSome(bin)) { + return Option.some(path.join(binDir, bin.value)) + } + + yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) + + yield* add(pkg) + + const resolved = yield* pick() + if (Option.isNone(resolved)) return Option.none() + return Option.some(path.join(binDir, resolved.value)) + }).pipe( + Effect.scoped, + Effect.orElseSucceed(() => Option.none()), ) }) - const first = result.edgesOut.values().next().value?.to - if (!first) throw new InstallFailedError({ pkg }) - return resolveEntryPoint(first.name, first.path) -} - -export async function install(dir: string) { - await using _ = await Flock.acquire(`npm-install:${dir}`) - log.info("checking dependencies", { dir }) - - const reify = async () => { - const { Arborist } = await import("@npmcli/arborist") - const arb = new Arborist({ - path: dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, + return Service.of({ + add, + install, + outdated, + which, }) - await arb.reify().catch(() => {}) - } + }), +) - if (!(await Filesystem.exists(path.join(dir, "node_modules")))) { - log.info("node_modules missing, reifying") - await reify() - return - } +export const defaultLayer = layer.pipe( + Layer.provide(EffectFlock.layer), + Layer.provide(AppFileSystem.layer), + Layer.provide(Global.layer), + Layer.provide(NodeFileSystem.layer), +) - type PackageDeps = Record - type PackageJson = { - dependencies?: PackageDeps - devDependencies?: PackageDeps - peerDependencies?: PackageDeps - optionalDependencies?: PackageDeps - } - const pkg: PackageJson = await Filesystem.readJson(path.join(dir, "package.json")).catch(() => ({})) - const lock: { packages?: Record } = await Filesystem.readJson<{ - packages?: Record - }>(path.join(dir, "package-lock.json")).catch(() => ({})) +const { runPromise } = makeRuntime(Service, defaultLayer) - const declared = new Set([ - ...Object.keys(pkg.dependencies || {}), - ...Object.keys(pkg.devDependencies || {}), - ...Object.keys(pkg.peerDependencies || {}), - ...Object.keys(pkg.optionalDependencies || {}), - ]) - - const root = lock.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root.dependencies || {}), - ...Object.keys(root.devDependencies || {}), - ...Object.keys(root.peerDependencies || {}), - ...Object.keys(root.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - log.info("dependency not in lock file, reifying", { name }) - await reify() - return - } - } - - log.info("dependencies in sync") +export async function install(...args: Parameters) { + return runPromise((svc) => svc.install(...args)) } -export async function which(pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = async () => { - const files = await readdir(binDir).catch(() => []) - if (files.length === 0) return undefined - if (files.length === 1) return files[0] - // Multiple binaries — resolve from package.json bin field like npx does - const pkgJson = await Filesystem.readJson<{ bin?: string | Record }>( - path.join(dir, "node_modules", pkg, "package.json"), - ).catch(() => undefined) - if (pkgJson?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = pkgJson.bin - if (typeof bin === "string") return unscoped - const keys = Object.keys(bin) - if (keys.length === 1) return keys[0] - return bin[unscoped] ? unscoped : keys[0] - } - return files[0] +export async function add(...args: Parameters) { + const entry = await runPromise((svc) => svc.add(...args)) + return { + directory: entry.directory, + entrypoint: Option.getOrUndefined(entry.entrypoint), } - - const bin = await pick() - if (bin) return path.join(binDir, bin) - - await rm(path.join(dir, "package-lock.json"), { force: true }) - await add(pkg) - const resolved = await pick() - if (!resolved) return - return path.join(binDir, resolved) } -export * as Npm from "." +export async function outdated(...args: Parameters) { + return runPromise((svc) => svc.outdated(...args)) +} + +export async function which(...args: Parameters) { + const resolved = await runPromise((svc) => svc.which(...args)) + return Option.getOrUndefined(resolved) +} diff --git a/packages/opencode/src/patch/index.ts b/packages/opencode/src/patch/index.ts index cec24614d8..19e1d7555b 100644 --- a/packages/opencode/src/patch/index.ts +++ b/packages/opencode/src/patch/index.ts @@ -1 +1,680 @@ -export * as Patch from "./patch" +import z from "zod" +import * as path from "path" +import * as fs from "fs/promises" +import { readFileSync } from "fs" +import { Log } from "../util" + +const log = Log.create({ service: "patch" }) + +// Schema definitions +export const PatchSchema = z.object({ + patchText: z.string().describe("The full patch text that describes all changes to be made"), +}) + +export type PatchParams = z.infer + +// Core types matching the Rust implementation +export interface ApplyPatchArgs { + patch: string + hunks: Hunk[] + workdir?: string +} + +export type Hunk = + | { type: "add"; path: string; contents: string } + | { type: "delete"; path: string } + | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } + +export interface UpdateFileChunk { + old_lines: string[] + new_lines: string[] + change_context?: string + is_end_of_file?: boolean +} + +export interface ApplyPatchAction { + changes: Map + patch: string + cwd: string +} + +export type ApplyPatchFileChange = + | { type: "add"; content: string } + | { type: "delete"; content: string } + | { type: "update"; unified_diff: string; move_path?: string; new_content: string } + +export interface AffectedPaths { + added: string[] + modified: string[] + deleted: string[] +} + +export enum ApplyPatchError { + ParseError = "ParseError", + IoError = "IoError", + ComputeReplacements = "ComputeReplacements", + ImplicitInvocation = "ImplicitInvocation", +} + +export enum MaybeApplyPatch { + Body = "Body", + ShellParseError = "ShellParseError", + PatchParseError = "PatchParseError", + NotApplyPatch = "NotApplyPatch", +} + +export enum MaybeApplyPatchVerified { + Body = "Body", + ShellParseError = "ShellParseError", + CorrectnessError = "CorrectnessError", + NotApplyPatch = "NotApplyPatch", +} + +// Parser implementation +function parsePatchHeader( + lines: string[], + startIdx: number, +): { filePath: string; movePath?: string; nextIdx: number } | null { + const line = lines[startIdx] + + if (line.startsWith("*** Add File:")) { + const filePath = line.slice("*** Add File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Delete File:")) { + const filePath = line.slice("*** Delete File:".length).trim() + return filePath ? { filePath, nextIdx: startIdx + 1 } : null + } + + if (line.startsWith("*** Update File:")) { + const filePath = line.slice("*** Update File:".length).trim() + let movePath: string | undefined + let nextIdx = startIdx + 1 + + // Check for move directive + if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { + movePath = lines[nextIdx].slice("*** Move to:".length).trim() + nextIdx++ + } + + return filePath ? { filePath, movePath, nextIdx } : null + } + + return null +} + +function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { + const chunks: UpdateFileChunk[] = [] + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("@@")) { + // Parse context line + const contextLine = lines[i].substring(2).trim() + i++ + + const oldLines: string[] = [] + const newLines: string[] = [] + let isEndOfFile = false + + // Parse change lines + while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { + const changeLine = lines[i] + + if (changeLine === "*** End of File") { + isEndOfFile = true + i++ + break + } + + if (changeLine.startsWith(" ")) { + // Keep line - appears in both old and new + const content = changeLine.substring(1) + oldLines.push(content) + newLines.push(content) + } else if (changeLine.startsWith("-")) { + // Remove line - only in old + oldLines.push(changeLine.substring(1)) + } else if (changeLine.startsWith("+")) { + // Add line - only in new + newLines.push(changeLine.substring(1)) + } + + i++ + } + + chunks.push({ + old_lines: oldLines, + new_lines: newLines, + change_context: contextLine || undefined, + is_end_of_file: isEndOfFile || undefined, + }) + } else { + i++ + } + } + + return { chunks, nextIdx: i } +} + +function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { + let content = "" + let i = startIdx + + while (i < lines.length && !lines[i].startsWith("***")) { + if (lines[i].startsWith("+")) { + content += lines[i].substring(1) + "\n" + } + i++ + } + + // Remove trailing newline + if (content.endsWith("\n")) { + content = content.slice(0, -1) + } + + return { content, nextIdx: i } +} + +function stripHeredoc(input: string): string { + // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) + const endIdx = lines.findIndex((line) => line.trim() === endMarker) + + if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { + throw new Error("Invalid patch format: missing Begin/End markers") + } + + // Parse content between markers + i = beginIdx + 1 + + while (i < endIdx) { + const header = parsePatchHeader(lines, i) + if (!header) { + i++ + continue + } + + if (lines[i].startsWith("*** Add File:")) { + const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) + hunks.push({ + type: "add", + path: header.filePath, + contents: content, + }) + i = nextIdx + } else if (lines[i].startsWith("*** Delete File:")) { + hunks.push({ + type: "delete", + path: header.filePath, + }) + i = header.nextIdx + } else if (lines[i].startsWith("*** Update File:")) { + const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) + hunks.push({ + type: "update", + path: header.filePath, + move_path: header.movePath, + chunks, + }) + i = nextIdx + } else { + i++ + } + } + + return { hunks } +} + +// Apply patch functionality +export function maybeParseApplyPatch( + argv: string[], +): + | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } + | { type: MaybeApplyPatch.PatchParseError; error: Error } + | { type: MaybeApplyPatch.NotApplyPatch } { + const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] + + // Direct invocation: apply_patch + if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { + try { + const { hunks } = parsePatch(argv[1]) + return { + type: MaybeApplyPatch.Body, + args: { + patch: argv[1], + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + + // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' + if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { + // Simple extraction - in real implementation would need proper bash parsing + const script = argv[2] + const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) + + if (heredocMatch) { + const patchContent = heredocMatch[2] + try { + const { hunks } = parsePatch(patchContent) + return { + type: MaybeApplyPatch.Body, + args: { + patch: patchContent, + hunks, + }, + } + } catch (error) { + return { + type: MaybeApplyPatch.PatchParseError, + error: error as Error, + } + } + } + } + + return { type: MaybeApplyPatch.NotApplyPatch } +} + +// File content manipulation +interface ApplyPatchFileUpdate { + unified_diff: string + content: string +} + +export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { + // Read original file content + let originalContent: string + try { + originalContent = readFileSync(filePath, "utf-8") + } catch (error) { + throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) + } + + let originalLines = originalContent.split("\n") + + // Drop trailing empty element for consistent line counting + if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { + originalLines.pop() + } + + const replacements = computeReplacements(originalLines, filePath, chunks) + let newLines = applyReplacements(originalLines, replacements) + + // Ensure trailing newline + if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { + newLines.push("") + } + + const newContent = newLines.join("\n") + + // Generate unified diff + const unifiedDiff = generateUnifiedDiff(originalContent, newContent) + + return { + unified_diff: unifiedDiff, + content: newContent, + } +} + +function computeReplacements( + originalLines: string[], + filePath: string, + chunks: UpdateFileChunk[], +): Array<[number, number, string[]]> { + const replacements: Array<[number, number, string[]]> = [] + let lineIndex = 0 + + for (const chunk of chunks) { + // Handle context-based seeking + if (chunk.change_context) { + const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) + if (contextIdx === -1) { + throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) + } + lineIndex = contextIdx + 1 + } + + // Handle pure addition (no old lines) + if (chunk.old_lines.length === 0) { + const insertionIdx = + originalLines.length > 0 && originalLines[originalLines.length - 1] === "" + ? originalLines.length - 1 + : originalLines.length + replacements.push([insertionIdx, 0, chunk.new_lines]) + continue + } + + // Try to match old lines in the file + let pattern = chunk.old_lines + let newSlice = chunk.new_lines + let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + + // Retry without trailing empty line if not found + if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { + pattern = pattern.slice(0, -1) + if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { + newSlice = newSlice.slice(0, -1) + } + found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) + } + + if (found !== -1) { + replacements.push([found, pattern.length, newSlice]) + lineIndex = found + pattern.length + } else { + throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) + } + } + + // Sort replacements by index to apply in order + replacements.sort((a, b) => a[0] - b[0]) + + return replacements +} + +function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { + // Apply replacements in reverse order to avoid index shifting + const result = [...lines] + + for (let i = replacements.length - 1; i >= 0; i--) { + const [startIdx, oldLen, newSegment] = replacements[i] + + // Remove old lines + result.splice(startIdx, oldLen) + + // Insert new lines + for (let j = 0; j < newSegment.length; j++) { + result.splice(startIdx + j, 0, newSegment[j]) + } + } + + return result +} + +// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) +function normalizeUnicode(str: string): string { + return str + .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes + .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes + .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes + .replace(/\u2026/g, "...") // ellipsis + .replace(/\u00A0/g, " ") // non-breaking space +} + +type Comparator = (a: string, b: string) => boolean + +function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { + // If EOF anchor, try matching from end of file first + if (eof) { + const fromEnd = lines.length - pattern.length + if (fromEnd >= startIndex) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[fromEnd + j], pattern[j])) { + matches = false + break + } + } + if (matches) return fromEnd + } + } + + // Forward search from startIndex + for (let i = startIndex; i <= lines.length - pattern.length; i++) { + let matches = true + for (let j = 0; j < pattern.length; j++) { + if (!compare(lines[i + j], pattern[j])) { + matches = false + break + } + } + if (matches) return i + } + + return -1 +} + +function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { + if (pattern.length === 0) return -1 + + // Pass 1: exact match + const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) + if (exact !== -1) return exact + + // Pass 2: rstrip (trim trailing whitespace) + const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) + if (rstrip !== -1) return rstrip + + // Pass 3: trim (both ends) + const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) + if (trim !== -1) return trim + + // Pass 4: normalized (Unicode punctuation to ASCII) + const normalized = tryMatch( + lines, + pattern, + startIndex, + (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), + eof, + ) + return normalized +} + +function generateUnifiedDiff(oldContent: string, newContent: string): string { + const oldLines = oldContent.split("\n") + const newLines = newContent.split("\n") + + // Simple diff generation - in a real implementation you'd use a proper diff algorithm + let diff = "@@ -1 +1 @@\n" + + // Find changes (simplified approach) + const maxLen = Math.max(oldLines.length, newLines.length) + let hasChanges = false + + for (let i = 0; i < maxLen; i++) { + const oldLine = oldLines[i] || "" + const newLine = newLines[i] || "" + + if (oldLine !== newLine) { + if (oldLine) diff += `-${oldLine}\n` + if (newLine) diff += `+${newLine}\n` + hasChanges = true + } else if (oldLine) { + diff += ` ${oldLine}\n` + } + } + + return hasChanges ? diff : "" +} + +// Apply hunks to filesystem +export async function applyHunksToFiles(hunks: Hunk[]): Promise { + if (hunks.length === 0) { + throw new Error("No files were modified.") + } + + const added: string[] = [] + const modified: string[] = [] + const deleted: string[] = [] + + for (const hunk of hunks) { + switch (hunk.type) { + case "add": + // Create parent directories + const addDir = path.dirname(hunk.path) + if (addDir !== "." && addDir !== "/") { + await fs.mkdir(addDir, { recursive: true }) + } + + await fs.writeFile(hunk.path, hunk.contents, "utf-8") + added.push(hunk.path) + log.info(`Added file: ${hunk.path}`) + break + + case "delete": + await fs.unlink(hunk.path) + deleted.push(hunk.path) + log.info(`Deleted file: ${hunk.path}`) + break + + case "update": + const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) + + if (hunk.move_path) { + // Handle file move + const moveDir = path.dirname(hunk.move_path) + if (moveDir !== "." && moveDir !== "/") { + await fs.mkdir(moveDir, { recursive: true }) + } + + await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") + await fs.unlink(hunk.path) + modified.push(hunk.move_path) + log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) + } else { + // Regular update + await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") + modified.push(hunk.path) + log.info(`Updated file: ${hunk.path}`) + } + break + } + } + + return { added, modified, deleted } +} + +// Main patch application function +export async function applyPatch(patchText: string): Promise { + const { hunks } = parsePatch(patchText) + return applyHunksToFiles(hunks) +} + +// Async version of maybeParseApplyPatchVerified +export async function maybeParseApplyPatchVerified( + argv: string[], + cwd: string, +): Promise< + | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } + | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } + | { type: MaybeApplyPatchVerified.NotApplyPatch } +> { + // Detect implicit patch invocation (raw patch without apply_patch command) + if (argv.length === 1) { + try { + parsePatch(argv[0]) + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(ApplyPatchError.ImplicitInvocation), + } + } catch { + // Not a patch, continue + } + } + + const result = maybeParseApplyPatch(argv) + + switch (result.type) { + case MaybeApplyPatch.Body: + const { args } = result + const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd + const changes = new Map() + + for (const hunk of args.hunks) { + const resolvedPath = path.resolve( + effectiveCwd, + hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, + ) + + switch (hunk.type) { + case "add": + changes.set(resolvedPath, { + type: "add", + content: hunk.contents, + }) + break + + case "delete": + // For delete, we need to read the current content + const deletePath = path.resolve(effectiveCwd, hunk.path) + try { + const content = await fs.readFile(deletePath, "utf-8") + changes.set(resolvedPath, { + type: "delete", + content, + }) + } catch { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: new Error(`Failed to read file for deletion: ${deletePath}`), + } + } + break + + case "update": + const updatePath = path.resolve(effectiveCwd, hunk.path) + try { + const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) + changes.set(resolvedPath, { + type: "update", + unified_diff: fileUpdate.unified_diff, + move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, + new_content: fileUpdate.content, + }) + } catch (error) { + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: error as Error, + } + } + break + } + } + + return { + type: MaybeApplyPatchVerified.Body, + action: { + changes, + patch: args.patch, + cwd: effectiveCwd, + }, + } + + case MaybeApplyPatch.PatchParseError: + return { + type: MaybeApplyPatchVerified.CorrectnessError, + error: result.error, + } + + case MaybeApplyPatch.NotApplyPatch: + return { type: MaybeApplyPatchVerified.NotApplyPatch } + } +} + +export * as Patch from "." diff --git a/packages/opencode/src/patch/patch.ts b/packages/opencode/src/patch/patch.ts deleted file mode 100644 index 1dc99b4da9..0000000000 --- a/packages/opencode/src/patch/patch.ts +++ /dev/null @@ -1,678 +0,0 @@ -import z from "zod" -import * as path from "path" -import * as fs from "fs/promises" -import { readFileSync } from "fs" -import { Log } from "../util" - -const log = Log.create({ service: "patch" }) - -// Schema definitions -export const PatchSchema = z.object({ - patchText: z.string().describe("The full patch text that describes all changes to be made"), -}) - -export type PatchParams = z.infer - -// Core types matching the Rust implementation -export interface ApplyPatchArgs { - patch: string - hunks: Hunk[] - workdir?: string -} - -export type Hunk = - | { type: "add"; path: string; contents: string } - | { type: "delete"; path: string } - | { type: "update"; path: string; move_path?: string; chunks: UpdateFileChunk[] } - -export interface UpdateFileChunk { - old_lines: string[] - new_lines: string[] - change_context?: string - is_end_of_file?: boolean -} - -export interface ApplyPatchAction { - changes: Map - patch: string - cwd: string -} - -export type ApplyPatchFileChange = - | { type: "add"; content: string } - | { type: "delete"; content: string } - | { type: "update"; unified_diff: string; move_path?: string; new_content: string } - -export interface AffectedPaths { - added: string[] - modified: string[] - deleted: string[] -} - -export enum ApplyPatchError { - ParseError = "ParseError", - IoError = "IoError", - ComputeReplacements = "ComputeReplacements", - ImplicitInvocation = "ImplicitInvocation", -} - -export enum MaybeApplyPatch { - Body = "Body", - ShellParseError = "ShellParseError", - PatchParseError = "PatchParseError", - NotApplyPatch = "NotApplyPatch", -} - -export enum MaybeApplyPatchVerified { - Body = "Body", - ShellParseError = "ShellParseError", - CorrectnessError = "CorrectnessError", - NotApplyPatch = "NotApplyPatch", -} - -// Parser implementation -function parsePatchHeader( - lines: string[], - startIdx: number, -): { filePath: string; movePath?: string; nextIdx: number } | null { - const line = lines[startIdx] - - if (line.startsWith("*** Add File:")) { - const filePath = line.slice("*** Add File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Delete File:")) { - const filePath = line.slice("*** Delete File:".length).trim() - return filePath ? { filePath, nextIdx: startIdx + 1 } : null - } - - if (line.startsWith("*** Update File:")) { - const filePath = line.slice("*** Update File:".length).trim() - let movePath: string | undefined - let nextIdx = startIdx + 1 - - // Check for move directive - if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) { - movePath = lines[nextIdx].slice("*** Move to:".length).trim() - nextIdx++ - } - - return filePath ? { filePath, movePath, nextIdx } : null - } - - return null -} - -function parseUpdateFileChunks(lines: string[], startIdx: number): { chunks: UpdateFileChunk[]; nextIdx: number } { - const chunks: UpdateFileChunk[] = [] - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("@@")) { - // Parse context line - const contextLine = lines[i].substring(2).trim() - i++ - - const oldLines: string[] = [] - const newLines: string[] = [] - let isEndOfFile = false - - // Parse change lines - while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) { - const changeLine = lines[i] - - if (changeLine === "*** End of File") { - isEndOfFile = true - i++ - break - } - - if (changeLine.startsWith(" ")) { - // Keep line - appears in both old and new - const content = changeLine.substring(1) - oldLines.push(content) - newLines.push(content) - } else if (changeLine.startsWith("-")) { - // Remove line - only in old - oldLines.push(changeLine.substring(1)) - } else if (changeLine.startsWith("+")) { - // Add line - only in new - newLines.push(changeLine.substring(1)) - } - - i++ - } - - chunks.push({ - old_lines: oldLines, - new_lines: newLines, - change_context: contextLine || undefined, - is_end_of_file: isEndOfFile || undefined, - }) - } else { - i++ - } - } - - return { chunks, nextIdx: i } -} - -function parseAddFileContent(lines: string[], startIdx: number): { content: string; nextIdx: number } { - let content = "" - let i = startIdx - - while (i < lines.length && !lines[i].startsWith("***")) { - if (lines[i].startsWith("+")) { - content += lines[i].substring(1) + "\n" - } - i++ - } - - // Remove trailing newline - if (content.endsWith("\n")) { - content = content.slice(0, -1) - } - - return { content, nextIdx: i } -} - -function stripHeredoc(input: string): string { - // Match heredoc patterns like: cat <<'EOF'\n...\nEOF or < line.trim() === beginMarker) - const endIdx = lines.findIndex((line) => line.trim() === endMarker) - - if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) { - throw new Error("Invalid patch format: missing Begin/End markers") - } - - // Parse content between markers - i = beginIdx + 1 - - while (i < endIdx) { - const header = parsePatchHeader(lines, i) - if (!header) { - i++ - continue - } - - if (lines[i].startsWith("*** Add File:")) { - const { content, nextIdx } = parseAddFileContent(lines, header.nextIdx) - hunks.push({ - type: "add", - path: header.filePath, - contents: content, - }) - i = nextIdx - } else if (lines[i].startsWith("*** Delete File:")) { - hunks.push({ - type: "delete", - path: header.filePath, - }) - i = header.nextIdx - } else if (lines[i].startsWith("*** Update File:")) { - const { chunks, nextIdx } = parseUpdateFileChunks(lines, header.nextIdx) - hunks.push({ - type: "update", - path: header.filePath, - move_path: header.movePath, - chunks, - }) - i = nextIdx - } else { - i++ - } - } - - return { hunks } -} - -// Apply patch functionality -export function maybeParseApplyPatch( - argv: string[], -): - | { type: MaybeApplyPatch.Body; args: ApplyPatchArgs } - | { type: MaybeApplyPatch.PatchParseError; error: Error } - | { type: MaybeApplyPatch.NotApplyPatch } { - const APPLY_PATCH_COMMANDS = ["apply_patch", "applypatch"] - - // Direct invocation: apply_patch - if (argv.length === 2 && APPLY_PATCH_COMMANDS.includes(argv[0])) { - try { - const { hunks } = parsePatch(argv[1]) - return { - type: MaybeApplyPatch.Body, - args: { - patch: argv[1], - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - - // Bash heredoc form: bash -lc 'apply_patch <<"EOF" ...' - if (argv.length === 3 && argv[0] === "bash" && argv[1] === "-lc") { - // Simple extraction - in real implementation would need proper bash parsing - const script = argv[2] - const heredocMatch = script.match(/apply_patch\s*<<['"](\w+)['"]\s*\n([\s\S]*?)\n\1/) - - if (heredocMatch) { - const patchContent = heredocMatch[2] - try { - const { hunks } = parsePatch(patchContent) - return { - type: MaybeApplyPatch.Body, - args: { - patch: patchContent, - hunks, - }, - } - } catch (error) { - return { - type: MaybeApplyPatch.PatchParseError, - error: error as Error, - } - } - } - } - - return { type: MaybeApplyPatch.NotApplyPatch } -} - -// File content manipulation -interface ApplyPatchFileUpdate { - unified_diff: string - content: string -} - -export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate { - // Read original file content - let originalContent: string - try { - originalContent = readFileSync(filePath, "utf-8") - } catch (error) { - throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error }) - } - - let originalLines = originalContent.split("\n") - - // Drop trailing empty element for consistent line counting - if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") { - originalLines.pop() - } - - const replacements = computeReplacements(originalLines, filePath, chunks) - let newLines = applyReplacements(originalLines, replacements) - - // Ensure trailing newline - if (newLines.length === 0 || newLines[newLines.length - 1] !== "") { - newLines.push("") - } - - const newContent = newLines.join("\n") - - // Generate unified diff - const unifiedDiff = generateUnifiedDiff(originalContent, newContent) - - return { - unified_diff: unifiedDiff, - content: newContent, - } -} - -function computeReplacements( - originalLines: string[], - filePath: string, - chunks: UpdateFileChunk[], -): Array<[number, number, string[]]> { - const replacements: Array<[number, number, string[]]> = [] - let lineIndex = 0 - - for (const chunk of chunks) { - // Handle context-based seeking - if (chunk.change_context) { - const contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex) - if (contextIdx === -1) { - throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`) - } - lineIndex = contextIdx + 1 - } - - // Handle pure addition (no old lines) - if (chunk.old_lines.length === 0) { - const insertionIdx = - originalLines.length > 0 && originalLines[originalLines.length - 1] === "" - ? originalLines.length - 1 - : originalLines.length - replacements.push([insertionIdx, 0, chunk.new_lines]) - continue - } - - // Try to match old lines in the file - let pattern = chunk.old_lines - let newSlice = chunk.new_lines - let found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - - // Retry without trailing empty line if not found - if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") { - pattern = pattern.slice(0, -1) - if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") { - newSlice = newSlice.slice(0, -1) - } - found = seekSequence(originalLines, pattern, lineIndex, chunk.is_end_of_file) - } - - if (found !== -1) { - replacements.push([found, pattern.length, newSlice]) - lineIndex = found + pattern.length - } else { - throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`) - } - } - - // Sort replacements by index to apply in order - replacements.sort((a, b) => a[0] - b[0]) - - return replacements -} - -function applyReplacements(lines: string[], replacements: Array<[number, number, string[]]>): string[] { - // Apply replacements in reverse order to avoid index shifting - const result = [...lines] - - for (let i = replacements.length - 1; i >= 0; i--) { - const [startIdx, oldLen, newSegment] = replacements[i] - - // Remove old lines - result.splice(startIdx, oldLen) - - // Insert new lines - for (let j = 0; j < newSegment.length; j++) { - result.splice(startIdx + j, 0, newSegment[j]) - } - } - - return result -} - -// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode) -function normalizeUnicode(str: string): string { - return str - .replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes - .replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes - .replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes - .replace(/\u2026/g, "...") // ellipsis - .replace(/\u00A0/g, " ") // non-breaking space -} - -type Comparator = (a: string, b: string) => boolean - -function tryMatch(lines: string[], pattern: string[], startIndex: number, compare: Comparator, eof: boolean): number { - // If EOF anchor, try matching from end of file first - if (eof) { - const fromEnd = lines.length - pattern.length - if (fromEnd >= startIndex) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[fromEnd + j], pattern[j])) { - matches = false - break - } - } - if (matches) return fromEnd - } - } - - // Forward search from startIndex - for (let i = startIndex; i <= lines.length - pattern.length; i++) { - let matches = true - for (let j = 0; j < pattern.length; j++) { - if (!compare(lines[i + j], pattern[j])) { - matches = false - break - } - } - if (matches) return i - } - - return -1 -} - -function seekSequence(lines: string[], pattern: string[], startIndex: number, eof = false): number { - if (pattern.length === 0) return -1 - - // Pass 1: exact match - const exact = tryMatch(lines, pattern, startIndex, (a, b) => a === b, eof) - if (exact !== -1) return exact - - // Pass 2: rstrip (trim trailing whitespace) - const rstrip = tryMatch(lines, pattern, startIndex, (a, b) => a.trimEnd() === b.trimEnd(), eof) - if (rstrip !== -1) return rstrip - - // Pass 3: trim (both ends) - const trim = tryMatch(lines, pattern, startIndex, (a, b) => a.trim() === b.trim(), eof) - if (trim !== -1) return trim - - // Pass 4: normalized (Unicode punctuation to ASCII) - const normalized = tryMatch( - lines, - pattern, - startIndex, - (a, b) => normalizeUnicode(a.trim()) === normalizeUnicode(b.trim()), - eof, - ) - return normalized -} - -function generateUnifiedDiff(oldContent: string, newContent: string): string { - const oldLines = oldContent.split("\n") - const newLines = newContent.split("\n") - - // Simple diff generation - in a real implementation you'd use a proper diff algorithm - let diff = "@@ -1 +1 @@\n" - - // Find changes (simplified approach) - const maxLen = Math.max(oldLines.length, newLines.length) - let hasChanges = false - - for (let i = 0; i < maxLen; i++) { - const oldLine = oldLines[i] || "" - const newLine = newLines[i] || "" - - if (oldLine !== newLine) { - if (oldLine) diff += `-${oldLine}\n` - if (newLine) diff += `+${newLine}\n` - hasChanges = true - } else if (oldLine) { - diff += ` ${oldLine}\n` - } - } - - return hasChanges ? diff : "" -} - -// Apply hunks to filesystem -export async function applyHunksToFiles(hunks: Hunk[]): Promise { - if (hunks.length === 0) { - throw new Error("No files were modified.") - } - - const added: string[] = [] - const modified: string[] = [] - const deleted: string[] = [] - - for (const hunk of hunks) { - switch (hunk.type) { - case "add": - // Create parent directories - const addDir = path.dirname(hunk.path) - if (addDir !== "." && addDir !== "/") { - await fs.mkdir(addDir, { recursive: true }) - } - - await fs.writeFile(hunk.path, hunk.contents, "utf-8") - added.push(hunk.path) - log.info(`Added file: ${hunk.path}`) - break - - case "delete": - await fs.unlink(hunk.path) - deleted.push(hunk.path) - log.info(`Deleted file: ${hunk.path}`) - break - - case "update": - const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks) - - if (hunk.move_path) { - // Handle file move - const moveDir = path.dirname(hunk.move_path) - if (moveDir !== "." && moveDir !== "/") { - await fs.mkdir(moveDir, { recursive: true }) - } - - await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8") - await fs.unlink(hunk.path) - modified.push(hunk.move_path) - log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`) - } else { - // Regular update - await fs.writeFile(hunk.path, fileUpdate.content, "utf-8") - modified.push(hunk.path) - log.info(`Updated file: ${hunk.path}`) - } - break - } - } - - return { added, modified, deleted } -} - -// Main patch application function -export async function applyPatch(patchText: string): Promise { - const { hunks } = parsePatch(patchText) - return applyHunksToFiles(hunks) -} - -// Async version of maybeParseApplyPatchVerified -export async function maybeParseApplyPatchVerified( - argv: string[], - cwd: string, -): Promise< - | { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction } - | { type: MaybeApplyPatchVerified.CorrectnessError; error: Error } - | { type: MaybeApplyPatchVerified.NotApplyPatch } -> { - // Detect implicit patch invocation (raw patch without apply_patch command) - if (argv.length === 1) { - try { - parsePatch(argv[0]) - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(ApplyPatchError.ImplicitInvocation), - } - } catch { - // Not a patch, continue - } - } - - const result = maybeParseApplyPatch(argv) - - switch (result.type) { - case MaybeApplyPatch.Body: - const { args } = result - const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd - const changes = new Map() - - for (const hunk of args.hunks) { - const resolvedPath = path.resolve( - effectiveCwd, - hunk.type === "update" && hunk.move_path ? hunk.move_path : hunk.path, - ) - - switch (hunk.type) { - case "add": - changes.set(resolvedPath, { - type: "add", - content: hunk.contents, - }) - break - - case "delete": - // For delete, we need to read the current content - const deletePath = path.resolve(effectiveCwd, hunk.path) - try { - const content = await fs.readFile(deletePath, "utf-8") - changes.set(resolvedPath, { - type: "delete", - content, - }) - } catch { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: new Error(`Failed to read file for deletion: ${deletePath}`), - } - } - break - - case "update": - const updatePath = path.resolve(effectiveCwd, hunk.path) - try { - const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks) - changes.set(resolvedPath, { - type: "update", - unified_diff: fileUpdate.unified_diff, - move_path: hunk.move_path ? path.resolve(effectiveCwd, hunk.move_path) : undefined, - new_content: fileUpdate.content, - }) - } catch (error) { - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: error as Error, - } - } - break - } - } - - return { - type: MaybeApplyPatchVerified.Body, - action: { - changes, - patch: args.patch, - cwd: effectiveCwd, - }, - } - - case MaybeApplyPatch.PatchParseError: - return { - type: MaybeApplyPatchVerified.CorrectnessError, - error: result.error, - } - - case MaybeApplyPatch.NotApplyPatch: - return { type: MaybeApplyPatchVerified.NotApplyPatch } - } -} diff --git a/packages/opencode/src/plugin/github-copilot/copilot.ts b/packages/opencode/src/plugin/github-copilot/copilot.ts index c9b7e3c1c7..9b6f54459d 100644 --- a/packages/opencode/src/plugin/github-copilot/copilot.ts +++ b/packages/opencode/src/plugin/github-copilot/copilot.ts @@ -1,6 +1,5 @@ import type { Hooks, PluginInput } from "@opencode-ai/plugin" import type { Model } from "@opencode-ai/sdk/v2" -import { Installation } from "@/installation" import { InstallationVersion } from "@/installation/version" import { iife } from "@/util/iife" import { Log } from "../../util" @@ -335,6 +334,13 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise { if (incoming.model.api.id.includes("gpt")) { output.maxOutputTokens = undefined } + + // GitHub Copilot's /v1/messages shim rejects the GA `eager_input_streaming` + // field on tool definitions ("Extra inputs are not permitted"). Opt out of + // the @ai-sdk/anthropic default so it stops injecting the field. + if (incoming.model.api.npm === "@ai-sdk/anthropic") { + output.options.toolStreaming = false + } }, "chat.headers": async (incoming, output) => { if (!incoming.model.providerID.includes("github-copilot")) return diff --git a/packages/opencode/src/plugin/github-copilot/models.ts b/packages/opencode/src/plugin/github-copilot/models.ts index 71d21afbe4..0aac0d3f5e 100644 --- a/packages/opencode/src/plugin/github-copilot/models.ts +++ b/packages/opencode/src/plugin/github-copilot/models.ts @@ -10,6 +10,11 @@ export const schema = z.object({ // every version looks like: `{model.id}-YYYY-MM-DD` version: z.string(), supported_endpoints: z.array(z.string()).optional(), + policy: z + .object({ + state: z.string().optional(), + }) + .optional(), capabilities: z.object({ family: z.string(), limits: z.object({ @@ -122,7 +127,9 @@ export async function get( }) const result = { ...existing } - const remote = new Map(data.data.filter((m) => m.model_picker_enabled).map((m) => [m.id, m] as const)) + const remote = new Map( + data.data.filter((m) => m.model_picker_enabled && m.policy?.state !== "disabled").map((m) => [m.id, m] as const), + ) // prune existing models whose api.id isn't in the endpoint response for (const [key, model] of Object.entries(result)) { diff --git a/packages/opencode/src/plugin/loader.ts b/packages/opencode/src/plugin/loader.ts index 0245d311e0..e61612561b 100644 --- a/packages/opencode/src/plugin/loader.ts +++ b/packages/opencode/src/plugin/loader.ts @@ -12,31 +12,41 @@ import { ConfigPlugin } from "@/config/plugin" import { InstallationVersion } from "@/installation/version" export namespace PluginLoader { + // A normalized plugin declaration derived from config before any filesystem or npm work happens. export type Plan = { spec: string options: ConfigPlugin.Options | undefined deprecated: boolean } + + // A plugin that has been resolved to a concrete target and entrypoint on disk. export type Resolved = Plan & { source: PluginSource target: string entry: string pkg?: PluginPackage } + + // A plugin target we could inspect, but which does not expose the requested kind of entrypoint. export type Missing = Plan & { source: PluginSource target: string pkg?: PluginPackage message: string } + + // A resolved plugin whose module has been imported successfully. export type Loaded = Resolved & { mod: Record } type Candidate = { origin: ConfigPlugin.Origin; plan: Plan } type Report = { + // Called before each attempt so callers can log initial load attempts and retries uniformly. start?: (candidate: Candidate, retry: boolean) => void + // Called when the package exists but does not provide the requested entrypoint. missing?: (candidate: Candidate, retry: boolean, message: string, resolved: Missing) => void + // Called for operational failures such as install, compatibility, or dynamic import errors. error?: ( candidate: Candidate, retry: boolean, @@ -46,11 +56,16 @@ export namespace PluginLoader { ) => void } + // Normalize a config item into the loader's internal representation. function plan(item: ConfigPlugin.Spec): Plan { const spec = ConfigPlugin.pluginSpecifier(item) return { spec, options: ConfigPlugin.pluginOptions(item), deprecated: isDeprecatedPlugin(spec) } } + // Resolve a configured plugin into a concrete entrypoint that can later be imported. + // + // The stages here intentionally separate install/target resolution, entrypoint detection, + // and compatibility checks so callers can report the exact reason a plugin was skipped. export async function resolve( plan: Plan, kind: PluginKind, @@ -59,6 +74,7 @@ export namespace PluginLoader { | { ok: false; stage: "missing"; value: Missing } | { ok: false; stage: "install" | "entry" | "compatibility"; error: unknown } > { + // First make sure the plugin exists locally, installing npm plugins on demand. let target = "" try { target = await resolvePluginTarget(plan.spec) @@ -67,6 +83,7 @@ export namespace PluginLoader { } if (!target) return { ok: false, stage: "install", error: new Error(`Plugin ${plan.spec} target is empty`) } + // Then inspect the target for the requested server/tui entrypoint. let base try { base = await createPluginEntry(plan.spec, target, kind) @@ -86,6 +103,8 @@ export namespace PluginLoader { }, } + // npm plugins can declare which opencode versions they support; file plugins are treated + // as local development code and skip this compatibility gate. if (base.source === "npm") { try { await checkPluginCompatibility(base.target, InstallationVersion, base.pkg) @@ -96,6 +115,7 @@ export namespace PluginLoader { return { ok: true, value: { ...plan, source: base.source, target: base.target, entry: base.entry, pkg: base.pkg } } } + // Import the resolved module only after all earlier validation has succeeded. export async function load(row: Resolved): Promise<{ ok: true; value: Loaded } | { ok: false; error: unknown }> { let mod try { @@ -107,6 +127,8 @@ export namespace PluginLoader { return { ok: true, value: { ...row, mod } } } + // Run one candidate through the full pipeline: resolve, optionally surface a missing entry, + // import the module, and finally let the caller transform the loaded plugin into any result type. async function attempt( candidate: Candidate, kind: PluginKind, @@ -116,11 +138,17 @@ export namespace PluginLoader { report: Report | undefined, ): Promise { const plan = candidate.plan + + // Deprecated plugin packages are silently ignored because they are now built in. if (plan.deprecated) return + report?.start?.(candidate, retry) + const resolved = await resolve(plan, kind) if (!resolved.ok) { if (resolved.stage === "missing") { + // Missing entrypoints are handled separately so callers can still inspect package metadata, + // for example to load theme files from a tui plugin package that has no code entrypoint. if (missing) { const value = await missing(resolved.value, candidate.origin, retry) if (value !== undefined) return value @@ -131,11 +159,15 @@ export namespace PluginLoader { report?.error?.(candidate, retry, resolved.stage, resolved.error) return } + const loaded = await load(resolved.value) if (!loaded.ok) { report?.error?.(candidate, retry, "load", loaded.error, resolved.value) return } + + // The default behavior is to return the successfully loaded plugin as-is, but callers can + // provide a finisher to adapt the result into a more specific runtime shape. if (!finish) return loaded.value as R return finish(loaded.value, candidate.origin, retry) } @@ -149,6 +181,11 @@ export namespace PluginLoader { report?: Report } + // Resolve and load all configured plugins in parallel. + // + // If `wait` is provided, file-based plugins that initially failed are retried once after the + // caller finishes preparing dependencies. This supports local plugins that depend on an install + // step happening elsewhere before their entrypoint becomes loadable. export async function loadExternal(input: Input): Promise { const candidates = input.items.map((origin) => ({ origin, plan: plan(origin.spec) })) const list: Array> = [] @@ -160,6 +197,9 @@ export namespace PluginLoader { let deps: Promise | undefined for (let i = 0; i < candidates.length; i++) { if (out[i] !== undefined) continue + + // Only local file plugins are retried. npm plugins already attempted installation during + // the first pass, while file plugins may need the caller's dependency preparation to finish. const candidate = candidates[i] if (!candidate || pluginSource(candidate.plan.spec) !== "file") continue deps ??= input.wait() @@ -167,6 +207,8 @@ export namespace PluginLoader { out[i] = await attempt(candidate, input.kind, true, input.finish, input.missing, input.report) } } + + // Drop skipped/failed entries while preserving the successful result order. const ready: R[] = [] for (const item of out) if (item !== undefined) ready.push(item) return ready diff --git a/packages/opencode/src/plugin/shared.ts b/packages/opencode/src/plugin/shared.ts index 11f36c41ae..ca821216d4 100644 --- a/packages/opencode/src/plugin/shared.ts +++ b/packages/opencode/src/plugin/shared.ts @@ -2,9 +2,9 @@ import path from "path" import { fileURLToPath, pathToFileURL } from "url" import npa from "npm-package-arg" import semver from "semver" -import { Npm } from "../npm" import { Filesystem } from "@/util" import { isRecord } from "@/util/record" +import { Npm } from "@/npm" // Old npm package names for plugins that are now built-in export const DEPRECATED_PLUGIN_PACKAGES = ["opencode-openai-codex-auth", "opencode-copilot-auth"] diff --git a/packages/opencode/src/project/instance.ts b/packages/opencode/src/project/instance.ts index 056eede01b..1c51096204 100644 --- a/packages/opencode/src/project/instance.ts +++ b/packages/opencode/src/project/instance.ts @@ -96,7 +96,7 @@ export const Instance = { if (AppFileSystem.contains(instance.directory, filepath)) return true // Non-git projects set worktree to "/" which would match ANY absolute path. // Skip worktree check in this case to preserve external_directory permissions. - if (Instance.worktree === "/") return false + if (instance.worktree === "/") return false return AppFileSystem.contains(instance.worktree, filepath) }, /** diff --git a/packages/opencode/src/project/project.ts b/packages/opencode/src/project/project.ts index f838d9ab43..6a2132274a 100644 --- a/packages/opencode/src/project/project.ts +++ b/packages/opencode/src/project/project.ts @@ -8,46 +8,52 @@ import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" import { which } from "../util/which" import { ProjectID } from "./schema" -import { Effect, Layer, Path, Scope, Context, Stream } from "effect" +import { Effect, Layer, Path, Scope, Context, Stream, Types, Schema } from "effect" import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process" import { NodePath } from "@effect/platform-node" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner" +import { zod } from "@/util/effect-zod" +import { withStatics } from "@/util/schema" const log = Log.create({ service: "project" }) -export const Info = z - .object({ - id: ProjectID.zod, - worktree: z.string(), - vcs: z.literal("git").optional(), - name: z.string().optional(), - icon: z - .object({ - url: z.string().optional(), - override: z.string().optional(), - color: z.string().optional(), - }) - .optional(), - commands: z - .object({ - start: z.string().optional().describe("Startup script to run when creating a new workspace (worktree)"), - }) - .optional(), - time: z.object({ - created: z.number(), - updated: z.number(), - initialized: z.number().optional(), - }), - sandboxes: z.array(z.string()), - }) - .meta({ - ref: "Project", - }) -export type Info = z.infer +const ProjectVcs = Schema.Literal("git") + +const ProjectIcon = Schema.Struct({ + url: Schema.optional(Schema.String), + override: Schema.optional(Schema.String), + color: Schema.optional(Schema.String), +}) + +const ProjectCommands = Schema.Struct({ + start: Schema.optional( + Schema.String.annotate({ description: "Startup script to run when creating a new workspace (worktree)" }), + ), +}) + +const ProjectTime = Schema.Struct({ + created: Schema.Number, + updated: Schema.Number, + initialized: Schema.optional(Schema.Number), +}) + +export const Info = Schema.Struct({ + id: ProjectID, + worktree: Schema.String, + vcs: Schema.optional(ProjectVcs), + name: Schema.optional(Schema.String), + icon: Schema.optional(ProjectIcon), + commands: Schema.optional(ProjectCommands), + time: ProjectTime, + sandboxes: Schema.Array(Schema.String), +}) + .annotate({ identifier: "Project" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Types.DeepMutable> export const Event = { - Updated: BusEvent.define("project.updated", Info), + Updated: BusEvent.define("project.updated", Info.zod), } type Row = typeof ProjectTable.$inferSelect @@ -58,7 +64,7 @@ export function fromRow(row: Row): Info { return { id: row.id, worktree: row.worktree, - vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined, + vcs: row.vcs ? Schema.decodeUnknownSync(ProjectVcs)(row.vcs) : undefined, name: row.name ?? undefined, icon, time: { @@ -74,8 +80,8 @@ export function fromRow(row: Row): Info { export const UpdateInput = z.object({ projectID: ProjectID.zod, name: z.string().optional(), - icon: Info.shape.icon.optional(), - commands: Info.shape.commands.optional(), + icon: zod(ProjectIcon).optional(), + commands: zod(ProjectCommands).optional(), }) export type UpdateInput = z.infer @@ -139,7 +145,7 @@ export const layer: Layer.Layer< }), ) - const fakeVcs = Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS) + const fakeVcs = Schema.decodeUnknownSync(Schema.optional(ProjectVcs))(Flag.OPENCODE_FAKE_VCS) const resolveGitPath = (cwd: string, name: string) => { if (!name) return cwd diff --git a/packages/opencode/src/project/vcs.ts b/packages/opencode/src/project/vcs.ts index b1375a7b78..ba028f7e8e 100644 --- a/packages/opencode/src/project/vcs.ts +++ b/packages/opencode/src/project/vcs.ts @@ -8,7 +8,6 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { FileWatcher } from "@/file/watcher" import { Git } from "@/git" import { Log } from "@/util" -import { Instance } from "./instance" import z from "zod" const log = Log.create({ service: "vcs" }) @@ -205,21 +204,17 @@ export const layer: Layer.Layer("ProviderAuthAuth static readonly zod = zod(this) } +export const AuthorizeInput = Schema.Struct({ + method: Schema.Number.annotate({ description: "Auth method index" }), + inputs: Schema.optional(Schema.Record(Schema.String, Schema.String)).annotate({ description: "Prompt inputs" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type AuthorizeInput = Schema.Schema.Type + +export const CallbackInput = Schema.Struct({ + method: Schema.Number.annotate({ description: "Auth method index" }), + code: Schema.optional(Schema.String).annotate({ description: "OAuth authorization code" }), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type CallbackInput = Schema.Schema.Type + export const OauthMissing = NamedError.create("ProviderAuthOauthMissing", z.object({ providerID: ProviderID.zod })) export const OauthCodeMissing = NamedError.create( @@ -86,12 +98,12 @@ type Hook = NonNullable export interface Interface { readonly methods: () => Effect.Effect - readonly authorize: (input: { - providerID: ProviderID - method: number - inputs?: Record - }) => Effect.Effect - readonly callback: (input: { providerID: ProviderID; method: number; code?: string }) => Effect.Effect + readonly authorize: ( + input: { + providerID: ProviderID + } & AuthorizeInput, + ) => Effect.Effect + readonly callback: (input: { providerID: ProviderID } & CallbackInput) => Effect.Effect } interface State { @@ -153,11 +165,9 @@ export const layer: Layer.Layer = ) }) - const authorize = Effect.fn("ProviderAuth.authorize")(function* (input: { - providerID: ProviderID - method: number - inputs?: Record - }) { + const authorize = Effect.fn("ProviderAuth.authorize")(function* ( + input: { providerID: ProviderID } & AuthorizeInput, + ) { const { hooks, pending } = yield* InstanceState.get(state) const method = hooks[input.providerID].methods[input.method] if (method.type !== "oauth") return @@ -180,11 +190,7 @@ export const layer: Layer.Layer = } }) - const callback = Effect.fn("ProviderAuth.callback")(function* (input: { - providerID: ProviderID - method: number - code?: string - }) { + const callback = Effect.fn("ProviderAuth.callback")(function* (input: { providerID: ProviderID } & CallbackInput) { const pending = (yield* InstanceState.get(state)).pending const match = pending.get(input.providerID) if (!match) return yield* Effect.fail(new OauthMissing({ providerID: input.providerID })) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index a7297634e7..8f6e1556ad 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -13,17 +13,18 @@ import { type LanguageModelV3 } from "@ai-sdk/provider" import * as ModelsDev from "./models" import { Auth } from "../auth" import { Env } from "../env" -import { Instance } from "../project/instance" import { InstallationVersion } from "../installation/version" import { Flag } from "../flag/flag" +import { zod } from "@/util/effect-zod" import { iife } from "@/util/iife" import { Global } from "../global" import path from "path" -import { Effect, Layer, Context } from "effect" +import { Effect, Layer, Context, Schema, Types } from "effect" import { EffectBridge } from "@/effect" import { InstanceState } from "@/effect" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { isRecord } from "@/util/record" +import { withStatics } from "@/util/schema" import * as ProviderTransform from "./transform" import { ModelID, ProviderID } from "./schema" @@ -388,6 +389,17 @@ function custom(dep: CustomDep): Record { }, } }), + llmgateway: () => + Effect.succeed({ + autoload: false, + options: { + headers: { + "HTTP-Referer": "https://opencode.ai/", + "X-Title": "opencode", + "X-Source": "opencode", + }, + }, + }), openrouter: () => Effect.succeed({ autoload: false, @@ -524,6 +536,7 @@ function custom(dep: CustomDep): Record { const token = apiKey ?? (yield* dep.get("GITLAB_TOKEN")) const providerConfig = (yield* dep.config()).provider?.["gitlab"] + const directory = yield* InstanceState.directory const aiGatewayHeaders = { "User-Agent": `opencode/${InstallationVersion} gitlab-ai-provider/${GITLAB_PROVIDER_VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`, @@ -578,10 +591,7 @@ function custom(dep: CustomDep): Record { auth?.type === "api" ? { "PRIVATE-TOKEN": token } : { Authorization: `Bearer ${token}` } log.info("gitlab model discovery starting", { instanceUrl }) - const result = await discoverWorkflowModels( - { instanceUrl, getHeaders }, - { workingDirectory: Instance.directory }, - ) + const result = await discoverWorkflowModels({ instanceUrl, getHeaders }, { workingDirectory: directory }) if (!result.models.length) { log.info("gitlab model discovery skipped: no models found", { @@ -796,91 +806,111 @@ function custom(dep: CustomDep): Record { } } -export const Model = z - .object({ - id: ModelID.zod, - providerID: ProviderID.zod, - api: z.object({ - id: z.string(), - url: z.string(), - npm: z.string(), - }), - name: z.string(), - family: z.string().optional(), - capabilities: z.object({ - temperature: z.boolean(), - reasoning: z.boolean(), - attachment: z.boolean(), - toolcall: z.boolean(), - input: z.object({ - text: z.boolean(), - audio: z.boolean(), - image: z.boolean(), - video: z.boolean(), - pdf: z.boolean(), - }), - output: z.object({ - text: z.boolean(), - audio: z.boolean(), - image: z.boolean(), - video: z.boolean(), - pdf: z.boolean(), - }), - interleaved: z.union([ - z.boolean(), - z.object({ - field: z.enum(["reasoning_content", "reasoning_details"]), - }), - ]), - }), - cost: z.object({ - input: z.number(), - output: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - experimentalOver200K: z - .object({ - input: z.number(), - output: z.number(), - cache: z.object({ - read: z.number(), - write: z.number(), - }), - }) - .optional(), - }), - limit: z.object({ - context: z.number(), - input: z.number().optional(), - output: z.number(), - }), - status: z.enum(["alpha", "beta", "deprecated", "active"]), - options: z.record(z.string(), z.any()), - headers: z.record(z.string(), z.string()), - release_date: z.string(), - variants: z.record(z.string(), z.record(z.string(), z.any())).optional(), - }) - .meta({ - ref: "Model", - }) -export type Model = z.infer +const ProviderApiInfo = Schema.Struct({ + id: Schema.String, + url: Schema.String, + npm: Schema.String, +}) -export const Info = z - .object({ - id: ProviderID.zod, - name: z.string(), - source: z.enum(["env", "config", "custom", "api"]), - env: z.string().array(), - key: z.string().optional(), - options: z.record(z.string(), z.any()), - models: z.record(z.string(), Model), - }) - .meta({ - ref: "Provider", - }) -export type Info = z.infer +const ProviderModalities = Schema.Struct({ + text: Schema.Boolean, + audio: Schema.Boolean, + image: Schema.Boolean, + video: Schema.Boolean, + pdf: Schema.Boolean, +}) + +const ProviderInterleaved = Schema.Union([ + Schema.Boolean, + Schema.Struct({ + field: Schema.Literals(["reasoning_content", "reasoning_details"]), + }), +]) + +const ProviderCapabilities = Schema.Struct({ + temperature: Schema.Boolean, + reasoning: Schema.Boolean, + attachment: Schema.Boolean, + toolcall: Schema.Boolean, + input: ProviderModalities, + output: ProviderModalities, + interleaved: ProviderInterleaved, +}) + +const ProviderCacheCost = Schema.Struct({ + read: Schema.Number, + write: Schema.Number, +}) + +const ProviderCost = Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache: ProviderCacheCost, + experimentalOver200K: Schema.optional( + Schema.Struct({ + input: Schema.Number, + output: Schema.Number, + cache: ProviderCacheCost, + }), + ), +}) + +const ProviderLimit = Schema.Struct({ + context: Schema.Number, + input: Schema.optional(Schema.Number), + output: Schema.Number, +}) + +export const Model = Schema.Struct({ + id: ModelID, + providerID: ProviderID, + api: ProviderApiInfo, + name: Schema.String, + family: Schema.optional(Schema.String), + capabilities: ProviderCapabilities, + cost: ProviderCost, + limit: ProviderLimit, + status: Schema.Literals(["alpha", "beta", "deprecated", "active"]), + options: Schema.Record(Schema.String, Schema.Any), + headers: Schema.Record(Schema.String, Schema.String), + release_date: Schema.String, + variants: Schema.optional(Schema.Record(Schema.String, Schema.Record(Schema.String, Schema.Any))), +}) + .annotate({ identifier: "Model" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Model = Types.DeepMutable> + +export const Info = Schema.Struct({ + id: ProviderID, + name: Schema.String, + source: Schema.Literals(["env", "config", "custom", "api"]), + env: Schema.Array(Schema.String), + key: Schema.optional(Schema.String), + options: Schema.Record(Schema.String, Schema.Any), + models: Schema.Record(Schema.String, Model), +}) + .annotate({ identifier: "Provider" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type Info = Types.DeepMutable> + +const DefaultModelIDs = Schema.Record(Schema.String, Schema.String) + +export const ListResult = Schema.Struct({ + all: Schema.Array(Info), + default: DefaultModelIDs, + connected: Schema.Array(Schema.String), +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ListResult = Types.DeepMutable> + +export const ConfigProvidersResult = Schema.Struct({ + providers: Schema.Array(Info), + default: DefaultModelIDs, +}).pipe(withStatics((s) => ({ zod: zod(s) }))) +export type ConfigProvidersResult = Types.DeepMutable> + +export function defaultModelIDs }>(providers: Record) { + return mapValues(providers, (item) => sort(Object.values(item.models))[0].id) +} export interface Interface { readonly list: () => Effect.Effect> @@ -928,14 +958,14 @@ function cost(c: ModelsDev.Model["cost"]): Model["cost"] { } function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model): Model { - const m: Model = { + const base: Model = { id: ModelID.make(model.id), providerID: ProviderID.make(provider.id), name: model.name, family: model.family, api: { id: model.id, - url: model.provider?.api ?? provider.api!, + url: model.provider?.api ?? provider.api ?? "", npm: model.provider?.npm ?? provider.npm ?? "@ai-sdk/openai-compatible", }, status: model.status ?? "active", @@ -948,10 +978,10 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model output: model.limit.output, }, capabilities: { - temperature: model.temperature, - reasoning: model.reasoning, - attachment: model.attachment, - toolcall: model.tool_call, + temperature: model.temperature ?? false, + reasoning: model.reasoning ?? false, + attachment: model.attachment ?? false, + toolcall: model.tool_call ?? true, input: { text: model.modalities?.input?.includes("text") ?? false, audio: model.modalities?.input?.includes("audio") ?? false, @@ -968,13 +998,14 @@ function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model }, interleaved: model.interleaved ?? false, }, - release_date: model.release_date, + release_date: model.release_date ?? "", variants: {}, } - m.variants = mapValues(ProviderTransform.variants(m), (v) => v) - - return m + return { + ...base, + variants: mapValues(ProviderTransform.variants(base), (v) => v), + } } export function fromModelsDevProvider(provider: ModelsDev.Provider): Info { @@ -983,17 +1014,22 @@ export function fromModelsDevProvider(provider: ModelsDev.Provider): Info { models[key] = fromModelsDevModel(provider, model) for (const [mode, opts] of Object.entries(model.experimental?.modes ?? {})) { const id = `${model.id}-${mode}` - const m = fromModelsDevModel(provider, model) - m.id = ModelID.make(id) - m.name = `${model.name} ${mode[0].toUpperCase()}${mode.slice(1)}` - if (opts.cost) m.cost = mergeDeep(m.cost, cost(opts.cost)) - // convert body params to camelCase for ai sdk compatibility - if (opts.provider?.body) - m.options = Object.fromEntries( - Object.entries(opts.provider.body).map(([k, v]) => [k.replace(/_([a-z])/g, (_, c) => c.toUpperCase()), v]), - ) - if (opts.provider?.headers) m.headers = opts.provider.headers - models[id] = m + const base = fromModelsDevModel(provider, model) + models[id] = { + ...base, + id: ModelID.make(id), + name: `${model.name} ${mode[0].toUpperCase()}${mode.slice(1)}`, + cost: opts.cost ? mergeDeep(base.cost, cost(opts.cost)) : base.cost, + options: opts.provider?.body + ? Object.fromEntries( + Object.entries(opts.provider.body).map(([k, v]) => [ + k.replace(/_([a-z])/g, (_, c) => c.toUpperCase()), + v, + ]), + ) + : base.options, + headers: opts.provider?.headers ?? base.headers, + } } } return { @@ -1104,7 +1140,7 @@ const layer: Layer.Layer< existingModel?.api.npm ?? modelsDev[providerID]?.npm ?? "@ai-sdk/openai-compatible", - url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api, + url: model.provider?.api ?? provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api ?? "", }, status: model.status ?? existingModel?.status ?? "active", name, diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 0ebd8bbf59..1b6b0918b1 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -587,6 +587,12 @@ export function variants(model: Provider.Model): Record [effort, { reasoningEffort: effort }])) + } + } + if (adaptiveEfforts) { return Object.fromEntries( adaptiveEfforts.map((effort) => [ @@ -801,7 +807,7 @@ export function options(input: { result["promptCacheKey"] = input.sessionID } - if (input.model.api.npm === "@openrouter/ai-sdk-provider") { + if (input.model.api.npm === "@openrouter/ai-sdk-provider" || input.model.api.npm === "@llmgateway/ai-sdk-provider") { result["usage"] = { include: true, } @@ -938,7 +944,7 @@ export function smallOptions(model: Provider.Model) { } return { thinkingConfig: { thinkingBudget: 0 } } } - if (model.providerID === "openrouter") { + if (model.providerID === "openrouter" || model.providerID === "llmgateway") { if (model.api.id.includes("google")) { return { reasoning: { enabled: false } } } diff --git a/packages/opencode/src/question/index.ts b/packages/opencode/src/question/index.ts index 627d04564d..3b377c9827 100644 --- a/packages/opencode/src/question/index.ts +++ b/packages/opencode/src/question/index.ts @@ -8,222 +8,222 @@ import { Log } from "@/util" import { withStatics } from "@/util/schema" import { QuestionID } from "./schema" -export namespace Question { - const log = Log.create({ service: "question" }) +const log = Log.create({ service: "question" }) - // Schemas +// Schemas - export class Option extends Schema.Class(file: string, fn: () => Effect.Effect) => FileTime.Service.use((svc) => svc.withLock(file, fn)) - -const fail = Effect.fn("FileTimeTest.fail")(function* (self: Effect.Effect) { - const exit = yield* self.pipe(Effect.exit) - if (Exit.isFailure(exit)) { - const err = Cause.squash(exit.cause) - return err instanceof Error ? err : new Error(String(err)) - } - throw new Error("expected file time effect to fail") -}) - -describe("file/time", () => { - describe("read() and get()", () => { - it.live("stores read timestamp", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const before = yield* get(id, file) - expect(before).toBeUndefined() - - yield* read(id, file) - - const after = yield* get(id, file) - expect(after).toBeInstanceOf(Date) - expect(after!.getTime()).toBeGreaterThan(0) - }), - ), - ) - - it.live("tracks separate timestamps per session", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const one = SessionID.make("ses_00000000000000000000000002") - const two = SessionID.make("ses_00000000000000000000000003") - yield* read(one, file) - yield* read(two, file) - - const first = yield* get(one, file) - const second = yield* get(two, file) - - expect(first).toBeDefined() - expect(second).toBeDefined() - }), - ), - ) - - it.live("updates timestamp on subsequent reads", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - yield* read(id, file) - const first = yield* get(id, file) - - yield* read(id, file) - const second = yield* get(id, file) - - expect(second!.getTime()).toBeGreaterThanOrEqual(first!.getTime()) - }), - ), - ) - - it.live("isolates reads by directory", () => - Effect.gen(function* () { - const one = yield* tmpdirScoped() - const two = yield* tmpdirScoped() - const shared = yield* tmpdirScoped() - const file = path.join(shared, "file.txt") - yield* put(file, "content") - - yield* provideInstance(one)(read(id, file)) - const result = yield* provideInstance(two)(get(id, file)) - expect(result).toBeUndefined() - }), - ) - }) - - describe("assert()", () => { - it.live("passes when file has not been modified", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* check(id, file) - }), - ), - ) - - it.live("throws when file was not read first", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("You must read file") - }), - ), - ) - - it.live("throws when file was modified after read", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* put(file, "modified content") - yield* touch(file, 2_000) - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("modified since it was last read") - }), - ), - ) - - it.live("includes timestamps in error message", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - yield* put(file, "modified") - yield* touch(file, 2_000) - - const err = yield* fail(check(id, file)) - expect(err.message).toContain("Last modification:") - expect(err.message).toContain("Last read:") - }), - ), - ) - }) - - describe("withLock()", () => { - it.live("executes function within lock", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - let hit = false - - yield* lock(file, () => - Effect.sync(() => { - hit = true - return "result" - }), - ) - - expect(hit).toBe(true) - }), - ), - ) - - it.live("returns function result", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const result = yield* lock(file, () => Effect.succeed("success")) - expect(result).toBe("success") - }), - ), - ) - - it.live("serializes concurrent operations on same file", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const order: number[] = [] - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const one = yield* lock(file, () => - Effect.gen(function* () { - order.push(1) - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - order.push(2) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const two = yield* lock(file, () => - Effect.sync(() => { - order.push(3) - order.push(4) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(one) - yield* Fiber.join(two) - - expect(order).toEqual([1, 2, 3, 4]) - }), - ), - ) - - it.live("allows concurrent operations on different files", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const onefile = path.join(dir, "file1.txt") - const twofile = path.join(dir, "file2.txt") - let one = false - let two = false - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const a = yield* lock(onefile, () => - Effect.gen(function* () { - one = true - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - expect(two).toBe(true) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const b = yield* lock(twofile, () => - Effect.sync(() => { - two = true - }), - ).pipe(Effect.forkScoped) - - yield* Fiber.join(b) - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(a) - - expect(one).toBe(true) - expect(two).toBe(true) - }), - ), - ) - - it.live("releases lock even if function throws", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const err = yield* fail(lock(file, () => Effect.die(new Error("Test error")))) - expect(err.message).toContain("Test error") - - let hit = false - yield* lock(file, () => - Effect.sync(() => { - hit = true - }), - ) - expect(hit).toBe(true) - }), - ), - ) - }) - - describe("path normalization", () => { - it.live("read with forward slashes, assert with backslashes", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - const forward = file.replaceAll("\\", "/") - yield* read(id, forward) - yield* check(id, file) - }), - ), - ) - - it.live("read with backslashes, assert with forward slashes", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - const forward = file.replaceAll("\\", "/") - yield* read(id, file) - yield* check(id, forward) - }), - ), - ) - - it.live("get returns timestamp regardless of slash direction", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - - const forward = file.replaceAll("\\", "/") - yield* read(id, forward) - - const result = yield* get(id, file) - expect(result).toBeInstanceOf(Date) - }), - ), - ) - - it.live("withLock serializes regardless of slash direction", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - const forward = file.replaceAll("\\", "/") - const order: number[] = [] - const hold = yield* Deferred.make() - const ready = yield* Deferred.make() - - const one = yield* lock(file, () => - Effect.gen(function* () { - order.push(1) - yield* Deferred.succeed(ready, void 0) - yield* Deferred.await(hold) - order.push(2) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.await(ready) - - const two = yield* lock(forward, () => - Effect.sync(() => { - order.push(3) - order.push(4) - }), - ).pipe(Effect.forkScoped) - - yield* Deferred.succeed(hold, void 0) - yield* Fiber.join(one) - yield* Fiber.join(two) - - expect(order).toEqual([1, 2, 3, 4]) - }), - ), - ) - }) - - describe("stat() Filesystem.stat pattern", () => { - it.live("reads file modification time via Filesystem.stat()", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "content") - yield* touch(file, 1_000) - - yield* read(id, file) - - const stat = Filesystem.stat(file) - expect(stat?.mtime).toBeInstanceOf(Date) - expect(stat!.mtime.getTime()).toBeGreaterThan(0) - - yield* check(id, file) - }), - ), - ) - - it.live("detects modification via stat mtime", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "file.txt") - yield* put(file, "original") - yield* touch(file, 1_000) - - yield* read(id, file) - - const first = Filesystem.stat(file) - - yield* put(file, "modified") - yield* touch(file, 2_000) - - const second = Filesystem.stat(file) - expect(second!.mtime.getTime()).toBeGreaterThan(first!.mtime.getTime()) - - yield* fail(check(id, file)) - }), - ), - ) - }) -}) diff --git a/packages/opencode/test/format/format.test.ts b/packages/opencode/test/format/format.test.ts index 39826aad16..5530e195b2 100644 --- a/packages/opencode/test/format/format.test.ts +++ b/packages/opencode/test/format/format.test.ts @@ -10,37 +10,55 @@ import * as Formatter from "../../src/format/formatter" const it = testEffect(Layer.mergeAll(Format.defaultLayer, CrossSpawnSpawner.defaultLayer, NodeFileSystem.layer)) describe("Format", () => { - it.live("status() returns built-in formatters when no config overrides", () => + it.live("status() returns empty list when no formatters are configured", () => provideTmpdirInstance(() => Format.Service.use((fmt) => Effect.gen(function* () { - const statuses = yield* fmt.status() - expect(Array.isArray(statuses)).toBe(true) - expect(statuses.length).toBeGreaterThan(0) - - for (const item of statuses) { - expect(typeof item.name).toBe("string") - expect(Array.isArray(item.extensions)).toBe(true) - expect(typeof item.enabled).toBe("boolean") - } - - const gofmt = statuses.find((item) => item.name === "gofmt") - expect(gofmt).toBeDefined() - expect(gofmt!.extensions).toContain(".go") + expect(yield* fmt.status()).toEqual([]) }), ), ), ) - it.live("status() returns empty list when formatter is disabled", () => + it.live("status() returns built-in formatters when formatter is true", () => provideTmpdirInstance( () => Format.Service.use((fmt) => Effect.gen(function* () { - expect(yield* fmt.status()).toEqual([]) + const statuses = yield* fmt.status() + const gofmt = statuses.find((item) => item.name === "gofmt") + expect(gofmt).toBeDefined() + expect(gofmt!.extensions).toContain(".go") }), ), - { config: { formatter: false } }, + { + config: { + formatter: true, + }, + }, + ), + ) + + it.live("status() keeps built-in formatters when config object is provided", () => + provideTmpdirInstance( + () => + Format.Service.use((fmt) => + Effect.gen(function* () { + const statuses = yield* fmt.status() + const gofmt = statuses.find((item) => item.name === "gofmt") + const mix = statuses.find((item) => item.name === "mix") + expect(gofmt).toBeDefined() + expect(gofmt!.extensions).toContain(".go") + expect(mix).toBeDefined() + }), + ), + { + config: { + formatter: { + gofmt: {}, + }, + }, + }, ), ) @@ -51,7 +69,9 @@ describe("Format", () => { Effect.gen(function* () { const statuses = yield* fmt.status() const gofmt = statuses.find((item) => item.name === "gofmt") + const mix = statuses.find((item) => item.name === "mix") expect(gofmt).toBeUndefined() + expect(mix).toBeDefined() }), ), { @@ -111,68 +131,81 @@ describe("Format", () => { const a = yield* provideTmpdirInstance(() => Format.Service.use((fmt) => fmt.status()), { config: { formatter: false }, }) - const b = yield* provideTmpdirInstance(() => Format.Service.use((fmt) => fmt.status())) + const b = yield* provideTmpdirInstance(() => Format.Service.use((fmt) => fmt.status()), { + config: { + formatter: true, + }, + }) expect(a).toEqual([]) - expect(b.length).toBeGreaterThan(0) + expect(b.find((item) => item.name === "gofmt")).toBeDefined() }), ) it.live("runs enabled checks for matching formatters in parallel", () => - provideTmpdirInstance((path) => - Effect.gen(function* () { - const file = `${path}/test.parallel` - yield* Effect.promise(() => Bun.write(file, "x")) + provideTmpdirInstance( + (path) => + Effect.gen(function* () { + const file = `${path}/test.parallel` + yield* Effect.promise(() => Bun.write(file, "x")) - const one = { - extensions: Formatter.gofmt.extensions, - enabled: Formatter.gofmt.enabled, - } - const two = { - extensions: Formatter.mix.extensions, - enabled: Formatter.mix.enabled, - } + const one = { + extensions: Formatter.gofmt.extensions, + enabled: Formatter.gofmt.enabled, + } + const two = { + extensions: Formatter.mix.extensions, + enabled: Formatter.mix.enabled, + } - let active = 0 - let max = 0 + let active = 0 + let max = 0 - yield* Effect.acquireUseRelease( - Effect.sync(() => { - Formatter.gofmt.extensions = [".parallel"] - Formatter.mix.extensions = [".parallel"] - Formatter.gofmt.enabled = async () => { - active++ - max = Math.max(max, active) - await Bun.sleep(20) - active-- - return ["sh", "-c", "true"] - } - Formatter.mix.enabled = async () => { - active++ - max = Math.max(max, active) - await Bun.sleep(20) - active-- - return ["sh", "-c", "true"] - } - }), - () => - Format.Service.use((fmt) => - Effect.gen(function* () { - yield* fmt.init() - yield* fmt.file(file) - }), - ), - () => + yield* Effect.acquireUseRelease( Effect.sync(() => { - Formatter.gofmt.extensions = one.extensions - Formatter.gofmt.enabled = one.enabled - Formatter.mix.extensions = two.extensions - Formatter.mix.enabled = two.enabled + Formatter.gofmt.extensions = [".parallel"] + Formatter.mix.extensions = [".parallel"] + Formatter.gofmt.enabled = async () => { + active++ + max = Math.max(max, active) + await Bun.sleep(20) + active-- + return ["sh", "-c", "true"] + } + Formatter.mix.enabled = async () => { + active++ + max = Math.max(max, active) + await Bun.sleep(20) + active-- + return ["sh", "-c", "true"] + } }), - ) + () => + Format.Service.use((fmt) => + Effect.gen(function* () { + yield* fmt.init() + yield* fmt.file(file) + }), + ), + () => + Effect.sync(() => { + Formatter.gofmt.extensions = one.extensions + Formatter.gofmt.enabled = one.enabled + Formatter.mix.extensions = two.extensions + Formatter.mix.enabled = two.enabled + }), + ) - expect(max).toBe(2) - }), + expect(max).toBe(2) + }), + { + config: { + formatter: { + gofmt: {}, + mix: {}, + }, + }, + }, ), ) diff --git a/packages/opencode/test/lsp/client.test.ts b/packages/opencode/test/lsp/client.test.ts index f124fddf95..d6eaa317f9 100644 --- a/packages/opencode/test/lsp/client.test.ts +++ b/packages/opencode/test/lsp/client.test.ts @@ -31,6 +31,7 @@ describe("LSPClient interop", () => { serverID: "fake", server: handle as unknown as LSPServer.Handle, root: process.cwd(), + directory: process.cwd(), }), }) @@ -55,6 +56,7 @@ describe("LSPClient interop", () => { serverID: "fake", server: handle as unknown as LSPServer.Handle, root: process.cwd(), + directory: process.cwd(), }), }) @@ -79,6 +81,7 @@ describe("LSPClient interop", () => { serverID: "fake", server: handle as unknown as LSPServer.Handle, root: process.cwd(), + directory: process.cwd(), }), }) diff --git a/packages/opencode/test/lsp/index.test.ts b/packages/opencode/test/lsp/index.test.ts index 7419f3bf5c..d138f56e3b 100644 --- a/packages/opencode/test/lsp/index.test.ts +++ b/packages/opencode/test/lsp/index.test.ts @@ -11,15 +11,38 @@ const it = testEffect(Layer.mergeAll(LSP.defaultLayer, CrossSpawnSpawner.default describe("lsp.spawn", () => { it.live("does not spawn builtin LSP for files outside instance", () => + provideTmpdirInstance( + (dir) => + LSP.Service.use((lsp) => + Effect.gen(function* () { + const spy = spyOn(LSPServer.Typescript, "spawn").mockResolvedValue(undefined) + + try { + yield* lsp.touchFile(path.join(dir, "..", "outside.ts")) + yield* lsp.hover({ + file: path.join(dir, "..", "hover.ts"), + line: 0, + character: 0, + }) + expect(spy).toHaveBeenCalledTimes(0) + } finally { + spy.mockRestore() + } + }), + ), + { config: { lsp: true } }, + ), + ) + + it.live("does not spawn builtin LSP for files inside instance when LSP is unset", () => provideTmpdirInstance((dir) => LSP.Service.use((lsp) => Effect.gen(function* () { const spy = spyOn(LSPServer.Typescript, "spawn").mockResolvedValue(undefined) try { - yield* lsp.touchFile(path.join(dir, "..", "outside.ts")) yield* lsp.hover({ - file: path.join(dir, "..", "hover.ts"), + file: path.join(dir, "src", "inside.ts"), line: 0, character: 0, }) @@ -32,24 +55,55 @@ describe("lsp.spawn", () => { ), ) - it.live("would spawn builtin LSP for files inside instance", () => - provideTmpdirInstance((dir) => - LSP.Service.use((lsp) => - Effect.gen(function* () { - const spy = spyOn(LSPServer.Typescript, "spawn").mockResolvedValue(undefined) + it.live("would spawn builtin LSP for files inside instance when lsp is true", () => + provideTmpdirInstance( + (dir) => + LSP.Service.use((lsp) => + Effect.gen(function* () { + const spy = spyOn(LSPServer.Typescript, "spawn").mockResolvedValue(undefined) - try { - yield* lsp.hover({ - file: path.join(dir, "src", "inside.ts"), - line: 0, - character: 0, - }) - expect(spy).toHaveBeenCalledTimes(1) - } finally { - spy.mockRestore() - } - }), - ), + try { + yield* lsp.hover({ + file: path.join(dir, "src", "inside.ts"), + line: 0, + character: 0, + }) + expect(spy).toHaveBeenCalledTimes(1) + } finally { + spy.mockRestore() + } + }), + ), + { config: { lsp: true } }, + ), + ) + + it.live("would spawn builtin LSP for files inside instance when config object is provided", () => + provideTmpdirInstance( + (dir) => + LSP.Service.use((lsp) => + Effect.gen(function* () { + const spy = spyOn(LSPServer.Typescript, "spawn").mockResolvedValue(undefined) + + try { + yield* lsp.hover({ + file: path.join(dir, "src", "inside.ts"), + line: 0, + character: 0, + }) + expect(spy).toHaveBeenCalledTimes(1) + } finally { + spy.mockRestore() + } + }), + ), + { + config: { + lsp: { + eslint: { disabled: true }, + }, + }, + }, ), ) }) diff --git a/packages/opencode/test/lsp/lifecycle.test.ts b/packages/opencode/test/lsp/lifecycle.test.ts index fe14729736..13f21c93cc 100644 --- a/packages/opencode/test/lsp/lifecycle.test.ts +++ b/packages/opencode/test/lsp/lifecycle.test.ts @@ -46,17 +46,49 @@ describe("LSP service lifecycle", () => { ), ) - it.live("hasClients() returns true for .ts files in instance", () => + it.live("hasClients() returns false for .ts files in instance when LSP is unset", () => provideTmpdirInstance((dir) => LSP.Service.use((lsp) => Effect.gen(function* () { const result = yield* lsp.hasClients(path.join(dir, "test.ts")) - expect(result).toBe(true) + expect(result).toBe(false) }), ), ), ) + it.live("hasClients() returns true for .ts files in instance when lsp is true", () => + provideTmpdirInstance( + (dir) => + LSP.Service.use((lsp) => + Effect.gen(function* () { + const result = yield* lsp.hasClients(path.join(dir, "test.ts")) + expect(result).toBe(true) + }), + ), + { config: { lsp: true } }, + ), + ) + + it.live("hasClients() keeps built-in LSPs when config object is provided", () => + provideTmpdirInstance( + (dir) => + LSP.Service.use((lsp) => + Effect.gen(function* () { + const result = yield* lsp.hasClients(path.join(dir, "test.ts")) + expect(result).toBe(true) + }), + ), + { + config: { + lsp: { + eslint: { disabled: true }, + }, + }, + }, + ), + ) + it.live("hasClients() returns false for files outside instance", () => provideTmpdirInstance((dir) => LSP.Service.use((lsp) => diff --git a/packages/opencode/test/plugin/loader-shared.test.ts b/packages/opencode/test/plugin/loader-shared.test.ts index 5072c1e748..83e9d71b4f 100644 --- a/packages/opencode/test/plugin/loader-shared.test.ts +++ b/packages/opencode/test/plugin/loader-shared.test.ts @@ -239,8 +239,8 @@ describe("plugin.loader.shared", () => { }) const add = spyOn(Npm, "add").mockImplementation(async (pkg) => { - if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: tmp.extra.acme } - return { directory: tmp.extra.scope, entrypoint: tmp.extra.scope } + if (pkg === "acme-plugin") return { directory: tmp.extra.acme, entrypoint: undefined } + return { directory: tmp.extra.scope, entrypoint: undefined } }) try { @@ -301,7 +301,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -358,7 +358,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -410,7 +410,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -455,7 +455,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -518,7 +518,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { await load(tmp.path) @@ -548,7 +548,7 @@ describe("plugin.loader.shared", () => { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: "" }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: "", entrypoint: undefined }) try { await load(tmp.path) @@ -927,7 +927,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) const missing: string[] = [] try { @@ -996,7 +996,7 @@ export default { }, }) - const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: tmp.extra.mod }) + const install = spyOn(Npm, "add").mockResolvedValue({ directory: tmp.extra.mod, entrypoint: undefined }) try { const loaded = await PluginLoader.loadExternal({ diff --git a/packages/opencode/test/plugin/workspace-adaptor.test.ts b/packages/opencode/test/plugin/workspace-adaptor.test.ts index ff8df7490d..e74522c8be 100644 --- a/packages/opencode/test/plugin/workspace-adaptor.test.ts +++ b/packages/opencode/test/plugin/workspace-adaptor.test.ts @@ -14,7 +14,6 @@ const { Instance } = await import("../../src/project/instance") const experimental = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES -// @ts-expect-error tests override the flag directly Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true afterEach(async () => { @@ -28,7 +27,6 @@ afterAll(() => { process.env.OPENCODE_DISABLE_DEFAULT_PLUGINS = disableDefault } - // @ts-expect-error restore original test flag value Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = experimental }) diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index a2592286ad..58dc2b0b48 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -62,6 +62,7 @@ delete process.env["AWS_PROFILE"] delete process.env["AWS_REGION"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"] delete process.env["OPENROUTER_API_KEY"] +delete process.env["LLM_GATEWAY_API_KEY"] delete process.env["GROQ_API_KEY"] delete process.env["MISTRAL_API_KEY"] delete process.env["PERPLEXITY_API_KEY"] diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index df8fc4e966..8993020820 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -1916,7 +1916,7 @@ test("mode cost preserves over-200k pricing from base model", () => { }, }, }, - } as ModelsDev.Provider + } as unknown as ModelsDev.Provider const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4-fast"] expect(model.cost.input).toEqual(5) @@ -1934,6 +1934,38 @@ test("mode cost preserves over-200k pricing from base model", () => { }) }) +test("models.dev normalization fills required response fields", () => { + const provider = { + id: "gateway", + name: "Gateway", + env: [], + models: { + "gpt-5.4": { + id: "gpt-5.4", + name: "GPT-5.4", + family: "gpt", + cost: { + input: 2.5, + output: 15, + }, + limit: { + context: 1_050_000, + input: 922_000, + output: 128_000, + }, + }, + }, + } as unknown as ModelsDev.Provider + + const model = Provider.fromModelsDevProvider(provider).models["gpt-5.4"] + expect(model.api.url).toBe("") + expect(model.capabilities.temperature).toBe(false) + expect(model.capabilities.reasoning).toBe(false) + expect(model.capabilities.attachment).toBe(false) + expect(model.capabilities.toolcall).toBe(true) + expect(model.release_date).toBe("") +}) + test("model variants are generated for reasoning models", async () => { await using tmp = await tmpdir({ init: async (dir) => { diff --git a/packages/opencode/test/server/session-messages.test.ts b/packages/opencode/test/server/session-messages.test.ts index 50b7658969..23e8b50145 100644 --- a/packages/opencode/test/server/session-messages.test.ts +++ b/packages/opencode/test/server/session-messages.test.ts @@ -165,16 +165,3 @@ describe("session messages endpoint", () => { ) }) }) - -describe("session.prompt_async error handling", () => { - test("prompt_async route has error handler for detached prompt call", async () => { - const src = await Bun.file(new URL("../../src/server/instance/session.ts", import.meta.url)).text() - const start = src.indexOf('"/:sessionID/prompt_async"') - const end = src.indexOf('"/:sessionID/command"', start) - expect(start).toBeGreaterThan(-1) - expect(end).toBeGreaterThan(start) - const route = src.slice(start, end) - expect(route).toContain(".catch(") - expect(route).toContain("Bus.publish(Session.Event.Error") - }) -}) diff --git a/packages/opencode/test/server/trace-attributes.test.ts b/packages/opencode/test/server/trace-attributes.test.ts new file mode 100644 index 0000000000..c6e8005a20 --- /dev/null +++ b/packages/opencode/test/server/trace-attributes.test.ts @@ -0,0 +1,76 @@ +import { describe, expect, test } from "bun:test" +import { paramToAttributeKey, requestAttributes } from "../../src/server/routes/instance/trace" + +function fakeContext(method: string, url: string, params: Record) { + return { + req: { + method, + url, + param: () => params, + }, + } +} + +describe("paramToAttributeKey", () => { + test("converts fooID to foo.id", () => { + expect(paramToAttributeKey("sessionID")).toBe("session.id") + expect(paramToAttributeKey("messageID")).toBe("message.id") + expect(paramToAttributeKey("partID")).toBe("part.id") + expect(paramToAttributeKey("projectID")).toBe("project.id") + expect(paramToAttributeKey("providerID")).toBe("provider.id") + expect(paramToAttributeKey("ptyID")).toBe("pty.id") + expect(paramToAttributeKey("permissionID")).toBe("permission.id") + expect(paramToAttributeKey("requestID")).toBe("request.id") + expect(paramToAttributeKey("workspaceID")).toBe("workspace.id") + }) + + test("namespaces non-ID params under opencode.", () => { + expect(paramToAttributeKey("name")).toBe("opencode.name") + expect(paramToAttributeKey("slug")).toBe("opencode.slug") + }) +}) + +describe("requestAttributes", () => { + test("includes http method and path", () => { + const attrs = requestAttributes(fakeContext("GET", "http://localhost/session", {})) + expect(attrs["http.method"]).toBe("GET") + expect(attrs["http.path"]).toBe("/session") + }) + + test("strips query string from path", () => { + const attrs = requestAttributes(fakeContext("GET", "http://localhost/file/search?query=foo&limit=10", {})) + expect(attrs["http.path"]).toBe("/file/search") + }) + + test("emits OTel-style .id for ID-shaped route params", () => { + const attrs = requestAttributes( + fakeContext("GET", "http://localhost/session/ses_abc/message/msg_def/part/prt_ghi", { + sessionID: "ses_abc", + messageID: "msg_def", + partID: "prt_ghi", + }), + ) + expect(attrs["session.id"]).toBe("ses_abc") + expect(attrs["message.id"]).toBe("msg_def") + expect(attrs["part.id"]).toBe("prt_ghi") + // No camelCase leftovers: + expect(attrs["opencode.sessionID"]).toBeUndefined() + expect(attrs["opencode.messageID"]).toBeUndefined() + expect(attrs["opencode.partID"]).toBeUndefined() + }) + + test("produces no param attributes when no params are matched", () => { + const attrs = requestAttributes(fakeContext("POST", "http://localhost/config", {})) + expect(Object.keys(attrs).filter((k) => k !== "http.method" && k !== "http.path")).toEqual([]) + }) + + test("namespaces non-ID params under opencode. (e.g. mcp :name)", () => { + const attrs = requestAttributes( + fakeContext("POST", "http://localhost/mcp/exa/connect", { + name: "exa", + }), + ) + expect(attrs["opencode.name"]).toBe("exa") + expect(attrs["name"]).toBeUndefined() + }) +}) diff --git a/packages/opencode/test/session/prompt-effect.test.ts b/packages/opencode/test/session/prompt-effect.test.ts index 121d662e5f..2f59046840 100644 --- a/packages/opencode/test/session/prompt-effect.test.ts +++ b/packages/opencode/test/session/prompt-effect.test.ts @@ -7,7 +7,6 @@ import { Agent as AgentSvc } from "../../src/agent/agent" import { Bus } from "../../src/bus" import { Command } from "../../src/command" import { Config } from "../../src/config" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { MCP } from "../../src/mcp" import { Permission } from "../../src/permission" @@ -148,16 +147,6 @@ const lsp = Layer.succeed( }), ) -const filetime = Layer.succeed( - FileTime.Service, - FileTime.Service.of({ - read: () => Effect.void, - get: () => Effect.succeed(undefined), - assert: () => Effect.void, - withLock: (_filepath, fn) => fn(), - }), -) - const status = SessionStatus.layer.pipe(Layer.provideMerge(Bus.layer)) const run = SessionRunState.layer.pipe(Layer.provide(status)) const infra = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer) @@ -173,7 +162,6 @@ function makeHttp() { Plugin.defaultLayer, Config.defaultLayer, ProviderSvc.defaultLayer, - filetime, lsp, mcp, AppFileSystem.defaultLayer, diff --git a/packages/opencode/test/session/session-entry.test.ts b/packages/opencode/test/session/session-entry.test.ts index 7eba3900d7..dea8da20a0 100644 --- a/packages/opencode/test/session/session-entry.test.ts +++ b/packages/opencode/test/session/session-entry.test.ts @@ -591,7 +591,64 @@ describe("session-entry step", () => { ) }) - test.failing("records synthetic events", () => { + test("routes tool events by callID when tool streams interleave", () => { + FastCheck.assert( + FastCheck.property(dict, dict, word, word, text, text, (a, b, titleA, titleB, deltaA, deltaB) => { + const next = run( + [ + SessionEvent.Tool.Input.Started.create({ callID: "a", name: "bash", timestamp: time(1) }), + SessionEvent.Tool.Input.Started.create({ callID: "b", name: "grep", timestamp: time(2) }), + SessionEvent.Tool.Input.Delta.create({ callID: "a", delta: deltaA, timestamp: time(3) }), + SessionEvent.Tool.Input.Delta.create({ callID: "b", delta: deltaB, timestamp: time(4) }), + SessionEvent.Tool.Called.create({ + callID: "a", + tool: "bash", + input: a, + provider: { executed: true }, + timestamp: time(5), + }), + SessionEvent.Tool.Called.create({ + callID: "b", + tool: "grep", + input: b, + provider: { executed: true }, + timestamp: time(6), + }), + SessionEvent.Tool.Success.create({ + callID: "a", + title: titleA, + output: "done-a", + provider: { executed: true }, + timestamp: time(7), + }), + SessionEvent.Tool.Success.create({ + callID: "b", + title: titleB, + output: "done-b", + provider: { executed: true }, + timestamp: time(8), + }), + ], + active(), + ) + + const first = tool(next, "a") + const second = tool(next, "b") + + expect(first?.state.status).toBe("completed") + expect(second?.state.status).toBe("completed") + if (first?.state.status !== "completed" || second?.state.status !== "completed") return + + expect(first.state.input).toEqual(a) + expect(second.state.input).toEqual(b) + expect(first.state.title).toBe(titleA) + expect(second.state.title).toBe(titleB) + }), + { numRuns: 50 }, + ) + }) + + test("records synthetic events", () => { FastCheck.assert( FastCheck.property(word, (body) => { const next = SessionEntry.step(history(), SessionEvent.Synthetic.create({ text: body, timestamp: time(1) })) @@ -604,7 +661,7 @@ describe("session-entry step", () => { ) }) - test.failing("records compaction events", () => { + test("records compaction events", () => { FastCheck.assert( FastCheck.property(FastCheck.boolean(), maybe(FastCheck.boolean()), (auto, overflow) => { const next = SessionEntry.step( diff --git a/packages/opencode/test/session/snapshot-tool-race.test.ts b/packages/opencode/test/session/snapshot-tool-race.test.ts index 1f66ccb995..6517547339 100644 --- a/packages/opencode/test/session/snapshot-tool-race.test.ts +++ b/packages/opencode/test/session/snapshot-tool-race.test.ts @@ -33,7 +33,6 @@ import { Agent as AgentSvc } from "../../src/agent/agent" import { Bus } from "../../src/bus" import { Command } from "../../src/command" import { Config } from "../../src/config" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { MCP } from "../../src/mcp" import { Permission } from "../../src/permission" @@ -102,16 +101,6 @@ const lsp = Layer.succeed( }), ) -const filetime = Layer.succeed( - FileTime.Service, - FileTime.Service.of({ - read: () => Effect.void, - get: () => Effect.succeed(undefined), - assert: () => Effect.void, - withLock: (_filepath, fn) => fn(), - }), -) - const status = SessionStatus.layer.pipe(Layer.provideMerge(Bus.layer)) const run = SessionRunState.layer.pipe(Layer.provide(status)) const infra = Layer.mergeAll(NodeFileSystem.layer, CrossSpawnSpawner.defaultLayer) @@ -128,7 +117,6 @@ function makeHttp() { Plugin.defaultLayer, Config.defaultLayer, ProviderSvc.defaultLayer, - filetime, lsp, mcp, AppFileSystem.defaultLayer, diff --git a/packages/opencode/test/share/share-next.test.ts b/packages/opencode/test/share/share-next.test.ts index 2359f06a31..e217300d09 100644 --- a/packages/opencode/test/share/share-next.test.ts +++ b/packages/opencode/test/share/share-next.test.ts @@ -3,8 +3,8 @@ import { beforeEach, describe, expect } from "bun:test" import { Effect, Exit, Layer, Option } from "effect" import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" -import { AccessToken, AccountID, OrgID, RefreshToken } from "../../src/account" -import { Account } from "../../src/account" +import { AccessToken, AccountID, OrgID, RefreshToken } from "../../src/account/schema" +import { Account } from "../../src/account/account" import { AccountRepo } from "../../src/account/repo" import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" import { Bus } from "../../src/bus" @@ -72,7 +72,7 @@ const share = (id: SessionID) => Database.use((db) => db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, id)).get()) const seed = (url: string, org?: string) => - AccountRepo.use((repo) => + AccountRepo.Service.use((repo) => repo.persistAccount({ id: AccountID.make("account-1"), email: "user@example.com", diff --git a/packages/opencode/test/sync/index.test.ts b/packages/opencode/test/sync/index.test.ts index 36429c3d84..866bcaa31a 100644 --- a/packages/opencode/test/sync/index.test.ts +++ b/packages/opencode/test/sync/index.test.ts @@ -15,12 +15,10 @@ const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES beforeEach(() => { Database.close() - // @ts-expect-error don't do this normally, but it works Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true }) afterEach(() => { - // @ts-expect-error don't do this normally, but it works Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original }) diff --git a/packages/opencode/test/tool/edit.test.ts b/packages/opencode/test/tool/edit.test.ts index 2e3dfa8a69..4759b8be36 100644 --- a/packages/opencode/test/tool/edit.test.ts +++ b/packages/opencode/test/tool/edit.test.ts @@ -5,7 +5,6 @@ import { Effect, Layer, ManagedRuntime } from "effect" import { EditTool } from "../../src/tool/edit" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { AppFileSystem } from "@opencode-ai/shared/filesystem" import { Format } from "../../src/format" @@ -38,7 +37,6 @@ async function touch(file: string, time: number) { const runtime = ManagedRuntime.make( Layer.mergeAll( LSP.defaultLayer, - FileTime.defaultLayer, AppFileSystem.defaultLayer, Format.defaultLayer, Bus.layer, @@ -59,9 +57,6 @@ const resolve = () => }), ) -const readFileTime = (sessionID: SessionID, filepath: string) => - runtime.runPromise(FileTime.Service.use((ft) => ft.read(sessionID, filepath))) - const subscribeBus = (def: D, callback: () => unknown) => runtime.runPromise(Bus.Service.use((bus) => bus.subscribeCallback(def, callback))) @@ -173,8 +168,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() const result = await Effect.runPromise( edit.execute( @@ -202,8 +195,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await expect( Effect.runPromise( @@ -254,8 +245,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await expect( Effect.runPromise( @@ -273,65 +262,6 @@ describe("tool.edit", () => { }) }) - test("throws error when file was not read first (FileTime)", async () => { - await using tmp = await tmpdir() - const filepath = path.join(tmp.path, "file.txt") - await fs.writeFile(filepath, "content", "utf-8") - - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const edit = await resolve() - await expect( - Effect.runPromise( - edit.execute( - { - filePath: filepath, - oldString: "content", - newString: "modified", - }, - ctx, - ), - ), - ).rejects.toThrow("You must read file") - }, - }) - }) - - test("throws error when file has been modified since read", async () => { - await using tmp = await tmpdir() - const filepath = path.join(tmp.path, "file.txt") - await fs.writeFile(filepath, "original content", "utf-8") - await touch(filepath, 1_000) - - await Instance.provide({ - directory: tmp.path, - fn: async () => { - // Read first - await readFileTime(ctx.sessionID, filepath) - - // Simulate external modification - await fs.writeFile(filepath, "modified externally", "utf-8") - await touch(filepath, 2_000) - - // Try to edit with the new content - const edit = await resolve() - await expect( - Effect.runPromise( - edit.execute( - { - filePath: filepath, - oldString: "modified externally", - newString: "edited", - }, - ctx, - ), - ), - ).rejects.toThrow("modified since it was last read") - }, - }) - }) - test("replaces all occurrences with replaceAll option", async () => { await using tmp = await tmpdir() const filepath = path.join(tmp.path, "file.txt") @@ -340,8 +270,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -369,8 +297,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const { FileWatcher } = await import("../../src/file/watcher") const updated = await onceBus(FileWatcher.Event.Updated) @@ -406,8 +332,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -434,8 +358,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() await Effect.runPromise( edit.execute( @@ -487,8 +409,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, dirpath) - const edit = await resolve() await expect( Effect.runPromise( @@ -514,8 +434,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() const result = await Effect.runPromise( edit.execute( @@ -587,7 +505,6 @@ describe("tool.edit", () => { fn: async () => { const edit = await resolve() const filePath = path.join(tmp.path, "test.txt") - await readFileTime(ctx.sessionID, filePath) await Effect.runPromise( edit.execute( { @@ -730,8 +647,6 @@ describe("tool.edit", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await readFileTime(ctx.sessionID, filepath) - const edit = await resolve() // Two concurrent edits @@ -746,9 +661,6 @@ describe("tool.edit", () => { ), ) - // Need to read again since FileTime tracks per-session - await readFileTime(ctx.sessionID, filepath) - const promise2 = Effect.runPromise( edit.execute( { diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts index 3b32c72e05..7456990ad0 100644 --- a/packages/opencode/test/tool/read.test.ts +++ b/packages/opencode/test/tool/read.test.ts @@ -4,7 +4,6 @@ import path from "path" import { Agent } from "../../src/agent/agent" import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { FileTime } from "../../src/file/time" import { LSP } from "../../src/lsp" import { Permission } from "../../src/permission" import { Instance } from "../../src/project/instance" @@ -16,7 +15,6 @@ import { Tool } from "../../src/tool" import { Filesystem } from "../../src/util" import { provideInstance, tmpdirScoped } from "../fixture/fixture" import { testEffect } from "../lib/effect" -import { Npm } from "@opencode-ai/shared/npm" const FIXTURES_DIR = path.join(import.meta.dir, "fixtures") @@ -40,7 +38,6 @@ const it = testEffect( Agent.defaultLayer, AppFileSystem.defaultLayer, CrossSpawnSpawner.defaultLayer, - FileTime.defaultLayer, Instruction.defaultLayer, LSP.defaultLayer, Truncate.defaultLayer, diff --git a/packages/opencode/test/tool/write.test.ts b/packages/opencode/test/tool/write.test.ts index 46bbe2e401..50d3b57527 100644 --- a/packages/opencode/test/tool/write.test.ts +++ b/packages/opencode/test/tool/write.test.ts @@ -6,7 +6,6 @@ import { WriteTool } from "../../src/tool/write" import { Instance } from "../../src/project/instance" import { LSP } from "../../src/lsp" import { AppFileSystem } from "@opencode-ai/shared/filesystem" -import { FileTime } from "../../src/file/time" import { Bus } from "../../src/bus" import { Format } from "../../src/format" import { Truncate } from "../../src/tool" @@ -36,7 +35,6 @@ const it = testEffect( Layer.mergeAll( LSP.defaultLayer, AppFileSystem.defaultLayer, - FileTime.defaultLayer, Bus.layer, Format.defaultLayer, CrossSpawnSpawner.defaultLayer, @@ -58,11 +56,6 @@ const run = Effect.fn("WriteToolTest.run")(function* ( return yield* tool.execute(args, next) }) -const markRead = Effect.fn("WriteToolTest.markRead")(function* (sessionID: string, filepath: string) { - const ft = yield* FileTime.Service - yield* ft.read(sessionID as any, filepath) -}) - describe("tool.write", () => { describe("new file creation", () => { it.live("writes content to new file", () => @@ -110,8 +103,6 @@ describe("tool.write", () => { Effect.gen(function* () { const filepath = path.join(dir, "existing.txt") yield* Effect.promise(() => fs.writeFile(filepath, "old content", "utf-8")) - yield* markRead(ctx.sessionID, filepath) - const result = yield* run({ filePath: filepath, content: "new content" }) expect(result.output).toContain("Wrote file successfully") @@ -128,8 +119,6 @@ describe("tool.write", () => { Effect.gen(function* () { const filepath = path.join(dir, "file.txt") yield* Effect.promise(() => fs.writeFile(filepath, "old", "utf-8")) - yield* markRead(ctx.sessionID, filepath) - const result = yield* run({ filePath: filepath, content: "new" }) expect(result.metadata).toHaveProperty("filepath", filepath) @@ -231,8 +220,6 @@ describe("tool.write", () => { const readonlyPath = path.join(dir, "readonly.txt") yield* Effect.promise(() => fs.writeFile(readonlyPath, "test", "utf-8")) yield* Effect.promise(() => fs.chmod(readonlyPath, 0o444)) - yield* markRead(ctx.sessionID, readonlyPath) - const exit = yield* run({ filePath: readonlyPath, content: "new content" }).pipe(Effect.exit) expect(exit._tag).toBe("Failure") }), diff --git a/packages/opencode/test/util/effect-zod.test.ts b/packages/opencode/test/util/effect-zod.test.ts index 7f7249514d..003945b434 100644 --- a/packages/opencode/test/util/effect-zod.test.ts +++ b/packages/opencode/test/util/effect-zod.test.ts @@ -1,8 +1,8 @@ import { describe, expect, test } from "bun:test" -import { Schema } from "effect" +import { Effect, Schema, SchemaGetter } from "effect" import z from "zod" -import { zod, ZodOverride } from "../../src/util/effect-zod" +import { zod, ZodOverride, ZodPreprocess } from "../../src/util/effect-zod" function json(schema: z.ZodTypeAny) { const { $schema: _, ...rest } = z.toJSONSchema(schema) @@ -61,8 +61,32 @@ describe("util.effect-zod", () => { }) }) - test("throws for unsupported tuple schemas", () => { - expect(() => zod(Schema.Tuple([Schema.String, Schema.Number]))).toThrow("unsupported effect schema") + describe("Tuples", () => { + test("fixed-length tuple parses matching array", () => { + const out = zod(Schema.Tuple([Schema.String, Schema.Number])) + expect(out.parse(["a", 1])).toEqual(["a", 1]) + expect(out.safeParse(["a"]).success).toBe(false) + expect(out.safeParse(["a", "b"]).success).toBe(false) + }) + + test("single-element tuple parses a one-element array", () => { + const out = zod(Schema.Tuple([Schema.Boolean])) + expect(out.parse([true])).toEqual([true]) + expect(out.safeParse([true, false]).success).toBe(false) + }) + + test("tuple inside a union picks the right branch", () => { + const out = zod(Schema.Union([Schema.String, Schema.Tuple([Schema.String, Schema.Number])])) + expect(out.parse("hello")).toBe("hello") + expect(out.parse(["foo", 42])).toEqual(["foo", 42]) + expect(out.safeParse(["foo"]).success).toBe(false) + }) + + test("plain arrays still work (no element positions)", () => { + const out = zod(Schema.Array(Schema.String)) + expect(out.parse(["a", "b", "c"])).toEqual(["a", "b", "c"]) + expect(out.parse([])).toEqual([]) + }) }) test("string literal unions produce z.enum with enum in JSON Schema", () => { @@ -186,4 +210,660 @@ describe("util.effect-zod", () => { const schema = json(zod(Parent)) as any expect(schema.properties.sessionID).toEqual({ type: "string", pattern: "^ses.*" }) }) + + describe("Schema.check translation", () => { + test("filter returning string triggers refinement with that message", () => { + const isEven = Schema.makeFilter((n: number) => (n % 2 === 0 ? undefined : "expected an even number")) + const schema = zod(Schema.Number.check(isEven)) + + expect(schema.parse(4)).toBe(4) + const result = schema.safeParse(3) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("expected an even number") + }) + + test("filter returning false triggers refinement with fallback message", () => { + const nonEmpty = Schema.makeFilter((s: string) => s.length > 0) + const schema = zod(Schema.String.check(nonEmpty)) + + expect(schema.parse("hi")).toBe("hi") + const result = schema.safeParse("") + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toMatch(/./) + }) + + test("filter returning undefined passes validation", () => { + const alwaysOk = Schema.makeFilter(() => undefined) + const schema = zod(Schema.Number.check(alwaysOk)) + + expect(schema.parse(42)).toBe(42) + }) + + test("annotations.message on the filter is used when filter returns false", () => { + const positive = Schema.makeFilter((n: number) => n > 0, { message: "must be positive" }) + const schema = zod(Schema.Number.check(positive)) + + const result = schema.safeParse(-1) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("must be positive") + }) + + test("cross-field check on a record flags missing key", () => { + const hasKey = Schema.makeFilter((data: Record) => + "required" in data ? undefined : "missing 'required' key", + ) + const schema = zod(Schema.Record(Schema.String, Schema.Struct({ enabled: Schema.Boolean })).check(hasKey)) + + expect(schema.parse({ required: { enabled: true } })).toEqual({ + required: { enabled: true }, + }) + + const result = schema.safeParse({ other: { enabled: true } }) + expect(result.success).toBe(false) + expect(result.error!.issues[0].message).toBe("missing 'required' key") + }) + }) + + describe("StructWithRest / catchall", () => { + test("struct with a string-keyed record rest parses known AND extra keys", () => { + const schema = zod( + Schema.StructWithRest( + Schema.Struct({ + apiKey: Schema.optional(Schema.String), + baseURL: Schema.optional(Schema.String), + }), + [Schema.Record(Schema.String, Schema.Unknown)], + ), + ) + + // Known fields come through as declared + expect(schema.parse({ apiKey: "sk-x" })).toEqual({ apiKey: "sk-x" }) + + // Extra keys are preserved (catchall) + expect( + schema.parse({ + apiKey: "sk-x", + baseURL: "https://api.example.com", + customField: "anything", + nested: { foo: 1 }, + }), + ).toEqual({ + apiKey: "sk-x", + baseURL: "https://api.example.com", + customField: "anything", + nested: { foo: 1 }, + }) + }) + + test("catchall value type constrains the extras", () => { + const schema = zod( + Schema.StructWithRest( + Schema.Struct({ + count: Schema.Number, + }), + [Schema.Record(Schema.String, Schema.Number)], + ), + ) + + // Known field + numeric extras + expect(schema.parse({ count: 10, a: 1, b: 2 })).toEqual({ count: 10, a: 1, b: 2 }) + + // Non-numeric extra is rejected + expect(schema.safeParse({ count: 10, bad: "not a number" }).success).toBe(false) + }) + + test("JSON schema output marks additionalProperties appropriately", () => { + const schema = zod( + Schema.StructWithRest( + Schema.Struct({ + id: Schema.String, + }), + [Schema.Record(Schema.String, Schema.Unknown)], + ), + ) + const shape = json(schema) as { additionalProperties?: unknown } + // Presence of `additionalProperties` (truthy or a schema) signals catchall. + expect(shape.additionalProperties).not.toBe(false) + expect(shape.additionalProperties).toBeDefined() + }) + + test("plain struct without rest still emits additionalProperties unchanged (regression)", () => { + const schema = zod(Schema.Struct({ id: Schema.String })) + expect(schema.parse({ id: "x" })).toEqual({ id: "x" }) + }) + }) + + describe("transforms (Schema.decodeTo)", () => { + test("Number -> pseudo-Duration (seconds) applies the decode function", () => { + // Models the account/account.ts DurationFromSeconds pattern. + const SecondsToMs = Schema.Number.pipe( + Schema.decodeTo(Schema.Number, { + decode: SchemaGetter.transform((n: number) => n * 1000), + encode: SchemaGetter.transform((ms: number) => ms / 1000), + }), + ) + + const schema = zod(SecondsToMs) + expect(schema.parse(3)).toBe(3000) + expect(schema.parse(0)).toBe(0) + }) + + test("String -> Number via parseInt decode", () => { + const ParsedInt = Schema.String.pipe( + Schema.decodeTo(Schema.Number, { + decode: SchemaGetter.transform((s: string) => Number.parseInt(s, 10)), + encode: SchemaGetter.transform((n: number) => String(n)), + }), + ) + + const schema = zod(ParsedInt) + expect(schema.parse("42")).toBe(42) + expect(schema.parse("0")).toBe(0) + }) + + test("transform inside a struct field applies per-field", () => { + const Field = Schema.Number.pipe( + Schema.decodeTo(Schema.Number, { + decode: SchemaGetter.transform((n: number) => n + 1), + encode: SchemaGetter.transform((n: number) => n - 1), + }), + ) + + const schema = zod( + Schema.Struct({ + plain: Schema.Number, + bumped: Field, + }), + ) + + expect(schema.parse({ plain: 5, bumped: 10 })).toEqual({ plain: 5, bumped: 11 }) + }) + + test("chained decodeTo composes transforms in order", () => { + // String -> Number (parseInt) -> Number (doubled). + // Exercises the encoded() reduce, not just a single link. + const Chained = Schema.String.pipe( + Schema.decodeTo(Schema.Number, { + decode: SchemaGetter.transform((s: string) => Number.parseInt(s, 10)), + encode: SchemaGetter.transform((n: number) => String(n)), + }), + Schema.decodeTo(Schema.Number, { + decode: SchemaGetter.transform((n: number) => n * 2), + encode: SchemaGetter.transform((n: number) => n / 2), + }), + ) + + const schema = zod(Chained) + expect(schema.parse("21")).toBe(42) + expect(schema.parse("0")).toBe(0) + }) + + test("Schema.Class is unaffected by transform walker (returns plain object, not instance)", () => { + // Schema.Class uses Declaration + encoding under the hood to construct + // class instances. The walker must NOT apply that transform, or zod + // parsing would return class instances instead of plain objects. + class Method extends Schema.Class("TxTestMethod")({ + type: Schema.String, + value: Schema.Number, + }) {} + + const schema = zod(Method) + const parsed = schema.parse({ type: "oauth", value: 1 }) + expect(parsed).toEqual({ type: "oauth", value: 1 }) + // Guardrail: ensure we didn't get back a Method instance. + expect(parsed).not.toBeInstanceOf(Method) + }) + }) + + describe("optimizations", () => { + test("walk() memoizes by AST identity — same AST node returns same Zod", () => { + const shared = Schema.Struct({ id: Schema.String, name: Schema.String }) + const left = zod(shared) + const right = zod(shared) + expect(left).toBe(right) + }) + + test("nested reuse of the same AST reuses the cached Zod child", () => { + // Two different parents embed the same inner schema. The inner zod + // child should be identical by reference inside both parents. + class Inner extends Schema.Class("MemoTestInner")({ + value: Schema.String, + }) {} + + class OuterA extends Schema.Class("MemoTestOuterA")({ + inner: Inner, + }) {} + + class OuterB extends Schema.Class("MemoTestOuterB")({ + inner: Inner, + }) {} + + const shapeA = (zod(OuterA) as any).shape ?? (zod(OuterA) as any)._def?.shape?.() + const shapeB = (zod(OuterB) as any).shape ?? (zod(OuterB) as any)._def?.shape?.() + expect(shapeA.inner).toBe(shapeB.inner) + }) + + test("multiple checks run in a single refinement layer (all fire on one value)", () => { + // Three checks attached to the same schema. All three must run and + // report — asserting that no check silently got dropped when we + // flattened into one superRefine. + const positive = Schema.makeFilter((n: number) => (n > 0 ? undefined : "not positive")) + const even = Schema.makeFilter((n: number) => (n % 2 === 0 ? undefined : "not even")) + const under100 = Schema.makeFilter((n: number) => (n < 100 ? undefined : "too big")) + + const schema = zod(Schema.Number.check(positive).check(even).check(under100)) + + const neg = schema.safeParse(-3) + expect(neg.success).toBe(false) + expect(neg.error!.issues.map((i) => i.message)).toEqual(expect.arrayContaining(["not positive", "not even"])) + + const big = schema.safeParse(101) + expect(big.success).toBe(false) + expect(big.error!.issues.map((i) => i.message)).toContain("too big") + + // Passing value satisfies all three + expect(schema.parse(42)).toBe(42) + }) + + test("FilterGroup flattens into the single refinement layer alongside its siblings", () => { + const positive = Schema.makeFilter((n: number) => (n > 0 ? undefined : "not positive")) + const even = Schema.makeFilter((n: number) => (n % 2 === 0 ? undefined : "not even")) + const group = Schema.makeFilterGroup([positive, even]) + const under100 = Schema.makeFilter((n: number) => (n < 100 ? undefined : "too big")) + + const schema = zod(Schema.Number.check(group).check(under100)) + + const bad = schema.safeParse(-3) + expect(bad.success).toBe(false) + expect(bad.error!.issues.map((i) => i.message)).toEqual(expect.arrayContaining(["not positive", "not even"])) + }) + }) + + describe("well-known refinement translation", () => { + test("Schema.isInt emits type: integer in JSON Schema", () => { + const schema = zod(Schema.Number.check(Schema.isInt())) + const native = json(z.number().int()) + expect(json(schema)).toEqual(native) + expect(schema.parse(3)).toBe(3) + expect(schema.safeParse(1.5).success).toBe(false) + }) + + test("Schema.isGreaterThan(0) emits exclusiveMinimum: 0", () => { + const schema = zod(Schema.Number.check(Schema.isGreaterThan(0))) + expect((json(schema) as any).exclusiveMinimum).toBe(0) + expect(schema.parse(1)).toBe(1) + expect(schema.safeParse(0).success).toBe(false) + expect(schema.safeParse(-1).success).toBe(false) + }) + + test("Schema.isGreaterThanOrEqualTo(0) emits minimum: 0", () => { + const schema = zod(Schema.Number.check(Schema.isGreaterThanOrEqualTo(0))) + expect((json(schema) as any).minimum).toBe(0) + expect(schema.parse(0)).toBe(0) + expect(schema.safeParse(-1).success).toBe(false) + }) + + test("Schema.isLessThan(10) emits exclusiveMaximum: 10", () => { + const schema = zod(Schema.Number.check(Schema.isLessThan(10))) + expect((json(schema) as any).exclusiveMaximum).toBe(10) + expect(schema.parse(9)).toBe(9) + expect(schema.safeParse(10).success).toBe(false) + }) + + test("Schema.isLessThanOrEqualTo(10) emits maximum: 10", () => { + const schema = zod(Schema.Number.check(Schema.isLessThanOrEqualTo(10))) + expect((json(schema) as any).maximum).toBe(10) + expect(schema.parse(10)).toBe(10) + expect(schema.safeParse(11).success).toBe(false) + }) + + test("Schema.isMultipleOf(5) emits multipleOf: 5", () => { + const schema = zod(Schema.Number.check(Schema.isMultipleOf(5))) + expect((json(schema) as any).multipleOf).toBe(5) + expect(schema.parse(10)).toBe(10) + expect(schema.safeParse(7).success).toBe(false) + }) + + test("Schema.isFinite validates at runtime", () => { + const schema = zod(Schema.Number.check(Schema.isFinite())) + expect(schema.parse(1)).toBe(1) + expect(schema.safeParse(Infinity).success).toBe(false) + expect(schema.safeParse(NaN).success).toBe(false) + }) + + test("chained isInt + isGreaterThan(0) matches z.number().int().positive()", () => { + const schema = zod(Schema.Number.check(Schema.isInt()).check(Schema.isGreaterThan(0))) + const native = json(z.number().int().positive()) + expect(json(schema)).toEqual(native) + expect(schema.parse(3)).toBe(3) + expect(schema.safeParse(0).success).toBe(false) + expect(schema.safeParse(1.5).success).toBe(false) + }) + + test("chained isInt + isGreaterThanOrEqualTo(0) matches z.number().int().min(0)", () => { + const schema = zod(Schema.Number.check(Schema.isInt()).check(Schema.isGreaterThanOrEqualTo(0))) + const native = json(z.number().int().min(0)) + expect(json(schema)).toEqual(native) + expect(schema.parse(0)).toBe(0) + expect(schema.safeParse(-1).success).toBe(false) + }) + + test("Schema.isBetween emits both bounds", () => { + const schema = zod(Schema.Number.check(Schema.isBetween({ minimum: 1, maximum: 10 }))) + const shape = json(schema) as any + expect(shape.minimum).toBe(1) + expect(shape.maximum).toBe(10) + expect(schema.parse(5)).toBe(5) + expect(schema.safeParse(11).success).toBe(false) + expect(schema.safeParse(0).success).toBe(false) + }) + + test("Schema.isBetween with exclusive bounds emits exclusiveMinimum/Maximum", () => { + const schema = zod( + Schema.Number.check( + Schema.isBetween({ minimum: 1, maximum: 10, exclusiveMinimum: true, exclusiveMaximum: true }), + ), + ) + const shape = json(schema) as any + expect(shape.exclusiveMinimum).toBe(1) + expect(shape.exclusiveMaximum).toBe(10) + expect(schema.parse(5)).toBe(5) + expect(schema.safeParse(1).success).toBe(false) + expect(schema.safeParse(10).success).toBe(false) + }) + + test("Schema.isInt32 (FilterGroup) produces integer bounds", () => { + const schema = zod(Schema.Number.check(Schema.isInt32())) + const shape = json(schema) as any + expect(shape.type).toBe("integer") + expect(shape.minimum).toBe(-2147483648) + expect(shape.maximum).toBe(2147483647) + expect(schema.parse(42)).toBe(42) + expect(schema.safeParse(1.5).success).toBe(false) + expect(schema.safeParse(2147483648).success).toBe(false) + }) + + test("Schema.isMinLength on string emits minLength", () => { + const schema = zod(Schema.String.check(Schema.isMinLength(3))) + expect((json(schema) as any).minLength).toBe(3) + expect(schema.parse("abc")).toBe("abc") + expect(schema.safeParse("ab").success).toBe(false) + }) + + test("Schema.isMaxLength on string emits maxLength", () => { + const schema = zod(Schema.String.check(Schema.isMaxLength(5))) + expect((json(schema) as any).maxLength).toBe(5) + expect(schema.parse("abcde")).toBe("abcde") + expect(schema.safeParse("abcdef").success).toBe(false) + }) + + test("Schema.isLengthBetween on string emits both bounds", () => { + const schema = zod(Schema.String.check(Schema.isLengthBetween(2, 4))) + const shape = json(schema) as any + expect(shape.minLength).toBe(2) + expect(shape.maxLength).toBe(4) + expect(schema.parse("abc")).toBe("abc") + expect(schema.safeParse("a").success).toBe(false) + expect(schema.safeParse("abcde").success).toBe(false) + }) + + test("Schema.isMinLength on array emits minItems", () => { + const schema = zod(Schema.Array(Schema.String).check(Schema.isMinLength(1))) + expect((json(schema) as any).minItems).toBe(1) + expect(schema.parse(["x"])).toEqual(["x"]) + expect(schema.safeParse([]).success).toBe(false) + }) + + test("Schema.isPattern emits pattern", () => { + const schema = zod(Schema.String.check(Schema.isPattern(/^per/))) + expect((json(schema) as any).pattern).toBe("^per") + expect(schema.parse("per_abc")).toBe("per_abc") + expect(schema.safeParse("abc").success).toBe(false) + }) + + test("Schema.isStartsWith matches native zod .startsWith() JSON Schema", () => { + const schema = zod(Schema.String.check(Schema.isStartsWith("per"))) + const native = json(z.string().startsWith("per")) + expect(json(schema)).toEqual(native) + expect(schema.parse("per_abc")).toBe("per_abc") + expect(schema.safeParse("abc").success).toBe(false) + }) + + test("Schema.isEndsWith matches native zod .endsWith() JSON Schema", () => { + const schema = zod(Schema.String.check(Schema.isEndsWith(".json"))) + const native = json(z.string().endsWith(".json")) + expect(json(schema)).toEqual(native) + expect(schema.parse("a.json")).toBe("a.json") + expect(schema.safeParse("a.txt").success).toBe(false) + }) + + test("Schema.isUUID emits format: uuid", () => { + const schema = zod(Schema.String.check(Schema.isUUID())) + expect((json(schema) as any).format).toBe("uuid") + }) + + test("mix of well-known and anonymous filters translates known and reroutes unknown to superRefine", () => { + // isInt is well-known (translates to .int()); the anonymous filter falls + // back to superRefine. + const notSeven = Schema.makeFilter((n: number) => (n !== 7 ? undefined : "no sevens allowed")) + const schema = zod(Schema.Number.check(Schema.isInt()).check(notSeven)) + + const shape = json(schema) as any + // Well-known translation is preserved — type is integer, not plain number + expect(shape.type).toBe("integer") + + // Runtime: both constraints fire + expect(schema.parse(3)).toBe(3) + expect(schema.safeParse(1.5).success).toBe(false) + const seven = schema.safeParse(7) + expect(seven.success).toBe(false) + expect(seven.error!.issues[0].message).toBe("no sevens allowed") + }) + + test("inside a struct field, well-known refinements propagate through", () => { + // Mirrors config.ts port: z.number().int().positive().optional() + const Port = Schema.optional(Schema.Number.check(Schema.isInt()).check(Schema.isGreaterThan(0))) + const schema = zod(Schema.Struct({ port: Port })) + const shape = json(schema) as any + expect(shape.properties.port.type).toBe("integer") + expect(shape.properties.port.exclusiveMinimum).toBe(0) + }) + }) + + describe("Schema.optionalWith defaults", () => { + test("parsing undefined returns the default value", () => { + const schema = zod( + Schema.Struct({ + mode: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed("ctrl-x"))), + }), + ) + expect(schema.parse({})).toEqual({ mode: "ctrl-x" }) + expect(schema.parse({ mode: undefined })).toEqual({ mode: "ctrl-x" }) + }) + + test("parsing a real value returns that value (default does not fire)", () => { + const schema = zod( + Schema.Struct({ + mode: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed("ctrl-x"))), + }), + ) + expect(schema.parse({ mode: "ctrl-y" })).toEqual({ mode: "ctrl-y" }) + }) + + test("default on a number field", () => { + const schema = zod( + Schema.Struct({ + count: Schema.Number.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed(42))), + }), + ) + expect(schema.parse({})).toEqual({ count: 42 }) + expect(schema.parse({ count: 7 })).toEqual({ count: 7 }) + }) + + test("multiple defaulted fields inside a struct", () => { + const schema = zod( + Schema.Struct({ + leader: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed("ctrl-x"))), + quit: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed("ctrl-c"))), + inner: Schema.String, + }), + ) + expect(schema.parse({ inner: "hi" })).toEqual({ + leader: "ctrl-x", + quit: "ctrl-c", + inner: "hi", + }) + expect(schema.parse({ leader: "a", quit: "b", inner: "c" })).toEqual({ + leader: "a", + quit: "b", + inner: "c", + }) + }) + + test("JSON Schema output includes the default key", () => { + const schema = zod( + Schema.Struct({ + mode: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed("ctrl-x"))), + }), + ) + const shape = json(schema) as any + expect(shape.properties.mode.default).toBe("ctrl-x") + }) + + test("default referencing a computed value resolves when evaluated", () => { + // Simulates `keybinds.ts` style of per-platform defaults: the default is + // produced by an Effect that computes a value at decode time. + const platform = "darwin" + const fallback = platform === "darwin" ? "cmd-k" : "ctrl-k" + const schema = zod( + Schema.Struct({ + command_palette: Schema.String.pipe(Schema.optional, Schema.withDecodingDefault(Effect.sync(() => fallback))), + }), + ) + expect(schema.parse({})).toEqual({ command_palette: "cmd-k" }) + const shape = json(schema) as any + expect(shape.properties.command_palette.default).toBe("cmd-k") + }) + + test("plain Schema.optional (no default) still emits .optional() (regression)", () => { + const schema = zod(Schema.Struct({ foo: Schema.optional(Schema.String) })) + expect(schema.parse({})).toEqual({}) + expect(schema.parse({ foo: "hi" })).toEqual({ foo: "hi" }) + }) + }) + + describe("ZodPreprocess annotation", () => { + test("preprocess runs on raw input before the inner schema parses", () => { + // Models the permission.ts __originalKeys pattern: capture the original + // insertion order of a user-provided object BEFORE Schema parsing + // canonicalises the keys. + const preprocess = (val: unknown) => { + if (typeof val === "object" && val !== null && !Array.isArray(val)) { + return { __keys: Object.keys(val), ...(val as Record) } + } + return val + } + const Inner = Schema.Struct({ + __keys: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + a: Schema.optional(Schema.String), + b: Schema.optional(Schema.String), + }).annotate({ [ZodPreprocess]: preprocess }) + + const schema = zod(Inner) + const parsed = schema.parse({ b: "1", a: "2" }) as { + __keys?: string[] + a?: string + b?: string + } + expect(parsed.__keys).toEqual(["b", "a"]) + expect(parsed.a).toBe("2") + expect(parsed.b).toBe("1") + }) + + test("preprocess does not transform already-shaped input", () => { + // When the user passes an object that already has __keys, preprocess + // returns it unchanged because spreading preserves any existing key. + const preprocess = (val: unknown) => { + if (typeof val === "object" && val !== null && !("__keys" in val)) { + return { __keys: Object.keys(val), ...(val as Record) } + } + return val + } + const Inner = Schema.Struct({ + __keys: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + a: Schema.optional(Schema.String), + }).annotate({ [ZodPreprocess]: preprocess }) + + const schema = zod(Inner) + const parsed = schema.parse({ __keys: ["existing"], a: "hi" }) as { + __keys?: string[] + a?: string + } + expect(parsed.__keys).toEqual(["existing"]) + }) + + test("preprocess composes with a union (either object or string)", () => { + // Mirrors permission.ts exactly: input can be either an object (with + // preprocess injecting metadata) or a plain string action. + const Action = Schema.Literals(["ask", "allow", "deny"]) + const Obj = Schema.Struct({ + __keys: Schema.optional(Schema.mutable(Schema.Array(Schema.String))), + read: Schema.optional(Action), + write: Schema.optional(Action), + }) + const preprocess = (val: unknown) => { + if (typeof val === "object" && val !== null && !Array.isArray(val)) { + return { __keys: Object.keys(val), ...(val as Record) } + } + return val + } + const Inner = Schema.Union([Obj, Action]).annotate({ [ZodPreprocess]: preprocess }) + const schema = zod(Inner) + + // String branch — passes through preprocess unchanged + expect(schema.parse("allow")).toBe("allow") + + // Object branch — __keys injected, preserves order + const parsed = schema.parse({ write: "allow", read: "deny" }) as { + __keys?: string[] + read?: string + write?: string + } + expect(parsed.__keys).toEqual(["write", "read"]) + expect(parsed.write).toBe("allow") + expect(parsed.read).toBe("deny") + }) + + test("JSON Schema output comes from the inner schema — preprocess is runtime-only", () => { + const Inner = Schema.Struct({ + a: Schema.optional(Schema.String), + b: Schema.optional(Schema.Number), + }).annotate({ [ZodPreprocess]: (v: unknown) => v }) + const shape = json(zod(Inner)) as any + expect(shape.type).toBe("object") + expect(shape.properties.a.type).toBe("string") + expect(shape.properties.b.type).toBe("number") + }) + + test("identifier + description propagate through the preprocess wrapper", () => { + const Inner = Schema.Struct({ + x: Schema.optional(Schema.String), + }).annotate({ + identifier: "WithPreproc", + description: "A schema with preprocess", + [ZodPreprocess]: (v: unknown) => v, + }) + const schema = zod(Inner) + expect(schema.meta()?.ref).toBe("WithPreproc") + expect(schema.meta()?.description).toBe("A schema with preprocess") + }) + + test("preprocess inside a struct field applies only to that field", () => { + const Inner = Schema.String.annotate({ + [ZodPreprocess]: (v: unknown) => (typeof v === "number" ? String(v) : v), + }) + const schema = zod(Schema.Struct({ name: Inner, raw: Schema.Number })) + expect(schema.parse({ name: 42, raw: 7 })).toEqual({ name: "42", raw: 7 }) + }) + }) }) diff --git a/packages/opencode/test/workspace/workspace-restore.test.ts b/packages/opencode/test/workspace/workspace-restore.test.ts index ee9ad059f8..ad6ac2c5fd 100644 --- a/packages/opencode/test/workspace/workspace-restore.test.ts +++ b/packages/opencode/test/workspace/workspace-restore.test.ts @@ -25,14 +25,12 @@ const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES beforeEach(() => { Database.close() - // @ts-expect-error test override Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true }) afterEach(async () => { mock.restore() await Instance.disposeAll() - // @ts-expect-error test override Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original await resetDatabase() }) @@ -143,9 +141,12 @@ describe("Workspace.sessionRestore", () => { Object.assign( async (input: URL | RequestInfo, init?: BunFetchRequestInit | RequestInit) => { const url = new URL(typeof input === "string" || input instanceof URL ? input : input.url) - if (url.pathname !== "/base/sync/replay") { + if (url.pathname === "/base/global/event") { return eventStreamResponse() } + if (url.pathname === "/base/sync/history") { + return Response.json([]) + } const body = JSON.parse(String(init?.body)) posts.push({ path: url.pathname, diff --git a/packages/plugin/package.json b/packages/plugin/package.json index 6f9a0ea1dc..15cd2db6e2 100644 --- a/packages/plugin/package.json +++ b/packages/plugin/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/plugin", - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "scripts": { @@ -22,8 +22,8 @@ "zod": "catalog:" }, "peerDependencies": { - "@opentui/core": ">=0.1.99", - "@opentui/solid": ">=0.1.99" + "@opentui/core": ">=0.1.100", + "@opentui/solid": ">=0.1.100" }, "peerDependenciesMeta": { "@opentui/core": { @@ -34,8 +34,8 @@ } }, "devDependencies": { - "@opentui/core": "0.1.99", - "@opentui/solid": "0.1.99", + "@opentui/core": "catalog:", + "@opentui/solid": "catalog:", "@tsconfig/node22": "catalog:", "@types/node": "catalog:", "typescript": "catalog:", diff --git a/packages/plugin/src/tool.ts b/packages/plugin/src/tool.ts index b568d03713..3105bf534b 100644 --- a/packages/plugin/src/tool.ts +++ b/packages/plugin/src/tool.ts @@ -27,10 +27,12 @@ type AskInput = { metadata: { [key: string]: any } } +export type ToolResult = string | { output: string; metadata?: { [key: string]: any } } + export function tool(input: { description: string args: Args - execute(args: z.infer>, context: ToolContext): Promise + execute(args: z.infer>, context: ToolContext): Promise }) { return input } diff --git a/packages/plugin/src/tui.ts b/packages/plugin/src/tui.ts index 099cf27580..1c57a71ab3 100644 --- a/packages/plugin/src/tui.ts +++ b/packages/plugin/src/tui.ts @@ -29,7 +29,7 @@ export type TuiRouteCurrent = name: "session" params: { sessionID: string - initialPrompt?: unknown + prompt?: unknown } } | { diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json index 53a5893143..91d6647449 100644 --- a/packages/sdk/js/package.json +++ b/packages/sdk/js/package.json @@ -1,7 +1,7 @@ { "$schema": "https://json.schemastore.org/package.json", "name": "@opencode-ai/sdk", - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/sdk/js/src/v2/gen/sdk.gen.ts b/packages/sdk/js/src/v2/gen/sdk.gen.ts index d7bf43f506..6248eb8e4d 100644 --- a/packages/sdk/js/src/v2/gen/sdk.gen.ts +++ b/packages/sdk/js/src/v2/gen/sdk.gen.ts @@ -163,6 +163,7 @@ import type { SyncHistoryListResponses, SyncReplayErrors, SyncReplayResponses, + SyncStartResponses, TextPartInput, ToolIdsErrors, ToolIdsResponses, @@ -510,6 +511,430 @@ export class App extends HeyApiClient { } } +export class Adaptor extends HeyApiClient { + /** + * List workspace adaptors + * + * List all available workspace adaptors for the current project. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace/adaptor", + ...options, + ...params, + }) + } +} + +export class Workspace extends HeyApiClient { + /** + * List workspaces + * + * List all workspaces. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace", + ...options, + ...params, + }) + } + + /** + * Create workspace + * + * Create a workspace for the current project. + */ + public create( + parameters?: { + directory?: string + workspace?: string + id?: string + type?: string + branch?: string | null + extra?: unknown | null + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "id" }, + { in: "body", key: "type" }, + { in: "body", key: "branch" }, + { in: "body", key: "extra" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post< + ExperimentalWorkspaceCreateResponses, + ExperimentalWorkspaceCreateErrors, + ThrowOnError + >({ + url: "/experimental/workspace", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } + + /** + * Workspace status + * + * Get connection status for workspaces in the current project. + */ + public status( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/workspace/status", + ...options, + ...params, + }) + } + + /** + * Remove workspace + * + * Remove an existing workspace. + */ + public remove( + parameters: { + id: string + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "path", key: "id" }, + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).delete< + ExperimentalWorkspaceRemoveResponses, + ExperimentalWorkspaceRemoveErrors, + ThrowOnError + >({ + url: "/experimental/workspace/{id}", + ...options, + ...params, + }) + } + + /** + * Restore session into workspace + * + * Replay a session's sync events into the target workspace in batches. + */ + public sessionRestore( + parameters: { + id: string + directory?: string + workspace?: string + sessionID?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "path", key: "id" }, + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "sessionID" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post< + ExperimentalWorkspaceSessionRestoreResponses, + ExperimentalWorkspaceSessionRestoreErrors, + ThrowOnError + >({ + url: "/experimental/workspace/{id}/session-restore", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } + + private _adaptor?: Adaptor + get adaptor(): Adaptor { + return (this._adaptor ??= new Adaptor({ client: this.client })) + } +} + +export class Console extends HeyApiClient { + /** + * Get active Console provider metadata + * + * Get the active Console org name and the set of provider IDs managed by that Console org. + */ + public get( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/console", + ...options, + ...params, + }) + } + + /** + * List switchable Console orgs + * + * Get the available Console orgs across logged-in accounts, including the current active org. + */ + public listOrgs( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/console/orgs", + ...options, + ...params, + }) + } + + /** + * Switch active Console org + * + * Persist a new active Console account/org selection for the current local OpenCode state. + */ + public switchOrg( + parameters?: { + directory?: string + workspace?: string + accountID?: string + orgID?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "body", key: "accountID" }, + { in: "body", key: "orgID" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post({ + url: "/experimental/console/switch", + ...options, + ...params, + headers: { + "Content-Type": "application/json", + ...options?.headers, + ...params.headers, + }, + }) + } +} + +export class Session extends HeyApiClient { + /** + * List sessions + * + * Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. + */ + public list( + parameters?: { + directory?: string + workspace?: string + roots?: boolean + start?: number + cursor?: number + search?: string + limit?: number + archived?: boolean + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + { in: "query", key: "roots" }, + { in: "query", key: "start" }, + { in: "query", key: "cursor" }, + { in: "query", key: "search" }, + { in: "query", key: "limit" }, + { in: "query", key: "archived" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/session", + ...options, + ...params, + }) + } +} + +export class Resource extends HeyApiClient { + /** + * Get MCP resources + * + * Get all available MCP resources from connected servers. Optionally filter by name. + */ + public list( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).get({ + url: "/experimental/resource", + ...options, + ...params, + }) + } +} + +export class Experimental extends HeyApiClient { + private _workspace?: Workspace + get workspace(): Workspace { + return (this._workspace ??= new Workspace({ client: this.client })) + } + + private _console?: Console + get console(): Console { + return (this._console ??= new Console({ client: this.client })) + } + + private _session?: Session + get session(): Session { + return (this._session ??= new Session({ client: this.client })) + } + + private _resource?: Resource + get resource(): Resource { + return (this._resource ??= new Resource({ client: this.client })) + } +} + export class Project extends HeyApiClient { /** * List all projects @@ -972,430 +1397,6 @@ export class Config2 extends HeyApiClient { } } -export class Console extends HeyApiClient { - /** - * Get active Console provider metadata - * - * Get the active Console org name and the set of provider IDs managed by that Console org. - */ - public get( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/console", - ...options, - ...params, - }) - } - - /** - * List switchable Console orgs - * - * Get the available Console orgs across logged-in accounts, including the current active org. - */ - public listOrgs( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/console/orgs", - ...options, - ...params, - }) - } - - /** - * Switch active Console org - * - * Persist a new active Console account/org selection for the current local OpenCode state. - */ - public switchOrg( - parameters?: { - directory?: string - workspace?: string - accountID?: string - orgID?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "accountID" }, - { in: "body", key: "orgID" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post({ - url: "/experimental/console/switch", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } -} - -export class Adaptor extends HeyApiClient { - /** - * List workspace adaptors - * - * List all available workspace adaptors for the current project. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace/adaptor", - ...options, - ...params, - }) - } -} - -export class Workspace extends HeyApiClient { - /** - * List workspaces - * - * List all workspaces. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace", - ...options, - ...params, - }) - } - - /** - * Create workspace - * - * Create a workspace for the current project. - */ - public create( - parameters?: { - directory?: string - workspace?: string - id?: string - type?: string - branch?: string | null - extra?: unknown | null - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "id" }, - { in: "body", key: "type" }, - { in: "body", key: "branch" }, - { in: "body", key: "extra" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post< - ExperimentalWorkspaceCreateResponses, - ExperimentalWorkspaceCreateErrors, - ThrowOnError - >({ - url: "/experimental/workspace", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } - - /** - * Workspace status - * - * Get connection status for workspaces in the current project. - */ - public status( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/workspace/status", - ...options, - ...params, - }) - } - - /** - * Remove workspace - * - * Remove an existing workspace. - */ - public remove( - parameters: { - id: string - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "path", key: "id" }, - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).delete< - ExperimentalWorkspaceRemoveResponses, - ExperimentalWorkspaceRemoveErrors, - ThrowOnError - >({ - url: "/experimental/workspace/{id}", - ...options, - ...params, - }) - } - - /** - * Restore session into workspace - * - * Replay a session's sync events into the target workspace in batches. - */ - public sessionRestore( - parameters: { - id: string - directory?: string - workspace?: string - sessionID?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "path", key: "id" }, - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "body", key: "sessionID" }, - ], - }, - ], - ) - return (options?.client ?? this.client).post< - ExperimentalWorkspaceSessionRestoreResponses, - ExperimentalWorkspaceSessionRestoreErrors, - ThrowOnError - >({ - url: "/experimental/workspace/{id}/session-restore", - ...options, - ...params, - headers: { - "Content-Type": "application/json", - ...options?.headers, - ...params.headers, - }, - }) - } - - private _adaptor?: Adaptor - get adaptor(): Adaptor { - return (this._adaptor ??= new Adaptor({ client: this.client })) - } -} - -export class Session extends HeyApiClient { - /** - * List sessions - * - * Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. - */ - public list( - parameters?: { - directory?: string - workspace?: string - roots?: boolean - start?: number - cursor?: number - search?: string - limit?: number - archived?: boolean - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - { in: "query", key: "roots" }, - { in: "query", key: "start" }, - { in: "query", key: "cursor" }, - { in: "query", key: "search" }, - { in: "query", key: "limit" }, - { in: "query", key: "archived" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/session", - ...options, - ...params, - }) - } -} - -export class Resource extends HeyApiClient { - /** - * Get MCP resources - * - * Get all available MCP resources from connected servers. Optionally filter by name. - */ - public list( - parameters?: { - directory?: string - workspace?: string - }, - options?: Options, - ) { - const params = buildClientParams( - [parameters], - [ - { - args: [ - { in: "query", key: "directory" }, - { in: "query", key: "workspace" }, - ], - }, - ], - ) - return (options?.client ?? this.client).get({ - url: "/experimental/resource", - ...options, - ...params, - }) - } -} - -export class Experimental extends HeyApiClient { - private _console?: Console - get console(): Console { - return (this._console ??= new Console({ client: this.client })) - } - - private _workspace?: Workspace - get workspace(): Workspace { - return (this._workspace ??= new Workspace({ client: this.client })) - } - - private _session?: Session - get session(): Session { - return (this._session ??= new Session({ client: this.client })) - } - - private _resource?: Resource - get resource(): Resource { - return (this._resource ??= new Resource({ client: this.client })) - } -} - export class Tool extends HeyApiClient { /** * List tool IDs @@ -3038,7 +3039,7 @@ export class History extends HeyApiClient { }, ], ) - return (options?.client ?? this.client).get({ + return (options?.client ?? this.client).post({ url: "/sync/history", ...options, ...params, @@ -3052,6 +3053,36 @@ export class History extends HeyApiClient { } export class Sync extends HeyApiClient { + /** + * Start workspace sync + * + * Start sync loops for workspaces in the current project that have active sessions. + */ + public start( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post({ + url: "/sync/start", + ...options, + ...params, + }) + } + /** * Replay sync events * @@ -4314,6 +4345,11 @@ export class OpencodeClient extends HeyApiClient { return (this._app ??= new App({ client: this.client })) } + private _experimental?: Experimental + get experimental(): Experimental { + return (this._experimental ??= new Experimental({ client: this.client })) + } + private _project?: Project get project(): Project { return (this._project ??= new Project({ client: this.client })) @@ -4329,11 +4365,6 @@ export class OpencodeClient extends HeyApiClient { return (this._config ??= new Config2({ client: this.client })) } - private _experimental?: Experimental - get experimental(): Experimental { - return (this._experimental ??= new Experimental({ client: this.client })) - } - private _tool?: Tool get tool(): Tool { return (this._tool ??= new Tool({ client: this.client })) diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index 795c2f264a..72a383a608 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -535,7 +535,6 @@ export type EventWorkspaceStatus = { properties: { workspaceID: string status: "connected" | "connecting" | "disconnected" | "error" - error?: string } } @@ -1589,7 +1588,7 @@ export type Config = { } } formatter?: - | false + | boolean | { [key: string]: { disabled?: boolean @@ -1601,7 +1600,7 @@ export type Config = { } } lsp?: - | false + | boolean | { [key: string]: | { @@ -1706,6 +1705,16 @@ export type WellKnownAuth = { export type Auth = OAuth | ApiAuth | WellKnownAuth +export type Workspace = { + id: string + type: string + name: string + branch: string | null + directory: string | null + extra: unknown | null + projectID: string +} + export type NotFoundError = { name: "NotFoundError" data: { @@ -1798,6 +1807,12 @@ export type Provider = { } } +export type ConsoleState = { + consoleManagedProviders: Array + activeOrgName?: string + switchableOrgCount: number +} + export type ToolIds = Array export type ToolListItem = { @@ -1808,16 +1823,6 @@ export type ToolListItem = { export type ToolList = Array -export type Workspace = { - id: string - type: string - name: string - branch: string | null - directory: string | null - extra: unknown | null - projectID: string -} - export type Worktree = { name: string branch: string @@ -2394,6 +2399,176 @@ export type AppLogResponses = { export type AppLogResponse = AppLogResponses[keyof AppLogResponses] +export type ExperimentalWorkspaceAdaptorListData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/adaptor" +} + +export type ExperimentalWorkspaceAdaptorListResponses = { + /** + * Workspace adaptors + */ + 200: Array<{ + type: string + name: string + description: string + }> +} + +export type ExperimentalWorkspaceAdaptorListResponse = + ExperimentalWorkspaceAdaptorListResponses[keyof ExperimentalWorkspaceAdaptorListResponses] + +export type ExperimentalWorkspaceListData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace" +} + +export type ExperimentalWorkspaceListResponses = { + /** + * Workspaces + */ + 200: Array +} + +export type ExperimentalWorkspaceListResponse = + ExperimentalWorkspaceListResponses[keyof ExperimentalWorkspaceListResponses] + +export type ExperimentalWorkspaceCreateData = { + body?: { + id?: string + type: string + branch: string | null + extra: unknown | null + } + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace" +} + +export type ExperimentalWorkspaceCreateErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceCreateError = + ExperimentalWorkspaceCreateErrors[keyof ExperimentalWorkspaceCreateErrors] + +export type ExperimentalWorkspaceCreateResponses = { + /** + * Workspace created + */ + 200: Workspace +} + +export type ExperimentalWorkspaceCreateResponse = + ExperimentalWorkspaceCreateResponses[keyof ExperimentalWorkspaceCreateResponses] + +export type ExperimentalWorkspaceStatusData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/status" +} + +export type ExperimentalWorkspaceStatusResponses = { + /** + * Workspace status + */ + 200: Array<{ + workspaceID: string + status: "connected" | "connecting" | "disconnected" | "error" + }> +} + +export type ExperimentalWorkspaceStatusResponse = + ExperimentalWorkspaceStatusResponses[keyof ExperimentalWorkspaceStatusResponses] + +export type ExperimentalWorkspaceRemoveData = { + body?: never + path: { + id: string + } + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/{id}" +} + +export type ExperimentalWorkspaceRemoveErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceRemoveError = + ExperimentalWorkspaceRemoveErrors[keyof ExperimentalWorkspaceRemoveErrors] + +export type ExperimentalWorkspaceRemoveResponses = { + /** + * Workspace removed + */ + 200: Workspace +} + +export type ExperimentalWorkspaceRemoveResponse = + ExperimentalWorkspaceRemoveResponses[keyof ExperimentalWorkspaceRemoveResponses] + +export type ExperimentalWorkspaceSessionRestoreData = { + body?: { + sessionID: string + } + path: { + id: string + } + query?: { + directory?: string + workspace?: string + } + url: "/experimental/workspace/{id}/session-restore" +} + +export type ExperimentalWorkspaceSessionRestoreErrors = { + /** + * Bad request + */ + 400: BadRequestError +} + +export type ExperimentalWorkspaceSessionRestoreError = + ExperimentalWorkspaceSessionRestoreErrors[keyof ExperimentalWorkspaceSessionRestoreErrors] + +export type ExperimentalWorkspaceSessionRestoreResponses = { + /** + * Session replay started + */ + 200: { + total: number + } +} + +export type ExperimentalWorkspaceSessionRestoreResponse = + ExperimentalWorkspaceSessionRestoreResponses[keyof ExperimentalWorkspaceSessionRestoreResponses] + export type ProjectListData = { body?: never path?: never @@ -2764,11 +2939,7 @@ export type ExperimentalConsoleGetResponses = { /** * Active Console provider metadata */ - 200: { - consoleManagedProviders: Array - activeOrgName?: string - switchableOrgCount: number - } + 200: ConsoleState } export type ExperimentalConsoleGetResponse = ExperimentalConsoleGetResponses[keyof ExperimentalConsoleGetResponses] @@ -2883,177 +3054,6 @@ export type ToolListResponses = { export type ToolListResponse = ToolListResponses[keyof ToolListResponses] -export type ExperimentalWorkspaceAdaptorListData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/adaptor" -} - -export type ExperimentalWorkspaceAdaptorListResponses = { - /** - * Workspace adaptors - */ - 200: Array<{ - type: string - name: string - description: string - }> -} - -export type ExperimentalWorkspaceAdaptorListResponse = - ExperimentalWorkspaceAdaptorListResponses[keyof ExperimentalWorkspaceAdaptorListResponses] - -export type ExperimentalWorkspaceListData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace" -} - -export type ExperimentalWorkspaceListResponses = { - /** - * Workspaces - */ - 200: Array -} - -export type ExperimentalWorkspaceListResponse = - ExperimentalWorkspaceListResponses[keyof ExperimentalWorkspaceListResponses] - -export type ExperimentalWorkspaceCreateData = { - body?: { - id?: string - type: string - branch: string | null - extra: unknown | null - } - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace" -} - -export type ExperimentalWorkspaceCreateErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceCreateError = - ExperimentalWorkspaceCreateErrors[keyof ExperimentalWorkspaceCreateErrors] - -export type ExperimentalWorkspaceCreateResponses = { - /** - * Workspace created - */ - 200: Workspace -} - -export type ExperimentalWorkspaceCreateResponse = - ExperimentalWorkspaceCreateResponses[keyof ExperimentalWorkspaceCreateResponses] - -export type ExperimentalWorkspaceStatusData = { - body?: never - path?: never - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/status" -} - -export type ExperimentalWorkspaceStatusResponses = { - /** - * Workspace status - */ - 200: Array<{ - workspaceID: string - status: "connected" | "connecting" | "disconnected" | "error" - error?: string - }> -} - -export type ExperimentalWorkspaceStatusResponse = - ExperimentalWorkspaceStatusResponses[keyof ExperimentalWorkspaceStatusResponses] - -export type ExperimentalWorkspaceRemoveData = { - body?: never - path: { - id: string - } - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/{id}" -} - -export type ExperimentalWorkspaceRemoveErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceRemoveError = - ExperimentalWorkspaceRemoveErrors[keyof ExperimentalWorkspaceRemoveErrors] - -export type ExperimentalWorkspaceRemoveResponses = { - /** - * Workspace removed - */ - 200: Workspace -} - -export type ExperimentalWorkspaceRemoveResponse = - ExperimentalWorkspaceRemoveResponses[keyof ExperimentalWorkspaceRemoveResponses] - -export type ExperimentalWorkspaceSessionRestoreData = { - body?: { - sessionID: string - } - path: { - id: string - } - query?: { - directory?: string - workspace?: string - } - url: "/experimental/workspace/{id}/session-restore" -} - -export type ExperimentalWorkspaceSessionRestoreErrors = { - /** - * Bad request - */ - 400: BadRequestError -} - -export type ExperimentalWorkspaceSessionRestoreError = - ExperimentalWorkspaceSessionRestoreErrors[keyof ExperimentalWorkspaceSessionRestoreErrors] - -export type ExperimentalWorkspaceSessionRestoreResponses = { - /** - * Session replay started - */ - 200: { - total: number - } -} - -export type ExperimentalWorkspaceSessionRestoreResponse = - ExperimentalWorkspaceSessionRestoreResponses[keyof ExperimentalWorkspaceSessionRestoreResponses] - export type WorktreeRemoveData = { body?: WorktreeRemoveInput path?: never @@ -4504,6 +4504,25 @@ export type ProviderOauthCallbackResponses = { export type ProviderOauthCallbackResponse = ProviderOauthCallbackResponses[keyof ProviderOauthCallbackResponses] +export type SyncStartData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/sync/start" +} + +export type SyncStartResponses = { + /** + * Workspace sync started + */ + 200: boolean +} + +export type SyncStartResponse = SyncStartResponses[keyof SyncStartResponses] + export type SyncReplayData = { body?: { directory: string diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index d11f9ad73f..b97d596b93 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -415,6 +415,391 @@ ] } }, + "/experimental/workspace/adaptor": { + "get": { + "operationId": "experimental.workspace.adaptor.list", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "List workspace adaptors", + "description": "List all available workspace adaptors for the current project.", + "responses": { + "200": { + "description": "Workspace adaptors", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + } + }, + "required": ["type", "name", "description"] + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.adaptor.list({\n ...\n})" + } + ] + } + }, + "/experimental/workspace": { + "post": { + "operationId": "experimental.workspace.create", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Create workspace", + "description": "Create a workspace for the current project.", + "responses": { + "200": { + "description": "Workspace created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Workspace" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "pattern": "^wrk.*" + }, + "type": { + "type": "string" + }, + "branch": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "extra": { + "anyOf": [ + {}, + { + "type": "null" + } + ] + } + }, + "required": ["type", "branch", "extra"] + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.create({\n ...\n})" + } + ] + }, + "get": { + "operationId": "experimental.workspace.list", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "List workspaces", + "description": "List all workspaces.", + "responses": { + "200": { + "description": "Workspaces", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Workspace" + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.list({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/status": { + "get": { + "operationId": "experimental.workspace.status", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Workspace status", + "description": "Get connection status for workspaces in the current project.", + "responses": { + "200": { + "description": "Workspace status", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "workspaceID": { + "type": "string", + "pattern": "^wrk.*" + }, + "status": { + "type": "string", + "enum": ["connected", "connecting", "disconnected", "error"] + } + }, + "required": ["workspaceID", "status"] + } + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.status({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/{id}": { + "delete": { + "operationId": "experimental.workspace.remove", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + }, + { + "in": "path", + "name": "id", + "schema": { + "type": "string", + "pattern": "^wrk.*" + }, + "required": true + } + ], + "summary": "Remove workspace", + "description": "Remove an existing workspace.", + "responses": { + "200": { + "description": "Workspace removed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Workspace" + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.remove({\n ...\n})" + } + ] + } + }, + "/experimental/workspace/{id}/session-restore": { + "post": { + "operationId": "experimental.workspace.sessionRestore", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + }, + { + "in": "path", + "name": "id", + "schema": { + "type": "string", + "pattern": "^wrk.*" + }, + "required": true + } + ], + "summary": "Restore session into workspace", + "description": "Replay a session's sync events into the target workspace in batches.", + "responses": { + "200": { + "description": "Session replay started", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "minimum": 0, + "maximum": 9007199254740991 + } + }, + "required": ["total"] + } + } + } + }, + "400": { + "description": "Bad request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BadRequestError" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "sessionID": { + "type": "string", + "pattern": "^ses.*" + } + }, + "required": ["sessionID"] + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.sessionRestore({\n ...\n})" + } + ] + } + }, "/project": { "get": { "operationId": "project.list", @@ -1222,24 +1607,7 @@ "content": { "application/json": { "schema": { - "type": "object", - "properties": { - "consoleManagedProviders": { - "type": "array", - "items": { - "type": "string" - } - }, - "activeOrgName": { - "type": "string" - }, - "switchableOrgCount": { - "type": "integer", - "minimum": 0, - "maximum": 9007199254740991 - } - }, - "required": ["consoleManagedProviders", "switchableOrgCount"] + "$ref": "#/components/schemas/ConsoleState" } } } @@ -1501,394 +1869,6 @@ ] } }, - "/experimental/workspace/adaptor": { - "get": { - "operationId": "experimental.workspace.adaptor.list", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "List workspace adaptors", - "description": "List all available workspace adaptors for the current project.", - "responses": { - "200": { - "description": "Workspace adaptors", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "description": { - "type": "string" - } - }, - "required": ["type", "name", "description"] - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.adaptor.list({\n ...\n})" - } - ] - } - }, - "/experimental/workspace": { - "post": { - "operationId": "experimental.workspace.create", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "Create workspace", - "description": "Create a workspace for the current project.", - "responses": { - "200": { - "description": "Workspace created", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Workspace" - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "requestBody": { - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "id": { - "type": "string", - "pattern": "^wrk.*" - }, - "type": { - "type": "string" - }, - "branch": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "extra": { - "anyOf": [ - {}, - { - "type": "null" - } - ] - } - }, - "required": ["type", "branch", "extra"] - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.create({\n ...\n})" - } - ] - }, - "get": { - "operationId": "experimental.workspace.list", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "List workspaces", - "description": "List all workspaces.", - "responses": { - "200": { - "description": "Workspaces", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Workspace" - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.list({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/status": { - "get": { - "operationId": "experimental.workspace.status", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - } - ], - "summary": "Workspace status", - "description": "Get connection status for workspaces in the current project.", - "responses": { - "200": { - "description": "Workspace status", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "type": "object", - "properties": { - "workspaceID": { - "type": "string", - "pattern": "^wrk.*" - }, - "status": { - "type": "string", - "enum": ["connected", "connecting", "disconnected", "error"] - }, - "error": { - "type": "string" - } - }, - "required": ["workspaceID", "status"] - } - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.status({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/{id}": { - "delete": { - "operationId": "experimental.workspace.remove", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - }, - { - "in": "path", - "name": "id", - "schema": { - "type": "string", - "pattern": "^wrk.*" - }, - "required": true - } - ], - "summary": "Remove workspace", - "description": "Remove an existing workspace.", - "responses": { - "200": { - "description": "Workspace removed", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Workspace" - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.remove({\n ...\n})" - } - ] - } - }, - "/experimental/workspace/{id}/session-restore": { - "post": { - "operationId": "experimental.workspace.sessionRestore", - "parameters": [ - { - "in": "query", - "name": "directory", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "workspace", - "schema": { - "type": "string" - } - }, - { - "in": "path", - "name": "id", - "schema": { - "type": "string", - "pattern": "^wrk.*" - }, - "required": true - } - ], - "summary": "Restore session into workspace", - "description": "Replay a session's sync events into the target workspace in batches.", - "responses": { - "200": { - "description": "Session replay started", - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "total": { - "type": "integer", - "minimum": 0, - "maximum": 9007199254740991 - } - }, - "required": ["total"] - } - } - } - }, - "400": { - "description": "Bad request", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/BadRequestError" - } - } - } - } - }, - "requestBody": { - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "sessionID": { - "type": "string", - "pattern": "^ses.*" - } - }, - "required": ["sessionID"] - } - } - } - }, - "x-codeSamples": [ - { - "lang": "js", - "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.experimental.workspace.sessionRestore({\n ...\n})" - } - ] - } - }, "/experimental/worktree": { "post": { "operationId": "worktree.create", @@ -5227,6 +5207,47 @@ ] } }, + "/sync/start": { + "post": { + "operationId": "sync.start", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Start workspace sync", + "description": "Start sync loops for workspaces in the current project that have active sessions.", + "responses": { + "200": { + "description": "Workspace sync started", + "content": { + "application/json": { + "schema": { + "type": "boolean" + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.sync.start({\n ...\n})" + } + ] + } + }, "/sync/replay": { "post": { "operationId": "sync.replay", @@ -5331,7 +5352,7 @@ } }, "/sync/history": { - "get": { + "post": { "operationId": "sync.history.list", "parameters": [ { @@ -8852,9 +8873,6 @@ "status": { "type": "string", "enum": ["connected", "connecting", "disconnected", "error"] - }, - "error": { - "type": "string" } }, "required": ["workspaceID", "status"] @@ -10931,8 +10949,7 @@ "type": "string" } } - }, - "additionalProperties": false + } }, "PermissionActionConfig": { "type": "string", @@ -11162,13 +11179,11 @@ "description": "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", "anyOf": [ { - "description": "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", "type": "integer", "exclusiveMinimum": 0, "maximum": 9007199254740991 }, { - "description": "Disable timeout for this provider entirely.", "type": "boolean", "const": false } @@ -11229,8 +11244,7 @@ "enum": ["reasoning_content", "reasoning_details"] } }, - "required": ["field"], - "additionalProperties": false + "required": ["field"] } ] }, @@ -11359,8 +11373,7 @@ } } } - }, - "additionalProperties": false + } }, "McpLocalConfig": { "type": "object", @@ -11393,13 +11406,10 @@ }, "timeout": { "description": "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", - "type": "integer", - "exclusiveMinimum": 0, - "maximum": 9007199254740991 + "type": "number" } }, - "required": ["type", "command"], - "additionalProperties": false + "required": ["type", "command"] }, "McpOAuthConfig": { "type": "object", @@ -11420,8 +11430,7 @@ "description": "OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback).", "type": "string" } - }, - "additionalProperties": false + } }, "McpRemoteConfig": { "type": "object", @@ -11463,13 +11472,10 @@ }, "timeout": { "description": "Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified.", - "type": "integer", - "exclusiveMinimum": 0, - "maximum": 9007199254740991 + "type": "number" } }, - "required": ["type", "url"], - "additionalProperties": false + "required": ["type", "url"] }, "LayoutConfig": { "description": "@deprecated Always uses stretch layout.", @@ -11717,8 +11723,7 @@ "formatter": { "anyOf": [ { - "type": "boolean", - "const": false + "type": "boolean" }, { "type": "object", @@ -11760,8 +11765,7 @@ "lsp": { "anyOf": [ { - "type": "boolean", - "const": false + "type": "boolean" }, { "type": "object", @@ -12005,6 +12009,53 @@ } ] }, + "Workspace": { + "type": "object", + "properties": { + "id": { + "type": "string", + "pattern": "^wrk.*" + }, + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "branch": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "directory": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "extra": { + "anyOf": [ + {}, + { + "type": "null" + } + ] + }, + "projectID": { + "type": "string" + } + }, + "required": ["id", "type", "name", "branch", "directory", "extra", "projectID"] + }, "NotFoundError": { "type": "object", "properties": { @@ -12286,6 +12337,24 @@ }, "required": ["id", "name", "source", "env", "options", "models"] }, + "ConsoleState": { + "type": "object", + "properties": { + "consoleManagedProviders": { + "type": "array", + "items": { + "type": "string" + } + }, + "activeOrgName": { + "type": "string" + }, + "switchableOrgCount": { + "type": "number" + } + }, + "required": ["consoleManagedProviders", "switchableOrgCount"] + }, "ToolIDs": { "type": "array", "items": { @@ -12311,53 +12380,6 @@ "$ref": "#/components/schemas/ToolListItem" } }, - "Workspace": { - "type": "object", - "properties": { - "id": { - "type": "string", - "pattern": "^wrk.*" - }, - "type": { - "type": "string" - }, - "name": { - "type": "string" - }, - "branch": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "directory": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - }, - "extra": { - "anyOf": [ - {}, - { - "type": "null" - } - ] - }, - "projectID": { - "type": "string" - } - }, - "required": ["id", "type", "name", "branch", "directory", "extra", "projectID"] - }, "Worktree": { "type": "object", "properties": { diff --git a/packages/shared/package.json b/packages/shared/package.json index 9dec6bdb6c..a8cd62886b 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "$schema": "https://json.schemastore.org/package.json", - "version": "1.4.7", + "version": "1.4.11", "name": "@opencode-ai/shared", "type": "module", "license": "MIT", diff --git a/packages/shared/src/npm.ts b/packages/shared/src/npm.ts deleted file mode 100644 index 865e827b31..0000000000 --- a/packages/shared/src/npm.ts +++ /dev/null @@ -1,249 +0,0 @@ -import path from "path" -import semver from "semver" -import { Effect, Schema, Context, Layer, Option, FileSystem } from "effect" -import { NodeFileSystem } from "@effect/platform-node" -import { AppFileSystem } from "./filesystem" -import { Global } from "./global" -import { EffectFlock } from "./util/effect-flock" - -export namespace Npm { - export class InstallFailedError extends Schema.TaggedErrorClass()("NpmInstallFailedError", { - add: Schema.Array(Schema.String).pipe(Schema.optional), - dir: Schema.String, - cause: Schema.optional(Schema.Defect), - }) {} - - export interface EntryPoint { - readonly directory: string - readonly entrypoint: Option.Option - } - - export interface Interface { - readonly add: (pkg: string) => Effect.Effect - readonly install: ( - dir: string, - input?: { add: string[] }, - ) => Effect.Effect - readonly outdated: (pkg: string, cachedVersion: string) => Effect.Effect - readonly which: (pkg: string) => Effect.Effect> - } - - export class Service extends Context.Service()("@opencode/Npm") {} - - const illegal = process.platform === "win32" ? new Set(["<", ">", ":", '"', "|", "?", "*"]) : undefined - - export function sanitize(pkg: string) { - if (!illegal) return pkg - return Array.from(pkg, (char) => (illegal.has(char) || char.charCodeAt(0) < 32 ? "_" : char)).join("") - } - - const resolveEntryPoint = (name: string, dir: string): EntryPoint => { - let entrypoint: Option.Option - try { - const resolved = typeof Bun !== "undefined" ? import.meta.resolve(name, dir) : import.meta.resolve(dir) - entrypoint = Option.some(resolved) - } catch { - entrypoint = Option.none() - } - return { - directory: dir, - entrypoint, - } - } - - interface ArboristNode { - name: string - path: string - } - - interface ArboristTree { - edgesOut: Map - } - - const reify = (input: { dir: string; add?: string[] }) => - Effect.gen(function* () { - const { Arborist } = yield* Effect.promise(() => import("@npmcli/arborist")) - const arborist = new Arborist({ - path: input.dir, - binLinks: true, - progress: false, - savePrefix: "", - ignoreScripts: true, - }) - return yield* Effect.tryPromise({ - try: () => - arborist.reify({ - add: input?.add || [], - save: true, - saveType: "prod", - }), - catch: (cause) => - new InstallFailedError({ - cause, - add: input?.add, - dir: input.dir, - }), - }) as Effect.Effect - }).pipe( - Effect.withSpan("Npm.reify", { - attributes: input, - }), - ) - - export const layer = Layer.effect( - Service, - Effect.gen(function* () { - const afs = yield* AppFileSystem.Service - const global = yield* Global.Service - const fs = yield* FileSystem.FileSystem - const flock = yield* EffectFlock.Service - const directory = (pkg: string) => path.join(global.cache, "packages", sanitize(pkg)) - - const outdated = Effect.fn("Npm.outdated")(function* (pkg: string, cachedVersion: string) { - const response = yield* Effect.tryPromise({ - try: () => fetch(`https://registry.npmjs.org/${pkg}`), - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - if (!response || !response.ok) { - return false - } - - const data = yield* Effect.tryPromise({ - try: () => response.json() as Promise<{ "dist-tags"?: { latest?: string } }>, - catch: () => undefined, - }).pipe(Effect.orElseSucceed(() => undefined)) - - const latestVersion = data?.["dist-tags"]?.latest - if (!latestVersion) { - return false - } - - const range = /[\s^~*xX<>|=]/.test(cachedVersion) - if (range) return !semver.satisfies(latestVersion, cachedVersion) - - return semver.lt(cachedVersion, latestVersion) - }) - - const add = Effect.fn("Npm.add")(function* (pkg: string) { - const dir = directory(pkg) - yield* flock.acquire(`npm-install:${dir}`) - - const tree = yield* reify({ dir, add: [pkg] }) - const first = tree.edgesOut.values().next().value?.to - if (!first) return yield* new InstallFailedError({ add: [pkg], dir }) - return resolveEntryPoint(first.name, first.path) - }, Effect.scoped) - - const install = Effect.fn("Npm.install")(function* (dir: string, input?: { add: string[] }) { - const canWrite = yield* afs.access(dir, { writable: true }).pipe( - Effect.as(true), - Effect.orElseSucceed(() => false), - ) - if (!canWrite) return - - yield* flock.acquire(`npm-install:${dir}`) - - yield* Effect.gen(function* () { - const nodeModulesExists = yield* afs.existsSafe(path.join(dir, "node_modules")) - if (!nodeModulesExists) { - yield* reify({ add: input?.add, dir }) - return - } - }).pipe(Effect.withSpan("Npm.checkNodeModules")) - - yield* Effect.gen(function* () { - const pkg = yield* afs.readJson(path.join(dir, "package.json")).pipe(Effect.orElseSucceed(() => ({}))) - const lock = yield* afs.readJson(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => ({}))) - - const pkgAny = pkg as any - const lockAny = lock as any - const declared = new Set([ - ...Object.keys(pkgAny?.dependencies || {}), - ...Object.keys(pkgAny?.devDependencies || {}), - ...Object.keys(pkgAny?.peerDependencies || {}), - ...Object.keys(pkgAny?.optionalDependencies || {}), - ...(input?.add || []), - ]) - - const root = lockAny?.packages?.[""] || {} - const locked = new Set([ - ...Object.keys(root?.dependencies || {}), - ...Object.keys(root?.devDependencies || {}), - ...Object.keys(root?.peerDependencies || {}), - ...Object.keys(root?.optionalDependencies || {}), - ]) - - for (const name of declared) { - if (!locked.has(name)) { - yield* reify({ dir, add: input?.add }) - return - } - } - }).pipe(Effect.withSpan("Npm.checkDirty")) - - return - }, Effect.scoped) - - const which = Effect.fn("Npm.which")(function* (pkg: string) { - const dir = directory(pkg) - const binDir = path.join(dir, "node_modules", ".bin") - - const pick = Effect.fnUntraced(function* () { - const files = yield* fs.readDirectory(binDir).pipe(Effect.catch(() => Effect.succeed([] as string[]))) - - if (files.length === 0) return Option.none() - if (files.length === 1) return Option.some(files[0]) - - const pkgJson = yield* afs.readJson(path.join(dir, "node_modules", pkg, "package.json")).pipe(Effect.option) - - if (Option.isSome(pkgJson)) { - const parsed = pkgJson.value as { bin?: string | Record } - if (parsed?.bin) { - const unscoped = pkg.startsWith("@") ? pkg.split("/")[1] : pkg - const bin = parsed.bin - if (typeof bin === "string") return Option.some(unscoped) - const keys = Object.keys(bin) - if (keys.length === 1) return Option.some(keys[0]) - return bin[unscoped] ? Option.some(unscoped) : Option.some(keys[0]) - } - } - - return Option.some(files[0]) - }) - - return yield* Effect.gen(function* () { - const bin = yield* pick() - if (Option.isSome(bin)) { - return Option.some(path.join(binDir, bin.value)) - } - - yield* fs.remove(path.join(dir, "package-lock.json")).pipe(Effect.orElseSucceed(() => {})) - - yield* add(pkg) - - const resolved = yield* pick() - if (Option.isNone(resolved)) return Option.none() - return Option.some(path.join(binDir, resolved.value)) - }).pipe( - Effect.scoped, - Effect.orElseSucceed(() => Option.none()), - ) - }) - - return Service.of({ - add, - install, - outdated, - which, - }) - }), - ) - - export const defaultLayer = layer.pipe( - Layer.provide(EffectFlock.layer), - Layer.provide(AppFileSystem.layer), - Layer.provide(Global.layer), - Layer.provide(NodeFileSystem.layer), - ) -} diff --git a/packages/shared/src/util/effect-flock.ts b/packages/shared/src/util/effect-flock.ts index 3e00afc9e4..16bcf091b4 100644 --- a/packages/shared/src/util/effect-flock.ts +++ b/packages/shared/src/util/effect-flock.ts @@ -165,55 +165,60 @@ export namespace EffectFlock { type Handle = { token: string; metaPath: string; heartbeatPath: string; lockDir: string } - const tryAcquireLockDir = Effect.fn("EffectFlock.tryAcquire")(function* (lockDir: string) { - const token = randomUUID() - const metaPath = path.join(lockDir, "meta.json") - const heartbeatPath = path.join(lockDir, "heartbeat") + const tryAcquireLockDir = (lockDir: string, key: string) => + Effect.gen(function* () { + const token = randomUUID() + const metaPath = path.join(lockDir, "meta.json") + const heartbeatPath = path.join(lockDir, "heartbeat") - // Atomic mkdir — the POSIX lock primitive - const created = yield* atomicMkdir(lockDir) + // Atomic mkdir — the POSIX lock primitive + const created = yield* atomicMkdir(lockDir) - if (!created) { - if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return yield* new NotAcquired() + if (!created) { + if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return yield* new NotAcquired() - // Stale — race for breaker ownership - const breakerPath = lockDir + ".breaker" + // Stale — race for breaker ownership + const breakerPath = lockDir + ".breaker" - const claimed = yield* fs.makeDirectory(breakerPath, { mode: 0o700 }).pipe( - Effect.as(true), - Effect.catchIf( - (e) => e.reason._tag === "AlreadyExists", - () => cleanStaleBreaker(breakerPath), - ), - Effect.catchIf(isPathGone, () => Effect.succeed(false)), - Effect.orDie, - ) + const claimed = yield* fs.makeDirectory(breakerPath, { mode: 0o700 }).pipe( + Effect.as(true), + Effect.catchIf( + (e) => e.reason._tag === "AlreadyExists", + () => cleanStaleBreaker(breakerPath), + ), + Effect.catchIf(isPathGone, () => Effect.succeed(false)), + Effect.orDie, + ) - if (!claimed) return yield* new NotAcquired() + if (!claimed) return yield* new NotAcquired() - // We own the breaker — double-check staleness, nuke, recreate - const recreated = yield* Effect.gen(function* () { - if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return false - yield* forceRemove(lockDir) - return yield* atomicMkdir(lockDir) - }).pipe(Effect.ensuring(forceRemove(breakerPath))) + // We own the breaker — double-check staleness, nuke, recreate + const recreated = yield* Effect.gen(function* () { + if (!(yield* isStale(lockDir, heartbeatPath, metaPath))) return false + yield* forceRemove(lockDir) + return yield* atomicMkdir(lockDir) + }).pipe(Effect.ensuring(forceRemove(breakerPath))) - if (!recreated) return yield* new NotAcquired() - } + if (!recreated) return yield* new NotAcquired() + } - // We own the lock dir — write heartbeat + meta with exclusive create - yield* exclusiveWrite(heartbeatPath, "", lockDir, "heartbeat already existed") + // We own the lock dir — write heartbeat + meta with exclusive create + yield* exclusiveWrite(heartbeatPath, "", lockDir, "heartbeat already existed") - const metaJson = encodeMeta({ token, pid: process.pid, hostname, createdAt: new Date().toISOString() }) - yield* exclusiveWrite(metaPath, metaJson, lockDir, "meta.json already existed") + const metaJson = encodeMeta({ token, pid: process.pid, hostname, createdAt: new Date().toISOString() }) + yield* exclusiveWrite(metaPath, metaJson, lockDir, "meta.json already existed") - return { token, metaPath, heartbeatPath, lockDir } satisfies Handle - }) + return { token, metaPath, heartbeatPath, lockDir } satisfies Handle + }).pipe( + Effect.withSpan("EffectFlock.tryAcquire", { + attributes: { key }, + }), + ) // -- retry wrapper (preserves Handle type) -- const acquireHandle = (lockfile: string, key: string): Effect.Effect => - tryAcquireLockDir(lockfile).pipe( + tryAcquireLockDir(lockfile, key).pipe( Effect.retry({ while: (err) => err._tag === "NotAcquired", schedule: retrySchedule, diff --git a/packages/shared/sst-env.d.ts b/packages/shared/sst-env.d.ts new file mode 100644 index 0000000000..64441936d7 --- /dev/null +++ b/packages/shared/sst-env.d.ts @@ -0,0 +1,10 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +/* biome-ignore-all lint: auto-generated */ + +/// + +import "sst" +export {} \ No newline at end of file diff --git a/packages/shared/test/npm.test.ts b/packages/shared/test/npm.test.ts deleted file mode 100644 index 4443d2985c..0000000000 --- a/packages/shared/test/npm.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { describe, expect, test } from "bun:test" -import { Npm } from "@opencode-ai/shared/npm" - -const win = process.platform === "win32" - -describe("Npm.sanitize", () => { - test("keeps normal scoped package specs unchanged", () => { - expect(Npm.sanitize("@opencode/acme")).toBe("@opencode/acme") - expect(Npm.sanitize("@opencode/acme@1.0.0")).toBe("@opencode/acme@1.0.0") - expect(Npm.sanitize("prettier")).toBe("prettier") - }) - - test("handles git https specs", () => { - const spec = "acme@git+https://github.com/opencode/acme.git" - const expected = win ? "acme@git+https_//github.com/opencode/acme.git" : spec - expect(Npm.sanitize(spec)).toBe(expected) - }) -}) diff --git a/packages/slack/package.json b/packages/slack/package.json index a23500241e..8ca990ba58 100644 --- a/packages/slack/package.json +++ b/packages/slack/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/slack", - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "scripts": { diff --git a/packages/ui/package.json b/packages/ui/package.json index cd559041cc..98cb928b7b 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@opencode-ai/ui", - "version": "1.4.7", + "version": "1.4.11", "type": "module", "license": "MIT", "exports": { diff --git a/packages/ui/src/components/provider-icons/types.ts b/packages/ui/src/components/provider-icons/types.ts index f9ddfdf0e9..5a97287509 100644 --- a/packages/ui/src/components/provider-icons/types.ts +++ b/packages/ui/src/components/provider-icons/types.ts @@ -32,6 +32,7 @@ export const iconNames = [ "perplexity", "ovhcloud", "openrouter", + "llmgateway", "opencode", "opencode-go", "openai", diff --git a/packages/web/package.json b/packages/web/package.json index a53ef51932..194f44ec03 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -2,7 +2,7 @@ "name": "@opencode-ai/web", "type": "module", "license": "MIT", - "version": "1.4.7", + "version": "1.4.11", "scripts": { "dev": "astro dev", "dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev", diff --git a/packages/web/src/content/docs/ar/cli.mdx b/packages/web/src/content/docs/ar/cli.mdx index 826ea43040..ab2c12fb20 100644 --- a/packages/web/src/content/docs/ar/cli.mdx +++ b/packages/web/src/content/docs/ar/cli.mdx @@ -573,7 +573,6 @@ opencode upgrade v0.1.48 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | تعطيل تحميل `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | تعطيل جلب النماذج من مصادر بعيدة | | `OPENCODE_FAKE_VCS` | string | مزود VCS وهمي لأغراض الاختبار | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | تعطيل التحقق من وقت الملف لتحسين الأداء | | `OPENCODE_CLIENT` | string | معرّف العميل (الافتراضي `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | تفعيل أدوات بحث الويب من Exa | | `OPENCODE_SERVER_PASSWORD` | string | تفعيل المصادقة الأساسية لخادمي `serve`/`web` | diff --git a/packages/web/src/content/docs/bs/cli.mdx b/packages/web/src/content/docs/bs/cli.mdx index 979066acbc..118b81ba4e 100644 --- a/packages/web/src/content/docs/bs/cli.mdx +++ b/packages/web/src/content/docs/bs/cli.mdx @@ -571,7 +571,6 @@ OpenCode se može konfigurirati pomoću varijabli okruženja. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Onemogući učitavanje `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Onemogući dohvaćanje modela iz udaljenih izvora | | `OPENCODE_FAKE_VCS` | string | Lažni VCS provajder za potrebe testiranja | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Onemogući provjeru vremena datoteke radi optimizacije | | `OPENCODE_CLIENT` | string | Identifikator klijenta (zadano na `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Omogući Exa alate za web pretraživanje | | `OPENCODE_SERVER_PASSWORD` | string | Omogući osnovnu autentifikaciju za `serve`/`web` | diff --git a/packages/web/src/content/docs/cli.mdx b/packages/web/src/content/docs/cli.mdx index 579038ad03..786b9d3d94 100644 --- a/packages/web/src/content/docs/cli.mdx +++ b/packages/web/src/content/docs/cli.mdx @@ -575,7 +575,6 @@ OpenCode can be configured using environment variables. | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Disable fetching models from remote sources | | `OPENCODE_DISABLE_MOUSE` | boolean | Disable mouse capture in the TUI | | `OPENCODE_FAKE_VCS` | string | Fake VCS provider for testing purposes | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Disable file time checking for optimization | | `OPENCODE_CLIENT` | string | Client identifier (defaults to `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Enable Exa web search tools | | `OPENCODE_SERVER_PASSWORD` | string | Enable basic auth for `serve`/`web` | diff --git a/packages/web/src/content/docs/da/cli.mdx b/packages/web/src/content/docs/da/cli.mdx index 40c6645e67..45c4f08e3f 100644 --- a/packages/web/src/content/docs/da/cli.mdx +++ b/packages/web/src/content/docs/da/cli.mdx @@ -574,7 +574,6 @@ OpenCode kan konfigureres ved hjælp af miljøvariabler. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Deaktiver indlæsning af `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Deaktivering af modeller fra eksterne kilder | | `OPENCODE_FAKE_VCS` | string | Falsk VCS-udbyder til testformål | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Deaktiver filtidskontrol for optimering | | `OPENCODE_CLIENT` | string | Klient-id (standard til `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Aktiver Exa-websøgeværktøjer | | `OPENCODE_SERVER_PASSWORD` | string | Aktiver grundlæggende godkendelse for `serve`/`web` | diff --git a/packages/web/src/content/docs/de/cli.mdx b/packages/web/src/content/docs/de/cli.mdx index cb1b974e10..43a1189d60 100644 --- a/packages/web/src/content/docs/de/cli.mdx +++ b/packages/web/src/content/docs/de/cli.mdx @@ -573,7 +573,6 @@ OpenCode kann mithilfe von Umgebungsvariablen konfiguriert werden. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolescher Wert | Deaktivieren Sie das Laden von `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolescher Wert | Deaktivieren Sie das Abrufen von Modellen aus Remote-Quellen | | `OPENCODE_FAKE_VCS` | Zeichenfolge | Gefälschter VCS-Anbieter zu Testzwecken | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolescher Wert | Dateizeitprüfung zur Optimierung deaktivieren | | `OPENCODE_CLIENT` | Zeichenfolge | Client-ID (standardmäßig `cli`) | | `OPENCODE_ENABLE_EXA` | boolescher Wert | Exa-Websuchtools aktivieren | | `OPENCODE_SERVER_PASSWORD` | Zeichenfolge | Aktivieren Sie die Basisauthentifizierung für `serve`/`web` | diff --git a/packages/web/src/content/docs/es/cli.mdx b/packages/web/src/content/docs/es/cli.mdx index 658be27084..5c86474a61 100644 --- a/packages/web/src/content/docs/es/cli.mdx +++ b/packages/web/src/content/docs/es/cli.mdx @@ -573,7 +573,6 @@ OpenCode se puede configurar mediante variables de entorno. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | booleano | Deshabilitar la carga `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | booleano | Deshabilitar la recuperación de modelos desde fuentes remotas | | `OPENCODE_FAKE_VCS` | cadena | Proveedor de VCS falso para fines de prueba | -| `OPENCODE_DISABLE_FILETIME_CHECK` | booleano | Deshabilite la verificación del tiempo del archivo para optimizarlo | | `OPENCODE_CLIENT` | cadena | Identificador de cliente (por defecto `cli`) | | `OPENCODE_ENABLE_EXA` | booleano | Habilitar las herramientas de búsqueda web de Exa | | `OPENCODE_SERVER_PASSWORD` | cadena | Habilite la autenticación básica para `serve`/`web` | diff --git a/packages/web/src/content/docs/fr/cli.mdx b/packages/web/src/content/docs/fr/cli.mdx index 2c763618e4..cffa748ad2 100644 --- a/packages/web/src/content/docs/fr/cli.mdx +++ b/packages/web/src/content/docs/fr/cli.mdx @@ -574,7 +574,6 @@ OpenCode peut être configuré à l'aide de variables d'environnement. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | booléen | Désactiver le chargement de `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | booléen | Désactiver la récupération de modèles à partir de sources distantes | | `OPENCODE_FAKE_VCS` | chaîne | Faux fournisseur VCS à des fins de test | -| `OPENCODE_DISABLE_FILETIME_CHECK` | booléen | Désactiver la vérification de l'heure des fichiers pour l'optimisation | | `OPENCODE_CLIENT` | chaîne | Identifiant du client (par défaut `cli`) | | `OPENCODE_ENABLE_EXA` | booléen | Activer les outils de recherche Web Exa | | `OPENCODE_SERVER_PASSWORD` | chaîne | Activer l'authentification de base pour `serve`/`web` | diff --git a/packages/web/src/content/docs/it/cli.mdx b/packages/web/src/content/docs/it/cli.mdx index 46d7da1495..952dfba090 100644 --- a/packages/web/src/content/docs/it/cli.mdx +++ b/packages/web/src/content/docs/it/cli.mdx @@ -574,7 +574,6 @@ OpenCode può essere configurato tramite variabili d'ambiente. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Disabilita caricamento di `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Disabilita fetch dei modelli da fonti remote | | `OPENCODE_FAKE_VCS` | string | Provider VCS finto per scopi di test | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Disabilita controllo file time per ottimizzazione | | `OPENCODE_CLIENT` | string | Identificatore client (default `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Abilita gli strumenti di web search Exa | | `OPENCODE_SERVER_PASSWORD` | string | Abilita basic auth per `serve`/`web` | diff --git a/packages/web/src/content/docs/ja/cli.mdx b/packages/web/src/content/docs/ja/cli.mdx index f690c7d7e9..82a8852ea5 100644 --- a/packages/web/src/content/docs/ja/cli.mdx +++ b/packages/web/src/content/docs/ja/cli.mdx @@ -573,7 +573,6 @@ OpenCode は環境変数を使用して構成できます。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | ブール値 | `.claude/skills` のロードを無効にする | | `OPENCODE_DISABLE_MODELS_FETCH` | ブール値 | リモートソースからのモデルの取得を無効にする | | `OPENCODE_FAKE_VCS` | 文字列 | テスト目的の偽の VCS プロバイダー | -| `OPENCODE_DISABLE_FILETIME_CHECK` | ブール値 | 最適化のためにファイル時間チェックを無効にする | | `OPENCODE_CLIENT` | 文字列 | クライアント識別子 (デフォルトは `cli`) | | `OPENCODE_ENABLE_EXA` | ブール値 | Exa Web 検索ツールを有効にする | | `OPENCODE_SERVER_PASSWORD` | 文字列 | `serve`/`web` の基本認証を有効にする | diff --git a/packages/web/src/content/docs/ko/cli.mdx b/packages/web/src/content/docs/ko/cli.mdx index 0562ab8afd..b0ce10567e 100644 --- a/packages/web/src/content/docs/ko/cli.mdx +++ b/packages/web/src/content/docs/ko/cli.mdx @@ -573,7 +573,6 @@ OpenCode는 환경 변수로도 구성할 수 있습니다. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | `.claude/skills` 로드 비활성화 | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 원격 소스에서 모델 목록 가져오기 비활성화 | | `OPENCODE_FAKE_VCS` | string | 테스트용 가짜 VCS provider | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 최적화를 위한 파일 시간 검사 비활성화 | | `OPENCODE_CLIENT` | string | 클라이언트 식별자(기본값: `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Exa 웹 검색 도구 활성화 | | `OPENCODE_SERVER_PASSWORD` | string | `serve`/`web` 기본 인증 활성화 | diff --git a/packages/web/src/content/docs/nb/cli.mdx b/packages/web/src/content/docs/nb/cli.mdx index 8b6d283e10..8312a1a7c5 100644 --- a/packages/web/src/content/docs/nb/cli.mdx +++ b/packages/web/src/content/docs/nb/cli.mdx @@ -574,7 +574,6 @@ OpenCode kan konfigureres ved hjelp av miljøvariabler. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolsk | Deaktiver innlasting av `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolsk | Deaktiver henting av modeller fra eksterne kilder | | `OPENCODE_FAKE_VCS` | streng | Falsk VCS-leverandør for testformål | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolsk | Deaktiver filtidskontroll for optimalisering | | `OPENCODE_CLIENT` | streng | Klientidentifikator (standard til `cli`) | | `OPENCODE_ENABLE_EXA` | boolsk | Aktiver Exa-nettsøkeverktøy | | `OPENCODE_SERVER_PASSWORD` | streng | Aktiver grunnleggende autentisering for `serve`/`web` | diff --git a/packages/web/src/content/docs/pl/cli.mdx b/packages/web/src/content/docs/pl/cli.mdx index 6cdc67a48f..e175870cbf 100644 --- a/packages/web/src/content/docs/pl/cli.mdx +++ b/packages/web/src/content/docs/pl/cli.mdx @@ -553,33 +553,32 @@ Interfejs CLI OpenCode przyjmuje następujące flagi globalne dla każdego polec OpenCode można skonfigurować za pomocą zmiennych środowiskowych. -| Zmienna | Typ | Opis | -| ------------------------------------- | ------- | ----------------------------------------------------------- | -| `OPENCODE_AUTO_SHARE` | boolean | Automatycznie udostępniaj sesje | -| `OPENCODE_GIT_BASH_PATH` | string | Ścieżka do pliku wykonywalnego Git Bash w systemie Windows | -| `OPENCODE_CONFIG` | string | Ścieżka do pliku konfiguracyjnego | -| `OPENCODE_TUI_CONFIG` | string | Ścieżka do pliku konfiguracyjnego TUI | -| `OPENCODE_CONFIG_DIR` | string | Ścieżka do katalogu konfiguracyjnego | -| `OPENCODE_CONFIG_CONTENT` | string | Treść konfiguracji JSON (inline) | -| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Wyłącz automatyczne sprawdzanie aktualizacji | -| `OPENCODE_DISABLE_PRUNE` | boolean | Wyłącz czyszczenie starych wyników (pruning) | -| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Wyłącz automatyczne ustawianie tytułu terminala | -| `OPENCODE_PERMISSION` | string | Konfiguracja uprawnień w JSON (inline) | -| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Wyłącz domyślne wtyczki | -| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Wyłącz automatyczne pobieranie serwerów LSP | -| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Włącz modele eksperymentalne | -| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Wyłącz automatyczne kompaktowanie kontekstu | -| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Wyłącz integrację z `.claude` (prompt + skills) | -| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Wyłącz czytanie `~/.claude/CLAUDE.md` | -| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Wyłącz ładowanie `.claude/skills` | -| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Wyłącz pobieranie modeli ze źródeł zewnętrznych | -| `OPENCODE_FAKE_VCS` | string | Fałszywy dostawca VCS do celów testowych | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Wyłącz sprawdzanie czasu modyfikacji plików (optymalizacja) | -| `OPENCODE_CLIENT` | string | Identyfikator klienta (domyślnie `cli`) | -| `OPENCODE_ENABLE_EXA` | boolean | Włącz narzędzie wyszukiwania internetowego Exa | -| `OPENCODE_SERVER_PASSWORD` | string | Włącz uwierzytelnianie podstawowe dla `serve`/`web` | -| `OPENCODE_SERVER_USERNAME` | string | Nazwa użytkownika do autoryzacji (domyślnie `opencode`) | -| `OPENCODE_MODELS_URL` | string | Niestandardowy adres URL do pobierania konfiguracji modeli | +| Zmienna | Typ | Opis | +| ------------------------------------- | ------- | ---------------------------------------------------------- | +| `OPENCODE_AUTO_SHARE` | boolean | Automatycznie udostępniaj sesje | +| `OPENCODE_GIT_BASH_PATH` | string | Ścieżka do pliku wykonywalnego Git Bash w systemie Windows | +| `OPENCODE_CONFIG` | string | Ścieżka do pliku konfiguracyjnego | +| `OPENCODE_TUI_CONFIG` | string | Ścieżka do pliku konfiguracyjnego TUI | +| `OPENCODE_CONFIG_DIR` | string | Ścieżka do katalogu konfiguracyjnego | +| `OPENCODE_CONFIG_CONTENT` | string | Treść konfiguracji JSON (inline) | +| `OPENCODE_DISABLE_AUTOUPDATE` | boolean | Wyłącz automatyczne sprawdzanie aktualizacji | +| `OPENCODE_DISABLE_PRUNE` | boolean | Wyłącz czyszczenie starych wyników (pruning) | +| `OPENCODE_DISABLE_TERMINAL_TITLE` | boolean | Wyłącz automatyczne ustawianie tytułu terminala | +| `OPENCODE_PERMISSION` | string | Konfiguracja uprawnień w JSON (inline) | +| `OPENCODE_DISABLE_DEFAULT_PLUGINS` | boolean | Wyłącz domyślne wtyczki | +| `OPENCODE_DISABLE_LSP_DOWNLOAD` | boolean | Wyłącz automatyczne pobieranie serwerów LSP | +| `OPENCODE_ENABLE_EXPERIMENTAL_MODELS` | boolean | Włącz modele eksperymentalne | +| `OPENCODE_DISABLE_AUTOCOMPACT` | boolean | Wyłącz automatyczne kompaktowanie kontekstu | +| `OPENCODE_DISABLE_CLAUDE_CODE` | boolean | Wyłącz integrację z `.claude` (prompt + skills) | +| `OPENCODE_DISABLE_CLAUDE_CODE_PROMPT` | boolean | Wyłącz czytanie `~/.claude/CLAUDE.md` | +| `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Wyłącz ładowanie `.claude/skills` | +| `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Wyłącz pobieranie modeli ze źródeł zewnętrznych | +| `OPENCODE_FAKE_VCS` | string | Fałszywy dostawca VCS do celów testowych | +| `OPENCODE_CLIENT` | string | Identyfikator klienta (domyślnie `cli`) | +| `OPENCODE_ENABLE_EXA` | boolean | Włącz narzędzie wyszukiwania internetowego Exa | +| `OPENCODE_SERVER_PASSWORD` | string | Włącz uwierzytelnianie podstawowe dla `serve`/`web` | +| `OPENCODE_SERVER_USERNAME` | string | Nazwa użytkownika do autoryzacji (domyślnie `opencode`) | +| `OPENCODE_MODELS_URL` | string | Niestandardowy adres URL do pobierania konfiguracji modeli | --- diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index bd7e10f928..bad9e1ebbc 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -1316,6 +1316,33 @@ To use Kimi K2 from Moonshot AI: --- +### Mistral AI + +1. Head over to the [Mistral AI console](https://console.mistral.ai/), create an account, and generate an API key. + +2. Run the `/connect` command and search for **Mistral AI**. + + ```txt + /connect + ``` + +3. Enter your Mistral API key. + + ```txt + ┌ API key + │ + │ + └ enter + ``` + +4. Run the `/models` command to select a model like _Mistral Medium_. + + ```txt + /models + ``` + +--- + ### Nebius Token Factory 1. Head over to the [Nebius Token Factory console](https://tokenfactory.nebius.com/), create an account, and click **Add Key**. @@ -1550,6 +1577,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te --- +### LLM Gateway + +1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key. + +2. Run the `/connect` command and search for LLM Gateway. + + ```txt + /connect + ``` + +3. Enter the API key for the provider. + + ```txt + ┌ API key + │ + │ + └ enter + ``` + +4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want. + + ```txt + /models + ``` + + You can also add additional models through your opencode config. + + ```json title="opencode.json" {6} + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "somecoolnewmodel": {} + } + } + } + } + ``` + +5. You can also customize them through your opencode config. Here's an example of specifying a provider + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "glm-4.7": { + "name": "GLM 4.7" + }, + "gpt-5.2": { + "name": "GPT-5.2" + }, + "gemini-2.5-pro": { + "name": "Gemini 2.5 Pro" + }, + "claude-3-5-sonnet-20241022": { + "name": "Claude 3.5 Sonnet" + } + } + } + } + } + ``` + +--- + ### SAP AI Core SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform. diff --git a/packages/web/src/content/docs/pt-br/cli.mdx b/packages/web/src/content/docs/pt-br/cli.mdx index 32c50d7c0a..78190b3c5d 100644 --- a/packages/web/src/content/docs/pt-br/cli.mdx +++ b/packages/web/src/content/docs/pt-br/cli.mdx @@ -573,7 +573,6 @@ O opencode pode ser configurado usando variáveis de ambiente. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | Desabilitar carregamento de `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Desabilitar busca de modelos de fontes remotas | | `OPENCODE_FAKE_VCS` | string | Provedor VCS falso para fins de teste | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Desabilitar verificação de tempo de arquivo para otimização | | `OPENCODE_CLIENT` | string | Identificador do cliente (padrão é `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Habilitar ferramentas de busca web Exa | | `OPENCODE_SERVER_PASSWORD` | string | Habilitar autenticação básica para `serve`/`web` | diff --git a/packages/web/src/content/docs/ru/cli.mdx b/packages/web/src/content/docs/ru/cli.mdx index a98111530f..f5aeee256f 100644 --- a/packages/web/src/content/docs/ru/cli.mdx +++ b/packages/web/src/content/docs/ru/cli.mdx @@ -574,7 +574,6 @@ opencode можно настроить с помощью переменных с | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | логическое значение | Отключить загрузку `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | логическое значение | Отключить получение моделей из удаленных источников | | `OPENCODE_FAKE_VCS` | строка | Поддельный поставщик VCS для целей тестирования | -| `OPENCODE_DISABLE_FILETIME_CHECK` | логическое значение | Отключить проверку времени файла для оптимизации | | `OPENCODE_CLIENT` | строка | Идентификатор клиента (по умолчанию `cli`) | | `OPENCODE_ENABLE_EXA` | логическое значение | Включить инструменты веб-поиска Exa | | `OPENCODE_SERVER_PASSWORD` | строка | Включить базовую аутентификацию для `serve`/`web` | diff --git a/packages/web/src/content/docs/th/cli.mdx b/packages/web/src/content/docs/th/cli.mdx index 2f75a96a7e..4b2db9d988 100644 --- a/packages/web/src/content/docs/th/cli.mdx +++ b/packages/web/src/content/docs/th/cli.mdx @@ -575,7 +575,6 @@ OpenCode สามารถกำหนดค่าโดยใช้ตัว | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | Boolean | ปิดใช้งานการนำเข้า `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | Boolean | ปิดใช้งานการดึงรายการโมเดลจากระยะไกล | | `OPENCODE_FAKE_VCS` | String | เปิดใช้งาน VCS จำลองสำหรับการทดสอบ | -| `OPENCODE_DISABLE_FILETIME_CHECK` | Boolean | ปิดใช้งานการตรวจสอบเวลาแก้ไขไฟล์ | | `OPENCODE_CLIENT` | String | ตัวระบุไคลเอนต์ (ค่าเริ่มต้นคือ `cli`) | | `OPENCODE_ENABLE_EXA` | Boolean | เปิดใช้งานการใช้ Exa แทน ls หากมี | | `OPENCODE_SERVER_PASSWORD` | String | รหัสผ่านสำหรับการตรวจสอบสิทธิ์พื้นฐาน `serve`/`web` | diff --git a/packages/web/src/content/docs/tr/cli.mdx b/packages/web/src/content/docs/tr/cli.mdx index 41600b5bf0..75ecca9926 100644 --- a/packages/web/src/content/docs/tr/cli.mdx +++ b/packages/web/src/content/docs/tr/cli.mdx @@ -574,7 +574,6 @@ opencode ortam değişkenleri kullanılarak yapılandırılabilir. | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | `.claude/skills` yüklemesini devre dışı bırak | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | Uzak kaynaklardan model getirmeyi devre dışı bırakın | | `OPENCODE_FAKE_VCS` | string | Test amaçlı sahte VCS sağlayıcısı | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | Optimizasyon için dosya süresi kontrolünü devre dışı bırakın | | `OPENCODE_CLIENT` | string | Client kimliği (varsayılan: `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | Exa web arama araçlarını etkinleştir | | `OPENCODE_SERVER_PASSWORD` | string | `serve`/`web` için temel kimlik doğrulamayı etkinleştirin | diff --git a/packages/web/src/content/docs/zh-cn/cli.mdx b/packages/web/src/content/docs/zh-cn/cli.mdx index 0c54d3d7b1..c0cff134a5 100644 --- a/packages/web/src/content/docs/zh-cn/cli.mdx +++ b/packages/web/src/content/docs/zh-cn/cli.mdx @@ -574,7 +574,6 @@ OpenCode 可以通过环境变量进行配置。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | 禁用加载 `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 禁用从远程源获取模型 | | `OPENCODE_FAKE_VCS` | string | 用于测试目的的模拟 VCS 提供商 | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 禁用文件时间检查优化 | | `OPENCODE_CLIENT` | string | 客户端标识符(默认为 `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | 启用 Exa 网络搜索工具 | | `OPENCODE_SERVER_PASSWORD` | string | 为 `serve`/`web` 启用基本认证 | diff --git a/packages/web/src/content/docs/zh-tw/cli.mdx b/packages/web/src/content/docs/zh-tw/cli.mdx index 5de2b96375..4df9d13fdd 100644 --- a/packages/web/src/content/docs/zh-tw/cli.mdx +++ b/packages/web/src/content/docs/zh-tw/cli.mdx @@ -574,7 +574,6 @@ OpenCode 可以透過環境變數進行設定。 | `OPENCODE_DISABLE_CLAUDE_CODE_SKILLS` | boolean | 停用載入 `.claude/skills` | | `OPENCODE_DISABLE_MODELS_FETCH` | boolean | 停用從遠端來源擷取模型 | | `OPENCODE_FAKE_VCS` | string | 用於測試目的的模擬 VCS 供應商 | -| `OPENCODE_DISABLE_FILETIME_CHECK` | boolean | 停用檔案時間檢查最佳化 | | `OPENCODE_CLIENT` | string | 用戶端識別碼(預設為 `cli`) | | `OPENCODE_ENABLE_EXA` | boolean | 啟用 Exa 網路搜尋工具 | | `OPENCODE_SERVER_PASSWORD` | string | 為 `serve`/`web` 啟用基本認證 | diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json index c499f679fe..f52135c206 100644 --- a/sdks/vscode/package.json +++ b/sdks/vscode/package.json @@ -2,7 +2,7 @@ "name": "opencode", "displayName": "opencode", "description": "opencode for VS Code", - "version": "1.4.7", + "version": "1.4.11", "publisher": "sst-dev", "repository": { "type": "git",