mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-04-21 13:21:17 +08:00
Merge branch 'dev' into nxl/improve-compaction-strategy
This commit is contained in:
@@ -14,6 +14,7 @@
|
||||
- Use Bun APIs when possible, like `Bun.file()`
|
||||
- Rely on type inference when possible; avoid explicit type annotations or interfaces unless necessary for exports or clarity
|
||||
- Prefer functional array methods (flatMap, filter, map) over for loops; use type guards on filter to maintain type inference downstream
|
||||
- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module.
|
||||
|
||||
Reduce total variable count by inlining when a value is only used once.
|
||||
|
||||
|
||||
61
bun.lock
61
bun.lock
@@ -29,7 +29,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -83,7 +83,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -117,7 +117,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -144,7 +144,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "3.0.64",
|
||||
"@ai-sdk/openai": "3.0.48",
|
||||
@@ -168,7 +168,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -192,7 +192,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -225,7 +225,7 @@
|
||||
},
|
||||
"packages/desktop-electron": {
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"effect": "catalog:",
|
||||
"electron-context-menu": "4.1.2",
|
||||
@@ -268,7 +268,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@opencode-ai/shared": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -297,7 +297,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -313,7 +313,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -322,15 +322,15 @@
|
||||
"@actions/github": "6.0.1",
|
||||
"@agentclientprotocol/sdk": "0.16.1",
|
||||
"@ai-sdk/alibaba": "1.0.17",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.93",
|
||||
"@ai-sdk/anthropic": "3.0.67",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.94",
|
||||
"@ai-sdk/anthropic": "3.0.70",
|
||||
"@ai-sdk/azure": "3.0.49",
|
||||
"@ai-sdk/cerebras": "2.0.41",
|
||||
"@ai-sdk/cohere": "3.0.27",
|
||||
"@ai-sdk/deepinfra": "2.0.41",
|
||||
"@ai-sdk/gateway": "3.0.97",
|
||||
"@ai-sdk/gateway": "3.0.102",
|
||||
"@ai-sdk/google": "3.0.63",
|
||||
"@ai-sdk/google-vertex": "4.0.109",
|
||||
"@ai-sdk/google-vertex": "4.0.111",
|
||||
"@ai-sdk/groq": "3.0.31",
|
||||
"@ai-sdk/mistral": "3.0.27",
|
||||
"@ai-sdk/openai": "3.0.53",
|
||||
@@ -458,7 +458,7 @@
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"effect": "catalog:",
|
||||
@@ -493,7 +493,7 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"cross-spawn": "catalog:",
|
||||
},
|
||||
@@ -508,7 +508,7 @@
|
||||
},
|
||||
"packages/shared": {
|
||||
"name": "@opencode-ai/shared",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -516,6 +516,7 @@
|
||||
"@effect/platform-node": "catalog:",
|
||||
"@npmcli/arborist": "catalog:",
|
||||
"effect": "catalog:",
|
||||
"glob": "13.0.5",
|
||||
"mime-types": "3.0.2",
|
||||
"minimatch": "10.2.5",
|
||||
"semver": "catalog:",
|
||||
@@ -531,7 +532,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -566,7 +567,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
@@ -615,7 +616,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
@@ -737,7 +738,7 @@
|
||||
|
||||
"@ai-sdk/alibaba": ["@ai-sdk/alibaba@1.0.17", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZbE+U5bWz2JBc5DERLowx5+TKbjGBE93LqKZAWvuEn7HOSQMraxFMZuc0ST335QZJAyfBOzh7m1mPQ+y7EaaoA=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="],
|
||||
"@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.94", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XKE7wAjXejsIfNQvn3onvGUByhGHVM6W+xlL+1DAQLmjEb+ue4sOJIRehJ96rEvTXVVHRVyA6bSXx7ayxXfn5A=="],
|
||||
|
||||
"@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-rwLi/Rsuj2pYniQXIrvClHvXDzgM4UQHHnvHTWEF14efnlKclG/1ghpNC+adsRujAbCTr6gRsSbDE2vEqriV7g=="],
|
||||
|
||||
@@ -757,11 +758,11 @@
|
||||
|
||||
"@ai-sdk/fireworks": ["@ai-sdk/fireworks@2.0.46", "", { "dependencies": { "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XRKR0zgRyegdmtK5CDUEjlyRp0Fo+XVCdoG+301U1SGtgRIAYG3ObVtgzVJBVpJdHFSLHuYeLTnNiQoUxD7+FQ=="],
|
||||
|
||||
"@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.97", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ERHmVGX30YKTwxObuHQzNqoOf8Nb5WwYMDBn34e3TGGVn0vLEXwMimo7uRVTbhhi4gfu9WtwYTE4x1+csZok1w=="],
|
||||
"@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.102", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GrwDpaYJiVafrsA1MTbZtXPcQUI67g5AXiJo7Y1F8b+w+SiYHLk3ZIn1YmpQVoVAh2bjvxjj+Vo0AvfskuGH4g=="],
|
||||
|
||||
"@ai-sdk/google": ["@ai-sdk/google@3.0.63", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-RfOZWVMYSPu2sPRfGajrauWAZ9BSaRopSn+AszkKWQ1MFj8nhaXvCqRHB5pBQUaHTfZKagvOmMpNfa/s3gPLgQ=="],
|
||||
|
||||
"@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.109", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/google": "3.0.63", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-QzQ+DgOoSYlkU4mK0H+iaCaW1bl5zOimH9X2E2oylcVyUtAdCuduQ959Uw1ygW3l09J2K/ceEDtK8OUPHyOA7g=="],
|
||||
"@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@4.0.111", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.70", "@ai-sdk/google": "3.0.64", "@ai-sdk/openai-compatible": "2.0.41", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-5gILpAWWI5idfal/MfoH3tlQeSnOJ9jfL8JB8m2fdc3ue/9xoXkYDpXpDL/nyJImFjMCi6eR0Fpvlo/IKEWDIg=="],
|
||||
|
||||
"@ai-sdk/groq": ["@ai-sdk/groq@3.0.31", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XbbugpnFmXGu2TlXiq8KUJskP6/VVbuFcnFIGDzDIB/Chg6XHsNnqrTF80Zxkh0Pd3+NvbM+2Uqrtsndk6bDAg=="],
|
||||
|
||||
@@ -5151,7 +5152,11 @@
|
||||
|
||||
"@ai-sdk/alibaba/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="],
|
||||
"@ai-sdk/amazon-bedrock/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock/@smithy/eventstream-codec": ["@smithy/eventstream-codec@4.2.13", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@smithy/types": "^4.14.0", "@smithy/util-hex-encoding": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vYahwBAtRaAcFbOmE9aLr12z7RiHYDSLcnogSdxfm7kKfsNa3wH+NU5r7vTeB5rKvLsWyPjVX8iH94brP7umiQ=="],
|
||||
|
||||
"@ai-sdk/amazon-bedrock/@smithy/util-utf8": ["@smithy/util-utf8@4.2.2", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw=="],
|
||||
|
||||
"@ai-sdk/anthropic/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.21", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-MtFUYI1/8mgDvRmaBDjbLJPFFrMG777AvSgyIFQtZHIMzm88R/12vYBBpnk7pfiWLFE1DSZzY4WDYzGbKAcmiw=="],
|
||||
|
||||
@@ -5165,7 +5170,9 @@
|
||||
|
||||
"@ai-sdk/fireworks/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="],
|
||||
|
||||
"@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="],
|
||||
"@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="],
|
||||
|
||||
"@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@3.0.64", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CbR82EgGPNrj/6q0HtclwuCqe0/pDShyv3nWDP/A9DroujzWXnLMlUJVrgPOsg4b40zQCwwVs2XSKCxvt/4QaA=="],
|
||||
|
||||
"@ai-sdk/google-vertex/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@2.0.41", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kNAGINk71AlOXx10Dq/PXw4t/9XjdK8uxfpVElRwtSFMdeSiLVt58p9TPx4/FJD+hxZuVhvxYj9r42osxWq79g=="],
|
||||
|
||||
@@ -5683,6 +5690,8 @@
|
||||
|
||||
"ai/@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.95", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-ZmUNNbZl3V42xwQzPaNUi+s8eqR2lnrxf0bvB6YbLXpLjHYv0k2Y78t12cNOfY0bxGeuVVTLyk856uLuQIuXEQ=="],
|
||||
|
||||
"ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@4.0.93", "", { "dependencies": { "@ai-sdk/anthropic": "3.0.69", "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hcXDU8QDwpAzLVTuY932TQVlIij9+iaVTxc5mPGY6yb//JMAAC5hMVhg93IrxlrxWLvMgjezNgoZGwquR+SGnw=="],
|
||||
|
||||
"ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.69", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LshR7X3pFugY0o41G2VKTmg1XoGpSl7uoYWfzk6zjVZLhCfeFiwgpOga+eTV4XY1VVpZwKVqRnkDbIL7K2eH5g=="],
|
||||
|
||||
"ai-gateway-provider/@ai-sdk/google": ["@ai-sdk/google@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.21" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-uz8tIlkDgQJG9Js2Wh9JHzd4kI9+hYJqf9XXJLx60vyN5mRIqhr49iwR5zGP5Gl8odp2PeR3Gh2k+5bh3Z1HHw=="],
|
||||
@@ -5899,7 +5908,7 @@
|
||||
|
||||
"nypm/tinyexec": ["tinyexec@1.1.1", "", {}, "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg=="],
|
||||
|
||||
"opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.67", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-FFX4P5Fd6lcQJc2OLngZQkbbJHa0IDDZi087Edb8qRZx6h90krtM61ArbMUL8us/7ZUwojCXnyJ/wQ2Eflx2jQ=="],
|
||||
"opencode/@ai-sdk/anthropic": ["@ai-sdk/anthropic@3.0.70", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-hubTFcfnG3NbrlcDW0tU2fsZhRy/7dF5GCymu4DzBQUYliy2lb7tCeeMhDtFBaYa01qSBHRjkwGnsAdUtDPCwA=="],
|
||||
|
||||
"opencode/@ai-sdk/openai": ["@ai-sdk/openai@3.0.53", "", { "dependencies": { "@ai-sdk/provider": "3.0.8", "@ai-sdk/provider-utils": "4.0.23" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Wld+Rbc05KaUn08uBt06eEuwcgalcIFtIl32Yp+GxuZXUQwOb6YeAuq+C6da4ch6BurFoqEaLemJVwjBb7x+PQ=="],
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-NJAK+cPjwn+2ojDLyyDmBQyx2pD+rILetp7VCylgjek=",
|
||||
"aarch64-linux": "sha256-q8NTtFQJoyM7TTvErGA6RtmUscxoZKD/mj9N6S5YhkA=",
|
||||
"aarch64-darwin": "sha256-/ccoSZNLef6j9j14HzpVqhKCR+czM3mhPKPH51mHO24=",
|
||||
"x86_64-darwin": "sha256-6Pd10sMHL/5ZoWNvGPwPn4/AIs1TKjt/3gFyrVpBaE0="
|
||||
"x86_64-linux": "sha256-tYAb5Mo39UW1VEejYuo0jW0jzH2OyY/HrqgiZL3rmjY=",
|
||||
"aarch64-linux": "sha256-3zGKV5UwokXpmY0nT1mry3IhNf2EQYLKT7ac+/trmQA=",
|
||||
"aarch64-darwin": "sha256-oKXAut7eu/eW5a43OT8+aFuH1F1tuIldTs+7PUXSCv4=",
|
||||
"x86_64-darwin": "sha256-Az+9X1scOEhw3aOO8laKJoZjiuz3qlLTIk1bx25P/z4="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,6 +55,7 @@ stdenvNoCC.mkDerivation {
|
||||
--filter './packages/opencode' \
|
||||
--filter './packages/desktop' \
|
||||
--filter './packages/app' \
|
||||
--filter './packages/shared' \
|
||||
--frozen-lockfile \
|
||||
--ignore-scripts \
|
||||
--no-progress
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -8,7 +8,7 @@ import { Spinner } from "@opencode-ai/ui/spinner"
|
||||
import { showToast } from "@opencode-ai/ui/toast"
|
||||
import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
|
||||
import { getFilename } from "@opencode-ai/shared/util/path"
|
||||
import { createEffect, createMemo, For, Show } from "solid-js"
|
||||
import { createEffect, createMemo, createSignal, For, onMount, Show } from "solid-js"
|
||||
import { createStore } from "solid-js/store"
|
||||
import { Portal } from "solid-js/web"
|
||||
import { useCommand } from "@/context/command"
|
||||
@@ -16,6 +16,7 @@ import { useLanguage } from "@/context/language"
|
||||
import { useLayout } from "@/context/layout"
|
||||
import { usePlatform } from "@/context/platform"
|
||||
import { useServer } from "@/context/server"
|
||||
import { useSettings } from "@/context/settings"
|
||||
import { useSync } from "@/context/sync"
|
||||
import { useTerminal } from "@/context/terminal"
|
||||
import { focusTerminalById } from "@/pages/session/helpers"
|
||||
@@ -134,6 +135,7 @@ export function SessionHeader() {
|
||||
const server = useServer()
|
||||
const platform = usePlatform()
|
||||
const language = useLanguage()
|
||||
const settings = useSettings()
|
||||
const sync = useSync()
|
||||
const terminal = useTerminal()
|
||||
const { params, view } = useSessionLayout()
|
||||
@@ -151,6 +153,11 @@ export function SessionHeader() {
|
||||
})
|
||||
const hotkey = createMemo(() => command.keybind("file.open"))
|
||||
const os = createMemo(() => detectOS(platform))
|
||||
const isDesktopBeta = platform.platform === "desktop" && import.meta.env.VITE_OPENCODE_CHANNEL === "beta"
|
||||
const search = createMemo(() => !isDesktopBeta || settings.general.showSearch())
|
||||
const tree = createMemo(() => !isDesktopBeta || settings.general.showFileTree())
|
||||
const term = createMemo(() => !isDesktopBeta || settings.general.showTerminal())
|
||||
const status = createMemo(() => !isDesktopBeta || settings.general.showStatus())
|
||||
|
||||
const [exists, setExists] = createStore<Partial<Record<OpenApp, boolean>>>({
|
||||
finder: true,
|
||||
@@ -262,12 +269,16 @@ export function SessionHeader() {
|
||||
.catch((err: unknown) => showRequestError(language, err))
|
||||
}
|
||||
|
||||
const centerMount = createMemo(() => document.getElementById("opencode-titlebar-center"))
|
||||
const rightMount = createMemo(() => document.getElementById("opencode-titlebar-right"))
|
||||
const [centerMount, setCenterMount] = createSignal<HTMLElement | null>(null)
|
||||
const [rightMount, setRightMount] = createSignal<HTMLElement | null>(null)
|
||||
onMount(() => {
|
||||
setCenterMount(document.getElementById("opencode-titlebar-center"))
|
||||
setRightMount(document.getElementById("opencode-titlebar-right"))
|
||||
})
|
||||
|
||||
return (
|
||||
<>
|
||||
<Show when={centerMount()}>
|
||||
<Show when={search() && centerMount()}>
|
||||
{(mount) => (
|
||||
<Portal mount={mount()}>
|
||||
<Button
|
||||
@@ -415,24 +426,28 @@ export function SessionHeader() {
|
||||
</div>
|
||||
</Show>
|
||||
<div class="flex items-center gap-1">
|
||||
<Tooltip placement="bottom" value={language.t("status.popover.trigger")}>
|
||||
<StatusPopover />
|
||||
</Tooltip>
|
||||
<TooltipKeybind
|
||||
title={language.t("command.terminal.toggle")}
|
||||
keybind={command.keybind("terminal.toggle")}
|
||||
>
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="group/terminal-toggle titlebar-icon w-8 h-6 p-0 box-border shrink-0"
|
||||
onClick={toggleTerminal}
|
||||
aria-label={language.t("command.terminal.toggle")}
|
||||
aria-expanded={view().terminal.opened()}
|
||||
aria-controls="terminal-panel"
|
||||
<Show when={status()}>
|
||||
<Tooltip placement="bottom" value={language.t("status.popover.trigger")}>
|
||||
<StatusPopover />
|
||||
</Tooltip>
|
||||
</Show>
|
||||
<Show when={term()}>
|
||||
<TooltipKeybind
|
||||
title={language.t("command.terminal.toggle")}
|
||||
keybind={command.keybind("terminal.toggle")}
|
||||
>
|
||||
<Icon size="small" name={view().terminal.opened() ? "terminal-active" : "terminal"} />
|
||||
</Button>
|
||||
</TooltipKeybind>
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="group/terminal-toggle titlebar-icon w-8 h-6 p-0 box-border shrink-0"
|
||||
onClick={toggleTerminal}
|
||||
aria-label={language.t("command.terminal.toggle")}
|
||||
aria-expanded={view().terminal.opened()}
|
||||
aria-controls="terminal-panel"
|
||||
>
|
||||
<Icon size="small" name={view().terminal.opened() ? "terminal-active" : "terminal"} />
|
||||
</Button>
|
||||
</TooltipKeybind>
|
||||
</Show>
|
||||
|
||||
<div class="hidden md:flex items-center gap-1 shrink-0">
|
||||
<TooltipKeybind
|
||||
@@ -451,30 +466,32 @@ export function SessionHeader() {
|
||||
</Button>
|
||||
</TooltipKeybind>
|
||||
|
||||
<TooltipKeybind
|
||||
title={language.t("command.fileTree.toggle")}
|
||||
keybind={command.keybind("fileTree.toggle")}
|
||||
>
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="titlebar-icon w-8 h-6 p-0 box-border"
|
||||
onClick={() => layout.fileTree.toggle()}
|
||||
aria-label={language.t("command.fileTree.toggle")}
|
||||
aria-expanded={layout.fileTree.opened()}
|
||||
aria-controls="file-tree-panel"
|
||||
<Show when={tree()}>
|
||||
<TooltipKeybind
|
||||
title={language.t("command.fileTree.toggle")}
|
||||
keybind={command.keybind("fileTree.toggle")}
|
||||
>
|
||||
<div class="relative flex items-center justify-center size-4">
|
||||
<Icon
|
||||
size="small"
|
||||
name={layout.fileTree.opened() ? "file-tree-active" : "file-tree"}
|
||||
classList={{
|
||||
"text-icon-strong": layout.fileTree.opened(),
|
||||
"text-icon-weak": !layout.fileTree.opened(),
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Button>
|
||||
</TooltipKeybind>
|
||||
<Button
|
||||
variant="ghost"
|
||||
class="titlebar-icon w-8 h-6 p-0 box-border"
|
||||
onClick={() => layout.fileTree.toggle()}
|
||||
aria-label={language.t("command.fileTree.toggle")}
|
||||
aria-expanded={layout.fileTree.opened()}
|
||||
aria-controls="file-tree-panel"
|
||||
>
|
||||
<div class="relative flex items-center justify-center size-4">
|
||||
<Icon
|
||||
size="small"
|
||||
name={layout.fileTree.opened() ? "file-tree-active" : "file-tree"}
|
||||
classList={{
|
||||
"text-icon-strong": layout.fileTree.opened(),
|
||||
"text-icon-weak": !layout.fileTree.opened(),
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Button>
|
||||
</TooltipKeybind>
|
||||
</Show>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -106,6 +106,7 @@ export const SettingsGeneral: Component = () => {
|
||||
|
||||
permission.disableAutoAccept(params.id, value)
|
||||
}
|
||||
const desktop = createMemo(() => platform.platform === "desktop")
|
||||
|
||||
const check = () => {
|
||||
if (!platform.checkUpdate) return
|
||||
@@ -279,6 +280,74 @@ export const SettingsGeneral: Component = () => {
|
||||
</div>
|
||||
)
|
||||
|
||||
const AdvancedSection = () => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<h3 class="text-14-medium text-text-strong pb-2">{language.t("settings.general.section.advanced")}</h3>
|
||||
|
||||
<SettingsList>
|
||||
<SettingsRow
|
||||
title={language.t("settings.general.row.showFileTree.title")}
|
||||
description={language.t("settings.general.row.showFileTree.description")}
|
||||
>
|
||||
<div data-action="settings-show-file-tree">
|
||||
<Switch
|
||||
checked={settings.general.showFileTree()}
|
||||
onChange={(checked) => settings.general.setShowFileTree(checked)}
|
||||
/>
|
||||
</div>
|
||||
</SettingsRow>
|
||||
|
||||
<SettingsRow
|
||||
title={language.t("settings.general.row.showNavigation.title")}
|
||||
description={language.t("settings.general.row.showNavigation.description")}
|
||||
>
|
||||
<div data-action="settings-show-navigation">
|
||||
<Switch
|
||||
checked={settings.general.showNavigation()}
|
||||
onChange={(checked) => settings.general.setShowNavigation(checked)}
|
||||
/>
|
||||
</div>
|
||||
</SettingsRow>
|
||||
|
||||
<SettingsRow
|
||||
title={language.t("settings.general.row.showSearch.title")}
|
||||
description={language.t("settings.general.row.showSearch.description")}
|
||||
>
|
||||
<div data-action="settings-show-search">
|
||||
<Switch
|
||||
checked={settings.general.showSearch()}
|
||||
onChange={(checked) => settings.general.setShowSearch(checked)}
|
||||
/>
|
||||
</div>
|
||||
</SettingsRow>
|
||||
|
||||
<SettingsRow
|
||||
title={language.t("settings.general.row.showTerminal.title")}
|
||||
description={language.t("settings.general.row.showTerminal.description")}
|
||||
>
|
||||
<div data-action="settings-show-terminal">
|
||||
<Switch
|
||||
checked={settings.general.showTerminal()}
|
||||
onChange={(checked) => settings.general.setShowTerminal(checked)}
|
||||
/>
|
||||
</div>
|
||||
</SettingsRow>
|
||||
|
||||
<SettingsRow
|
||||
title={language.t("settings.general.row.showStatus.title")}
|
||||
description={language.t("settings.general.row.showStatus.description")}
|
||||
>
|
||||
<div data-action="settings-show-status">
|
||||
<Switch
|
||||
checked={settings.general.showStatus()}
|
||||
onChange={(checked) => settings.general.setShowStatus(checked)}
|
||||
/>
|
||||
</div>
|
||||
</SettingsRow>
|
||||
</SettingsList>
|
||||
</div>
|
||||
)
|
||||
|
||||
const AppearanceSection = () => (
|
||||
<div class="flex flex-col gap-1">
|
||||
<h3 class="text-14-medium text-text-strong pb-2">{language.t("settings.general.section.appearance")}</h3>
|
||||
@@ -527,6 +596,7 @@ export const SettingsGeneral: Component = () => {
|
||||
</div>
|
||||
)
|
||||
|
||||
console.log(import.meta.env)
|
||||
return (
|
||||
<div class="flex flex-col h-full overflow-y-auto no-scrollbar px-4 pb-10 sm:px-10 sm:pb-10">
|
||||
<div class="sticky top-0 z-10 bg-[linear-gradient(to_bottom,var(--surface-stronger-non-alpha)_calc(100%_-_24px),transparent)]">
|
||||
@@ -609,6 +679,10 @@ export const SettingsGeneral: Component = () => {
|
||||
)
|
||||
}}
|
||||
</Show>
|
||||
|
||||
<Show when={desktop() && import.meta.env.VITE_OPENCODE_CHANNEL === "beta"}>
|
||||
<AdvancedSection />
|
||||
</Show>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -11,6 +11,7 @@ import { useLayout } from "@/context/layout"
|
||||
import { usePlatform } from "@/context/platform"
|
||||
import { useCommand } from "@/context/command"
|
||||
import { useLanguage } from "@/context/language"
|
||||
import { useSettings } from "@/context/settings"
|
||||
import { applyPath, backPath, forwardPath } from "./titlebar-history"
|
||||
|
||||
type TauriDesktopWindow = {
|
||||
@@ -40,6 +41,7 @@ export function Titlebar() {
|
||||
const platform = usePlatform()
|
||||
const command = useCommand()
|
||||
const language = useLanguage()
|
||||
const settings = useSettings()
|
||||
const theme = useTheme()
|
||||
const navigate = useNavigate()
|
||||
const location = useLocation()
|
||||
@@ -78,6 +80,7 @@ export function Titlebar() {
|
||||
const canBack = createMemo(() => history.index > 0)
|
||||
const canForward = createMemo(() => history.index < history.stack.length - 1)
|
||||
const hasProjects = createMemo(() => layout.projects.list().length > 0)
|
||||
const nav = createMemo(() => import.meta.env.VITE_OPENCODE_CHANNEL !== "beta" || settings.general.showNavigation())
|
||||
|
||||
const back = () => {
|
||||
const next = backPath(history)
|
||||
@@ -255,13 +258,12 @@ export function Titlebar() {
|
||||
<div
|
||||
class="flex items-center shrink-0"
|
||||
classList={{
|
||||
"translate-x-0": !layout.sidebar.opened(),
|
||||
"-translate-x-[36px]": layout.sidebar.opened(),
|
||||
"-translate-x-[36px]": layout.sidebar.opened() && !!params.dir,
|
||||
"duration-180 ease-out": !layout.sidebar.opened(),
|
||||
"duration-180 ease-in": layout.sidebar.opened(),
|
||||
}}
|
||||
>
|
||||
<Show when={hasProjects()}>
|
||||
<Show when={hasProjects() && nav()}>
|
||||
<div class="flex items-center gap-0 transition-transform">
|
||||
<Tooltip placement="bottom" value={language.t("common.goBack")} openDelay={2000}>
|
||||
<Button
|
||||
|
||||
@@ -23,6 +23,11 @@ export interface Settings {
|
||||
autoSave: boolean
|
||||
releaseNotes: boolean
|
||||
followup: "queue" | "steer"
|
||||
showFileTree: boolean
|
||||
showNavigation: boolean
|
||||
showSearch: boolean
|
||||
showStatus: boolean
|
||||
showTerminal: boolean
|
||||
showReasoningSummaries: boolean
|
||||
shellToolPartsExpanded: boolean
|
||||
editToolPartsExpanded: boolean
|
||||
@@ -89,6 +94,11 @@ const defaultSettings: Settings = {
|
||||
autoSave: true,
|
||||
releaseNotes: true,
|
||||
followup: "steer",
|
||||
showFileTree: false,
|
||||
showNavigation: false,
|
||||
showSearch: false,
|
||||
showStatus: false,
|
||||
showTerminal: false,
|
||||
showReasoningSummaries: false,
|
||||
shellToolPartsExpanded: false,
|
||||
editToolPartsExpanded: false,
|
||||
@@ -162,6 +172,26 @@ export const { use: useSettings, provider: SettingsProvider } = createSimpleCont
|
||||
setFollowup(value: "queue" | "steer") {
|
||||
setStore("general", "followup", value === "queue" ? "steer" : value)
|
||||
},
|
||||
showFileTree: withFallback(() => store.general?.showFileTree, defaultSettings.general.showFileTree),
|
||||
setShowFileTree(value: boolean) {
|
||||
setStore("general", "showFileTree", value)
|
||||
},
|
||||
showNavigation: withFallback(() => store.general?.showNavigation, defaultSettings.general.showNavigation),
|
||||
setShowNavigation(value: boolean) {
|
||||
setStore("general", "showNavigation", value)
|
||||
},
|
||||
showSearch: withFallback(() => store.general?.showSearch, defaultSettings.general.showSearch),
|
||||
setShowSearch(value: boolean) {
|
||||
setStore("general", "showSearch", value)
|
||||
},
|
||||
showStatus: withFallback(() => store.general?.showStatus, defaultSettings.general.showStatus),
|
||||
setShowStatus(value: boolean) {
|
||||
setStore("general", "showStatus", value)
|
||||
},
|
||||
showTerminal: withFallback(() => store.general?.showTerminal, defaultSettings.general.showTerminal),
|
||||
setShowTerminal(value: boolean) {
|
||||
setStore("general", "showTerminal", value)
|
||||
},
|
||||
showReasoningSummaries: withFallback(
|
||||
() => store.general?.showReasoningSummaries,
|
||||
defaultSettings.general.showReasoningSummaries,
|
||||
|
||||
6
packages/app/src/env.d.ts
vendored
6
packages/app/src/env.d.ts
vendored
@@ -1,16 +1,14 @@
|
||||
import "solid-js"
|
||||
|
||||
interface ImportMetaEnv {
|
||||
readonly VITE_OPENCODE_SERVER_HOST: string
|
||||
readonly VITE_OPENCODE_SERVER_PORT: string
|
||||
readonly OPENCODE_CHANNEL?: "dev" | "beta" | "prod"
|
||||
readonly VITE_OPENCODE_CHANNEL?: "dev" | "beta" | "prod"
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
readonly env: ImportMetaEnv
|
||||
}
|
||||
|
||||
declare module "solid-js" {
|
||||
export declare module "solid-js" {
|
||||
namespace JSX {
|
||||
interface Directives {
|
||||
sortable: true
|
||||
|
||||
@@ -719,6 +719,7 @@ export const dict = {
|
||||
"settings.desktop.wsl.description": "Run the OpenCode server inside WSL on Windows.",
|
||||
|
||||
"settings.general.section.appearance": "Appearance",
|
||||
"settings.general.section.advanced": "Advanced",
|
||||
"settings.general.section.notifications": "System notifications",
|
||||
"settings.general.section.updates": "Updates",
|
||||
"settings.general.section.sounds": "Sound effects",
|
||||
@@ -741,6 +742,16 @@ export const dict = {
|
||||
"settings.general.row.followup.description": "Choose whether follow-up prompts steer immediately or wait in a queue",
|
||||
"settings.general.row.followup.option.queue": "Queue",
|
||||
"settings.general.row.followup.option.steer": "Steer",
|
||||
"settings.general.row.showFileTree.title": "File tree",
|
||||
"settings.general.row.showFileTree.description": "Show the file tree toggle and panel in desktop sessions",
|
||||
"settings.general.row.showNavigation.title": "Navigation controls",
|
||||
"settings.general.row.showNavigation.description": "Show the back and forward buttons in the desktop title bar",
|
||||
"settings.general.row.showSearch.title": "Command palette",
|
||||
"settings.general.row.showSearch.description": "Show the search and command palette button in the desktop title bar",
|
||||
"settings.general.row.showTerminal.title": "Terminal",
|
||||
"settings.general.row.showTerminal.description": "Show the terminal button in the desktop title bar",
|
||||
"settings.general.row.showStatus.title": "Server status",
|
||||
"settings.general.row.showStatus.description": "Show the server status button in the desktop title bar",
|
||||
"settings.general.row.reasoningSummaries.title": "Show reasoning summaries",
|
||||
"settings.general.row.reasoningSummaries.description": "Display model reasoning summaries in the timeline",
|
||||
"settings.general.row.shellToolPartsExpanded.title": "Expand shell tool parts",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Project, UserMessage, VcsFileDiff } from "@opencode-ai/sdk/v2"
|
||||
import type { Project, UserMessage } from "@opencode-ai/sdk/v2"
|
||||
import { useDialog } from "@opencode-ai/ui/context/dialog"
|
||||
import { useMutation } from "@tanstack/solid-query"
|
||||
import { createQuery, skipToken, useMutation, useQueryClient } from "@tanstack/solid-query"
|
||||
import {
|
||||
batch,
|
||||
onCleanup,
|
||||
@@ -324,6 +324,7 @@ export default function Page() {
|
||||
const local = useLocal()
|
||||
const file = useFile()
|
||||
const sync = useSync()
|
||||
const queryClient = useQueryClient()
|
||||
const dialog = useDialog()
|
||||
const language = useLanguage()
|
||||
const sdk = useSDK()
|
||||
@@ -518,26 +519,6 @@ export default function Page() {
|
||||
deferRender: false,
|
||||
})
|
||||
|
||||
const [vcs, setVcs] = createStore<{
|
||||
diff: {
|
||||
git: VcsFileDiff[]
|
||||
branch: VcsFileDiff[]
|
||||
}
|
||||
ready: {
|
||||
git: boolean
|
||||
branch: boolean
|
||||
}
|
||||
}>({
|
||||
diff: {
|
||||
git: [] as VcsFileDiff[],
|
||||
branch: [] as VcsFileDiff[],
|
||||
},
|
||||
ready: {
|
||||
git: false,
|
||||
branch: false,
|
||||
},
|
||||
})
|
||||
|
||||
const [followup, setFollowup] = persisted(
|
||||
Persist.workspace(sdk.directory, "followup", ["followup.v1"]),
|
||||
createStore<{
|
||||
@@ -571,68 +552,6 @@ export default function Page() {
|
||||
let todoTimer: number | undefined
|
||||
let diffFrame: number | undefined
|
||||
let diffTimer: number | undefined
|
||||
const vcsTask = new Map<VcsMode, Promise<void>>()
|
||||
const vcsRun = new Map<VcsMode, number>()
|
||||
|
||||
const bumpVcs = (mode: VcsMode) => {
|
||||
const next = (vcsRun.get(mode) ?? 0) + 1
|
||||
vcsRun.set(mode, next)
|
||||
return next
|
||||
}
|
||||
|
||||
const resetVcs = (mode?: VcsMode) => {
|
||||
const list = mode ? [mode] : (["git", "branch"] as const)
|
||||
list.forEach((item) => {
|
||||
bumpVcs(item)
|
||||
vcsTask.delete(item)
|
||||
setVcs("diff", item, [])
|
||||
setVcs("ready", item, false)
|
||||
})
|
||||
}
|
||||
|
||||
const loadVcs = (mode: VcsMode, force = false) => {
|
||||
if (sync.project?.vcs !== "git") return Promise.resolve()
|
||||
if (!force && vcs.ready[mode]) return Promise.resolve()
|
||||
|
||||
if (force) {
|
||||
if (vcsTask.has(mode)) bumpVcs(mode)
|
||||
vcsTask.delete(mode)
|
||||
setVcs("ready", mode, false)
|
||||
}
|
||||
|
||||
const current = vcsTask.get(mode)
|
||||
if (current) return current
|
||||
|
||||
const run = bumpVcs(mode)
|
||||
|
||||
const task = sdk.client.vcs
|
||||
.diff({ mode })
|
||||
.then((result) => {
|
||||
if (vcsRun.get(mode) !== run) return
|
||||
setVcs("diff", mode, list(result.data))
|
||||
setVcs("ready", mode, true)
|
||||
})
|
||||
.catch((error) => {
|
||||
if (vcsRun.get(mode) !== run) return
|
||||
console.debug("[session-review] failed to load vcs diff", { mode, error })
|
||||
setVcs("diff", mode, [])
|
||||
setVcs("ready", mode, true)
|
||||
})
|
||||
.finally(() => {
|
||||
if (vcsTask.get(mode) === task) vcsTask.delete(mode)
|
||||
})
|
||||
|
||||
vcsTask.set(mode, task)
|
||||
return task
|
||||
}
|
||||
|
||||
const refreshVcs = () => {
|
||||
resetVcs()
|
||||
const mode = untrack(vcsMode)
|
||||
if (!mode) return
|
||||
if (!untrack(wantsReview)) return
|
||||
void loadVcs(mode, true)
|
||||
}
|
||||
|
||||
createComputed((prev) => {
|
||||
const open = desktopReviewOpen()
|
||||
@@ -663,21 +582,52 @@ export default function Page() {
|
||||
list.push("turn")
|
||||
return list
|
||||
})
|
||||
const mobileChanges = createMemo(() => !isDesktop() && store.mobileTab === "changes")
|
||||
const wantsReview = createMemo(() =>
|
||||
isDesktop()
|
||||
? desktopFileTreeOpen() || (desktopReviewOpen() && activeTab() === "review")
|
||||
: store.mobileTab === "changes",
|
||||
)
|
||||
const vcsMode = createMemo<VcsMode | undefined>(() => {
|
||||
if (store.changes === "git" || store.changes === "branch") return store.changes
|
||||
})
|
||||
const reviewDiffs = createMemo(() => {
|
||||
if (store.changes === "git") return list(vcs.diff.git)
|
||||
if (store.changes === "branch") return list(vcs.diff.branch)
|
||||
const vcsKey = createMemo(
|
||||
() => ["session-vcs", sdk.directory, sync.data.vcs?.branch ?? "", sync.data.vcs?.default_branch ?? ""] as const,
|
||||
)
|
||||
const vcsQuery = createQuery(() => {
|
||||
const mode = vcsMode()
|
||||
const enabled = wantsReview() && sync.project?.vcs === "git"
|
||||
|
||||
return {
|
||||
queryKey: [...vcsKey(), mode] as const,
|
||||
enabled,
|
||||
staleTime: Number.POSITIVE_INFINITY,
|
||||
gcTime: 60 * 1000,
|
||||
queryFn: mode
|
||||
? () =>
|
||||
sdk.client.vcs
|
||||
.diff({ mode })
|
||||
.then((result) => list(result.data))
|
||||
.catch((error) => {
|
||||
console.debug("[session-review] failed to load vcs diff", { mode, error })
|
||||
return []
|
||||
})
|
||||
: skipToken,
|
||||
}
|
||||
})
|
||||
const refreshVcs = () => void queryClient.invalidateQueries({ queryKey: vcsKey() })
|
||||
const reviewDiffs = () => {
|
||||
if (store.changes === "git" || store.changes === "branch")
|
||||
// avoids suspense
|
||||
return vcsQuery.isFetched ? (vcsQuery.data ?? []) : []
|
||||
return turnDiffs()
|
||||
})
|
||||
const reviewCount = createMemo(() => reviewDiffs().length)
|
||||
const hasReview = createMemo(() => reviewCount() > 0)
|
||||
const reviewReady = createMemo(() => {
|
||||
if (store.changes === "git") return vcs.ready.git
|
||||
if (store.changes === "branch") return vcs.ready.branch
|
||||
}
|
||||
const reviewCount = () => reviewDiffs().length
|
||||
const hasReview = () => reviewCount() > 0
|
||||
const reviewReady = () => {
|
||||
if (store.changes === "git" || store.changes === "branch") return !vcsQuery.isPending
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
const newSessionWorktree = createMemo(() => {
|
||||
if (store.newSessionWorktree === "create") return "create"
|
||||
@@ -897,27 +847,6 @@ export default function Page() {
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => sdk.directory,
|
||||
() => {
|
||||
resetVcs()
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => [sync.data.vcs?.branch, sync.data.vcs?.default_branch] as const,
|
||||
(next, prev) => {
|
||||
if (prev === undefined || same(next, prev)) return
|
||||
refreshVcs()
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
)
|
||||
|
||||
const stopVcs = sdk.event.listen((evt) => {
|
||||
if (evt.details.type !== "file.watcher.updated") return
|
||||
const props =
|
||||
@@ -1051,13 +980,6 @@ export default function Page() {
|
||||
}
|
||||
}
|
||||
|
||||
const mobileChanges = createMemo(() => !isDesktop() && store.mobileTab === "changes")
|
||||
const wantsReview = createMemo(() =>
|
||||
isDesktop()
|
||||
? desktopFileTreeOpen() || (desktopReviewOpen() && activeTab() === "review")
|
||||
: store.mobileTab === "changes",
|
||||
)
|
||||
|
||||
createEffect(() => {
|
||||
const list = changesOptions()
|
||||
if (list.includes(store.changes)) return
|
||||
@@ -1066,22 +988,12 @@ export default function Page() {
|
||||
setStore("changes", next)
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
const mode = vcsMode()
|
||||
if (!mode) return
|
||||
if (!wantsReview()) return
|
||||
void loadVcs(mode)
|
||||
})
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => sync.data.session_status[params.id ?? ""]?.type,
|
||||
(next, prev) => {
|
||||
const mode = vcsMode()
|
||||
if (!mode) return
|
||||
if (!wantsReview()) return
|
||||
if (next !== "idle" || prev === undefined || prev === "idle") return
|
||||
void loadVcs(mode, true)
|
||||
refreshVcs()
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
|
||||
@@ -19,6 +19,9 @@ import { useCommand } from "@/context/command"
|
||||
import { useFile, type SelectedLineRange } from "@/context/file"
|
||||
import { useLanguage } from "@/context/language"
|
||||
import { useLayout } from "@/context/layout"
|
||||
import { usePlatform } from "@/context/platform"
|
||||
import { useSettings } from "@/context/settings"
|
||||
import { useSync } from "@/context/sync"
|
||||
import { createFileTabListSync } from "@/pages/session/file-tab-scroll"
|
||||
import { FileTabContent } from "@/pages/session/file-tabs"
|
||||
import { createOpenSessionFileTab, createSessionTabs, getTabReorderIndex, type Sizing } from "@/pages/session/helpers"
|
||||
@@ -39,6 +42,9 @@ export function SessionSidePanel(props: {
|
||||
size: Sizing
|
||||
}) {
|
||||
const layout = useLayout()
|
||||
const platform = usePlatform()
|
||||
const settings = useSettings()
|
||||
const sync = useSync()
|
||||
const file = useFile()
|
||||
const language = useLanguage()
|
||||
const command = useCommand()
|
||||
@@ -46,9 +52,10 @@ export function SessionSidePanel(props: {
|
||||
const { sessionKey, tabs, view } = useSessionLayout()
|
||||
|
||||
const isDesktop = createMediaQuery("(min-width: 768px)")
|
||||
const shown = createMemo(() => platform.platform !== "desktop" || settings.general.showFileTree())
|
||||
|
||||
const reviewOpen = createMemo(() => isDesktop() && view().reviewPanel.opened())
|
||||
const fileOpen = createMemo(() => isDesktop() && layout.fileTree.opened())
|
||||
const fileOpen = createMemo(() => isDesktop() && shown() && layout.fileTree.opened())
|
||||
const open = createMemo(() => reviewOpen() || fileOpen())
|
||||
const reviewTab = createMemo(() => isDesktop())
|
||||
const panelWidth = createMemo(() => {
|
||||
@@ -341,98 +348,99 @@ export function SessionSidePanel(props: {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
id="file-tree-panel"
|
||||
aria-hidden={!fileOpen()}
|
||||
inert={!fileOpen()}
|
||||
class="relative min-w-0 h-full shrink-0 overflow-hidden"
|
||||
classList={{
|
||||
"pointer-events-none": !fileOpen(),
|
||||
"transition-[width] duration-200 ease-[cubic-bezier(0.22,1,0.36,1)] will-change-[width] motion-reduce:transition-none":
|
||||
!props.size.active(),
|
||||
}}
|
||||
style={{ width: treeWidth() }}
|
||||
>
|
||||
<Show when={shown()}>
|
||||
<div
|
||||
class="h-full flex flex-col overflow-hidden group/filetree"
|
||||
classList={{ "border-l border-border-weaker-base": reviewOpen() }}
|
||||
id="file-tree-panel"
|
||||
aria-hidden={!fileOpen()}
|
||||
inert={!fileOpen()}
|
||||
class="relative min-w-0 h-full shrink-0 overflow-hidden"
|
||||
classList={{
|
||||
"pointer-events-none": !fileOpen(),
|
||||
"transition-[width] duration-200 ease-[cubic-bezier(0.22,1,0.36,1)] will-change-[width] motion-reduce:transition-none":
|
||||
!props.size.active(),
|
||||
}}
|
||||
style={{ width: treeWidth() }}
|
||||
>
|
||||
<Tabs
|
||||
variant="pill"
|
||||
value={fileTreeTab()}
|
||||
onChange={setFileTreeTabValue}
|
||||
class="h-full"
|
||||
data-scope="filetree"
|
||||
<div
|
||||
class="h-full flex flex-col overflow-hidden group/filetree"
|
||||
classList={{ "border-l border-border-weaker-base": reviewOpen() }}
|
||||
>
|
||||
<Tabs.List>
|
||||
<Tabs.Trigger value="changes" class="flex-1" classes={{ button: "w-full" }}>
|
||||
{props.reviewCount()}{" "}
|
||||
{language.t(
|
||||
props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other",
|
||||
)}
|
||||
</Tabs.Trigger>
|
||||
<Tabs.Trigger value="all" class="flex-1" classes={{ button: "w-full" }}>
|
||||
{language.t("session.files.all")}
|
||||
</Tabs.Trigger>
|
||||
</Tabs.List>
|
||||
<Tabs.Content value="changes" class="bg-background-stronger px-3 py-0">
|
||||
<Switch>
|
||||
<Match when={props.hasReview() || !props.diffsReady()}>
|
||||
<Show
|
||||
when={props.diffsReady()}
|
||||
fallback={
|
||||
<div class="px-2 py-2 text-12-regular text-text-weak">
|
||||
{language.t("common.loading")}
|
||||
{language.t("common.loading.ellipsis")}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Tabs
|
||||
variant="pill"
|
||||
value={fileTreeTab()}
|
||||
onChange={setFileTreeTabValue}
|
||||
class="h-full"
|
||||
data-scope="filetree"
|
||||
>
|
||||
<Tabs.List>
|
||||
<Tabs.Trigger value="changes" class="flex-1" classes={{ button: "w-full" }}>
|
||||
{props.reviewCount()}{" "}
|
||||
{language.t(
|
||||
props.reviewCount() === 1 ? "session.review.change.one" : "session.review.change.other",
|
||||
)}
|
||||
</Tabs.Trigger>
|
||||
<Tabs.Trigger value="all" class="flex-1" classes={{ button: "w-full" }}>
|
||||
{language.t("session.files.all")}
|
||||
</Tabs.Trigger>
|
||||
</Tabs.List>
|
||||
<Tabs.Content value="changes" class="bg-background-stronger px-3 py-0">
|
||||
<Switch>
|
||||
<Match when={props.hasReview() || !props.diffsReady()}>
|
||||
<Show
|
||||
when={props.diffsReady()}
|
||||
fallback={
|
||||
<div class="px-2 py-2 text-12-regular text-text-weak">
|
||||
{language.t("common.loading")}
|
||||
{language.t("common.loading.ellipsis")}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<FileTree
|
||||
path=""
|
||||
class="pt-3"
|
||||
allowed={diffFiles()}
|
||||
kinds={kinds()}
|
||||
draggable={false}
|
||||
active={props.activeDiff}
|
||||
onFileClick={(node) => props.focusReviewDiff(node.path)}
|
||||
/>
|
||||
</Show>
|
||||
</Match>
|
||||
</Switch>
|
||||
</Tabs.Content>
|
||||
<Tabs.Content value="all" class="bg-background-stronger px-3 py-0">
|
||||
<Switch>
|
||||
<Match when={nofiles()}>{empty(language.t("session.files.empty"))}</Match>
|
||||
<Match when={true}>
|
||||
<FileTree
|
||||
path=""
|
||||
class="pt-3"
|
||||
allowed={diffFiles()}
|
||||
modified={diffFiles()}
|
||||
kinds={kinds()}
|
||||
draggable={false}
|
||||
active={props.activeDiff}
|
||||
onFileClick={(node) => props.focusReviewDiff(node.path)}
|
||||
onFileClick={(node) => openTab(file.tab(node.path))}
|
||||
/>
|
||||
</Show>
|
||||
</Match>
|
||||
<Match when={true}>{empty(props.empty())}</Match>
|
||||
</Switch>
|
||||
</Tabs.Content>
|
||||
<Tabs.Content value="all" class="bg-background-stronger px-3 py-0">
|
||||
<Switch>
|
||||
<Match when={nofiles()}>{empty(language.t("session.files.empty"))}</Match>
|
||||
<Match when={true}>
|
||||
<FileTree
|
||||
path=""
|
||||
class="pt-3"
|
||||
modified={diffFiles()}
|
||||
kinds={kinds()}
|
||||
onFileClick={(node) => openTab(file.tab(node.path))}
|
||||
/>
|
||||
</Match>
|
||||
</Switch>
|
||||
</Tabs.Content>
|
||||
</Tabs>
|
||||
</div>
|
||||
<Show when={fileOpen()}>
|
||||
<div onPointerDown={() => props.size.start()}>
|
||||
<ResizeHandle
|
||||
direction="horizontal"
|
||||
edge="start"
|
||||
size={layout.fileTree.width()}
|
||||
min={200}
|
||||
max={480}
|
||||
onResize={(width) => {
|
||||
props.size.touch()
|
||||
layout.fileTree.resize(width)
|
||||
}}
|
||||
/>
|
||||
</Match>
|
||||
</Switch>
|
||||
</Tabs.Content>
|
||||
</Tabs>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
<Show when={fileOpen()}>
|
||||
<div onPointerDown={() => props.size.start()}>
|
||||
<ResizeHandle
|
||||
direction="horizontal"
|
||||
edge="start"
|
||||
size={layout.fileTree.width()}
|
||||
min={200}
|
||||
max={480}
|
||||
onResize={(width) => {
|
||||
props.size.touch()
|
||||
layout.fileTree.resize(width)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
</aside>
|
||||
</Show>
|
||||
|
||||
@@ -7,8 +7,10 @@ import { useLanguage } from "@/context/language"
|
||||
import { useLayout } from "@/context/layout"
|
||||
import { useLocal } from "@/context/local"
|
||||
import { usePermission } from "@/context/permission"
|
||||
import { usePlatform } from "@/context/platform"
|
||||
import { usePrompt } from "@/context/prompt"
|
||||
import { useSDK } from "@/context/sdk"
|
||||
import { useSettings } from "@/context/settings"
|
||||
import { useSync } from "@/context/sync"
|
||||
import { useTerminal } from "@/context/terminal"
|
||||
import { showToast } from "@opencode-ai/ui/toast"
|
||||
@@ -39,8 +41,10 @@ export const useSessionCommands = (actions: SessionCommandContext) => {
|
||||
const language = useLanguage()
|
||||
const local = useLocal()
|
||||
const permission = usePermission()
|
||||
const platform = usePlatform()
|
||||
const prompt = usePrompt()
|
||||
const sdk = useSDK()
|
||||
const settings = useSettings()
|
||||
const sync = useSync()
|
||||
const terminal = useTerminal()
|
||||
const layout = useLayout()
|
||||
@@ -66,6 +70,7 @@ export const useSessionCommands = (actions: SessionCommandContext) => {
|
||||
})
|
||||
const activeFileTab = tabState.activeFileTab
|
||||
const closableTab = tabState.closableTab
|
||||
const shown = () => platform.platform !== "desktop" || settings.general.showFileTree()
|
||||
|
||||
const idle = { type: "idle" as const }
|
||||
const status = () => sync.data.session_status[params.id ?? ""] ?? idle
|
||||
@@ -457,12 +462,16 @@ export const useSessionCommands = (actions: SessionCommandContext) => {
|
||||
keybind: "mod+shift+r",
|
||||
onSelect: () => view().reviewPanel.toggle(),
|
||||
}),
|
||||
viewCommand({
|
||||
id: "fileTree.toggle",
|
||||
title: language.t("command.fileTree.toggle"),
|
||||
keybind: "mod+\\",
|
||||
onSelect: () => layout.fileTree.toggle(),
|
||||
}),
|
||||
...(shown()
|
||||
? [
|
||||
viewCommand({
|
||||
id: "fileTree.toggle",
|
||||
title: language.t("command.fileTree.toggle"),
|
||||
keybind: "mod+\\",
|
||||
onSelect: () => layout.fileTree.toggle(),
|
||||
}),
|
||||
]
|
||||
: []),
|
||||
viewCommand({
|
||||
id: "input.focus",
|
||||
title: language.t("command.input.focus"),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"private": true,
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"homepage": "https://opencode.ai",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.4.6"
|
||||
version = "1.4.7"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.6/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.4.7/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -9,6 +9,63 @@
|
||||
- **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
|
||||
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).
|
||||
|
||||
# Module shape
|
||||
|
||||
Do not use `export namespace Foo { ... }` for module organization. It is not
|
||||
standard ESM, it prevents tree-shaking, and it breaks Node's native TypeScript
|
||||
runner. Use flat top-level exports combined with a self-reexport at the bottom
|
||||
of the file:
|
||||
|
||||
```ts
|
||||
// src/foo/foo.ts
|
||||
export interface Interface { ... }
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Foo") {}
|
||||
export const layer = Layer.effect(Service, ...)
|
||||
export const defaultLayer = layer.pipe(...)
|
||||
|
||||
export * as Foo from "./foo"
|
||||
```
|
||||
|
||||
Consumers import the namespace projection:
|
||||
|
||||
```ts
|
||||
import { Foo } from "@/foo/foo"
|
||||
|
||||
yield * Foo.Service
|
||||
Foo.layer
|
||||
Foo.defaultLayer
|
||||
```
|
||||
|
||||
Namespace-private helpers stay as non-exported top-level declarations in the
|
||||
same file — they remain inaccessible to consumers (they are not projected by
|
||||
`export * as`) but are usable by the file's own code.
|
||||
|
||||
## When the file is an `index.ts`
|
||||
|
||||
If the module is `foo/index.ts` (single-namespace directory), use `"."` for
|
||||
the self-reexport source rather than `"./index"`:
|
||||
|
||||
```ts
|
||||
// src/foo/index.ts
|
||||
export const thing = ...
|
||||
|
||||
export * as Foo from "."
|
||||
```
|
||||
|
||||
## Multi-sibling directories
|
||||
|
||||
For directories with several independent modules (e.g. `src/session/`,
|
||||
`src/config/`), keep each sibling as its own file with its own self-reexport,
|
||||
and do not add a barrel `index.ts`. Consumers import the specific sibling:
|
||||
|
||||
```ts
|
||||
import { SessionRetry } from "@/session/retry"
|
||||
import { SessionStatus } from "@/session/status"
|
||||
```
|
||||
|
||||
Barrels in multi-sibling directories force every import through the barrel to
|
||||
evaluate every sibling, which defeats tree-shaking and slows module load.
|
||||
|
||||
# opencode Effect rules
|
||||
|
||||
Use these rules when writing or migrating Effect code.
|
||||
@@ -23,6 +80,10 @@ See `specs/effect/migration.md` for the compact pattern reference and examples.
|
||||
- Use `Effect.callback` for callback-based APIs.
|
||||
- Prefer `DateTime.nowAsDate` over `new Date(yield* Clock.currentTimeMillis)` when you need a `Date`.
|
||||
|
||||
## Module conventions
|
||||
|
||||
- In `src/config`, follow the existing self-export pattern at the top of the file (for example `export * as ConfigAgent from "./agent"`) when adding a new config module.
|
||||
|
||||
## Schemas and errors
|
||||
|
||||
- Use `Schema.Class` for multi-field data.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.4.6",
|
||||
"version": "1.4.7",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
@@ -79,15 +79,15 @@
|
||||
"@actions/github": "6.0.1",
|
||||
"@agentclientprotocol/sdk": "0.16.1",
|
||||
"@ai-sdk/alibaba": "1.0.17",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.93",
|
||||
"@ai-sdk/anthropic": "3.0.67",
|
||||
"@ai-sdk/amazon-bedrock": "4.0.94",
|
||||
"@ai-sdk/anthropic": "3.0.70",
|
||||
"@ai-sdk/azure": "3.0.49",
|
||||
"@ai-sdk/cerebras": "2.0.41",
|
||||
"@ai-sdk/cohere": "3.0.27",
|
||||
"@ai-sdk/deepinfra": "2.0.41",
|
||||
"@ai-sdk/gateway": "3.0.97",
|
||||
"@ai-sdk/gateway": "3.0.102",
|
||||
"@ai-sdk/google": "3.0.63",
|
||||
"@ai-sdk/google-vertex": "4.0.109",
|
||||
"@ai-sdk/google-vertex": "4.0.111",
|
||||
"@ai-sdk/groq": "3.0.31",
|
||||
"@ai-sdk/mistral": "3.0.27",
|
||||
"@ai-sdk/openai": "3.0.53",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import path from "path"
|
||||
const toDynamicallyImport = path.join(process.cwd(), process.argv[2])
|
||||
await import(toDynamicallyImport)
|
||||
@@ -1,305 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Unwrap a TypeScript `export namespace` into flat exports + barrel.
|
||||
*
|
||||
* Usage:
|
||||
* bun script/unwrap-namespace.ts src/bus/index.ts
|
||||
* bun script/unwrap-namespace.ts src/bus/index.ts --dry-run
|
||||
* bun script/unwrap-namespace.ts src/pty/index.ts --name service # avoid collision with pty.ts
|
||||
*
|
||||
* What it does:
|
||||
* 1. Reads the file and finds the `export namespace Foo { ... }` block
|
||||
* (uses ast-grep for accurate AST-based boundary detection)
|
||||
* 2. Removes the namespace wrapper and dedents the body
|
||||
* 3. Fixes self-references (e.g. Config.PermissionAction → PermissionAction)
|
||||
* 4. If the file is index.ts, renames it to <lowercase-name>.ts
|
||||
* 5. Creates/updates index.ts with `export * as Foo from "./<file>"`
|
||||
* 6. Rewrites import paths across src/, test/, and script/
|
||||
* 7. Fixes sibling imports within the same directory
|
||||
*
|
||||
* Requires: ast-grep (`brew install ast-grep` or `cargo install ast-grep`)
|
||||
*/
|
||||
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
|
||||
const args = process.argv.slice(2)
|
||||
const dryRun = args.includes("--dry-run")
|
||||
const nameFlag = args.find((a, i) => args[i - 1] === "--name")
|
||||
const filePath = args.find((a) => !a.startsWith("--") && args[args.indexOf(a) - 1] !== "--name")
|
||||
|
||||
if (!filePath) {
|
||||
console.error("Usage: bun script/unwrap-namespace.ts <file> [--dry-run] [--name <impl-name>]")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const absPath = path.resolve(filePath)
|
||||
if (!fs.existsSync(absPath)) {
|
||||
console.error(`File not found: ${absPath}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const src = fs.readFileSync(absPath, "utf-8")
|
||||
const lines = src.split("\n")
|
||||
|
||||
// Use ast-grep to find the namespace boundaries accurately.
|
||||
// This avoids false matches from braces in strings, templates, comments, etc.
|
||||
const astResult = Bun.spawnSync(
|
||||
["ast-grep", "run", "--pattern", "export namespace $NAME { $$$BODY }", "--lang", "typescript", "--json", absPath],
|
||||
{ stdout: "pipe", stderr: "pipe" },
|
||||
)
|
||||
|
||||
if (astResult.exitCode !== 0) {
|
||||
console.error("ast-grep failed:", astResult.stderr.toString())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const matches = JSON.parse(astResult.stdout.toString()) as Array<{
|
||||
text: string
|
||||
range: { start: { line: number; column: number }; end: { line: number; column: number } }
|
||||
metaVariables: { single: Record<string, { text: string }>; multi: Record<string, Array<{ text: string }>> }
|
||||
}>
|
||||
|
||||
if (matches.length === 0) {
|
||||
console.error("No `export namespace Foo { ... }` found in file")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (matches.length > 1) {
|
||||
console.error(`Found ${matches.length} namespaces — this script handles one at a time`)
|
||||
console.error("Namespaces found:")
|
||||
for (const m of matches) console.error(` ${m.metaVariables.single.NAME.text} (line ${m.range.start.line + 1})`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const match = matches[0]
|
||||
const nsName = match.metaVariables.single.NAME.text
|
||||
const nsLine = match.range.start.line // 0-indexed
|
||||
const closeLine = match.range.end.line // 0-indexed, the line with closing `}`
|
||||
|
||||
console.log(`Found: export namespace ${nsName} { ... }`)
|
||||
console.log(` Lines ${nsLine + 1}–${closeLine + 1} (${closeLine - nsLine + 1} lines)`)
|
||||
|
||||
// Build the new file content:
|
||||
// 1. Everything before the namespace declaration (imports, etc.)
|
||||
// 2. The namespace body, dedented by one level (2 spaces)
|
||||
// 3. Everything after the closing brace (rare, but possible)
|
||||
const before = lines.slice(0, nsLine)
|
||||
const body = lines.slice(nsLine + 1, closeLine)
|
||||
const after = lines.slice(closeLine + 1)
|
||||
|
||||
// Dedent: remove exactly 2 leading spaces from each line
|
||||
const dedented = body.map((line) => {
|
||||
if (line === "") return ""
|
||||
if (line.startsWith(" ")) return line.slice(2)
|
||||
return line
|
||||
})
|
||||
|
||||
let newContent = [...before, ...dedented, ...after].join("\n")
|
||||
|
||||
// --- Fix self-references ---
|
||||
// After unwrapping, references like `Config.PermissionAction` inside the same file
|
||||
// need to become just `PermissionAction`. Only fix code positions, not strings.
|
||||
const exportedNames = new Set<string>()
|
||||
const exportRegex = /export\s+(?:const|function|class|interface|type|enum|abstract\s+class)\s+(\w+)/g
|
||||
for (const line of dedented) {
|
||||
for (const m of line.matchAll(exportRegex)) exportedNames.add(m[1])
|
||||
}
|
||||
const reExportRegex = /export\s*\{\s*([^}]+)\}/g
|
||||
for (const line of dedented) {
|
||||
for (const m of line.matchAll(reExportRegex)) {
|
||||
for (const name of m[1].split(",")) {
|
||||
const trimmed = name
|
||||
.trim()
|
||||
.split(/\s+as\s+/)
|
||||
.pop()!
|
||||
.trim()
|
||||
if (trimmed) exportedNames.add(trimmed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let selfRefCount = 0
|
||||
if (exportedNames.size > 0) {
|
||||
const fixedLines = newContent.split("\n").map((line) => {
|
||||
// Split line into string-literal and code segments to avoid replacing inside strings
|
||||
const segments: Array<{ text: string; isString: boolean }> = []
|
||||
let i = 0
|
||||
let current = ""
|
||||
let inString: string | null = null
|
||||
|
||||
while (i < line.length) {
|
||||
const ch = line[i]
|
||||
if (inString) {
|
||||
current += ch
|
||||
if (ch === "\\" && i + 1 < line.length) {
|
||||
current += line[i + 1]
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
if (ch === inString) {
|
||||
segments.push({ text: current, isString: true })
|
||||
current = ""
|
||||
inString = null
|
||||
}
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (ch === '"' || ch === "'" || ch === "`") {
|
||||
if (current) segments.push({ text: current, isString: false })
|
||||
current = ch
|
||||
inString = ch
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (ch === "/" && i + 1 < line.length && line[i + 1] === "/") {
|
||||
current += line.slice(i)
|
||||
segments.push({ text: current, isString: true })
|
||||
current = ""
|
||||
i = line.length
|
||||
continue
|
||||
}
|
||||
current += ch
|
||||
i++
|
||||
}
|
||||
if (current) segments.push({ text: current, isString: !!inString })
|
||||
|
||||
return segments
|
||||
.map((seg) => {
|
||||
if (seg.isString) return seg.text
|
||||
let result = seg.text
|
||||
for (const name of exportedNames) {
|
||||
const pattern = `${nsName}.${name}`
|
||||
while (result.includes(pattern)) {
|
||||
const idx = result.indexOf(pattern)
|
||||
const charBefore = idx > 0 ? result[idx - 1] : " "
|
||||
const charAfter = idx + pattern.length < result.length ? result[idx + pattern.length] : " "
|
||||
if (/\w/.test(charBefore) || /\w/.test(charAfter)) break
|
||||
result = result.slice(0, idx) + name + result.slice(idx + pattern.length)
|
||||
selfRefCount++
|
||||
}
|
||||
}
|
||||
return result
|
||||
})
|
||||
.join("")
|
||||
})
|
||||
newContent = fixedLines.join("\n")
|
||||
}
|
||||
|
||||
// Figure out file naming
|
||||
const dir = path.dirname(absPath)
|
||||
const basename = path.basename(absPath, ".ts")
|
||||
const isIndex = basename === "index"
|
||||
const implName = nameFlag ?? (isIndex ? nsName.replace(/([a-z])([A-Z])/g, "$1-$2").toLowerCase() : basename)
|
||||
const implFile = path.join(dir, `${implName}.ts`)
|
||||
const indexFile = path.join(dir, "index.ts")
|
||||
const barrelLine = `export * as ${nsName} from "./${implName}"\n`
|
||||
|
||||
console.log("")
|
||||
if (isIndex) {
|
||||
console.log(`Plan: rename ${basename}.ts → ${implName}.ts, create new index.ts barrel`)
|
||||
} else {
|
||||
console.log(`Plan: rewrite ${basename}.ts in place, create index.ts barrel`)
|
||||
}
|
||||
if (selfRefCount > 0) console.log(`Fixed ${selfRefCount} self-reference(s) (${nsName}.X → X)`)
|
||||
console.log("")
|
||||
|
||||
if (dryRun) {
|
||||
console.log("--- DRY RUN ---")
|
||||
console.log("")
|
||||
console.log(`=== ${implName}.ts (first 30 lines) ===`)
|
||||
newContent
|
||||
.split("\n")
|
||||
.slice(0, 30)
|
||||
.forEach((l, i) => console.log(` ${i + 1}: ${l}`))
|
||||
console.log(" ...")
|
||||
console.log("")
|
||||
console.log(`=== index.ts ===`)
|
||||
console.log(` ${barrelLine.trim()}`)
|
||||
console.log("")
|
||||
if (!isIndex) {
|
||||
const relDir = path.relative(path.resolve("src"), dir)
|
||||
console.log(`=== Import rewrites (would apply) ===`)
|
||||
console.log(` ${relDir}/${basename}" → ${relDir}" across src/, test/, script/`)
|
||||
} else {
|
||||
console.log("No import rewrites needed (was index.ts)")
|
||||
}
|
||||
} else {
|
||||
if (isIndex) {
|
||||
fs.writeFileSync(implFile, newContent)
|
||||
fs.writeFileSync(indexFile, barrelLine)
|
||||
console.log(`Wrote ${implName}.ts (${newContent.split("\n").length} lines)`)
|
||||
console.log(`Wrote index.ts (barrel)`)
|
||||
} else {
|
||||
fs.writeFileSync(absPath, newContent)
|
||||
if (fs.existsSync(indexFile)) {
|
||||
const existing = fs.readFileSync(indexFile, "utf-8")
|
||||
if (!existing.includes(`export * as ${nsName}`)) {
|
||||
fs.appendFileSync(indexFile, barrelLine)
|
||||
console.log(`Appended to existing index.ts`)
|
||||
} else {
|
||||
console.log(`index.ts already has ${nsName} export`)
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(indexFile, barrelLine)
|
||||
console.log(`Wrote index.ts (barrel)`)
|
||||
}
|
||||
console.log(`Rewrote ${basename}.ts (${newContent.split("\n").length} lines)`)
|
||||
}
|
||||
|
||||
// --- Rewrite import paths across src/, test/, script/ ---
|
||||
const relDir = path.relative(path.resolve("src"), dir)
|
||||
if (!isIndex) {
|
||||
const oldTail = `${relDir}/${basename}`
|
||||
const searchDirs = ["src", "test", "script"].filter((d) => fs.existsSync(d))
|
||||
const rgResult = Bun.spawnSync(["rg", "-l", `from.*${oldTail}"`, ...searchDirs], {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
})
|
||||
const filesToRewrite = rgResult.stdout
|
||||
.toString()
|
||||
.trim()
|
||||
.split("\n")
|
||||
.filter((f) => f.length > 0)
|
||||
|
||||
if (filesToRewrite.length > 0) {
|
||||
console.log(`\nRewriting imports in ${filesToRewrite.length} file(s)...`)
|
||||
for (const file of filesToRewrite) {
|
||||
const content = fs.readFileSync(file, "utf-8")
|
||||
fs.writeFileSync(file, content.replaceAll(`${oldTail}"`, `${relDir}"`))
|
||||
}
|
||||
console.log(` Done: ${oldTail}" → ${relDir}"`)
|
||||
} else {
|
||||
console.log("\nNo import rewrites needed")
|
||||
}
|
||||
} else {
|
||||
console.log("\nNo import rewrites needed (was index.ts)")
|
||||
}
|
||||
|
||||
// --- Fix sibling imports within the same directory ---
|
||||
const siblingFiles = fs.readdirSync(dir).filter((f) => {
|
||||
if (!f.endsWith(".ts")) return false
|
||||
if (f === "index.ts" || f === `${implName}.ts`) return false
|
||||
return true
|
||||
})
|
||||
|
||||
let siblingFixCount = 0
|
||||
for (const sibFile of siblingFiles) {
|
||||
const sibPath = path.join(dir, sibFile)
|
||||
const content = fs.readFileSync(sibPath, "utf-8")
|
||||
const pattern = new RegExp(`from\\s+["']\\./${basename}["']`, "g")
|
||||
if (pattern.test(content)) {
|
||||
fs.writeFileSync(sibPath, content.replace(pattern, `from "."`))
|
||||
siblingFixCount++
|
||||
}
|
||||
}
|
||||
if (siblingFixCount > 0) {
|
||||
console.log(`Fixed ${siblingFixCount} sibling import(s) in ${path.basename(dir)}/ (./${basename} → .)`)
|
||||
}
|
||||
}
|
||||
|
||||
console.log("")
|
||||
console.log("=== Verify ===")
|
||||
console.log("")
|
||||
console.log("bunx --bun tsgo --noEmit # typecheck")
|
||||
console.log("bun run test # run tests")
|
||||
@@ -1,499 +0,0 @@
|
||||
# Namespace → flat export migration
|
||||
|
||||
Migrate `export namespace` to the `export * as` / flat-export pattern used by
|
||||
effect-smol. Primary goal: tree-shakeability. Secondary: consistency with Effect
|
||||
conventions, LLM-friendliness for future migrations.
|
||||
|
||||
## What changes and what doesn't
|
||||
|
||||
The **consumer API stays the same**. You still write `Provider.ModelNotFoundError`,
|
||||
`Config.JsonError`, `Bus.publish`, etc. The namespace ergonomics are preserved.
|
||||
|
||||
What changes is **how** the namespace is constructed — the TypeScript
|
||||
`export namespace` keyword is replaced by `export * as` in a barrel file. This
|
||||
is a mechanical change: unwrap the namespace body into flat exports, add a
|
||||
one-line barrel. Consumers that import `{ Provider }` don't notice.
|
||||
|
||||
Import paths actually get **nicer**. Today most consumers import from the
|
||||
explicit file (`"../provider/provider"`). After the migration, each module has a
|
||||
barrel `index.ts`, so imports become `"../provider"` or `"@/provider"`:
|
||||
|
||||
```ts
|
||||
// BEFORE — points at the file directly
|
||||
import { Provider } from "../provider/provider"
|
||||
|
||||
// AFTER — resolves to provider/index.ts, same Provider namespace
|
||||
import { Provider } from "../provider"
|
||||
```
|
||||
|
||||
## Why this matters right now
|
||||
|
||||
The CLI binary startup time (TOI) is too slow. Profiling shows we're loading
|
||||
massive dependency graphs that are never actually used at runtime — because
|
||||
bundlers cannot tree-shake TypeScript `export namespace` bodies.
|
||||
|
||||
### The problem in one sentence
|
||||
|
||||
`cli/error.ts` needs 6 lightweight `.isInstance()` checks on error classes, but
|
||||
importing `{ Provider }` from `provider.ts` forces the bundler to include **all
|
||||
20+ `@ai-sdk/*` packages**, `@aws-sdk/credential-providers`,
|
||||
`google-auth-library`, and every other top-level import in that 1709-line file.
|
||||
|
||||
### Why `export namespace` defeats tree-shaking
|
||||
|
||||
TypeScript compiles `export namespace Foo { ... }` to an IIFE:
|
||||
|
||||
```js
|
||||
// TypeScript output
|
||||
export var Provider;
|
||||
(function (Provider) {
|
||||
Provider.ModelNotFoundError = NamedError.create(...)
|
||||
// ... 1600 more lines of assignments ...
|
||||
})(Provider || (Provider = {}))
|
||||
```
|
||||
|
||||
This is **opaque to static analysis**. The bundler sees one big function call
|
||||
whose return value populates an object. It cannot determine which properties are
|
||||
used downstream, so it keeps everything. Every `import` statement at the top of
|
||||
`provider.ts` executes unconditionally — that's 20+ AI SDK packages loaded into
|
||||
memory just so the CLI can check `Provider.ModelNotFoundError.isInstance(x)`.
|
||||
|
||||
### What `export * as` does differently
|
||||
|
||||
`export * as Provider from "./provider"` compiles to a static re-export. The
|
||||
bundler knows the exact shape of `Provider` at compile time — it's the named
|
||||
export list of `./provider.ts`. When it sees `Provider.ModelNotFoundError` used
|
||||
but `Provider.layer` unused, it can trace that `ModelNotFoundError` doesn't
|
||||
reference `createAnthropic` or any AI SDK import, and drop them. The namespace
|
||||
object still exists at runtime — same API — but the bundler can see inside it.
|
||||
|
||||
### Concrete impact
|
||||
|
||||
The worst import chain in the codebase:
|
||||
|
||||
```
|
||||
src/index.ts (entry point)
|
||||
└── FormatError from src/cli/error.ts
|
||||
├── { Provider } from provider/provider.ts (1709 lines)
|
||||
│ ├── 20+ @ai-sdk/* packages
|
||||
│ ├── @aws-sdk/credential-providers
|
||||
│ ├── google-auth-library
|
||||
│ ├── gitlab-ai-provider, venice-ai-sdk-provider
|
||||
│ └── fuzzysort, remeda, etc.
|
||||
├── { Config } from config/config.ts (1663 lines)
|
||||
│ ├── jsonc-parser
|
||||
│ ├── LSPServer (all server definitions)
|
||||
│ └── Plugin, Auth, Env, Account, etc.
|
||||
└── { MCP } from mcp/index.ts (930 lines)
|
||||
├── @modelcontextprotocol/sdk (3 transports)
|
||||
└── open (browser launcher)
|
||||
```
|
||||
|
||||
All of this gets pulled in to check `.isInstance()` on 6 error classes — code
|
||||
that needs maybe 200 bytes total. This inflates the binary, increases startup
|
||||
memory, and slows down initial module evaluation.
|
||||
|
||||
### Why this also hurts memory
|
||||
|
||||
Every module-level import is eagerly evaluated. Even with Bun's fast module
|
||||
loader, evaluating 20+ AI SDK factory functions, the AWS credential chain, and
|
||||
Google's auth library allocates objects, closures, and prototype chains that
|
||||
persist for the lifetime of the process. Most CLI commands never use a provider
|
||||
at all.
|
||||
|
||||
## What effect-smol does
|
||||
|
||||
effect-smol achieves tree-shakeable namespaced APIs via three structural choices.
|
||||
|
||||
### 1. Each module is a separate file with flat named exports
|
||||
|
||||
```ts
|
||||
// Effect.ts — no namespace wrapper, just flat exports
|
||||
export const gen: { ... } = internal.gen
|
||||
export const fail: <E>(error: E) => Effect<never, E> = internal.fail
|
||||
export const succeed: <A>(value: A) => Effect<A> = internal.succeed
|
||||
// ... 230+ individual named exports
|
||||
```
|
||||
|
||||
### 2. Barrel file uses `export * as` (not `export namespace`)
|
||||
|
||||
```ts
|
||||
// index.ts
|
||||
export * as Effect from "./Effect.ts"
|
||||
export * as Schema from "./Schema.ts"
|
||||
export * as Stream from "./Stream.ts"
|
||||
// ~134 modules
|
||||
```
|
||||
|
||||
This creates a namespace-like API (`Effect.gen`, `Schema.parse`) but the
|
||||
bundler knows the **exact shape** at compile time — it's the static export list
|
||||
of that file. It can trace property accesses (`Effect.gen` → keep `gen`,
|
||||
drop `timeout` if unused). With `export namespace`, the IIFE is opaque and
|
||||
nothing can be dropped.
|
||||
|
||||
### 3. `sideEffects: []` and deep imports
|
||||
|
||||
```jsonc
|
||||
// package.json
|
||||
{ "sideEffects": [] }
|
||||
```
|
||||
|
||||
Plus `"./*": "./src/*.ts"` in the exports map, enabling
|
||||
`import * as Effect from "effect/Effect"` to bypass the barrel entirely.
|
||||
|
||||
### 4. Errors as flat exports, not class declarations
|
||||
|
||||
```ts
|
||||
// Cause.ts
|
||||
export const NoSuchElementErrorTypeId = core.NoSuchElementErrorTypeId
|
||||
export interface NoSuchElementError extends YieldableError { ... }
|
||||
export const NoSuchElementError: new(msg?: string) => NoSuchElementError = core.NoSuchElementError
|
||||
export const isNoSuchElementError: (u: unknown) => u is NoSuchElementError = core.isNoSuchElementError
|
||||
```
|
||||
|
||||
Each error is 4 independent exports: TypeId, interface, constructor (as const),
|
||||
type guard. All individually shakeable.
|
||||
|
||||
## The plan
|
||||
|
||||
The core migration is **Phase 1** — convert `export namespace` to
|
||||
`export * as`. Once that's done, the bundler can tree-shake individual exports
|
||||
within each module. You do NOT need to break things into subfiles for
|
||||
tree-shaking to work — the bundler traces which exports you actually access on
|
||||
the namespace object and drops the rest, including their transitive imports.
|
||||
|
||||
Splitting errors/schemas into separate files (Phase 0) is optional — it's a
|
||||
lower-risk warmup step that can be done before or after the main conversion, and
|
||||
it provides extra resilience against bundler edge cases. But the big win comes
|
||||
from Phase 1.
|
||||
|
||||
### Phase 0 (optional): Pre-split errors into subfiles
|
||||
|
||||
This is a low-risk warmup that provides immediate benefit even before the full
|
||||
`export * as` conversion. It's optional because Phase 1 alone is sufficient for
|
||||
tree-shaking. But it's a good starting point if you want incremental progress:
|
||||
|
||||
**For each namespace that defines errors** (15 files, ~30 error classes total):
|
||||
|
||||
1. Create a sibling `errors.ts` file (e.g. `provider/errors.ts`) with the error
|
||||
definitions as top-level named exports:
|
||||
|
||||
```ts
|
||||
// provider/errors.ts
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
import { ProviderID, ModelID } from "./schema"
|
||||
|
||||
export const ModelNotFoundError = NamedError.create(
|
||||
"ProviderModelNotFoundError",
|
||||
z.object({
|
||||
providerID: ProviderID.zod,
|
||||
modelID: ModelID.zod,
|
||||
suggestions: z.array(z.string()).optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const InitError = NamedError.create("ProviderInitError", z.object({ providerID: ProviderID.zod }))
|
||||
```
|
||||
|
||||
2. In the namespace file, re-export from the errors file to maintain backward
|
||||
compatibility:
|
||||
|
||||
```ts
|
||||
// provider/provider.ts — inside the namespace
|
||||
export { ModelNotFoundError, InitError } from "./errors"
|
||||
```
|
||||
|
||||
3. Update `cli/error.ts` (and any other light consumers) to import directly:
|
||||
|
||||
```ts
|
||||
// BEFORE
|
||||
import { Provider } from "../provider/provider"
|
||||
Provider.ModelNotFoundError.isInstance(input)
|
||||
|
||||
// AFTER
|
||||
import { ModelNotFoundError as ProviderModelNotFoundError } from "../provider/errors"
|
||||
ProviderModelNotFoundError.isInstance(input)
|
||||
```
|
||||
|
||||
**Files to split (Phase 0):**
|
||||
|
||||
| Current file | New errors file | Errors to extract |
|
||||
| ----------------------- | ------------------------------- | ----------------------------------------------------------------------------------------------------------------------- |
|
||||
| `provider/provider.ts` | `provider/errors.ts` | ModelNotFoundError, InitError |
|
||||
| `provider/auth.ts` | `provider/auth-errors.ts` | OauthMissing, OauthCodeMissing, OauthCallbackFailed, ValidationFailed |
|
||||
| `config/config.ts` | (already has `config/paths.ts`) | ConfigDirectoryTypoError → move to paths.ts |
|
||||
| `config/markdown.ts` | `config/markdown-errors.ts` | FrontmatterError |
|
||||
| `mcp/index.ts` | `mcp/errors.ts` | Failed |
|
||||
| `session/message-v2.ts` | `session/message-errors.ts` | OutputLengthError, AbortedError, StructuredOutputError, AuthError, APIError, ContextOverflowError |
|
||||
| `session/message.ts` | (shares with message-v2) | OutputLengthError, AuthError |
|
||||
| `cli/ui.ts` | `cli/ui-errors.ts` | CancelledError |
|
||||
| `skill/index.ts` | `skill/errors.ts` | InvalidError, NameMismatchError |
|
||||
| `worktree/index.ts` | `worktree/errors.ts` | NotGitError, NameGenerationFailedError, CreateFailedError, StartCommandFailedError, RemoveFailedError, ResetFailedError |
|
||||
| `storage/storage.ts` | `storage/errors.ts` | NotFoundError |
|
||||
| `npm/index.ts` | `npm/errors.ts` | InstallFailedError |
|
||||
| `ide/index.ts` | `ide/errors.ts` | AlreadyInstalledError, InstallFailedError |
|
||||
| `lsp/client.ts` | `lsp/errors.ts` | InitializeError |
|
||||
|
||||
### Phase 1: The real migration — `export namespace` → `export * as`
|
||||
|
||||
This is the phase that actually fixes tree-shaking. For each module:
|
||||
|
||||
1. **Unwrap** the `export namespace Foo { ... }` — remove the namespace wrapper,
|
||||
keep all the members as top-level `export const` / `export function` / etc.
|
||||
2. **Rename** the file if it's currently `index.ts` (e.g. `bus/index.ts` →
|
||||
`bus/bus.ts`), so the barrel can take `index.ts`.
|
||||
3. **Create the barrel** `index.ts` with one line: `export * as Foo from "./foo"`
|
||||
|
||||
The file structure change for a module that's currently a single file:
|
||||
|
||||
```
|
||||
# BEFORE
|
||||
provider/
|
||||
provider.ts ← 1709-line file with `export namespace Provider { ... }`
|
||||
|
||||
# AFTER
|
||||
provider/
|
||||
index.ts ← NEW: `export * as Provider from "./provider"`
|
||||
provider.ts ← SAME file, same name, just unwrap the namespace
|
||||
```
|
||||
|
||||
And the code change is purely removing the wrapper:
|
||||
|
||||
```ts
|
||||
// BEFORE: provider/provider.ts
|
||||
export namespace Provider {
|
||||
export class Service extends Context.Service<...>()("@opencode/Provider") {}
|
||||
export const layer = Layer.effect(Service, ...)
|
||||
export const ModelNotFoundError = NamedError.create(...)
|
||||
export function parseModel(model: string) { ... }
|
||||
}
|
||||
|
||||
// AFTER: provider/provider.ts — identical exports, no namespace keyword
|
||||
export class Service extends Context.Service<...>()("@opencode/Provider") {}
|
||||
export const layer = Layer.effect(Service, ...)
|
||||
export const ModelNotFoundError = NamedError.create(...)
|
||||
export function parseModel(model: string) { ... }
|
||||
```
|
||||
|
||||
```ts
|
||||
// NEW: provider/index.ts
|
||||
export * as Provider from "./provider"
|
||||
```
|
||||
|
||||
Consumer code barely changes — import path gets shorter:
|
||||
|
||||
```ts
|
||||
// BEFORE
|
||||
import { Provider } from "../provider/provider"
|
||||
|
||||
// AFTER — resolves to provider/index.ts, same Provider object
|
||||
import { Provider } from "../provider"
|
||||
```
|
||||
|
||||
All access like `Provider.ModelNotFoundError`, `Provider.Service`,
|
||||
`Provider.layer` works exactly as before. The difference is invisible to
|
||||
consumers but lets the bundler see inside the namespace.
|
||||
|
||||
**Once this is done, you don't need to break anything into subfiles for
|
||||
tree-shaking.** The bundler traces that `Provider.ModelNotFoundError` only
|
||||
depends on `NamedError` + `zod` + the schema file, and drops
|
||||
`Provider.layer` + all 20 AI SDK imports when they're unused. This works because
|
||||
`export * as` gives the bundler a static export list it can do inner-graph
|
||||
analysis on — it knows which exports reference which imports.
|
||||
|
||||
**Order of conversion** (by risk / size, do small modules first):
|
||||
|
||||
1. Tiny utilities: `Archive`, `Color`, `Token`, `Rpc`, `LocalContext` (~7-66 lines each)
|
||||
2. Small services: `Auth`, `Env`, `BusEvent`, `SessionStatus`, `SessionRunState`, `Editor`, `Selection` (~25-91 lines)
|
||||
3. Medium services: `Bus`, `Format`, `FileTime`, `FileWatcher`, `Command`, `Question`, `Permission`, `Vcs`, `Project`
|
||||
4. Large services: `Config`, `Provider`, `MCP`, `Session`, `SessionProcessor`, `SessionPrompt`, `ACP`
|
||||
|
||||
### Phase 2: Build configuration
|
||||
|
||||
After the module structure supports tree-shaking:
|
||||
|
||||
1. Add `"sideEffects": []` to `packages/opencode/package.json` (or
|
||||
`"sideEffects": false`) — this is safe because our services use explicit
|
||||
layer composition, not import-time side effects.
|
||||
2. Verify Bun's bundler respects the new structure. If Bun's tree-shaking is
|
||||
insufficient, evaluate whether the compiled binary path needs an esbuild
|
||||
pre-pass.
|
||||
3. Consider adding `/*#__PURE__*/` annotations to `NamedError.create(...)` calls
|
||||
— these are factory functions that return classes, and bundlers may not know
|
||||
they're side-effect-free without the annotation.
|
||||
|
||||
## Automation
|
||||
|
||||
The transformation is scripted. From `packages/opencode`:
|
||||
|
||||
```bash
|
||||
bun script/unwrap-namespace.ts <file> [--dry-run]
|
||||
```
|
||||
|
||||
The script uses ast-grep for accurate AST-based namespace boundary detection
|
||||
(no false matches from braces in strings/templates/comments), then:
|
||||
|
||||
1. Removes the `export namespace Foo {` line and its closing `}`
|
||||
2. Dedents the body by one indent level (2 spaces)
|
||||
3. If the file is `index.ts`, renames it to `<name>.ts` and creates a new
|
||||
`index.ts` barrel
|
||||
4. If the file is NOT `index.ts`, rewrites it in place and creates `index.ts`
|
||||
5. Prints the exact commands to find and rewrite import paths
|
||||
|
||||
### Walkthrough: converting a module
|
||||
|
||||
Using `Provider` as an example:
|
||||
|
||||
```bash
|
||||
# 1. Preview what will change
|
||||
bun script/unwrap-namespace.ts src/provider/provider.ts --dry-run
|
||||
|
||||
# 2. Apply the transformation
|
||||
bun script/unwrap-namespace.ts src/provider/provider.ts
|
||||
|
||||
# 3. Rewrite import paths (script prints the exact command)
|
||||
rg -l 'from.*provider/provider' src/ | xargs sed -i '' 's|provider/provider"|provider"|g'
|
||||
|
||||
# 4. Verify
|
||||
bun typecheck
|
||||
bun run test
|
||||
```
|
||||
|
||||
**What changes on disk:**
|
||||
|
||||
```
|
||||
# BEFORE
|
||||
provider/
|
||||
provider.ts ← 1709 lines, `export namespace Provider { ... }`
|
||||
|
||||
# AFTER
|
||||
provider/
|
||||
index.ts ← NEW: `export * as Provider from "./provider"`
|
||||
provider.ts ← same file, namespace unwrapped to flat exports
|
||||
```
|
||||
|
||||
**What changes in consumer code:**
|
||||
|
||||
```ts
|
||||
// BEFORE
|
||||
import { Provider } from "../provider/provider"
|
||||
|
||||
// AFTER — shorter path, same Provider object
|
||||
import { Provider } from "../provider"
|
||||
```
|
||||
|
||||
All property access (`Provider.Service`, `Provider.ModelNotFoundError`, etc.)
|
||||
stays identical.
|
||||
|
||||
### Two cases the script handles
|
||||
|
||||
**Case A: file is NOT `index.ts`** (e.g. `provider/provider.ts`)
|
||||
|
||||
- Rewrites the file in place (unwrap + dedent)
|
||||
- Creates `provider/index.ts` as the barrel
|
||||
- Import paths change: `"../provider/provider"` → `"../provider"`
|
||||
|
||||
**Case B: file IS `index.ts`** (e.g. `bus/index.ts`)
|
||||
|
||||
- Renames `index.ts` → `bus.ts` (kebab-case of namespace name)
|
||||
- Creates new `index.ts` as the barrel
|
||||
- **No import rewrites needed** — `"@/bus"` already resolves to `bus/index.ts`
|
||||
|
||||
## Do I need to split errors/schemas into subfiles?
|
||||
|
||||
**No.** Once you do the `export * as` conversion, the bundler can tree-shake
|
||||
individual exports within the file. If `cli/error.ts` only accesses
|
||||
`Provider.ModelNotFoundError`, the bundler traces that `ModelNotFoundError`
|
||||
doesn't reference `createAnthropic` and drops the AI SDK imports.
|
||||
|
||||
Splitting into subfiles (errors.ts, schema.ts) is still a fine idea for **code
|
||||
organization** — smaller files are easier to read and review. But it's not
|
||||
required for tree-shaking. The `export * as` conversion alone is sufficient.
|
||||
|
||||
The one case where subfile splitting provides extra tree-shake value is if an
|
||||
imported package has module-level side effects that the bundler can't prove are
|
||||
unused. In practice this is rare — most npm packages are side-effect-free — and
|
||||
adding `"sideEffects": []` to package.json handles the common cases.
|
||||
|
||||
## Scope
|
||||
|
||||
| Metric | Count |
|
||||
| ----------------------------------------------- | --------------- |
|
||||
| Files with `export namespace` | 106 |
|
||||
| Total namespace declarations | 118 (12 nested) |
|
||||
| Files with `NamedError.create` inside namespace | 15 |
|
||||
| Total error classes to extract | ~30 |
|
||||
| Files using `export * as` today | 0 |
|
||||
|
||||
Phase 1 (the `export * as` conversion) is the main change. It's mechanical and
|
||||
LLM-friendly but touches every import site, so it should be done module by
|
||||
module with type-checking between each step. Each module is an independent PR.
|
||||
|
||||
## Rules for new code
|
||||
|
||||
Going forward:
|
||||
|
||||
- **No new `export namespace`**. Use a file with flat named exports and
|
||||
`export * as` in the barrel.
|
||||
- Keep the service, layer, errors, schemas, and runtime wiring together in one
|
||||
file if you want — that's fine now. The `export * as` barrel makes everything
|
||||
individually shakeable regardless of file structure.
|
||||
- If a file grows large enough that it's hard to navigate, split by concern
|
||||
(errors.ts, schema.ts, etc.) for readability. Not for tree-shaking — the
|
||||
bundler handles that.
|
||||
|
||||
## Circular import rules
|
||||
|
||||
Barrel files (`index.ts` with `export * as`) introduce circular import risks.
|
||||
These cause `ReferenceError: Cannot access 'X' before initialization` at
|
||||
runtime — not caught by the type checker.
|
||||
|
||||
### Rule 1: Sibling files never import through their own barrel
|
||||
|
||||
Files in the same directory must import directly from the source file, never
|
||||
through `"."` or `"@/<own-dir>"`:
|
||||
|
||||
```ts
|
||||
// BAD — circular: index.ts re-exports both files, so A → index → B → index → A
|
||||
import { Sibling } from "."
|
||||
|
||||
// GOOD — direct, no cycle
|
||||
import * as Sibling from "./sibling"
|
||||
```
|
||||
|
||||
### Rule 2: Cross-directory imports must not form cycles through barrels
|
||||
|
||||
If `src/lsp/lsp.ts` imports `Config` from `"../config"`, and
|
||||
`src/config/config.ts` imports `LSPServer` from `"../lsp"`, that's a cycle:
|
||||
|
||||
```
|
||||
lsp/lsp.ts → config/index.ts → config/config.ts → lsp/index.ts → lsp/lsp.ts 💥
|
||||
```
|
||||
|
||||
Fix by importing the specific file, breaking the cycle:
|
||||
|
||||
```ts
|
||||
// In config/config.ts — import directly, not through the lsp barrel
|
||||
import * as LSPServer from "../lsp/server"
|
||||
```
|
||||
|
||||
### Why the type checker doesn't catch this
|
||||
|
||||
TypeScript resolves types lazily — it doesn't evaluate module-scope
|
||||
expressions. The `ReferenceError` only happens at runtime when a module-scope
|
||||
`const` or function call accesses a value from a circular dependency that
|
||||
hasn't finished initializing. The SDK build step (`bun run --conditions=browser
|
||||
./src/index.ts generate`) is the reliable way to catch these because it
|
||||
evaluates all modules eagerly.
|
||||
|
||||
### How to verify
|
||||
|
||||
After any namespace conversion, run:
|
||||
|
||||
```bash
|
||||
cd packages/opencode
|
||||
bun run --conditions=browser ./src/index.ts generate
|
||||
```
|
||||
|
||||
If this completes without `ReferenceError`, the module graph is safe.
|
||||
@@ -181,10 +181,10 @@ export interface Interface {
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Account") {}
|
||||
|
||||
export const layer: Layer.Layer<Service, never, AccountRepo | HttpClient.HttpClient> = Layer.effect(
|
||||
export const layer: Layer.Layer<Service, never, AccountRepo.Service | HttpClient.HttpClient> = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const repo = yield* AccountRepo
|
||||
const repo = yield* AccountRepo.Service
|
||||
const http = yield* HttpClient.HttpClient
|
||||
const httpRead = withTransientReadRetry(http)
|
||||
const httpOk = HttpClient.filterStatusOk(http)
|
||||
@@ -452,3 +452,5 @@ export const layer: Layer.Layer<Service, never, AccountRepo | HttpClient.HttpCli
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AccountRepo.layer), Layer.provide(FetchHttpClient.layer))
|
||||
|
||||
export * as Account from "./account"
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
export * as Account from "./account"
|
||||
export {
|
||||
AccountID,
|
||||
type AccountError,
|
||||
AccountRepoError,
|
||||
AccountServiceError,
|
||||
AccountTransportError,
|
||||
AccessToken,
|
||||
RefreshToken,
|
||||
DeviceCode,
|
||||
UserCode,
|
||||
Info,
|
||||
Org,
|
||||
OrgID,
|
||||
Login,
|
||||
PollSuccess,
|
||||
PollPending,
|
||||
PollSlow,
|
||||
PollExpired,
|
||||
PollDenied,
|
||||
PollError,
|
||||
type PollResult,
|
||||
} from "./schema"
|
||||
export type { AccountOrgs, ActiveOrg } from "./account"
|
||||
@@ -13,154 +13,154 @@ type DbTransactionCallback<A> = Parameters<typeof Database.transaction<A>>[0]
|
||||
|
||||
const ACCOUNT_STATE_ID = 1
|
||||
|
||||
export namespace AccountRepo {
|
||||
export interface Service {
|
||||
readonly active: () => Effect.Effect<Option.Option<Info>, AccountRepoError>
|
||||
readonly list: () => Effect.Effect<Info[], AccountRepoError>
|
||||
readonly remove: (accountID: AccountID) => Effect.Effect<void, AccountRepoError>
|
||||
readonly use: (accountID: AccountID, orgID: Option.Option<OrgID>) => Effect.Effect<void, AccountRepoError>
|
||||
readonly getRow: (accountID: AccountID) => Effect.Effect<Option.Option<AccountRow>, AccountRepoError>
|
||||
readonly persistToken: (input: {
|
||||
accountID: AccountID
|
||||
accessToken: AccessToken
|
||||
refreshToken: RefreshToken
|
||||
expiry: Option.Option<number>
|
||||
}) => Effect.Effect<void, AccountRepoError>
|
||||
readonly persistAccount: (input: {
|
||||
id: AccountID
|
||||
email: string
|
||||
url: string
|
||||
accessToken: AccessToken
|
||||
refreshToken: RefreshToken
|
||||
expiry: number
|
||||
orgID: Option.Option<OrgID>
|
||||
}) => Effect.Effect<void, AccountRepoError>
|
||||
}
|
||||
export interface Interface {
|
||||
readonly active: () => Effect.Effect<Option.Option<Info>, AccountRepoError>
|
||||
readonly list: () => Effect.Effect<Info[], AccountRepoError>
|
||||
readonly remove: (accountID: AccountID) => Effect.Effect<void, AccountRepoError>
|
||||
readonly use: (accountID: AccountID, orgID: Option.Option<OrgID>) => Effect.Effect<void, AccountRepoError>
|
||||
readonly getRow: (accountID: AccountID) => Effect.Effect<Option.Option<AccountRow>, AccountRepoError>
|
||||
readonly persistToken: (input: {
|
||||
accountID: AccountID
|
||||
accessToken: AccessToken
|
||||
refreshToken: RefreshToken
|
||||
expiry: Option.Option<number>
|
||||
}) => Effect.Effect<void, AccountRepoError>
|
||||
readonly persistAccount: (input: {
|
||||
id: AccountID
|
||||
email: string
|
||||
url: string
|
||||
accessToken: AccessToken
|
||||
refreshToken: RefreshToken
|
||||
expiry: number
|
||||
orgID: Option.Option<OrgID>
|
||||
}) => Effect.Effect<void, AccountRepoError>
|
||||
}
|
||||
|
||||
export class AccountRepo extends Context.Service<AccountRepo, AccountRepo.Service>()("@opencode/AccountRepo") {
|
||||
static readonly layer: Layer.Layer<AccountRepo> = Layer.effect(
|
||||
AccountRepo,
|
||||
Effect.gen(function* () {
|
||||
const decode = Schema.decodeUnknownSync(Info)
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/AccountRepo") {}
|
||||
|
||||
const query = <A>(f: DbTransactionCallback<A>) =>
|
||||
Effect.try({
|
||||
try: () => Database.use(f),
|
||||
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),
|
||||
export const layer: Layer.Layer<Service> = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const decode = Schema.decodeUnknownSync(Info)
|
||||
|
||||
const query = <A>(f: DbTransactionCallback<A>) =>
|
||||
Effect.try({
|
||||
try: () => Database.use(f),
|
||||
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),
|
||||
})
|
||||
|
||||
const tx = <A>(f: DbTransactionCallback<A>) =>
|
||||
Effect.try({
|
||||
try: () => Database.transaction(f),
|
||||
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),
|
||||
})
|
||||
|
||||
const current = (db: DbClient) => {
|
||||
const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get()
|
||||
if (!state?.active_account_id) return
|
||||
const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get()
|
||||
if (!account) return
|
||||
return { ...account, active_org_id: state.active_org_id ?? null }
|
||||
}
|
||||
|
||||
const state = (db: DbClient, accountID: AccountID, orgID: Option.Option<OrgID>) => {
|
||||
const id = Option.getOrNull(orgID)
|
||||
return db
|
||||
.insert(AccountStateTable)
|
||||
.values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id })
|
||||
.onConflictDoUpdate({
|
||||
target: AccountStateTable.id,
|
||||
set: { active_account_id: accountID, active_org_id: id },
|
||||
})
|
||||
.run()
|
||||
}
|
||||
|
||||
const tx = <A>(f: DbTransactionCallback<A>) =>
|
||||
Effect.try({
|
||||
try: () => Database.transaction(f),
|
||||
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),
|
||||
})
|
||||
const active = Effect.fn("AccountRepo.active")(() =>
|
||||
query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))),
|
||||
)
|
||||
|
||||
const current = (db: DbClient) => {
|
||||
const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get()
|
||||
if (!state?.active_account_id) return
|
||||
const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get()
|
||||
if (!account) return
|
||||
return { ...account, active_org_id: state.active_org_id ?? null }
|
||||
}
|
||||
const list = Effect.fn("AccountRepo.list")(() =>
|
||||
query((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(AccountTable)
|
||||
.all()
|
||||
.map((row: AccountRow) => decode({ ...row, active_org_id: null })),
|
||||
),
|
||||
)
|
||||
|
||||
const state = (db: DbClient, accountID: AccountID, orgID: Option.Option<OrgID>) => {
|
||||
const id = Option.getOrNull(orgID)
|
||||
return db
|
||||
.insert(AccountStateTable)
|
||||
.values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id })
|
||||
.onConflictDoUpdate({
|
||||
target: AccountStateTable.id,
|
||||
set: { active_account_id: accountID, active_org_id: id },
|
||||
})
|
||||
const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) =>
|
||||
tx((db) => {
|
||||
db.update(AccountStateTable)
|
||||
.set({ active_account_id: null, active_org_id: null })
|
||||
.where(eq(AccountStateTable.active_account_id, accountID))
|
||||
.run()
|
||||
}
|
||||
db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run()
|
||||
}).pipe(Effect.asVoid),
|
||||
)
|
||||
|
||||
const active = Effect.fn("AccountRepo.active")(() =>
|
||||
query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))),
|
||||
)
|
||||
const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option<OrgID>) =>
|
||||
query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid),
|
||||
)
|
||||
|
||||
const list = Effect.fn("AccountRepo.list")(() =>
|
||||
query((db) =>
|
||||
db
|
||||
.select()
|
||||
.from(AccountTable)
|
||||
.all()
|
||||
.map((row: AccountRow) => decode({ ...row, active_org_id: null })),
|
||||
),
|
||||
)
|
||||
const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) =>
|
||||
query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe(
|
||||
Effect.map(Option.fromNullishOr),
|
||||
),
|
||||
)
|
||||
|
||||
const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) =>
|
||||
tx((db) => {
|
||||
db.update(AccountStateTable)
|
||||
.set({ active_account_id: null, active_org_id: null })
|
||||
.where(eq(AccountStateTable.active_account_id, accountID))
|
||||
.run()
|
||||
db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run()
|
||||
}).pipe(Effect.asVoid),
|
||||
)
|
||||
const persistToken = Effect.fn("AccountRepo.persistToken")((input) =>
|
||||
query((db) =>
|
||||
db
|
||||
.update(AccountTable)
|
||||
.set({
|
||||
access_token: input.accessToken,
|
||||
refresh_token: input.refreshToken,
|
||||
token_expiry: Option.getOrNull(input.expiry),
|
||||
})
|
||||
.where(eq(AccountTable.id, input.accountID))
|
||||
.run(),
|
||||
).pipe(Effect.asVoid),
|
||||
)
|
||||
|
||||
const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option<OrgID>) =>
|
||||
query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid),
|
||||
)
|
||||
const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) =>
|
||||
tx((db) => {
|
||||
const url = normalizeServerUrl(input.url)
|
||||
|
||||
const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) =>
|
||||
query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe(
|
||||
Effect.map(Option.fromNullishOr),
|
||||
),
|
||||
)
|
||||
|
||||
const persistToken = Effect.fn("AccountRepo.persistToken")((input) =>
|
||||
query((db) =>
|
||||
db
|
||||
.update(AccountTable)
|
||||
.set({
|
||||
access_token: input.accessToken,
|
||||
refresh_token: input.refreshToken,
|
||||
token_expiry: Option.getOrNull(input.expiry),
|
||||
})
|
||||
.where(eq(AccountTable.id, input.accountID))
|
||||
.run(),
|
||||
).pipe(Effect.asVoid),
|
||||
)
|
||||
|
||||
const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) =>
|
||||
tx((db) => {
|
||||
const url = normalizeServerUrl(input.url)
|
||||
|
||||
db.insert(AccountTable)
|
||||
.values({
|
||||
id: input.id,
|
||||
db.insert(AccountTable)
|
||||
.values({
|
||||
id: input.id,
|
||||
email: input.email,
|
||||
url,
|
||||
access_token: input.accessToken,
|
||||
refresh_token: input.refreshToken,
|
||||
token_expiry: input.expiry,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: AccountTable.id,
|
||||
set: {
|
||||
email: input.email,
|
||||
url,
|
||||
access_token: input.accessToken,
|
||||
refresh_token: input.refreshToken,
|
||||
token_expiry: input.expiry,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: AccountTable.id,
|
||||
set: {
|
||||
email: input.email,
|
||||
url,
|
||||
access_token: input.accessToken,
|
||||
refresh_token: input.refreshToken,
|
||||
token_expiry: input.expiry,
|
||||
},
|
||||
})
|
||||
.run()
|
||||
void state(db, input.id, input.orgID)
|
||||
}).pipe(Effect.asVoid),
|
||||
)
|
||||
},
|
||||
})
|
||||
.run()
|
||||
void state(db, input.id, input.orgID)
|
||||
}).pipe(Effect.asVoid),
|
||||
)
|
||||
|
||||
return AccountRepo.of({
|
||||
active,
|
||||
list,
|
||||
remove,
|
||||
use,
|
||||
getRow,
|
||||
persistToken,
|
||||
persistAccount,
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
return Service.of({
|
||||
active,
|
||||
list,
|
||||
remove,
|
||||
use,
|
||||
getRow,
|
||||
persistToken,
|
||||
persistAccount,
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
export * as AccountRepo from "./repo"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -24,389 +24,388 @@ import { InstanceState } from "@/effect"
|
||||
import * as Option from "effect/Option"
|
||||
import * as OtelTracer from "@effect/opentelemetry/Tracer"
|
||||
|
||||
export namespace Agent {
|
||||
export const Info = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
mode: z.enum(["subagent", "primary", "all"]),
|
||||
native: z.boolean().optional(),
|
||||
hidden: z.boolean().optional(),
|
||||
topP: z.number().optional(),
|
||||
temperature: z.number().optional(),
|
||||
color: z.string().optional(),
|
||||
permission: Permission.Ruleset.zod,
|
||||
model: z
|
||||
.object({
|
||||
modelID: ModelID.zod,
|
||||
providerID: ProviderID.zod,
|
||||
})
|
||||
.optional(),
|
||||
variant: z.string().optional(),
|
||||
prompt: z.string().optional(),
|
||||
options: z.record(z.string(), z.any()),
|
||||
steps: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({
|
||||
ref: "Agent",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (agent: string) => Effect.Effect<Agent.Info>
|
||||
readonly list: () => Effect.Effect<Agent.Info[]>
|
||||
readonly defaultAgent: () => Effect.Effect<string>
|
||||
readonly generate: (input: {
|
||||
description: string
|
||||
model?: { providerID: ProviderID; modelID: ModelID }
|
||||
}) => Effect.Effect<{
|
||||
identifier: string
|
||||
whenToUse: string
|
||||
systemPrompt: string
|
||||
}>
|
||||
}
|
||||
|
||||
type State = Omit<Interface, "generate">
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Agent") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const auth = yield* Auth.Service
|
||||
const plugin = yield* Plugin.Service
|
||||
const skill = yield* Skill.Service
|
||||
const provider = yield* Provider.Service
|
||||
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("Agent.state")(function* (_ctx) {
|
||||
const cfg = yield* config.get()
|
||||
const skillDirs = yield* skill.dirs()
|
||||
const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))]
|
||||
|
||||
const defaults = Permission.fromConfig({
|
||||
"*": "allow",
|
||||
doom_loop: "ask",
|
||||
external_directory: {
|
||||
"*": "ask",
|
||||
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
|
||||
},
|
||||
question: "deny",
|
||||
plan_enter: "deny",
|
||||
plan_exit: "deny",
|
||||
// mirrors github.com/github/gitignore Node.gitignore pattern for .env files
|
||||
read: {
|
||||
"*": "allow",
|
||||
"*.env": "ask",
|
||||
"*.env.*": "ask",
|
||||
"*.env.example": "allow",
|
||||
},
|
||||
})
|
||||
|
||||
const user = Permission.fromConfig(cfg.permission ?? {})
|
||||
|
||||
const agents: Record<string, Info> = {
|
||||
build: {
|
||||
name: "build",
|
||||
description: "The default agent. Executes tools based on configured permissions.",
|
||||
options: {},
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
question: "allow",
|
||||
plan_enter: "allow",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
mode: "primary",
|
||||
native: true,
|
||||
},
|
||||
plan: {
|
||||
name: "plan",
|
||||
description: "Plan mode. Disallows all edit tools.",
|
||||
options: {},
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
question: "allow",
|
||||
plan_exit: "allow",
|
||||
external_directory: {
|
||||
[path.join(Global.Path.data, "plans", "*")]: "allow",
|
||||
},
|
||||
edit: {
|
||||
"*": "deny",
|
||||
[path.join(".opencode", "plans", "*.md")]: "allow",
|
||||
[path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]:
|
||||
"allow",
|
||||
},
|
||||
}),
|
||||
user,
|
||||
),
|
||||
mode: "primary",
|
||||
native: true,
|
||||
},
|
||||
general: {
|
||||
name: "general",
|
||||
description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
todowrite: "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
options: {},
|
||||
mode: "subagent",
|
||||
native: true,
|
||||
},
|
||||
explore: {
|
||||
name: "explore",
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
grep: "allow",
|
||||
glob: "allow",
|
||||
list: "allow",
|
||||
bash: "allow",
|
||||
webfetch: "allow",
|
||||
websearch: "allow",
|
||||
codesearch: "allow",
|
||||
read: "allow",
|
||||
external_directory: {
|
||||
"*": "ask",
|
||||
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
|
||||
},
|
||||
}),
|
||||
user,
|
||||
),
|
||||
description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`,
|
||||
prompt: PROMPT_EXPLORE,
|
||||
options: {},
|
||||
mode: "subagent",
|
||||
native: true,
|
||||
},
|
||||
compaction: {
|
||||
name: "compaction",
|
||||
mode: "primary",
|
||||
native: true,
|
||||
hidden: true,
|
||||
prompt: PROMPT_COMPACTION,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
options: {},
|
||||
},
|
||||
title: {
|
||||
name: "title",
|
||||
mode: "primary",
|
||||
options: {},
|
||||
native: true,
|
||||
hidden: true,
|
||||
temperature: 0.5,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
prompt: PROMPT_TITLE,
|
||||
},
|
||||
summary: {
|
||||
name: "summary",
|
||||
mode: "primary",
|
||||
options: {},
|
||||
native: true,
|
||||
hidden: true,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
prompt: PROMPT_SUMMARY,
|
||||
},
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(cfg.agent ?? {})) {
|
||||
if (value.disable) {
|
||||
delete agents[key]
|
||||
continue
|
||||
}
|
||||
let item = agents[key]
|
||||
if (!item)
|
||||
item = agents[key] = {
|
||||
name: key,
|
||||
mode: "all",
|
||||
permission: Permission.merge(defaults, user),
|
||||
options: {},
|
||||
native: false,
|
||||
}
|
||||
if (value.model) item.model = Provider.parseModel(value.model)
|
||||
item.variant = value.variant ?? item.variant
|
||||
item.prompt = value.prompt ?? item.prompt
|
||||
item.description = value.description ?? item.description
|
||||
item.temperature = value.temperature ?? item.temperature
|
||||
item.topP = value.top_p ?? item.topP
|
||||
item.mode = value.mode ?? item.mode
|
||||
item.color = value.color ?? item.color
|
||||
item.hidden = value.hidden ?? item.hidden
|
||||
item.name = value.name ?? item.name
|
||||
item.steps = value.steps ?? item.steps
|
||||
item.options = mergeDeep(item.options, value.options ?? {})
|
||||
item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {}))
|
||||
}
|
||||
|
||||
// Ensure Truncate.GLOB is allowed unless explicitly configured
|
||||
for (const name in agents) {
|
||||
const agent = agents[name]
|
||||
const explicit = agent.permission.some((r) => {
|
||||
if (r.permission !== "external_directory") return false
|
||||
if (r.action !== "deny") return false
|
||||
return r.pattern === Truncate.GLOB
|
||||
})
|
||||
if (explicit) continue
|
||||
|
||||
agents[name].permission = Permission.merge(
|
||||
agents[name].permission,
|
||||
Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }),
|
||||
)
|
||||
}
|
||||
|
||||
const get = Effect.fnUntraced(function* (agent: string) {
|
||||
return agents[agent]
|
||||
})
|
||||
|
||||
const list = Effect.fnUntraced(function* () {
|
||||
const cfg = yield* config.get()
|
||||
return pipe(
|
||||
agents,
|
||||
values(),
|
||||
sortBy(
|
||||
[(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"],
|
||||
[(x) => x.name, "asc"],
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
const defaultAgent = Effect.fnUntraced(function* () {
|
||||
const c = yield* config.get()
|
||||
if (c.default_agent) {
|
||||
const agent = agents[c.default_agent]
|
||||
if (!agent) throw new Error(`default agent "${c.default_agent}" not found`)
|
||||
if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`)
|
||||
if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`)
|
||||
return agent.name
|
||||
}
|
||||
const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true)
|
||||
if (!visible) throw new Error("no primary visible agent found")
|
||||
return visible.name
|
||||
})
|
||||
|
||||
return {
|
||||
get,
|
||||
list,
|
||||
defaultAgent,
|
||||
} satisfies State
|
||||
}),
|
||||
)
|
||||
|
||||
return Service.of({
|
||||
get: Effect.fn("Agent.get")(function* (agent: string) {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.get(agent))
|
||||
}),
|
||||
list: Effect.fn("Agent.list")(function* () {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.list())
|
||||
}),
|
||||
defaultAgent: Effect.fn("Agent.defaultAgent")(function* () {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.defaultAgent())
|
||||
}),
|
||||
generate: Effect.fn("Agent.generate")(function* (input: {
|
||||
description: string
|
||||
model?: { providerID: ProviderID; modelID: ModelID }
|
||||
}) {
|
||||
const cfg = yield* config.get()
|
||||
const model = input.model ?? (yield* provider.defaultModel())
|
||||
const resolved = yield* provider.getModel(model.providerID, model.modelID)
|
||||
const language = yield* provider.getLanguage(resolved)
|
||||
const tracer = cfg.experimental?.openTelemetry
|
||||
? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer))
|
||||
: undefined
|
||||
|
||||
const system = [PROMPT_GENERATE]
|
||||
yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system })
|
||||
const existing = yield* InstanceState.useEffect(state, (s) => s.list())
|
||||
|
||||
// TODO: clean this up so provider specific logic doesnt bleed over
|
||||
const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie)
|
||||
const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth"
|
||||
|
||||
const params = {
|
||||
experimental_telemetry: {
|
||||
isEnabled: cfg.experimental?.openTelemetry,
|
||||
tracer,
|
||||
metadata: {
|
||||
userId: cfg.username ?? "unknown",
|
||||
},
|
||||
},
|
||||
temperature: 0.3,
|
||||
messages: [
|
||||
...(isOpenaiOauth
|
||||
? []
|
||||
: system.map(
|
||||
(item): ModelMessage => ({
|
||||
role: "system",
|
||||
content: item,
|
||||
}),
|
||||
)),
|
||||
{
|
||||
role: "user",
|
||||
content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`,
|
||||
},
|
||||
],
|
||||
model: language,
|
||||
schema: z.object({
|
||||
identifier: z.string(),
|
||||
whenToUse: z.string(),
|
||||
systemPrompt: z.string(),
|
||||
}),
|
||||
} satisfies Parameters<typeof generateObject>[0]
|
||||
|
||||
if (isOpenaiOauth) {
|
||||
return yield* Effect.promise(async () => {
|
||||
const result = streamObject({
|
||||
...params,
|
||||
providerOptions: ProviderTransform.providerOptions(resolved, {
|
||||
instructions: system.join("\n"),
|
||||
store: false,
|
||||
}),
|
||||
onError: () => {},
|
||||
})
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === "error") throw part.error
|
||||
}
|
||||
return result.object
|
||||
})
|
||||
}
|
||||
|
||||
return yield* Effect.promise(() => generateObject(params).then((r) => r.object))
|
||||
}),
|
||||
export const Info = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
mode: z.enum(["subagent", "primary", "all"]),
|
||||
native: z.boolean().optional(),
|
||||
hidden: z.boolean().optional(),
|
||||
topP: z.number().optional(),
|
||||
temperature: z.number().optional(),
|
||||
color: z.string().optional(),
|
||||
permission: Permission.Ruleset.zod,
|
||||
model: z
|
||||
.object({
|
||||
modelID: ModelID.zod,
|
||||
providerID: ProviderID.zod,
|
||||
})
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
variant: z.string().optional(),
|
||||
prompt: z.string().optional(),
|
||||
options: z.record(z.string(), z.any()),
|
||||
steps: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({
|
||||
ref: "Agent",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Plugin.defaultLayer),
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Auth.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
export interface Interface {
|
||||
readonly get: (agent: string) => Effect.Effect<Info>
|
||||
readonly list: () => Effect.Effect<Info[]>
|
||||
readonly defaultAgent: () => Effect.Effect<string>
|
||||
readonly generate: (input: {
|
||||
description: string
|
||||
model?: { providerID: ProviderID; modelID: ModelID }
|
||||
}) => Effect.Effect<{
|
||||
identifier: string
|
||||
whenToUse: string
|
||||
systemPrompt: string
|
||||
}>
|
||||
}
|
||||
|
||||
type State = Omit<Interface, "generate">
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Agent") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const auth = yield* Auth.Service
|
||||
const plugin = yield* Plugin.Service
|
||||
const skill = yield* Skill.Service
|
||||
const provider = yield* Provider.Service
|
||||
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("Agent.state")(function* (_ctx) {
|
||||
const cfg = yield* config.get()
|
||||
const skillDirs = yield* skill.dirs()
|
||||
const whitelistedDirs = [Truncate.GLOB, ...skillDirs.map((dir) => path.join(dir, "*"))]
|
||||
|
||||
const defaults = Permission.fromConfig({
|
||||
"*": "allow",
|
||||
doom_loop: "ask",
|
||||
external_directory: {
|
||||
"*": "ask",
|
||||
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
|
||||
},
|
||||
question: "deny",
|
||||
plan_enter: "deny",
|
||||
plan_exit: "deny",
|
||||
// mirrors github.com/github/gitignore Node.gitignore pattern for .env files
|
||||
read: {
|
||||
"*": "allow",
|
||||
"*.env": "ask",
|
||||
"*.env.*": "ask",
|
||||
"*.env.example": "allow",
|
||||
},
|
||||
})
|
||||
|
||||
const user = Permission.fromConfig(cfg.permission ?? {})
|
||||
|
||||
const agents: Record<string, Info> = {
|
||||
build: {
|
||||
name: "build",
|
||||
description: "The default agent. Executes tools based on configured permissions.",
|
||||
options: {},
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
question: "allow",
|
||||
plan_enter: "allow",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
mode: "primary",
|
||||
native: true,
|
||||
},
|
||||
plan: {
|
||||
name: "plan",
|
||||
description: "Plan mode. Disallows all edit tools.",
|
||||
options: {},
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
question: "allow",
|
||||
plan_exit: "allow",
|
||||
external_directory: {
|
||||
[path.join(Global.Path.data, "plans", "*")]: "allow",
|
||||
},
|
||||
edit: {
|
||||
"*": "deny",
|
||||
[path.join(".opencode", "plans", "*.md")]: "allow",
|
||||
[path.relative(Instance.worktree, path.join(Global.Path.data, path.join("plans", "*.md")))]: "allow",
|
||||
},
|
||||
}),
|
||||
user,
|
||||
),
|
||||
mode: "primary",
|
||||
native: true,
|
||||
},
|
||||
general: {
|
||||
name: "general",
|
||||
description: `General-purpose agent for researching complex questions and executing multi-step tasks. Use this agent to execute multiple units of work in parallel.`,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
todowrite: "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
options: {},
|
||||
mode: "subagent",
|
||||
native: true,
|
||||
},
|
||||
explore: {
|
||||
name: "explore",
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
grep: "allow",
|
||||
glob: "allow",
|
||||
list: "allow",
|
||||
bash: "allow",
|
||||
webfetch: "allow",
|
||||
websearch: "allow",
|
||||
codesearch: "allow",
|
||||
read: "allow",
|
||||
external_directory: {
|
||||
"*": "ask",
|
||||
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
|
||||
},
|
||||
}),
|
||||
user,
|
||||
),
|
||||
description: `Fast agent specialized for exploring codebases. Use this when you need to quickly find files by patterns (eg. "src/components/**/*.tsx"), search code for keywords (eg. "API endpoints"), or answer questions about the codebase (eg. "how do API endpoints work?"). When calling this agent, specify the desired thoroughness level: "quick" for basic searches, "medium" for moderate exploration, or "very thorough" for comprehensive analysis across multiple locations and naming conventions.`,
|
||||
prompt: PROMPT_EXPLORE,
|
||||
options: {},
|
||||
mode: "subagent",
|
||||
native: true,
|
||||
},
|
||||
compaction: {
|
||||
name: "compaction",
|
||||
mode: "primary",
|
||||
native: true,
|
||||
hidden: true,
|
||||
prompt: PROMPT_COMPACTION,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
options: {},
|
||||
},
|
||||
title: {
|
||||
name: "title",
|
||||
mode: "primary",
|
||||
options: {},
|
||||
native: true,
|
||||
hidden: true,
|
||||
temperature: 0.5,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
prompt: PROMPT_TITLE,
|
||||
},
|
||||
summary: {
|
||||
name: "summary",
|
||||
mode: "primary",
|
||||
options: {},
|
||||
native: true,
|
||||
hidden: true,
|
||||
permission: Permission.merge(
|
||||
defaults,
|
||||
Permission.fromConfig({
|
||||
"*": "deny",
|
||||
}),
|
||||
user,
|
||||
),
|
||||
prompt: PROMPT_SUMMARY,
|
||||
},
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(cfg.agent ?? {})) {
|
||||
if (value.disable) {
|
||||
delete agents[key]
|
||||
continue
|
||||
}
|
||||
let item = agents[key]
|
||||
if (!item)
|
||||
item = agents[key] = {
|
||||
name: key,
|
||||
mode: "all",
|
||||
permission: Permission.merge(defaults, user),
|
||||
options: {},
|
||||
native: false,
|
||||
}
|
||||
if (value.model) item.model = Provider.parseModel(value.model)
|
||||
item.variant = value.variant ?? item.variant
|
||||
item.prompt = value.prompt ?? item.prompt
|
||||
item.description = value.description ?? item.description
|
||||
item.temperature = value.temperature ?? item.temperature
|
||||
item.topP = value.top_p ?? item.topP
|
||||
item.mode = value.mode ?? item.mode
|
||||
item.color = value.color ?? item.color
|
||||
item.hidden = value.hidden ?? item.hidden
|
||||
item.name = value.name ?? item.name
|
||||
item.steps = value.steps ?? item.steps
|
||||
item.options = mergeDeep(item.options, value.options ?? {})
|
||||
item.permission = Permission.merge(item.permission, Permission.fromConfig(value.permission ?? {}))
|
||||
}
|
||||
|
||||
// Ensure Truncate.GLOB is allowed unless explicitly configured
|
||||
for (const name in agents) {
|
||||
const agent = agents[name]
|
||||
const explicit = agent.permission.some((r) => {
|
||||
if (r.permission !== "external_directory") return false
|
||||
if (r.action !== "deny") return false
|
||||
return r.pattern === Truncate.GLOB
|
||||
})
|
||||
if (explicit) continue
|
||||
|
||||
agents[name].permission = Permission.merge(
|
||||
agents[name].permission,
|
||||
Permission.fromConfig({ external_directory: { [Truncate.GLOB]: "allow" } }),
|
||||
)
|
||||
}
|
||||
|
||||
const get = Effect.fnUntraced(function* (agent: string) {
|
||||
return agents[agent]
|
||||
})
|
||||
|
||||
const list = Effect.fnUntraced(function* () {
|
||||
const cfg = yield* config.get()
|
||||
return pipe(
|
||||
agents,
|
||||
values(),
|
||||
sortBy(
|
||||
[(x) => (cfg.default_agent ? x.name === cfg.default_agent : x.name === "build"), "desc"],
|
||||
[(x) => x.name, "asc"],
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
const defaultAgent = Effect.fnUntraced(function* () {
|
||||
const c = yield* config.get()
|
||||
if (c.default_agent) {
|
||||
const agent = agents[c.default_agent]
|
||||
if (!agent) throw new Error(`default agent "${c.default_agent}" not found`)
|
||||
if (agent.mode === "subagent") throw new Error(`default agent "${c.default_agent}" is a subagent`)
|
||||
if (agent.hidden === true) throw new Error(`default agent "${c.default_agent}" is hidden`)
|
||||
return agent.name
|
||||
}
|
||||
const visible = Object.values(agents).find((a) => a.mode !== "subagent" && a.hidden !== true)
|
||||
if (!visible) throw new Error("no primary visible agent found")
|
||||
return visible.name
|
||||
})
|
||||
|
||||
return {
|
||||
get,
|
||||
list,
|
||||
defaultAgent,
|
||||
} satisfies State
|
||||
}),
|
||||
)
|
||||
|
||||
return Service.of({
|
||||
get: Effect.fn("Agent.get")(function* (agent: string) {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.get(agent))
|
||||
}),
|
||||
list: Effect.fn("Agent.list")(function* () {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.list())
|
||||
}),
|
||||
defaultAgent: Effect.fn("Agent.defaultAgent")(function* () {
|
||||
return yield* InstanceState.useEffect(state, (s) => s.defaultAgent())
|
||||
}),
|
||||
generate: Effect.fn("Agent.generate")(function* (input: {
|
||||
description: string
|
||||
model?: { providerID: ProviderID; modelID: ModelID }
|
||||
}) {
|
||||
const cfg = yield* config.get()
|
||||
const model = input.model ?? (yield* provider.defaultModel())
|
||||
const resolved = yield* provider.getModel(model.providerID, model.modelID)
|
||||
const language = yield* provider.getLanguage(resolved)
|
||||
const tracer = cfg.experimental?.openTelemetry
|
||||
? Option.getOrUndefined(yield* Effect.serviceOption(OtelTracer.OtelTracer))
|
||||
: undefined
|
||||
|
||||
const system = [PROMPT_GENERATE]
|
||||
yield* plugin.trigger("experimental.chat.system.transform", { model: resolved }, { system })
|
||||
const existing = yield* InstanceState.useEffect(state, (s) => s.list())
|
||||
|
||||
// TODO: clean this up so provider specific logic doesnt bleed over
|
||||
const authInfo = yield* auth.get(model.providerID).pipe(Effect.orDie)
|
||||
const isOpenaiOauth = model.providerID === "openai" && authInfo?.type === "oauth"
|
||||
|
||||
const params = {
|
||||
experimental_telemetry: {
|
||||
isEnabled: cfg.experimental?.openTelemetry,
|
||||
tracer,
|
||||
metadata: {
|
||||
userId: cfg.username ?? "unknown",
|
||||
},
|
||||
},
|
||||
temperature: 0.3,
|
||||
messages: [
|
||||
...(isOpenaiOauth
|
||||
? []
|
||||
: system.map(
|
||||
(item): ModelMessage => ({
|
||||
role: "system",
|
||||
content: item,
|
||||
}),
|
||||
)),
|
||||
{
|
||||
role: "user",
|
||||
content: `Create an agent configuration based on this request: "${input.description}".\n\nIMPORTANT: The following identifiers already exist and must NOT be used: ${existing.map((i) => i.name).join(", ")}\n Return ONLY the JSON object, no other text, do not wrap in backticks`,
|
||||
},
|
||||
],
|
||||
model: language,
|
||||
schema: z.object({
|
||||
identifier: z.string(),
|
||||
whenToUse: z.string(),
|
||||
systemPrompt: z.string(),
|
||||
}),
|
||||
} satisfies Parameters<typeof generateObject>[0]
|
||||
|
||||
if (isOpenaiOauth) {
|
||||
return yield* Effect.promise(async () => {
|
||||
const result = streamObject({
|
||||
...params,
|
||||
providerOptions: ProviderTransform.providerOptions(resolved, {
|
||||
instructions: system.join("\n"),
|
||||
store: false,
|
||||
}),
|
||||
onError: () => {},
|
||||
})
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === "error") throw part.error
|
||||
}
|
||||
return result.object
|
||||
})
|
||||
}
|
||||
|
||||
return yield* Effect.promise(() => generateObject(params).then((r) => r.object))
|
||||
}),
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Plugin.defaultLayer),
|
||||
Layer.provide(Provider.defaultLayer),
|
||||
Layer.provide(Auth.defaultLayer),
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
|
||||
export * as Agent from "./agent"
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
import path from "path"
|
||||
import { Effect, Layer, Record, Result, Schema, Context } from "effect"
|
||||
import { zod } from "@/util/effect-zod"
|
||||
import { Global } from "../global"
|
||||
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
|
||||
export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key"
|
||||
|
||||
const file = path.join(Global.Path.data, "auth.json")
|
||||
|
||||
const fail = (message: string) => (cause: unknown) => new AuthError({ message, cause })
|
||||
|
||||
export class Oauth extends Schema.Class<Oauth>("OAuth")({
|
||||
type: Schema.Literal("oauth"),
|
||||
refresh: Schema.String,
|
||||
access: Schema.String,
|
||||
expires: Schema.Number,
|
||||
accountId: Schema.optional(Schema.String),
|
||||
enterpriseUrl: Schema.optional(Schema.String),
|
||||
}) {}
|
||||
|
||||
export class Api extends Schema.Class<Api>("ApiAuth")({
|
||||
type: Schema.Literal("api"),
|
||||
key: Schema.String,
|
||||
metadata: Schema.optional(Schema.Record(Schema.String, Schema.String)),
|
||||
}) {}
|
||||
|
||||
export class WellKnown extends Schema.Class<WellKnown>("WellKnownAuth")({
|
||||
type: Schema.Literal("wellknown"),
|
||||
key: Schema.String,
|
||||
token: Schema.String,
|
||||
}) {}
|
||||
|
||||
const _Info = Schema.Union([Oauth, Api, WellKnown]).annotate({ discriminator: "type", identifier: "Auth" })
|
||||
export const Info = Object.assign(_Info, { zod: zod(_Info) })
|
||||
export type Info = Schema.Schema.Type<typeof _Info>
|
||||
|
||||
export class AuthError extends Schema.TaggedErrorClass<AuthError>()("AuthError", {
|
||||
message: Schema.String,
|
||||
cause: Schema.optional(Schema.Defect),
|
||||
}) {}
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (providerID: string) => Effect.Effect<Info | undefined, AuthError>
|
||||
readonly all: () => Effect.Effect<Record<string, Info>, AuthError>
|
||||
readonly set: (key: string, info: Info) => Effect.Effect<void, AuthError>
|
||||
readonly remove: (key: string) => Effect.Effect<void, AuthError>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Auth") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fsys = yield* AppFileSystem.Service
|
||||
const decode = Schema.decodeUnknownOption(Info)
|
||||
|
||||
const all = Effect.fn("Auth.all")(function* () {
|
||||
const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record<string, unknown>
|
||||
return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined))
|
||||
})
|
||||
|
||||
const get = Effect.fn("Auth.get")(function* (providerID: string) {
|
||||
return (yield* all())[providerID]
|
||||
})
|
||||
|
||||
const set = Effect.fn("Auth.set")(function* (key: string, info: Info) {
|
||||
const norm = key.replace(/\/+$/, "")
|
||||
const data = yield* all()
|
||||
if (norm !== key) delete data[key]
|
||||
delete data[norm + "/"]
|
||||
yield* fsys
|
||||
.writeJson(file, { ...data, [norm]: info }, 0o600)
|
||||
.pipe(Effect.mapError(fail("Failed to write auth data")))
|
||||
})
|
||||
|
||||
const remove = Effect.fn("Auth.remove")(function* (key: string) {
|
||||
const norm = key.replace(/\/+$/, "")
|
||||
const data = yield* all()
|
||||
delete data[key]
|
||||
delete data[norm]
|
||||
yield* fsys.writeJson(file, data, 0o600).pipe(Effect.mapError(fail("Failed to write auth data")))
|
||||
})
|
||||
|
||||
return Service.of({ get, all, set, remove })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
@@ -1,2 +1,97 @@
|
||||
export * as Auth from "./auth"
|
||||
export { OAUTH_DUMMY_KEY } from "./auth"
|
||||
import path from "path"
|
||||
import { Effect, Layer, Record, Result, Schema, Context } from "effect"
|
||||
import { zod } from "@/util/effect-zod"
|
||||
import { Global } from "../global"
|
||||
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
|
||||
export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key"
|
||||
|
||||
const file = path.join(Global.Path.data, "auth.json")
|
||||
|
||||
const fail = (message: string) => (cause: unknown) => new AuthError({ message, cause })
|
||||
|
||||
export class Oauth extends Schema.Class<Oauth>("OAuth")({
|
||||
type: Schema.Literal("oauth"),
|
||||
refresh: Schema.String,
|
||||
access: Schema.String,
|
||||
expires: Schema.Number,
|
||||
accountId: Schema.optional(Schema.String),
|
||||
enterpriseUrl: Schema.optional(Schema.String),
|
||||
}) {}
|
||||
|
||||
export class Api extends Schema.Class<Api>("ApiAuth")({
|
||||
type: Schema.Literal("api"),
|
||||
key: Schema.String,
|
||||
metadata: Schema.optional(Schema.Record(Schema.String, Schema.String)),
|
||||
}) {}
|
||||
|
||||
export class WellKnown extends Schema.Class<WellKnown>("WellKnownAuth")({
|
||||
type: Schema.Literal("wellknown"),
|
||||
key: Schema.String,
|
||||
token: Schema.String,
|
||||
}) {}
|
||||
|
||||
const _Info = Schema.Union([Oauth, Api, WellKnown]).annotate({ discriminator: "type", identifier: "Auth" })
|
||||
export const Info = Object.assign(_Info, { zod: zod(_Info) })
|
||||
export type Info = Schema.Schema.Type<typeof _Info>
|
||||
|
||||
export class AuthError extends Schema.TaggedErrorClass<AuthError>()("AuthError", {
|
||||
message: Schema.String,
|
||||
cause: Schema.optional(Schema.Defect),
|
||||
}) {}
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (providerID: string) => Effect.Effect<Info | undefined, AuthError>
|
||||
readonly all: () => Effect.Effect<Record<string, Info>, AuthError>
|
||||
readonly set: (key: string, info: Info) => Effect.Effect<void, AuthError>
|
||||
readonly remove: (key: string) => Effect.Effect<void, AuthError>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Auth") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fsys = yield* AppFileSystem.Service
|
||||
const decode = Schema.decodeUnknownOption(Info)
|
||||
|
||||
const all = Effect.fn("Auth.all")(function* () {
|
||||
if (process.env.OPENCODE_AUTH_CONTENT) {
|
||||
try {
|
||||
return JSON.parse(process.env.OPENCODE_AUTH_CONTENT)
|
||||
} catch (err) {}
|
||||
}
|
||||
|
||||
const data = (yield* fsys.readJson(file).pipe(Effect.orElseSucceed(() => ({})))) as Record<string, unknown>
|
||||
return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined))
|
||||
})
|
||||
|
||||
const get = Effect.fn("Auth.get")(function* (providerID: string) {
|
||||
return (yield* all())[providerID]
|
||||
})
|
||||
|
||||
const set = Effect.fn("Auth.set")(function* (key: string, info: Info) {
|
||||
const norm = key.replace(/\/+$/, "")
|
||||
const data = yield* all()
|
||||
if (norm !== key) delete data[key]
|
||||
delete data[norm + "/"]
|
||||
yield* fsys
|
||||
.writeJson(file, { ...data, [norm]: info }, 0o600)
|
||||
.pipe(Effect.mapError(fail("Failed to write auth data")))
|
||||
})
|
||||
|
||||
const remove = Effect.fn("Auth.remove")(function* (key: string) {
|
||||
const norm = key.replace(/\/+$/, "")
|
||||
const data = yield* all()
|
||||
delete data[key]
|
||||
delete data[norm]
|
||||
yield* fsys.writeJson(file, data, 0o600).pipe(Effect.mapError(fail("Failed to write auth data")))
|
||||
})
|
||||
|
||||
return Service.of({ get, all, set, remove })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
|
||||
export * as Auth from "."
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
import z from "zod"
|
||||
import type { ZodType } from "zod"
|
||||
|
||||
export namespace BusEvent {
|
||||
export type Definition = ReturnType<typeof define>
|
||||
export type Definition = ReturnType<typeof define>
|
||||
|
||||
const registry = new Map<string, Definition>()
|
||||
const registry = new Map<string, Definition>()
|
||||
|
||||
export function define<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
|
||||
const result = {
|
||||
type,
|
||||
properties,
|
||||
}
|
||||
registry.set(type, result)
|
||||
return result
|
||||
}
|
||||
|
||||
export function payloads() {
|
||||
return registry
|
||||
.entries()
|
||||
.map(([type, def]) => {
|
||||
return z
|
||||
.object({
|
||||
type: z.literal(type),
|
||||
properties: def.properties,
|
||||
})
|
||||
.meta({
|
||||
ref: `Event.${def.type}`,
|
||||
})
|
||||
})
|
||||
.toArray()
|
||||
export function define<Type extends string, Properties extends ZodType>(type: Type, properties: Properties) {
|
||||
const result = {
|
||||
type,
|
||||
properties,
|
||||
}
|
||||
registry.set(type, result)
|
||||
return result
|
||||
}
|
||||
|
||||
export function payloads() {
|
||||
return registry
|
||||
.entries()
|
||||
.map(([type, def]) => {
|
||||
return z
|
||||
.object({
|
||||
type: z.literal(type),
|
||||
properties: def.properties,
|
||||
})
|
||||
.meta({
|
||||
ref: `Event.${def.type}`,
|
||||
})
|
||||
})
|
||||
.toArray()
|
||||
}
|
||||
|
||||
export * as BusEvent from "./bus-event"
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
import z from "zod"
|
||||
import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect"
|
||||
import { EffectBridge } from "@/effect"
|
||||
import { Log } from "../util"
|
||||
import { BusEvent } from "./bus-event"
|
||||
import { GlobalBus } from "./global"
|
||||
import { InstanceState } from "@/effect"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
|
||||
const log = Log.create({ service: "bus" })
|
||||
|
||||
export const InstanceDisposed = BusEvent.define(
|
||||
"server.instance.disposed",
|
||||
z.object({
|
||||
directory: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
type Payload<D extends BusEvent.Definition = BusEvent.Definition> = {
|
||||
type: D["type"]
|
||||
properties: z.infer<D["properties"]>
|
||||
}
|
||||
|
||||
type State = {
|
||||
wildcard: PubSub.PubSub<Payload>
|
||||
typed: Map<string, PubSub.PubSub<Payload>>
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly publish: <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
properties: z.output<D["properties"]>,
|
||||
) => Effect.Effect<void>
|
||||
readonly subscribe: <D extends BusEvent.Definition>(def: D) => Stream.Stream<Payload<D>>
|
||||
readonly subscribeAll: () => Stream.Stream<Payload>
|
||||
readonly subscribeCallback: <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: Payload<D>) => unknown,
|
||||
) => Effect.Effect<() => void>
|
||||
readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Bus") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("Bus.state")(function* (ctx) {
|
||||
const wildcard = yield* PubSub.unbounded<Payload>()
|
||||
const typed = new Map<string, PubSub.PubSub<Payload>>()
|
||||
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.gen(function* () {
|
||||
// Publish InstanceDisposed before shutting down so subscribers see it
|
||||
yield* PubSub.publish(wildcard, {
|
||||
type: InstanceDisposed.type,
|
||||
properties: { directory: ctx.directory },
|
||||
})
|
||||
yield* PubSub.shutdown(wildcard)
|
||||
for (const ps of typed.values()) {
|
||||
yield* PubSub.shutdown(ps)
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
return { wildcard, typed }
|
||||
}),
|
||||
)
|
||||
|
||||
function getOrCreate<D extends BusEvent.Definition>(state: State, def: D) {
|
||||
return Effect.gen(function* () {
|
||||
let ps = state.typed.get(def.type)
|
||||
if (!ps) {
|
||||
ps = yield* PubSub.unbounded<Payload>()
|
||||
state.typed.set(def.type, ps)
|
||||
}
|
||||
return ps as unknown as PubSub.PubSub<Payload<D>>
|
||||
})
|
||||
}
|
||||
|
||||
function publish<D extends BusEvent.Definition>(def: D, properties: z.output<D["properties"]>) {
|
||||
return Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const payload: Payload = { type: def.type, properties }
|
||||
log.info("publishing", { type: def.type })
|
||||
|
||||
const ps = s.typed.get(def.type)
|
||||
if (ps) yield* PubSub.publish(ps, payload)
|
||||
yield* PubSub.publish(s.wildcard, payload)
|
||||
|
||||
const dir = yield* InstanceState.directory
|
||||
const context = yield* InstanceState.context
|
||||
const workspace = yield* InstanceState.workspaceID
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: dir,
|
||||
project: context.project.id,
|
||||
workspace,
|
||||
payload,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function subscribe<D extends BusEvent.Definition>(def: D): Stream.Stream<Payload<D>> {
|
||||
log.info("subscribing", { type: def.type })
|
||||
return Stream.unwrap(
|
||||
Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const ps = yield* getOrCreate(s, def)
|
||||
return Stream.fromPubSub(ps)
|
||||
}),
|
||||
).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type }))))
|
||||
}
|
||||
|
||||
function subscribeAll(): Stream.Stream<Payload> {
|
||||
log.info("subscribing", { type: "*" })
|
||||
return Stream.unwrap(
|
||||
Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return Stream.fromPubSub(s.wildcard)
|
||||
}),
|
||||
).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" }))))
|
||||
}
|
||||
|
||||
function on<T>(pubsub: PubSub.PubSub<T>, type: string, callback: (event: T) => unknown) {
|
||||
return Effect.gen(function* () {
|
||||
log.info("subscribing", { type })
|
||||
const bridge = yield* EffectBridge.make()
|
||||
const scope = yield* Scope.make()
|
||||
const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub))
|
||||
|
||||
yield* Scope.provide(scope)(
|
||||
Stream.fromSubscription(subscription).pipe(
|
||||
Stream.runForEach((msg) =>
|
||||
Effect.tryPromise({
|
||||
try: () => Promise.resolve().then(() => callback(msg)),
|
||||
catch: (cause) => {
|
||||
log.error("subscriber failed", { type, cause })
|
||||
},
|
||||
}).pipe(Effect.ignore),
|
||||
),
|
||||
Effect.forkScoped,
|
||||
),
|
||||
)
|
||||
|
||||
return () => {
|
||||
log.info("unsubscribing", { type })
|
||||
bridge.fork(Scope.close(scope, Exit.void))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: Payload<D>) => unknown,
|
||||
) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const ps = yield* getOrCreate(s, def)
|
||||
return yield* on(ps, def.type, callback)
|
||||
})
|
||||
|
||||
const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return yield* on(s.wildcard, "*", callback)
|
||||
})
|
||||
|
||||
return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer
|
||||
|
||||
const { runPromise, runSync } = makeRuntime(Service, layer)
|
||||
|
||||
// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe,
|
||||
// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw.
|
||||
export async function publish<D extends BusEvent.Definition>(def: D, properties: z.output<D["properties"]>) {
|
||||
return runPromise((svc) => svc.publish(def, properties))
|
||||
}
|
||||
|
||||
export function subscribe<D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: { type: D["type"]; properties: z.infer<D["properties"]> }) => unknown,
|
||||
) {
|
||||
return runSync((svc) => svc.subscribeCallback(def, callback))
|
||||
}
|
||||
|
||||
export function subscribeAll(callback: (event: any) => unknown) {
|
||||
return runSync((svc) => svc.subscribeAllCallback(callback))
|
||||
}
|
||||
@@ -1 +1,193 @@
|
||||
export * as Bus from "./bus"
|
||||
import z from "zod"
|
||||
import { Effect, Exit, Layer, PubSub, Scope, Context, Stream } from "effect"
|
||||
import { EffectBridge } from "@/effect"
|
||||
import { Log } from "../util"
|
||||
import { BusEvent } from "./bus-event"
|
||||
import { GlobalBus } from "./global"
|
||||
import { InstanceState } from "@/effect"
|
||||
import { makeRuntime } from "@/effect/run-service"
|
||||
|
||||
const log = Log.create({ service: "bus" })
|
||||
|
||||
export const InstanceDisposed = BusEvent.define(
|
||||
"server.instance.disposed",
|
||||
z.object({
|
||||
directory: z.string(),
|
||||
}),
|
||||
)
|
||||
|
||||
type Payload<D extends BusEvent.Definition = BusEvent.Definition> = {
|
||||
type: D["type"]
|
||||
properties: z.infer<D["properties"]>
|
||||
}
|
||||
|
||||
type State = {
|
||||
wildcard: PubSub.PubSub<Payload>
|
||||
typed: Map<string, PubSub.PubSub<Payload>>
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly publish: <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
properties: z.output<D["properties"]>,
|
||||
) => Effect.Effect<void>
|
||||
readonly subscribe: <D extends BusEvent.Definition>(def: D) => Stream.Stream<Payload<D>>
|
||||
readonly subscribeAll: () => Stream.Stream<Payload>
|
||||
readonly subscribeCallback: <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: Payload<D>) => unknown,
|
||||
) => Effect.Effect<() => void>
|
||||
readonly subscribeAllCallback: (callback: (event: any) => unknown) => Effect.Effect<() => void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Bus") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("Bus.state")(function* (ctx) {
|
||||
const wildcard = yield* PubSub.unbounded<Payload>()
|
||||
const typed = new Map<string, PubSub.PubSub<Payload>>()
|
||||
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.gen(function* () {
|
||||
// Publish InstanceDisposed before shutting down so subscribers see it
|
||||
yield* PubSub.publish(wildcard, {
|
||||
type: InstanceDisposed.type,
|
||||
properties: { directory: ctx.directory },
|
||||
})
|
||||
yield* PubSub.shutdown(wildcard)
|
||||
for (const ps of typed.values()) {
|
||||
yield* PubSub.shutdown(ps)
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
return { wildcard, typed }
|
||||
}),
|
||||
)
|
||||
|
||||
function getOrCreate<D extends BusEvent.Definition>(state: State, def: D) {
|
||||
return Effect.gen(function* () {
|
||||
let ps = state.typed.get(def.type)
|
||||
if (!ps) {
|
||||
ps = yield* PubSub.unbounded<Payload>()
|
||||
state.typed.set(def.type, ps)
|
||||
}
|
||||
return ps as unknown as PubSub.PubSub<Payload<D>>
|
||||
})
|
||||
}
|
||||
|
||||
function publish<D extends BusEvent.Definition>(def: D, properties: z.output<D["properties"]>) {
|
||||
return Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const payload: Payload = { type: def.type, properties }
|
||||
log.info("publishing", { type: def.type })
|
||||
|
||||
const ps = s.typed.get(def.type)
|
||||
if (ps) yield* PubSub.publish(ps, payload)
|
||||
yield* PubSub.publish(s.wildcard, payload)
|
||||
|
||||
const dir = yield* InstanceState.directory
|
||||
const context = yield* InstanceState.context
|
||||
const workspace = yield* InstanceState.workspaceID
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: dir,
|
||||
project: context.project.id,
|
||||
workspace,
|
||||
payload,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function subscribe<D extends BusEvent.Definition>(def: D): Stream.Stream<Payload<D>> {
|
||||
log.info("subscribing", { type: def.type })
|
||||
return Stream.unwrap(
|
||||
Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const ps = yield* getOrCreate(s, def)
|
||||
return Stream.fromPubSub(ps)
|
||||
}),
|
||||
).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: def.type }))))
|
||||
}
|
||||
|
||||
function subscribeAll(): Stream.Stream<Payload> {
|
||||
log.info("subscribing", { type: "*" })
|
||||
return Stream.unwrap(
|
||||
Effect.gen(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return Stream.fromPubSub(s.wildcard)
|
||||
}),
|
||||
).pipe(Stream.ensuring(Effect.sync(() => log.info("unsubscribing", { type: "*" }))))
|
||||
}
|
||||
|
||||
function on<T>(pubsub: PubSub.PubSub<T>, type: string, callback: (event: T) => unknown) {
|
||||
return Effect.gen(function* () {
|
||||
log.info("subscribing", { type })
|
||||
const bridge = yield* EffectBridge.make()
|
||||
const scope = yield* Scope.make()
|
||||
const subscription = yield* Scope.provide(scope)(PubSub.subscribe(pubsub))
|
||||
|
||||
yield* Scope.provide(scope)(
|
||||
Stream.fromSubscription(subscription).pipe(
|
||||
Stream.runForEach((msg) =>
|
||||
Effect.tryPromise({
|
||||
try: () => Promise.resolve().then(() => callback(msg)),
|
||||
catch: (cause) => {
|
||||
log.error("subscriber failed", { type, cause })
|
||||
},
|
||||
}).pipe(Effect.ignore),
|
||||
),
|
||||
Effect.forkScoped,
|
||||
),
|
||||
)
|
||||
|
||||
return () => {
|
||||
log.info("unsubscribing", { type })
|
||||
bridge.fork(Scope.close(scope, Exit.void))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const subscribeCallback = Effect.fn("Bus.subscribeCallback")(function* <D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: Payload<D>) => unknown,
|
||||
) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
const ps = yield* getOrCreate(s, def)
|
||||
return yield* on(ps, def.type, callback)
|
||||
})
|
||||
|
||||
const subscribeAllCallback = Effect.fn("Bus.subscribeAllCallback")(function* (callback: (event: any) => unknown) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return yield* on(s.wildcard, "*", callback)
|
||||
})
|
||||
|
||||
return Service.of({ publish, subscribe, subscribeAll, subscribeCallback, subscribeAllCallback })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer
|
||||
|
||||
const { runPromise, runSync } = makeRuntime(Service, layer)
|
||||
|
||||
// runSync is safe here because the subscribe chain (InstanceState.get, PubSub.subscribe,
|
||||
// Scope.make, Effect.forkScoped) is entirely synchronous. If any step becomes async, this will throw.
|
||||
export async function publish<D extends BusEvent.Definition>(def: D, properties: z.output<D["properties"]>) {
|
||||
return runPromise((svc) => svc.publish(def, properties))
|
||||
}
|
||||
|
||||
export function subscribe<D extends BusEvent.Definition>(
|
||||
def: D,
|
||||
callback: (event: { type: D["type"]; properties: z.infer<D["properties"]> }) => unknown,
|
||||
) {
|
||||
return runSync((svc) => svc.subscribeCallback(def, callback))
|
||||
}
|
||||
|
||||
export function subscribeAll(callback: (event: any) => unknown) {
|
||||
return runSync((svc) => svc.subscribeAllCallback(callback))
|
||||
}
|
||||
|
||||
export * as Bus from "."
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { cmd } from "./cmd"
|
||||
import { Duration, Effect, Match, Option } from "effect"
|
||||
import { UI } from "../ui"
|
||||
import { AccountID, Account, OrgID, PollExpired, type PollResult } from "@/account"
|
||||
import { type AccountError } from "@/account/schema"
|
||||
import { Account } from "@/account/account"
|
||||
import { AccountID, OrgID, PollExpired, type PollResult, type AccountError } from "@/account/schema"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import * as Prompt from "../effect/prompt"
|
||||
import open from "open"
|
||||
|
||||
@@ -8,6 +8,7 @@ import { MCP } from "../../mcp"
|
||||
import { McpAuth } from "../../mcp/auth"
|
||||
import { McpOAuthProvider } from "../../mcp/oauth-provider"
|
||||
import { Config } from "../../config"
|
||||
import { ConfigMCP } from "../../config/mcp"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { Installation } from "../../installation"
|
||||
import { InstallationVersion } from "../../installation/version"
|
||||
@@ -43,7 +44,7 @@ function getAuthStatusText(status: MCP.AuthStatus): string {
|
||||
|
||||
type McpEntry = NonNullable<Config.Info["mcp"]>[string]
|
||||
|
||||
type McpConfigured = Config.Mcp
|
||||
type McpConfigured = ConfigMCP.Info
|
||||
function isMcpConfigured(config: McpEntry): config is McpConfigured {
|
||||
return typeof config === "object" && config !== null && "type" in config
|
||||
}
|
||||
@@ -426,7 +427,7 @@ async function resolveConfigPath(baseDir: string, global = false) {
|
||||
return candidates[0]
|
||||
}
|
||||
|
||||
async function addMcpToConfig(name: string, mcpConfig: Config.Mcp, configPath: string) {
|
||||
async function addMcpToConfig(name: string, mcpConfig: ConfigMCP.Info, configPath: string) {
|
||||
let text = "{}"
|
||||
if (await Filesystem.exists(configPath)) {
|
||||
text = await Filesystem.readText(configPath)
|
||||
@@ -514,7 +515,7 @@ export const McpAddCommand = cmd({
|
||||
})
|
||||
if (prompts.isCancel(command)) throw new UI.CancelledError()
|
||||
|
||||
const mcpConfig: Config.Mcp = {
|
||||
const mcpConfig: ConfigMCP.Info = {
|
||||
type: "local",
|
||||
command: command.split(" "),
|
||||
}
|
||||
@@ -544,7 +545,7 @@ export const McpAddCommand = cmd({
|
||||
})
|
||||
if (prompts.isCancel(useOAuth)) throw new UI.CancelledError()
|
||||
|
||||
let mcpConfig: Config.Mcp
|
||||
let mcpConfig: ConfigMCP.Info
|
||||
|
||||
if (useOAuth) {
|
||||
const hasClientId = await prompts.confirm({
|
||||
|
||||
@@ -297,7 +297,9 @@ export const ProvidersLoginCommand = cmd({
|
||||
prompts.intro("Add credential")
|
||||
if (args.url) {
|
||||
const url = args.url.replace(/\/+$/, "")
|
||||
const wellknown = await fetch(`${url}/.well-known/opencode`).then((x) => x.json() as any)
|
||||
const wellknown = (await fetch(`${url}/.well-known/opencode`).then((x) => x.json())) as {
|
||||
auth: { command: string[]; env: string }
|
||||
}
|
||||
prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
|
||||
const proc = Process.spawn(wellknown.auth.command, {
|
||||
stdout: "pipe",
|
||||
|
||||
130
packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx
Normal file
130
packages/opencode/src/cli/cmd/tui/component/bg-pulse.tsx
Normal file
@@ -0,0 +1,130 @@
|
||||
import { BoxRenderable, RGBA } from "@opentui/core"
|
||||
import { createMemo, createSignal, For, onCleanup, onMount } from "solid-js"
|
||||
import { tint, useTheme } from "@tui/context/theme"
|
||||
|
||||
const PERIOD = 4600
|
||||
const RINGS = 3
|
||||
const WIDTH = 3.8
|
||||
const TAIL = 9.5
|
||||
const AMP = 0.55
|
||||
const TAIL_AMP = 0.16
|
||||
const BREATH_AMP = 0.05
|
||||
const BREATH_SPEED = 0.0008
|
||||
// Offset so bg ring emits from GO center at the moment the logo pulse peaks.
|
||||
const PHASE_OFFSET = 0.29
|
||||
|
||||
export type BgPulseMask = {
|
||||
x: number
|
||||
y: number
|
||||
width: number
|
||||
height: number
|
||||
pad?: number
|
||||
strength?: number
|
||||
}
|
||||
|
||||
export function BgPulse(props: { centerX?: number; centerY?: number; masks?: BgPulseMask[] }) {
|
||||
const { theme } = useTheme()
|
||||
const [now, setNow] = createSignal(performance.now())
|
||||
const [size, setSize] = createSignal<{ width: number; height: number }>({ width: 0, height: 0 })
|
||||
let box: BoxRenderable | undefined
|
||||
|
||||
const timer = setInterval(() => setNow(performance.now()), 50)
|
||||
onCleanup(() => clearInterval(timer))
|
||||
|
||||
const sync = () => {
|
||||
if (!box) return
|
||||
setSize({ width: box.width, height: box.height })
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
sync()
|
||||
box?.on("resize", sync)
|
||||
})
|
||||
|
||||
onCleanup(() => {
|
||||
box?.off("resize", sync)
|
||||
})
|
||||
|
||||
const grid = createMemo(() => {
|
||||
const t = now()
|
||||
const w = size().width
|
||||
const h = size().height
|
||||
if (w === 0 || h === 0) return [] as RGBA[][]
|
||||
const cxv = props.centerX ?? w / 2
|
||||
const cyv = props.centerY ?? h / 2
|
||||
const reach = Math.hypot(Math.max(cxv, w - cxv), Math.max(cyv, h - cyv) * 2) + TAIL
|
||||
const ringStates = Array.from({ length: RINGS }, (_, i) => {
|
||||
const offset = i / RINGS
|
||||
const phase = (t / PERIOD + offset - PHASE_OFFSET + 1) % 1
|
||||
const envelope = Math.sin(phase * Math.PI)
|
||||
const eased = envelope * envelope * (3 - 2 * envelope)
|
||||
return {
|
||||
head: phase * reach,
|
||||
eased,
|
||||
}
|
||||
})
|
||||
const normalizedMasks = props.masks?.map((m) => {
|
||||
const pad = m.pad ?? 2
|
||||
return {
|
||||
left: m.x - pad,
|
||||
right: m.x + m.width + pad,
|
||||
top: m.y - pad,
|
||||
bottom: m.y + m.height + pad,
|
||||
pad,
|
||||
strength: m.strength ?? 0.85,
|
||||
}
|
||||
})
|
||||
const rows = [] as RGBA[][]
|
||||
for (let y = 0; y < h; y++) {
|
||||
const row = [] as RGBA[]
|
||||
for (let x = 0; x < w; x++) {
|
||||
const dx = x + 0.5 - cxv
|
||||
const dy = (y + 0.5 - cyv) * 2
|
||||
const dist = Math.hypot(dx, dy)
|
||||
let level = 0
|
||||
for (const ring of ringStates) {
|
||||
const delta = dist - ring.head
|
||||
const crest = Math.abs(delta) < WIDTH ? 0.5 + 0.5 * Math.cos((delta / WIDTH) * Math.PI) : 0
|
||||
const tail = delta < 0 && delta > -TAIL ? (1 + delta / TAIL) ** 2.3 : 0
|
||||
level += (crest * AMP + tail * TAIL_AMP) * ring.eased
|
||||
}
|
||||
const edgeFalloff = Math.max(0, 1 - (dist / (reach * 0.85)) ** 2)
|
||||
const breath = (0.5 + 0.5 * Math.sin(t * BREATH_SPEED)) * BREATH_AMP
|
||||
let maskAtten = 1
|
||||
if (normalizedMasks) {
|
||||
for (const m of normalizedMasks) {
|
||||
if (x < m.left || x > m.right || y < m.top || y > m.bottom) continue
|
||||
const inX = Math.min(x - m.left, m.right - x)
|
||||
const inY = Math.min(y - m.top, m.bottom - y)
|
||||
const edge = Math.min(inX / m.pad, inY / m.pad, 1)
|
||||
const eased = edge * edge * (3 - 2 * edge)
|
||||
const reduce = 1 - m.strength * eased
|
||||
if (reduce < maskAtten) maskAtten = reduce
|
||||
}
|
||||
}
|
||||
const strength = Math.min(1, ((level / RINGS) * edgeFalloff + breath * edgeFalloff) * maskAtten)
|
||||
row.push(tint(theme.backgroundPanel, theme.primary, strength * 0.7))
|
||||
}
|
||||
rows.push(row)
|
||||
}
|
||||
return rows
|
||||
})
|
||||
|
||||
return (
|
||||
<box ref={(item: BoxRenderable) => (box = item)} width="100%" height="100%">
|
||||
<For each={grid()}>
|
||||
{(row) => (
|
||||
<box flexDirection="row">
|
||||
<For each={row}>
|
||||
{(color) => (
|
||||
<text bg={color} fg={color} selectable={false}>
|
||||
{" "}
|
||||
</text>
|
||||
)}
|
||||
</For>
|
||||
</box>
|
||||
)}
|
||||
</For>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
import { RGBA, TextAttributes } from "@opentui/core"
|
||||
import { BoxRenderable, RGBA, TextAttributes } from "@opentui/core"
|
||||
import { useKeyboard } from "@opentui/solid"
|
||||
import open from "open"
|
||||
import { createSignal } from "solid-js"
|
||||
import { createSignal, onCleanup, onMount } from "solid-js"
|
||||
import { selectedForeground, useTheme } from "@tui/context/theme"
|
||||
import { useDialog, type DialogContext } from "@tui/ui/dialog"
|
||||
import { Link } from "@tui/ui/link"
|
||||
import { GoLogo } from "./logo"
|
||||
import { BgPulse, type BgPulseMask } from "./bg-pulse"
|
||||
|
||||
const GO_URL = "https://opencode.ai/go"
|
||||
const PAD_X = 3
|
||||
const PAD_TOP_OUTER = 1
|
||||
|
||||
export type DialogGoUpsellProps = {
|
||||
onClose?: (dontShowAgain?: boolean) => void
|
||||
@@ -27,62 +31,116 @@ export function DialogGoUpsell(props: DialogGoUpsellProps) {
|
||||
const dialog = useDialog()
|
||||
const { theme } = useTheme()
|
||||
const fg = selectedForeground(theme)
|
||||
const [selected, setSelected] = createSignal(0)
|
||||
const [selected, setSelected] = createSignal<"dismiss" | "subscribe">("subscribe")
|
||||
const [center, setCenter] = createSignal<{ x: number; y: number } | undefined>()
|
||||
const [masks, setMasks] = createSignal<BgPulseMask[]>([])
|
||||
let content: BoxRenderable | undefined
|
||||
let logoBox: BoxRenderable | undefined
|
||||
let headingBox: BoxRenderable | undefined
|
||||
let descBox: BoxRenderable | undefined
|
||||
let buttonsBox: BoxRenderable | undefined
|
||||
|
||||
const sync = () => {
|
||||
if (!content || !logoBox) return
|
||||
setCenter({
|
||||
x: logoBox.x - content.x + logoBox.width / 2,
|
||||
y: logoBox.y - content.y + logoBox.height / 2 + PAD_TOP_OUTER,
|
||||
})
|
||||
const next: BgPulseMask[] = []
|
||||
const baseY = PAD_TOP_OUTER
|
||||
for (const b of [headingBox, descBox, buttonsBox]) {
|
||||
if (!b) continue
|
||||
next.push({
|
||||
x: b.x - content.x,
|
||||
y: b.y - content.y + baseY,
|
||||
width: b.width,
|
||||
height: b.height,
|
||||
pad: 2,
|
||||
strength: 0.78,
|
||||
})
|
||||
}
|
||||
setMasks(next)
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
sync()
|
||||
for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.on("resize", sync)
|
||||
})
|
||||
|
||||
onCleanup(() => {
|
||||
for (const b of [content, logoBox, headingBox, descBox, buttonsBox]) b?.off("resize", sync)
|
||||
})
|
||||
|
||||
useKeyboard((evt) => {
|
||||
if (evt.name === "left" || evt.name === "right" || evt.name === "tab") {
|
||||
setSelected((s) => (s === 0 ? 1 : 0))
|
||||
setSelected((s) => (s === "subscribe" ? "dismiss" : "subscribe"))
|
||||
return
|
||||
}
|
||||
if (evt.name !== "return") return
|
||||
if (selected() === 0) subscribe(props, dialog)
|
||||
else dismiss(props, dialog)
|
||||
if (evt.name === "return") {
|
||||
if (selected() === "subscribe") subscribe(props, dialog)
|
||||
else dismiss(props, dialog)
|
||||
}
|
||||
})
|
||||
|
||||
return (
|
||||
<box paddingLeft={2} paddingRight={2} gap={1}>
|
||||
<box flexDirection="row" justifyContent="space-between">
|
||||
<text attributes={TextAttributes.BOLD} fg={theme.text}>
|
||||
Free limit reached
|
||||
</text>
|
||||
<text fg={theme.textMuted} onMouseUp={() => dialog.clear()}>
|
||||
esc
|
||||
</text>
|
||||
<box ref={(item: BoxRenderable) => (content = item)}>
|
||||
<box position="absolute" top={-PAD_TOP_OUTER} left={0} right={0} bottom={0} zIndex={0}>
|
||||
<BgPulse centerX={center()?.x} centerY={center()?.y} masks={masks()} />
|
||||
</box>
|
||||
<box gap={1} paddingBottom={1}>
|
||||
<text fg={theme.textMuted}>
|
||||
Subscribe to OpenCode Go to keep going with reliable access to the best open-source models, starting at
|
||||
$5/month.
|
||||
</text>
|
||||
<box flexDirection="row" gap={1}>
|
||||
<box paddingLeft={PAD_X} paddingRight={PAD_X} paddingBottom={1} gap={1}>
|
||||
<box ref={(item: BoxRenderable) => (headingBox = item)} flexDirection="row" justifyContent="space-between">
|
||||
<text attributes={TextAttributes.BOLD} fg={theme.text}>
|
||||
Free limit reached
|
||||
</text>
|
||||
<text fg={theme.textMuted} onMouseUp={() => dialog.clear()}>
|
||||
esc
|
||||
</text>
|
||||
</box>
|
||||
<box ref={(item: BoxRenderable) => (descBox = item)} gap={0}>
|
||||
<box flexDirection="row">
|
||||
<text fg={theme.textMuted}>Subscribe to </text>
|
||||
<text attributes={TextAttributes.BOLD} fg={theme.textMuted}>
|
||||
OpenCode Go
|
||||
</text>
|
||||
<text fg={theme.textMuted}> for reliable access to the</text>
|
||||
</box>
|
||||
<text fg={theme.textMuted}>best open-source models, starting at $5/month.</text>
|
||||
</box>
|
||||
<box alignItems="center" gap={1} paddingBottom={1}>
|
||||
<box ref={(item: BoxRenderable) => (logoBox = item)}>
|
||||
<GoLogo />
|
||||
</box>
|
||||
<Link href={GO_URL} fg={theme.primary} />
|
||||
</box>
|
||||
</box>
|
||||
<box flexDirection="row" justifyContent="flex-end" gap={1} paddingBottom={1}>
|
||||
<box
|
||||
paddingLeft={3}
|
||||
paddingRight={3}
|
||||
backgroundColor={selected() === 0 ? theme.primary : RGBA.fromInts(0, 0, 0, 0)}
|
||||
onMouseOver={() => setSelected(0)}
|
||||
onMouseUp={() => subscribe(props, dialog)}
|
||||
>
|
||||
<text fg={selected() === 0 ? fg : theme.text} attributes={selected() === 0 ? TextAttributes.BOLD : undefined}>
|
||||
subscribe
|
||||
</text>
|
||||
</box>
|
||||
<box
|
||||
paddingLeft={3}
|
||||
paddingRight={3}
|
||||
backgroundColor={selected() === 1 ? theme.primary : RGBA.fromInts(0, 0, 0, 0)}
|
||||
onMouseOver={() => setSelected(1)}
|
||||
onMouseUp={() => dismiss(props, dialog)}
|
||||
>
|
||||
<text
|
||||
fg={selected() === 1 ? fg : theme.textMuted}
|
||||
attributes={selected() === 1 ? TextAttributes.BOLD : undefined}
|
||||
<box ref={(item: BoxRenderable) => (buttonsBox = item)} flexDirection="row" justifyContent="space-between">
|
||||
<box
|
||||
paddingLeft={2}
|
||||
paddingRight={2}
|
||||
backgroundColor={selected() === "dismiss" ? theme.primary : RGBA.fromInts(0, 0, 0, 0)}
|
||||
onMouseOver={() => setSelected("dismiss")}
|
||||
onMouseUp={() => dismiss(props, dialog)}
|
||||
>
|
||||
don't show again
|
||||
</text>
|
||||
<text
|
||||
fg={selected() === "dismiss" ? fg : theme.textMuted}
|
||||
attributes={selected() === "dismiss" ? TextAttributes.BOLD : undefined}
|
||||
>
|
||||
don't show again
|
||||
</text>
|
||||
</box>
|
||||
<box
|
||||
paddingLeft={2}
|
||||
paddingRight={2}
|
||||
backgroundColor={selected() === "subscribe" ? theme.primary : RGBA.fromInts(0, 0, 0, 0)}
|
||||
onMouseOver={() => setSelected("subscribe")}
|
||||
onMouseUp={() => subscribe(props, dialog)}
|
||||
>
|
||||
<text
|
||||
fg={selected() === "subscribe" ? fg : theme.text}
|
||||
attributes={selected() === "subscribe" ? TextAttributes.BOLD : undefined}
|
||||
>
|
||||
subscribe
|
||||
</text>
|
||||
</box>
|
||||
</box>
|
||||
</box>
|
||||
</box>
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
import { TextAttributes } from "@opentui/core"
|
||||
import { useTheme } from "../context/theme"
|
||||
import { useDialog } from "../ui/dialog"
|
||||
import { createStore } from "solid-js/store"
|
||||
import { For } from "solid-js"
|
||||
import { useKeyboard } from "@opentui/solid"
|
||||
|
||||
export function DialogSessionDeleteFailed(props: {
|
||||
session: string
|
||||
workspace: string
|
||||
onDelete?: () => boolean | void | Promise<boolean | void>
|
||||
onRestore?: () => boolean | void | Promise<boolean | void>
|
||||
onDone?: () => void
|
||||
}) {
|
||||
const dialog = useDialog()
|
||||
const { theme } = useTheme()
|
||||
const [store, setStore] = createStore({
|
||||
active: "delete" as "delete" | "restore",
|
||||
})
|
||||
|
||||
const options = [
|
||||
{
|
||||
id: "delete" as const,
|
||||
title: "Delete workspace",
|
||||
description: "Delete the workspace and all sessions attached to it.",
|
||||
run: props.onDelete,
|
||||
},
|
||||
{
|
||||
id: "restore" as const,
|
||||
title: "Restore to new workspace",
|
||||
description: "Try to restore this session into a new workspace.",
|
||||
run: props.onRestore,
|
||||
},
|
||||
]
|
||||
|
||||
async function confirm() {
|
||||
const result = await options.find((item) => item.id === store.active)?.run?.()
|
||||
if (result === false) return
|
||||
props.onDone?.()
|
||||
if (!props.onDone) dialog.clear()
|
||||
}
|
||||
|
||||
useKeyboard((evt) => {
|
||||
if (evt.name === "return") {
|
||||
void confirm()
|
||||
}
|
||||
if (evt.name === "left" || evt.name === "up") {
|
||||
setStore("active", "delete")
|
||||
}
|
||||
if (evt.name === "right" || evt.name === "down") {
|
||||
setStore("active", "restore")
|
||||
}
|
||||
})
|
||||
|
||||
return (
|
||||
<box paddingLeft={2} paddingRight={2} gap={1}>
|
||||
<box flexDirection="row" justifyContent="space-between">
|
||||
<text attributes={TextAttributes.BOLD} fg={theme.text}>
|
||||
Failed to Delete Session
|
||||
</text>
|
||||
<text fg={theme.textMuted} onMouseUp={() => dialog.clear()}>
|
||||
esc
|
||||
</text>
|
||||
</box>
|
||||
<text fg={theme.textMuted} wrapMode="word">
|
||||
{`The session "${props.session}" could not be deleted because the workspace "${props.workspace}" is not available.`}
|
||||
</text>
|
||||
<text fg={theme.textMuted} wrapMode="word">
|
||||
Choose how you want to recover this broken workspace session.
|
||||
</text>
|
||||
<box flexDirection="column" paddingBottom={1} gap={1}>
|
||||
<For each={options}>
|
||||
{(item) => (
|
||||
<box
|
||||
flexDirection="column"
|
||||
paddingLeft={1}
|
||||
paddingRight={1}
|
||||
paddingTop={1}
|
||||
paddingBottom={1}
|
||||
backgroundColor={item.id === store.active ? theme.primary : undefined}
|
||||
onMouseUp={() => {
|
||||
setStore("active", item.id)
|
||||
void confirm()
|
||||
}}
|
||||
>
|
||||
<text
|
||||
attributes={TextAttributes.BOLD}
|
||||
fg={item.id === store.active ? theme.selectedListItemText : theme.text}
|
||||
>
|
||||
{item.title}
|
||||
</text>
|
||||
<text fg={item.id === store.active ? theme.selectedListItemText : theme.textMuted} wrapMode="word">
|
||||
{item.description}
|
||||
</text>
|
||||
</box>
|
||||
)}
|
||||
</For>
|
||||
</box>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
@@ -13,8 +13,10 @@ import { DialogSessionRename } from "./dialog-session-rename"
|
||||
import { Keybind } from "@/util"
|
||||
import { createDebouncedSignal } from "../util/signal"
|
||||
import { useToast } from "../ui/toast"
|
||||
import { DialogWorkspaceCreate, openWorkspaceSession } from "./dialog-workspace-create"
|
||||
import { DialogWorkspaceCreate, openWorkspaceSession, restoreWorkspaceSession } from "./dialog-workspace-create"
|
||||
import { Spinner } from "./spinner"
|
||||
import { errorMessage } from "@/util/error"
|
||||
import { DialogSessionDeleteFailed } from "./dialog-session-delete-failed"
|
||||
|
||||
type WorkspaceStatus = "connected" | "connecting" | "disconnected" | "error"
|
||||
|
||||
@@ -30,7 +32,7 @@ export function DialogSessionList() {
|
||||
const [toDelete, setToDelete] = createSignal<string>()
|
||||
const [search, setSearch] = createDebouncedSignal("", 150)
|
||||
|
||||
const [searchResults] = createResource(search, async (query) => {
|
||||
const [searchResults, { refetch }] = createResource(search, async (query) => {
|
||||
if (!query) return undefined
|
||||
const result = await sdk.client.session.list({ search: query, limit: 30 })
|
||||
return result.data ?? []
|
||||
@@ -56,11 +58,66 @@ export function DialogSessionList() {
|
||||
))
|
||||
}
|
||||
|
||||
function recover(session: NonNullable<ReturnType<typeof sessions>[number]>) {
|
||||
const workspace = project.workspace.get(session.workspaceID!)
|
||||
const list = () => dialog.replace(() => <DialogSessionList />)
|
||||
dialog.replace(() => (
|
||||
<DialogSessionDeleteFailed
|
||||
session={session.title}
|
||||
workspace={workspace?.name ?? session.workspaceID!}
|
||||
onDone={list}
|
||||
onDelete={async () => {
|
||||
const current = currentSessionID()
|
||||
const info = current ? sync.data.session.find((item) => item.id === current) : undefined
|
||||
const result = await sdk.client.experimental.workspace.remove({ id: session.workspaceID! })
|
||||
if (result.error) {
|
||||
toast.show({
|
||||
variant: "error",
|
||||
title: "Failed to delete workspace",
|
||||
message: errorMessage(result.error),
|
||||
})
|
||||
return false
|
||||
}
|
||||
await project.workspace.sync()
|
||||
await sync.session.refresh()
|
||||
if (search()) await refetch()
|
||||
if (info?.workspaceID === session.workspaceID) {
|
||||
route.navigate({ type: "home" })
|
||||
}
|
||||
return true
|
||||
}}
|
||||
onRestore={() => {
|
||||
dialog.replace(() => (
|
||||
<DialogWorkspaceCreate
|
||||
onSelect={(workspaceID) =>
|
||||
restoreWorkspaceSession({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
project,
|
||||
toast,
|
||||
workspaceID,
|
||||
sessionID: session.id,
|
||||
done: list,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))
|
||||
return false
|
||||
}}
|
||||
/>
|
||||
))
|
||||
}
|
||||
|
||||
const options = createMemo(() => {
|
||||
const today = new Date().toDateString()
|
||||
return sessions()
|
||||
.filter((x) => x.parentID === undefined)
|
||||
.toSorted((a, b) => b.time.updated - a.time.updated)
|
||||
.toSorted((a, b) => {
|
||||
const updatedDay = new Date(b.time.updated).setHours(0, 0, 0, 0) - new Date(a.time.updated).setHours(0, 0, 0, 0)
|
||||
if (updatedDay !== 0) return updatedDay
|
||||
return b.time.created - a.time.created
|
||||
})
|
||||
.map((x) => {
|
||||
const workspace = x.workspaceID ? project.workspace.get(x.workspaceID) : undefined
|
||||
|
||||
@@ -145,9 +202,43 @@ export function DialogSessionList() {
|
||||
title: "delete",
|
||||
onTrigger: async (option) => {
|
||||
if (toDelete() === option.value) {
|
||||
void sdk.client.session.delete({
|
||||
sessionID: option.value,
|
||||
})
|
||||
const session = sessions().find((item) => item.id === option.value)
|
||||
const status = session?.workspaceID ? project.workspace.status(session.workspaceID) : undefined
|
||||
|
||||
try {
|
||||
const result = await sdk.client.session.delete({
|
||||
sessionID: option.value,
|
||||
})
|
||||
if (result.error) {
|
||||
if (session?.workspaceID) {
|
||||
recover(session)
|
||||
} else {
|
||||
toast.show({
|
||||
variant: "error",
|
||||
title: "Failed to delete session",
|
||||
message: errorMessage(result.error),
|
||||
})
|
||||
}
|
||||
setToDelete(undefined)
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
if (session?.workspaceID) {
|
||||
recover(session)
|
||||
} else {
|
||||
toast.show({
|
||||
variant: "error",
|
||||
title: "Failed to delete session",
|
||||
message: errorMessage(err),
|
||||
})
|
||||
}
|
||||
setToDelete(undefined)
|
||||
return
|
||||
}
|
||||
if (status && status !== "connected") {
|
||||
await sync.session.refresh()
|
||||
}
|
||||
if (search()) await refetch()
|
||||
setToDelete(undefined)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -6,6 +6,8 @@ import { useSync } from "@tui/context/sync"
|
||||
import { useProject } from "@tui/context/project"
|
||||
import { createMemo, createSignal, onMount } from "solid-js"
|
||||
import { setTimeout as sleep } from "node:timers/promises"
|
||||
import { errorData, errorMessage } from "@/util/error"
|
||||
import * as Log from "@/util/log"
|
||||
import { useSDK } from "../context/sdk"
|
||||
import { useToast } from "../ui/toast"
|
||||
|
||||
@@ -15,6 +17,8 @@ type Adaptor = {
|
||||
description: string
|
||||
}
|
||||
|
||||
const log = Log.Default.clone().tag("service", "tui-workspace")
|
||||
|
||||
function scoped(sdk: ReturnType<typeof useSDK>, sync: ReturnType<typeof useSync>, workspaceID: string) {
|
||||
return createOpencodeClient({
|
||||
baseUrl: sdk.url,
|
||||
@@ -33,8 +37,18 @@ export async function openWorkspaceSession(input: {
|
||||
workspaceID: string
|
||||
}) {
|
||||
const client = scoped(input.sdk, input.sync, input.workspaceID)
|
||||
log.info("workspace session create requested", {
|
||||
workspaceID: input.workspaceID,
|
||||
})
|
||||
|
||||
while (true) {
|
||||
const result = await client.session.create({ workspaceID: input.workspaceID }).catch(() => undefined)
|
||||
const result = await client.session.create({ workspace: input.workspaceID }).catch((err) => {
|
||||
log.error("workspace session create request failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
error: errorData(err),
|
||||
})
|
||||
return undefined
|
||||
})
|
||||
if (!result) {
|
||||
input.toast.show({
|
||||
message: "Failed to create workspace session",
|
||||
@@ -42,26 +56,113 @@ export async function openWorkspaceSession(input: {
|
||||
})
|
||||
return
|
||||
}
|
||||
if (result.response.status >= 500 && result.response.status < 600) {
|
||||
log.info("workspace session create response", {
|
||||
workspaceID: input.workspaceID,
|
||||
status: result.response?.status,
|
||||
sessionID: result.data?.id,
|
||||
})
|
||||
if (result.response?.status && result.response.status >= 500 && result.response.status < 600) {
|
||||
log.warn("workspace session create retrying after server error", {
|
||||
workspaceID: input.workspaceID,
|
||||
status: result.response.status,
|
||||
})
|
||||
await sleep(1000)
|
||||
continue
|
||||
}
|
||||
if (!result.data) {
|
||||
log.error("workspace session create returned no data", {
|
||||
workspaceID: input.workspaceID,
|
||||
status: result.response?.status,
|
||||
})
|
||||
input.toast.show({
|
||||
message: "Failed to create workspace session",
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
input.route.navigate({
|
||||
type: "session",
|
||||
sessionID: result.data.id,
|
||||
})
|
||||
log.info("workspace session create complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: result.data.id,
|
||||
})
|
||||
input.dialog.clear()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreWorkspaceSession(input: {
|
||||
dialog: ReturnType<typeof useDialog>
|
||||
sdk: ReturnType<typeof useSDK>
|
||||
sync: ReturnType<typeof useSync>
|
||||
project: ReturnType<typeof useProject>
|
||||
toast: ReturnType<typeof useToast>
|
||||
workspaceID: string
|
||||
sessionID: string
|
||||
done?: () => void
|
||||
}) {
|
||||
log.info("session restore requested", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
})
|
||||
const result = await input.sdk.client.experimental.workspace
|
||||
.sessionRestore({ id: input.workspaceID, sessionID: input.sessionID })
|
||||
.catch((err) => {
|
||||
log.error("session restore request failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(err),
|
||||
})
|
||||
return undefined
|
||||
})
|
||||
if (!result?.data) {
|
||||
log.error("session restore failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: result?.response?.status,
|
||||
error: result?.error ? errorData(result.error) : undefined,
|
||||
})
|
||||
input.toast.show({
|
||||
message: `Failed to restore session: ${errorMessage(result?.error ?? "no response")}`,
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
log.info("session restore response", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: result.response?.status,
|
||||
total: result.data.total,
|
||||
})
|
||||
|
||||
await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()]).catch((err) => {
|
||||
log.error("session restore refresh failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(err),
|
||||
})
|
||||
throw err
|
||||
})
|
||||
|
||||
log.info("session restore complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total: result.data.total,
|
||||
})
|
||||
|
||||
input.toast.show({
|
||||
message: "Session restored into the new workspace",
|
||||
variant: "success",
|
||||
})
|
||||
input.done?.()
|
||||
if (input.done) return
|
||||
input.dialog.clear()
|
||||
}
|
||||
|
||||
export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) => Promise<void> | void }) {
|
||||
const dialog = useDialog()
|
||||
const sync = useSync()
|
||||
@@ -123,18 +224,43 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) =
|
||||
const create = async (type: string) => {
|
||||
if (creating()) return
|
||||
setCreating(type)
|
||||
log.info("workspace create requested", {
|
||||
type,
|
||||
})
|
||||
|
||||
const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => {
|
||||
log.error("workspace create request failed", {
|
||||
type,
|
||||
error: errorData(err),
|
||||
})
|
||||
return undefined
|
||||
})
|
||||
|
||||
const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch(() => undefined)
|
||||
const workspace = result?.data
|
||||
if (!workspace) {
|
||||
setCreating(undefined)
|
||||
log.error("workspace create failed", {
|
||||
type,
|
||||
status: result?.response.status,
|
||||
error: result?.error ? errorData(result.error) : undefined,
|
||||
})
|
||||
toast.show({
|
||||
message: "Failed to create workspace",
|
||||
message: `Failed to create workspace: ${errorMessage(result?.error ?? "no response")}`,
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
log.info("workspace create response", {
|
||||
type,
|
||||
workspaceID: workspace.id,
|
||||
status: result.response?.status,
|
||||
})
|
||||
|
||||
await project.workspace.sync()
|
||||
log.info("workspace create synced", {
|
||||
type,
|
||||
workspaceID: workspace.id,
|
||||
})
|
||||
await props.onSelect(workspace.id)
|
||||
setCreating(undefined)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core"
|
||||
import { For, createMemo, createSignal, onCleanup, type JSX } from "solid-js"
|
||||
import { For, createMemo, createSignal, onCleanup, onMount, type JSX } from "solid-js"
|
||||
import { useTheme, tint } from "@tui/context/theme"
|
||||
import * as Sound from "@tui/util/sound"
|
||||
import { logo } from "@/cli/logo"
|
||||
import { go, logo } from "@/cli/logo"
|
||||
import { shimmerConfig, type ShimmerConfig } from "./shimmer-config"
|
||||
|
||||
export type LogoShape = {
|
||||
left: string[]
|
||||
right: string[]
|
||||
}
|
||||
|
||||
// Shadow markers (rendered chars in parens):
|
||||
// _ = full shadow cell (space with bg=shadow)
|
||||
@@ -74,9 +80,6 @@ type Frame = {
|
||||
spark: number
|
||||
}
|
||||
|
||||
const LEFT = logo.left[0]?.length ?? 0
|
||||
const FULL = logo.left.map((line, i) => line + " ".repeat(GAP) + logo.right[i])
|
||||
const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94
|
||||
const NEAR = [
|
||||
[1, 0],
|
||||
[1, 1],
|
||||
@@ -140,7 +143,7 @@ function noise(x: number, y: number, t: number) {
|
||||
}
|
||||
|
||||
function lit(char: string) {
|
||||
return char !== " " && char !== "_" && char !== "~"
|
||||
return char !== " " && char !== "_" && char !== "~" && char !== ","
|
||||
}
|
||||
|
||||
function key(x: number, y: number) {
|
||||
@@ -188,12 +191,12 @@ function route(list: Array<{ x: number; y: number }>) {
|
||||
return path
|
||||
}
|
||||
|
||||
function mapGlyphs() {
|
||||
function mapGlyphs(full: string[]) {
|
||||
const cells = [] as Array<{ x: number; y: number }>
|
||||
|
||||
for (let y = 0; y < FULL.length; y++) {
|
||||
for (let x = 0; x < (FULL[y]?.length ?? 0); x++) {
|
||||
if (lit(FULL[y]?.[x] ?? " ")) cells.push({ x, y })
|
||||
for (let y = 0; y < full.length; y++) {
|
||||
for (let x = 0; x < (full[y]?.length ?? 0); x++) {
|
||||
if (lit(full[y]?.[x] ?? " ")) cells.push({ x, y })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,9 +240,25 @@ function mapGlyphs() {
|
||||
return { glyph, trace, center }
|
||||
}
|
||||
|
||||
const MAP = mapGlyphs()
|
||||
type LogoContext = {
|
||||
LEFT: number
|
||||
FULL: string[]
|
||||
SPAN: number
|
||||
MAP: ReturnType<typeof mapGlyphs>
|
||||
shape: LogoShape
|
||||
}
|
||||
|
||||
function shimmer(x: number, y: number, frame: Frame) {
|
||||
function build(shape: LogoShape): LogoContext {
|
||||
const LEFT = shape.left[0]?.length ?? 0
|
||||
const FULL = shape.left.map((line, i) => line + " ".repeat(GAP) + shape.right[i])
|
||||
const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94
|
||||
return { LEFT, FULL, SPAN, MAP: mapGlyphs(FULL), shape }
|
||||
}
|
||||
|
||||
const DEFAULT = build(logo)
|
||||
const GO = build(go)
|
||||
|
||||
function shimmer(x: number, y: number, frame: Frame, ctx: LogoContext) {
|
||||
return frame.list.reduce((best, item) => {
|
||||
const age = frame.t - item.at
|
||||
if (age < SHIMMER_IN || age > LIFE) return best
|
||||
@@ -247,7 +266,7 @@ function shimmer(x: number, y: number, frame: Frame) {
|
||||
const dy = y * 2 + 1 - item.y
|
||||
const dist = Math.hypot(dx, dy)
|
||||
const p = age / LIFE
|
||||
const r = SPAN * (1 - (1 - p) ** EXPAND)
|
||||
const r = ctx.SPAN * (1 - (1 - p) ** EXPAND)
|
||||
const lag = r - dist
|
||||
if (lag < 0.18 || lag > SHIMMER_OUT) return best
|
||||
const band = Math.exp(-(((lag - 1.05) / 0.68) ** 2))
|
||||
@@ -258,19 +277,19 @@ function shimmer(x: number, y: number, frame: Frame) {
|
||||
}, 0)
|
||||
}
|
||||
|
||||
function remain(x: number, y: number, item: Release, t: number) {
|
||||
function remain(x: number, y: number, item: Release, t: number, ctx: LogoContext) {
|
||||
const age = t - item.at
|
||||
if (age < 0 || age > LIFE) return 0
|
||||
const p = age / LIFE
|
||||
const dx = x + 0.5 - item.x - 0.5
|
||||
const dy = y * 2 + 1 - item.y * 2 - 1
|
||||
const dist = Math.hypot(dx, dy)
|
||||
const r = SPAN * (1 - (1 - p) ** EXPAND)
|
||||
const r = ctx.SPAN * (1 - (1 - p) ** EXPAND)
|
||||
if (dist > r) return 1
|
||||
return clamp((r - dist) / 1.35 < 1 ? 1 - (r - dist) / 1.35 : 0)
|
||||
}
|
||||
|
||||
function wave(x: number, y: number, frame: Frame, live: boolean) {
|
||||
function wave(x: number, y: number, frame: Frame, live: boolean, ctx: LogoContext) {
|
||||
return frame.list.reduce((sum, item) => {
|
||||
const age = frame.t - item.at
|
||||
if (age < 0 || age > LIFE) return sum
|
||||
@@ -278,7 +297,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) {
|
||||
const dx = x + 0.5 - item.x
|
||||
const dy = y * 2 + 1 - item.y
|
||||
const dist = Math.hypot(dx, dy)
|
||||
const r = SPAN * (1 - (1 - p) ** EXPAND)
|
||||
const r = ctx.SPAN * (1 - (1 - p) ** EXPAND)
|
||||
const fade = (1 - p) ** 1.32
|
||||
const j = 1.02 + noise(x + item.x * 0.7, y + item.y * 0.7, item.at * 0.002 + age * 0.06) * 0.52
|
||||
const edge = Math.exp(-(((dist - r) / WIDTH) ** 2)) * GAIN * fade * item.force * j
|
||||
@@ -292,7 +311,7 @@ function wave(x: number, y: number, frame: Frame, live: boolean) {
|
||||
}, 0)
|
||||
}
|
||||
|
||||
function field(x: number, y: number, frame: Frame) {
|
||||
function field(x: number, y: number, frame: Frame, ctx: LogoContext) {
|
||||
const held = frame.hold
|
||||
const rest = frame.release
|
||||
const item = held ?? rest
|
||||
@@ -326,11 +345,11 @@ function field(x: number, y: number, frame: Frame) {
|
||||
Math.max(0, noise(item.x * 3.1, item.y * 2.7, frame.t * 1.7) - 0.72) *
|
||||
Math.exp(-(dist * dist) / 0.15) *
|
||||
lerp(0.08, 0.42, body)
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1
|
||||
return (core + shell + ember + ring + fork + glitch + lash + flicker - dim) * fade
|
||||
}
|
||||
|
||||
function pick(x: number, y: number, frame: Frame) {
|
||||
function pick(x: number, y: number, frame: Frame, ctx: LogoContext) {
|
||||
const held = frame.hold
|
||||
const rest = frame.release
|
||||
const item = held ?? rest
|
||||
@@ -339,26 +358,26 @@ function pick(x: number, y: number, frame: Frame) {
|
||||
const dx = x + 0.5 - item.x - 0.5
|
||||
const dy = y * 2 + 1 - item.y * 2 - 1
|
||||
const dist = Math.hypot(dx, dy)
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1
|
||||
return Math.exp(-(dist * dist) / 1.7) * lerp(0.2, 0.96, rise) * fade
|
||||
}
|
||||
|
||||
function select(x: number, y: number) {
|
||||
const direct = MAP.glyph.get(key(x, y))
|
||||
function select(x: number, y: number, ctx: LogoContext) {
|
||||
const direct = ctx.MAP.glyph.get(key(x, y))
|
||||
if (direct !== undefined) return direct
|
||||
|
||||
const near = NEAR.map(([dx, dy]) => MAP.glyph.get(key(x + dx, y + dy))).find(
|
||||
const near = NEAR.map(([dx, dy]) => ctx.MAP.glyph.get(key(x + dx, y + dy))).find(
|
||||
(item): item is number => item !== undefined,
|
||||
)
|
||||
return near
|
||||
}
|
||||
|
||||
function trace(x: number, y: number, frame: Frame) {
|
||||
function trace(x: number, y: number, frame: Frame, ctx: LogoContext) {
|
||||
const held = frame.hold
|
||||
const rest = frame.release
|
||||
const item = held ?? rest
|
||||
if (!item || item.glyph === undefined) return 0
|
||||
const step = MAP.trace.get(key(x, y))
|
||||
const step = ctx.MAP.trace.get(key(x, y))
|
||||
if (!step || step.glyph !== item.glyph || step.l < 2) return 0
|
||||
const age = frame.t - item.at
|
||||
const rise = held ? ramp(age, HOLD, CHARGE) : rest!.rise
|
||||
@@ -368,29 +387,125 @@ function trace(x: number, y: number, frame: Frame) {
|
||||
const dist = Math.min(Math.abs(step.i - head), step.l - Math.abs(step.i - head))
|
||||
const tail = (head - TAIL + step.l) % step.l
|
||||
const lag = Math.min(Math.abs(step.i - tail), step.l - Math.abs(step.i - tail))
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
|
||||
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t, ctx) : 1
|
||||
const core = Math.exp(-((dist / 1.05) ** 2)) * lerp(0.8, 2.35, rise)
|
||||
const glow = Math.exp(-((dist / 1.85) ** 2)) * lerp(0.08, 0.34, rise)
|
||||
const trail = Math.exp(-((lag / 1.45) ** 2)) * lerp(0.04, 0.42, rise)
|
||||
return (core + glow + trail) * appear * fade
|
||||
}
|
||||
|
||||
function bloom(x: number, y: number, frame: Frame) {
|
||||
function idle(
|
||||
x: number,
|
||||
pixelY: number,
|
||||
frame: Frame,
|
||||
ctx: LogoContext,
|
||||
state: IdleState,
|
||||
): { glow: number; peak: number; primary: number } {
|
||||
const cfg = state.cfg
|
||||
const dx = x + 0.5 - cfg.originX
|
||||
const dy = pixelY - cfg.originY
|
||||
const dist = Math.hypot(dx, dy)
|
||||
const angle = Math.atan2(dy, dx)
|
||||
const wob1 = noise(x * 0.32, pixelY * 0.25, frame.t * 0.0005) - 0.5
|
||||
const wob2 = noise(x * 0.12, pixelY * 0.08, frame.t * 0.00022) - 0.5
|
||||
const ripple = Math.sin(angle * 3 + frame.t * 0.0012) * 0.3
|
||||
const jitter = (wob1 * 0.55 + wob2 * 0.32 + ripple * 0.18) * cfg.noise
|
||||
const traveled = dist + jitter
|
||||
let glow = 0
|
||||
let peak = 0
|
||||
let halo = 0
|
||||
let primary = 0
|
||||
let ambient = 0
|
||||
for (const active of state.active) {
|
||||
const head = active.head
|
||||
const eased = active.eased
|
||||
const delta = traveled - head
|
||||
// Use shallower exponent (1.6 vs 2) for softer edges on the Gaussians
|
||||
// so adjacent pixels have smaller brightness deltas
|
||||
const core = Math.exp(-(Math.abs(delta / cfg.coreWidth) ** 1.8))
|
||||
const soft = Math.exp(-(Math.abs(delta / cfg.softWidth) ** 1.6))
|
||||
const tailRange = cfg.tail * 2.6
|
||||
const tail = delta < 0 && delta > -tailRange ? (1 + delta / tailRange) ** 2.6 : 0
|
||||
const haloDelta = delta + cfg.haloOffset
|
||||
const haloBand = Math.exp(-(Math.abs(haloDelta / cfg.haloWidth) ** 1.6))
|
||||
glow += (soft * cfg.softAmp + tail * cfg.tailAmp) * eased
|
||||
peak += core * cfg.coreAmp * eased
|
||||
halo += haloBand * cfg.haloAmp * eased
|
||||
// Primary-tinted fringe follows the halo (which trails behind the core) and the tail
|
||||
primary += (haloBand + tail * 0.6) * eased
|
||||
ambient += active.ambient
|
||||
}
|
||||
ambient /= state.rings
|
||||
return {
|
||||
glow: glow / state.rings,
|
||||
peak: cfg.breathBase + ambient + (peak + halo) / state.rings,
|
||||
primary: (primary / state.rings) * cfg.primaryMix,
|
||||
}
|
||||
}
|
||||
|
||||
function bloom(x: number, y: number, frame: Frame, ctx: LogoContext) {
|
||||
const item = frame.glow
|
||||
if (!item) return 0
|
||||
const glyph = MAP.glyph.get(key(x, y))
|
||||
const glyph = ctx.MAP.glyph.get(key(x, y))
|
||||
if (glyph !== item.glyph) return 0
|
||||
const age = frame.t - item.at
|
||||
if (age < 0 || age > GLOW_OUT) return 0
|
||||
const p = age / GLOW_OUT
|
||||
const flash = (1 - p) ** 2
|
||||
const dx = x + 0.5 - MAP.center.get(item.glyph)!.x
|
||||
const dy = y * 2 + 1 - MAP.center.get(item.glyph)!.y
|
||||
const dx = x + 0.5 - ctx.MAP.center.get(item.glyph)!.x
|
||||
const dy = y * 2 + 1 - ctx.MAP.center.get(item.glyph)!.y
|
||||
const bias = Math.exp(-((Math.hypot(dx, dy) / 2.8) ** 2))
|
||||
return lerp(item.force, item.force * 0.18, p) * lerp(0.72, 1.1, bias) * flash
|
||||
}
|
||||
|
||||
export function Logo() {
|
||||
type IdleState = {
|
||||
cfg: ShimmerConfig
|
||||
reach: number
|
||||
rings: number
|
||||
active: Array<{
|
||||
head: number
|
||||
eased: number
|
||||
ambient: number
|
||||
}>
|
||||
}
|
||||
|
||||
function buildIdleState(t: number, ctx: LogoContext): IdleState {
|
||||
const cfg = shimmerConfig
|
||||
const w = ctx.FULL[0]?.length ?? 1
|
||||
const h = ctx.FULL.length * 2
|
||||
const corners: [number, number][] = [
|
||||
[0, 0],
|
||||
[w, 0],
|
||||
[0, h],
|
||||
[w, h],
|
||||
]
|
||||
let maxCorner = 0
|
||||
for (const [cx, cy] of corners) {
|
||||
const d = Math.hypot(cx - cfg.originX, cy - cfg.originY)
|
||||
if (d > maxCorner) maxCorner = d
|
||||
}
|
||||
const reach = maxCorner + cfg.tail * 2
|
||||
const rings = Math.max(1, Math.floor(cfg.rings))
|
||||
const active = [] as IdleState["active"]
|
||||
for (let i = 0; i < rings; i++) {
|
||||
const offset = i / rings
|
||||
const cyclePhase = (t / cfg.period + offset) % 1
|
||||
if (cyclePhase >= cfg.sweepFraction) continue
|
||||
const phase = cyclePhase / cfg.sweepFraction
|
||||
const envelope = Math.sin(phase * Math.PI)
|
||||
const eased = envelope * envelope * (3 - 2 * envelope)
|
||||
const d = (phase - cfg.ambientCenter) / cfg.ambientWidth
|
||||
active.push({
|
||||
head: phase * reach,
|
||||
eased,
|
||||
ambient: Math.abs(d) < 1 ? (1 - d * d) ** 2 * cfg.ambientAmp : 0,
|
||||
})
|
||||
}
|
||||
return { cfg, reach, rings, active }
|
||||
}
|
||||
|
||||
export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } = {}) {
|
||||
const ctx = props.shape ? build(props.shape) : DEFAULT
|
||||
const { theme } = useTheme()
|
||||
const [rings, setRings] = createSignal<Ring[]>([])
|
||||
const [hold, setHold] = createSignal<Hold>()
|
||||
@@ -430,6 +545,7 @@ export function Logo() {
|
||||
}
|
||||
if (!live) setRelease(undefined)
|
||||
if (live || hold() || release() || glow()) return
|
||||
if (props.idle) return
|
||||
stop()
|
||||
}
|
||||
|
||||
@@ -438,8 +554,20 @@ export function Logo() {
|
||||
timer = setInterval(tick, 16)
|
||||
}
|
||||
|
||||
onCleanup(() => {
|
||||
stop()
|
||||
hum = false
|
||||
Sound.dispose()
|
||||
})
|
||||
|
||||
onMount(() => {
|
||||
if (!props.idle) return
|
||||
setNow(performance.now())
|
||||
start()
|
||||
})
|
||||
|
||||
const hit = (x: number, y: number) => {
|
||||
const char = FULL[y]?.[x]
|
||||
const char = ctx.FULL[y]?.[x]
|
||||
return char !== undefined && char !== " "
|
||||
}
|
||||
|
||||
@@ -448,7 +576,7 @@ export function Logo() {
|
||||
if (last) burst(last.x, last.y)
|
||||
setNow(t)
|
||||
if (!last) setRelease(undefined)
|
||||
setHold({ x, y, at: t, glyph: select(x, y) })
|
||||
setHold({ x, y, at: t, glyph: select(x, y, ctx) })
|
||||
hum = false
|
||||
start()
|
||||
}
|
||||
@@ -508,6 +636,8 @@ export function Logo() {
|
||||
}
|
||||
})
|
||||
|
||||
const idleState = createMemo(() => (props.idle ? buildIdleState(frame().t, ctx) : undefined))
|
||||
|
||||
const renderLine = (
|
||||
line: string,
|
||||
y: number,
|
||||
@@ -516,24 +646,64 @@ export function Logo() {
|
||||
off: number,
|
||||
frame: Frame,
|
||||
dusk: Frame,
|
||||
state: IdleState | undefined,
|
||||
): JSX.Element[] => {
|
||||
const shadow = tint(theme.background, ink, 0.25)
|
||||
const attrs = bold ? TextAttributes.BOLD : undefined
|
||||
|
||||
return Array.from(line).map((char, i) => {
|
||||
const h = field(off + i, y, frame)
|
||||
const n = wave(off + i, y, frame, lit(char)) + h
|
||||
const s = wave(off + i, y, dusk, false) + h
|
||||
const p = lit(char) ? pick(off + i, y, frame) : 0
|
||||
const e = lit(char) ? trace(off + i, y, frame) : 0
|
||||
const b = lit(char) ? bloom(off + i, y, frame) : 0
|
||||
const q = shimmer(off + i, y, frame)
|
||||
if (char === " ") {
|
||||
return (
|
||||
<text fg={ink} attributes={attrs} selectable={false}>
|
||||
{char}
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
const h = field(off + i, y, frame, ctx)
|
||||
const charLit = lit(char)
|
||||
// Sub-pixel sampling: cells are 2 pixels tall. Sample at top (y*2) and bottom (y*2+1) pixel rows.
|
||||
const pulseTop = state ? idle(off + i, y * 2, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 }
|
||||
const pulseBot = state ? idle(off + i, y * 2 + 1, frame, ctx, state) : { glow: 0, peak: 0, primary: 0 }
|
||||
const peakMixTop = charLit ? Math.min(1, pulseTop.peak) : 0
|
||||
const peakMixBot = charLit ? Math.min(1, pulseBot.peak) : 0
|
||||
const primaryMixTop = charLit ? Math.min(1, pulseTop.primary) : 0
|
||||
const primaryMixBot = charLit ? Math.min(1, pulseBot.primary) : 0
|
||||
// Layer primary tint first, then white peak on top — so the halo/tail pulls toward primary,
|
||||
// while the bright core stays pure white
|
||||
const inkTopTint = primaryMixTop > 0 ? tint(ink, theme.primary, primaryMixTop) : ink
|
||||
const inkBotTint = primaryMixBot > 0 ? tint(ink, theme.primary, primaryMixBot) : ink
|
||||
const inkTop = peakMixTop > 0 ? tint(inkTopTint, PEAK, peakMixTop) : inkTopTint
|
||||
const inkBot = peakMixBot > 0 ? tint(inkBotTint, PEAK, peakMixBot) : inkBotTint
|
||||
// For the non-peak-aware brightness channels, use the average of top/bot
|
||||
const pulse = {
|
||||
glow: (pulseTop.glow + pulseBot.glow) / 2,
|
||||
peak: (pulseTop.peak + pulseBot.peak) / 2,
|
||||
primary: (pulseTop.primary + pulseBot.primary) / 2,
|
||||
}
|
||||
const peakMix = charLit ? Math.min(1, pulse.peak) : 0
|
||||
const primaryMix = charLit ? Math.min(1, pulse.primary) : 0
|
||||
const inkPrimary = primaryMix > 0 ? tint(ink, theme.primary, primaryMix) : ink
|
||||
const inkTinted = peakMix > 0 ? tint(inkPrimary, PEAK, peakMix) : inkPrimary
|
||||
const shadowMixCfg = state?.cfg.shadowMix ?? shimmerConfig.shadowMix
|
||||
const shadowMixTop = Math.min(1, pulseTop.peak * shadowMixCfg)
|
||||
const shadowMixBot = Math.min(1, pulseBot.peak * shadowMixCfg)
|
||||
const shadowTop = shadowMixTop > 0 ? tint(shadow, PEAK, shadowMixTop) : shadow
|
||||
const shadowBot = shadowMixBot > 0 ? tint(shadow, PEAK, shadowMixBot) : shadow
|
||||
const shadowMix = Math.min(1, pulse.peak * shadowMixCfg)
|
||||
const shadowTinted = shadowMix > 0 ? tint(shadow, PEAK, shadowMix) : shadow
|
||||
const n = wave(off + i, y, frame, charLit, ctx) + h
|
||||
const s = wave(off + i, y, dusk, false, ctx) + h
|
||||
const p = charLit ? pick(off + i, y, frame, ctx) : 0
|
||||
const e = charLit ? trace(off + i, y, frame, ctx) : 0
|
||||
const b = charLit ? bloom(off + i, y, frame, ctx) : 0
|
||||
const q = shimmer(off + i, y, frame, ctx)
|
||||
|
||||
if (char === "_") {
|
||||
return (
|
||||
<text
|
||||
fg={shade(ink, theme, s * 0.08)}
|
||||
bg={shade(shadow, theme, ghost(s, 0.24) + ghost(q, 0.06))}
|
||||
fg={shade(inkTinted, theme, s * 0.08)}
|
||||
bg={shade(shadowTinted, theme, ghost(s, 0.24) + ghost(q, 0.06))}
|
||||
attributes={attrs}
|
||||
selectable={false}
|
||||
>
|
||||
@@ -545,8 +715,8 @@ export function Logo() {
|
||||
if (char === "^") {
|
||||
return (
|
||||
<text
|
||||
fg={shade(ink, theme, n + p + e + b)}
|
||||
bg={shade(shadow, theme, ghost(s, 0.18) + ghost(q, 0.05) + ghost(b, 0.08))}
|
||||
fg={shade(inkTop, theme, n + p + e + b)}
|
||||
bg={shade(shadowBot, theme, ghost(s, 0.18) + ghost(q, 0.05) + ghost(b, 0.08))}
|
||||
attributes={attrs}
|
||||
selectable={false}
|
||||
>
|
||||
@@ -557,34 +727,60 @@ export function Logo() {
|
||||
|
||||
if (char === "~") {
|
||||
return (
|
||||
<text fg={shade(shadow, theme, ghost(s, 0.22) + ghost(q, 0.05))} attributes={attrs} selectable={false}>
|
||||
<text fg={shade(shadowTop, theme, ghost(s, 0.22) + ghost(q, 0.05))} attributes={attrs} selectable={false}>
|
||||
▀
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
if (char === " ") {
|
||||
if (char === ",") {
|
||||
return (
|
||||
<text fg={ink} attributes={attrs} selectable={false}>
|
||||
{char}
|
||||
<text fg={shade(shadowBot, theme, ghost(s, 0.22) + ghost(q, 0.05))} attributes={attrs} selectable={false}>
|
||||
▄
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
// Solid █: render as ▀ so the top pixel (fg) and bottom pixel (bg) can carry independent shimmer values
|
||||
if (char === "█") {
|
||||
return (
|
||||
<text
|
||||
fg={shade(inkTop, theme, n + p + e + b)}
|
||||
bg={shade(inkBot, theme, n + p + e + b)}
|
||||
attributes={attrs}
|
||||
selectable={false}
|
||||
>
|
||||
▀
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
// ▀ top-half-lit: fg uses top-pixel sample, bg stays transparent/panel
|
||||
if (char === "▀") {
|
||||
return (
|
||||
<text fg={shade(inkTop, theme, n + p + e + b)} attributes={attrs} selectable={false}>
|
||||
▀
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
// ▄ bottom-half-lit: fg uses bottom-pixel sample
|
||||
if (char === "▄") {
|
||||
return (
|
||||
<text fg={shade(inkBot, theme, n + p + e + b)} attributes={attrs} selectable={false}>
|
||||
▄
|
||||
</text>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<text fg={shade(ink, theme, n + p + e + b)} attributes={attrs} selectable={false}>
|
||||
<text fg={shade(inkTinted, theme, n + p + e + b)} attributes={attrs} selectable={false}>
|
||||
{char}
|
||||
</text>
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
onCleanup(() => {
|
||||
stop()
|
||||
hum = false
|
||||
Sound.dispose()
|
||||
})
|
||||
|
||||
const mouse = (evt: MouseEvent) => {
|
||||
if (!box) return
|
||||
if ((evt.type === "down" || evt.type === "drag") && evt.button === MouseButton.LEFT) {
|
||||
@@ -613,17 +809,28 @@ export function Logo() {
|
||||
position="absolute"
|
||||
top={0}
|
||||
left={0}
|
||||
width={FULL[0]?.length ?? 0}
|
||||
height={FULL.length}
|
||||
width={ctx.FULL[0]?.length ?? 0}
|
||||
height={ctx.FULL.length}
|
||||
zIndex={1}
|
||||
onMouse={mouse}
|
||||
/>
|
||||
<For each={logo.left}>
|
||||
<For each={ctx.shape.left}>
|
||||
{(line, index) => (
|
||||
<box flexDirection="row" gap={1}>
|
||||
<box flexDirection="row">{renderLine(line, index(), theme.textMuted, false, 0, frame(), dusk())}</box>
|
||||
<box flexDirection="row">
|
||||
{renderLine(logo.right[index()], index(), theme.text, true, LEFT + GAP, frame(), dusk())}
|
||||
{renderLine(line, index(), props.ink ?? theme.textMuted, !!props.ink, 0, frame(), dusk(), idleState())}
|
||||
</box>
|
||||
<box flexDirection="row">
|
||||
{renderLine(
|
||||
ctx.shape.right[index()],
|
||||
index(),
|
||||
props.ink ?? theme.text,
|
||||
true,
|
||||
ctx.LEFT + GAP,
|
||||
frame(),
|
||||
dusk(),
|
||||
idleState(),
|
||||
)}
|
||||
</box>
|
||||
</box>
|
||||
)}
|
||||
@@ -631,3 +838,9 @@ export function Logo() {
|
||||
</box>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoLogo() {
|
||||
const { theme } = useTheme()
|
||||
const base = tint(theme.background, theme.text, 0.62)
|
||||
return <Logo shape={go} ink={base} idle />
|
||||
}
|
||||
|
||||
@@ -617,9 +617,7 @@ export function Prompt(props: PromptProps) {
|
||||
|
||||
let sessionID = props.sessionID
|
||||
if (sessionID == null) {
|
||||
const res = await sdk.client.session.create({
|
||||
workspaceID: props.workspaceID,
|
||||
})
|
||||
const res = await sdk.client.session.create({ workspace: props.workspaceID })
|
||||
|
||||
if (res.error) {
|
||||
console.log("Creating a session failed:", res.error)
|
||||
@@ -1033,6 +1031,10 @@ export function Prompt(props: PromptProps) {
|
||||
return
|
||||
}
|
||||
|
||||
// Once we cross an async boundary below, the terminal may perform its
|
||||
// default paste unless we suppress it first and handle insertion ourselves.
|
||||
event.preventDefault()
|
||||
|
||||
const filepath = iife(() => {
|
||||
const raw = pastedContent.replace(/^['"]+|['"]+$/g, "")
|
||||
if (raw.startsWith("file://")) {
|
||||
@@ -1050,7 +1052,6 @@ export function Prompt(props: PromptProps) {
|
||||
const filename = path.basename(filepath)
|
||||
// Handle SVG as raw text content, not as base64 image
|
||||
if (mime === "image/svg+xml") {
|
||||
event.preventDefault()
|
||||
const content = await Filesystem.readText(filepath).catch(() => {})
|
||||
if (content) {
|
||||
pasteText(content, `[SVG: ${filename ?? "image"}]`)
|
||||
@@ -1058,7 +1059,6 @@ export function Prompt(props: PromptProps) {
|
||||
}
|
||||
}
|
||||
if (mime.startsWith("image/") || mime === "application/pdf") {
|
||||
event.preventDefault()
|
||||
const content = await Filesystem.readArrayBuffer(filepath)
|
||||
.then((buffer) => Buffer.from(buffer).toString("base64"))
|
||||
.catch(() => {})
|
||||
@@ -1080,11 +1080,12 @@ export function Prompt(props: PromptProps) {
|
||||
(lineCount >= 3 || pastedContent.length > 150) &&
|
||||
!sync.data.config.experimental?.disable_paste_summary
|
||||
) {
|
||||
event.preventDefault()
|
||||
pasteText(pastedContent, `[Pasted ~${lineCount} lines]`)
|
||||
return
|
||||
}
|
||||
|
||||
input.insertText(normalizedText)
|
||||
|
||||
// Force layout update and render for the pasted content
|
||||
setTimeout(() => {
|
||||
// setTimeout is a workaround and needs to be addressed properly
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
export type ShimmerConfig = {
|
||||
period: number
|
||||
rings: number
|
||||
sweepFraction: number
|
||||
coreWidth: number
|
||||
coreAmp: number
|
||||
softWidth: number
|
||||
softAmp: number
|
||||
tail: number
|
||||
tailAmp: number
|
||||
haloWidth: number
|
||||
haloOffset: number
|
||||
haloAmp: number
|
||||
breathBase: number
|
||||
noise: number
|
||||
ambientAmp: number
|
||||
ambientCenter: number
|
||||
ambientWidth: number
|
||||
shadowMix: number
|
||||
primaryMix: number
|
||||
originX: number
|
||||
originY: number
|
||||
}
|
||||
|
||||
export const shimmerDefaults: ShimmerConfig = {
|
||||
period: 4600,
|
||||
rings: 2,
|
||||
sweepFraction: 1,
|
||||
coreWidth: 1.2,
|
||||
coreAmp: 1.9,
|
||||
softWidth: 10,
|
||||
softAmp: 1.6,
|
||||
tail: 5,
|
||||
tailAmp: 0.64,
|
||||
haloWidth: 4.3,
|
||||
haloOffset: 0.6,
|
||||
haloAmp: 0.16,
|
||||
breathBase: 0.04,
|
||||
noise: 0.1,
|
||||
ambientAmp: 0.36,
|
||||
ambientCenter: 0.5,
|
||||
ambientWidth: 0.34,
|
||||
shadowMix: 0.1,
|
||||
primaryMix: 0.3,
|
||||
originX: 4.5,
|
||||
originY: 13.5,
|
||||
}
|
||||
|
||||
export const shimmerConfig: ShimmerConfig = { ...shimmerDefaults }
|
||||
@@ -26,7 +26,6 @@ const TuiLegacy = z
|
||||
interface MigrateInput {
|
||||
cwd: string
|
||||
directories: string[]
|
||||
custom?: string
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export * as TuiConfig from "./tui"
|
||||
|
||||
import z from "zod"
|
||||
import { mergeDeep, unique } from "remeda"
|
||||
import { Context, Effect, Fiber, Layer } from "effect"
|
||||
import { ConfigParse } from "@/config/parse"
|
||||
import * as ConfigPaths from "@/config/paths"
|
||||
import { migrateTuiConfig } from "./tui-migrate"
|
||||
import { TuiInfo } from "./tui-schema"
|
||||
@@ -15,194 +18,198 @@ import { ConfigKeybinds } from "@/config/keybinds"
|
||||
import { InstallationLocal, InstallationVersion } from "@/installation/version"
|
||||
import { makeRuntime } from "@/cli/effect/runtime"
|
||||
import { Filesystem, Log } from "@/util"
|
||||
import { ConfigVariable } from "@/config/variable"
|
||||
|
||||
export namespace TuiConfig {
|
||||
const log = Log.create({ service: "tui.config" })
|
||||
const log = Log.create({ service: "tui.config" })
|
||||
|
||||
export const Info = TuiInfo
|
||||
export const Info = TuiInfo
|
||||
|
||||
type Acc = {
|
||||
result: Info
|
||||
}
|
||||
type Acc = {
|
||||
result: Info
|
||||
}
|
||||
|
||||
type State = {
|
||||
config: Info
|
||||
deps: Array<Fiber.Fiber<void, AppFileSystem.Error>>
|
||||
}
|
||||
type State = {
|
||||
config: Info
|
||||
deps: Array<Fiber.Fiber<void, AppFileSystem.Error>>
|
||||
}
|
||||
|
||||
export type Info = z.output<typeof Info> & {
|
||||
// Internal resolved plugin list used by runtime loading.
|
||||
plugin_origins?: ConfigPlugin.Origin[]
|
||||
}
|
||||
export type Info = z.output<typeof Info> & {
|
||||
// Internal resolved plugin list used by runtime loading.
|
||||
plugin_origins?: ConfigPlugin.Origin[]
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly get: () => Effect.Effect<Info>
|
||||
readonly waitForDependencies: () => Effect.Effect<void>
|
||||
}
|
||||
export interface Interface {
|
||||
readonly get: () => Effect.Effect<Info>
|
||||
readonly waitForDependencies: () => Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/TuiConfig") {}
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/TuiConfig") {}
|
||||
|
||||
function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope {
|
||||
if (Filesystem.contains(ctx.directory, file)) return "local"
|
||||
// if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local"
|
||||
return "global"
|
||||
}
|
||||
function pluginScope(file: string, ctx: { directory: string }): ConfigPlugin.Scope {
|
||||
if (Filesystem.contains(ctx.directory, file)) return "local"
|
||||
// if (ctx.worktree !== "/" && Filesystem.contains(ctx.worktree, file)) return "local"
|
||||
return "global"
|
||||
}
|
||||
|
||||
function customPath() {
|
||||
return Flag.OPENCODE_TUI_CONFIG
|
||||
}
|
||||
|
||||
function normalize(raw: Record<string, unknown>) {
|
||||
const data = { ...raw }
|
||||
if (!("tui" in data)) return data
|
||||
if (!isRecord(data.tui)) {
|
||||
delete data.tui
|
||||
return data
|
||||
}
|
||||
|
||||
const tui = data.tui
|
||||
function normalize(raw: Record<string, unknown>) {
|
||||
const data = { ...raw }
|
||||
if (!("tui" in data)) return data
|
||||
if (!isRecord(data.tui)) {
|
||||
delete data.tui
|
||||
return {
|
||||
...tui,
|
||||
...data,
|
||||
}
|
||||
}
|
||||
|
||||
async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) {
|
||||
const data = await loadFile(file)
|
||||
acc.result = mergeDeep(acc.result, data)
|
||||
if (!data.plugin?.length) return
|
||||
|
||||
const scope = pluginScope(file, ctx)
|
||||
const plugins = ConfigPlugin.deduplicatePluginOrigins([
|
||||
...(acc.result.plugin_origins ?? []),
|
||||
...data.plugin.map((spec) => ({ spec, scope, source: file })),
|
||||
])
|
||||
acc.result.plugin = plugins.map((item) => item.spec)
|
||||
acc.result.plugin_origins = plugins
|
||||
}
|
||||
|
||||
async function loadState(ctx: { directory: string }) {
|
||||
let projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory)
|
||||
const directories = await ConfigPaths.directories(ctx.directory)
|
||||
const custom = customPath()
|
||||
await migrateTuiConfig({ directories, custom, cwd: ctx.directory })
|
||||
// Re-compute after migration since migrateTuiConfig may have created new tui.json files
|
||||
projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory)
|
||||
|
||||
const acc: Acc = {
|
||||
result: {},
|
||||
}
|
||||
|
||||
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
|
||||
if (custom) {
|
||||
await mergeFile(acc, custom, ctx)
|
||||
log.debug("loaded custom tui config", { path: custom })
|
||||
}
|
||||
|
||||
for (const file of projectFiles) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
|
||||
const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR)
|
||||
|
||||
for (const dir of dirs) {
|
||||
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
|
||||
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
const keybinds = { ...(acc.result.keybinds ?? {}) }
|
||||
if (process.platform === "win32") {
|
||||
// Native Windows terminals do not support POSIX suspend, so prefer prompt undo.
|
||||
keybinds.terminal_suspend = "none"
|
||||
keybinds.input_undo ??= unique([
|
||||
"ctrl+z",
|
||||
...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","),
|
||||
]).join(",")
|
||||
}
|
||||
acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds)
|
||||
|
||||
return {
|
||||
config: acc.result,
|
||||
dirs: acc.result.plugin?.length ? dirs : [],
|
||||
}
|
||||
}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const directory = yield* CurrentWorkingDirectory
|
||||
const npm = yield* Npm.Service
|
||||
const data = yield* Effect.promise(() => loadState({ directory }))
|
||||
const deps = yield* Effect.forEach(
|
||||
data.dirs,
|
||||
(dir) =>
|
||||
npm
|
||||
.install(dir, {
|
||||
add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)],
|
||||
})
|
||||
.pipe(Effect.forkScoped),
|
||||
{
|
||||
concurrency: "unbounded",
|
||||
},
|
||||
)
|
||||
|
||||
const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config))
|
||||
|
||||
const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() =>
|
||||
Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid),
|
||||
)
|
||||
return Service.of({ get, waitForDependencies })
|
||||
}).pipe(Effect.withSpan("TuiConfig.layer")),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
export async function waitForDependencies() {
|
||||
await runPromise((svc) => svc.waitForDependencies())
|
||||
}
|
||||
|
||||
export async function get() {
|
||||
return runPromise((svc) => svc.get())
|
||||
}
|
||||
|
||||
async function loadFile(filepath: string): Promise<Info> {
|
||||
const text = await ConfigPaths.readFile(filepath)
|
||||
if (!text) return {}
|
||||
return load(text, filepath).catch((error) => {
|
||||
log.warn("failed to load tui config", { path: filepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
async function load(text: string, configFilepath: string): Promise<Info> {
|
||||
const raw = await ConfigPaths.parseText(text, configFilepath, "empty")
|
||||
if (!isRecord(raw)) return {}
|
||||
|
||||
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
|
||||
// (mirroring the old opencode.json shape) still get their settings applied.
|
||||
const normalized = normalize(raw)
|
||||
|
||||
const parsed = Info.safeParse(normalized)
|
||||
if (!parsed.success) {
|
||||
log.warn("invalid tui config", { path: configFilepath, issues: parsed.error.issues })
|
||||
return {}
|
||||
}
|
||||
|
||||
const data = parsed.data
|
||||
if (data.plugin) {
|
||||
for (let i = 0; i < data.plugin.length; i++) {
|
||||
data.plugin[i] = await ConfigPlugin.resolvePluginSpec(data.plugin[i], configFilepath)
|
||||
}
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const tui = data.tui
|
||||
delete data.tui
|
||||
return {
|
||||
...tui,
|
||||
...data,
|
||||
}
|
||||
}
|
||||
|
||||
async function resolvePlugins(config: Info, configFilepath: string) {
|
||||
if (!config.plugin) return config
|
||||
for (let i = 0; i < config.plugin.length; i++) {
|
||||
config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath)
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) {
|
||||
const data = await loadFile(file)
|
||||
acc.result = mergeDeep(acc.result, data)
|
||||
if (!data.plugin?.length) return
|
||||
|
||||
const scope = pluginScope(file, ctx)
|
||||
const plugins = ConfigPlugin.deduplicatePluginOrigins([
|
||||
...(acc.result.plugin_origins ?? []),
|
||||
...data.plugin.map((spec) => ({ spec, scope, source: file })),
|
||||
])
|
||||
acc.result.plugin = plugins.map((item) => item.spec)
|
||||
acc.result.plugin_origins = plugins
|
||||
}
|
||||
|
||||
async function loadState(ctx: { directory: string }) {
|
||||
// Every config dir we may read from: global config dir, any `.opencode`
|
||||
// folders between cwd and home, and OPENCODE_CONFIG_DIR.
|
||||
const directories = await ConfigPaths.directories(ctx.directory)
|
||||
// One-time migration: extract tui keys (theme/keybinds/tui) from existing
|
||||
// opencode.json files into sibling tui.json files.
|
||||
await migrateTuiConfig({ directories, cwd: ctx.directory })
|
||||
|
||||
const projectFiles = Flag.OPENCODE_DISABLE_PROJECT_CONFIG ? [] : await ConfigPaths.projectFiles("tui", ctx.directory)
|
||||
|
||||
const acc: Acc = {
|
||||
result: {},
|
||||
}
|
||||
|
||||
// 1. Global tui config (lowest precedence).
|
||||
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
|
||||
// 2. Explicit OPENCODE_TUI_CONFIG override, if set.
|
||||
if (Flag.OPENCODE_TUI_CONFIG) {
|
||||
await mergeFile(acc, Flag.OPENCODE_TUI_CONFIG, ctx)
|
||||
log.debug("loaded custom tui config", { path: Flag.OPENCODE_TUI_CONFIG })
|
||||
}
|
||||
|
||||
// 3. Project tui files, applied root-first so the closest file wins.
|
||||
for (const file of projectFiles) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
|
||||
// 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while
|
||||
// walking up the tree. Also returned below so callers can install plugin
|
||||
// dependencies from each location.
|
||||
const dirs = unique(directories).filter((dir) => dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR)
|
||||
|
||||
for (const dir of dirs) {
|
||||
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
|
||||
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
|
||||
await mergeFile(acc, file, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
const keybinds = { ...(acc.result.keybinds ?? {}) }
|
||||
if (process.platform === "win32") {
|
||||
// Native Windows terminals do not support POSIX suspend, so prefer prompt undo.
|
||||
keybinds.terminal_suspend = "none"
|
||||
keybinds.input_undo ??= unique([
|
||||
"ctrl+z",
|
||||
...ConfigKeybinds.Keybinds.shape.input_undo.parse(undefined).split(","),
|
||||
]).join(",")
|
||||
}
|
||||
acc.result.keybinds = ConfigKeybinds.Keybinds.parse(keybinds)
|
||||
|
||||
return {
|
||||
config: acc.result,
|
||||
dirs: acc.result.plugin?.length ? dirs : [],
|
||||
}
|
||||
}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const directory = yield* CurrentWorkingDirectory
|
||||
const npm = yield* Npm.Service
|
||||
const data = yield* Effect.promise(() => loadState({ directory }))
|
||||
const deps = yield* Effect.forEach(
|
||||
data.dirs,
|
||||
(dir) =>
|
||||
npm
|
||||
.install(dir, {
|
||||
add: ["@opencode-ai/plugin" + (InstallationLocal ? "" : "@" + InstallationVersion)],
|
||||
})
|
||||
.pipe(Effect.forkScoped),
|
||||
{
|
||||
concurrency: "unbounded",
|
||||
},
|
||||
)
|
||||
|
||||
const get = Effect.fn("TuiConfig.get")(() => Effect.succeed(data.config))
|
||||
|
||||
const waitForDependencies = Effect.fn("TuiConfig.waitForDependencies")(() =>
|
||||
Effect.forEach(deps, Fiber.join, { concurrency: "unbounded" }).pipe(Effect.ignore(), Effect.asVoid),
|
||||
)
|
||||
return Service.of({ get, waitForDependencies })
|
||||
}).pipe(Effect.withSpan("TuiConfig.layer")),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Npm.defaultLayer))
|
||||
|
||||
const { runPromise } = makeRuntime(Service, defaultLayer)
|
||||
|
||||
export async function waitForDependencies() {
|
||||
await runPromise((svc) => svc.waitForDependencies())
|
||||
}
|
||||
|
||||
export async function get() {
|
||||
return runPromise((svc) => svc.get())
|
||||
}
|
||||
|
||||
async function loadFile(filepath: string): Promise<Info> {
|
||||
const text = await ConfigPaths.readFile(filepath)
|
||||
if (!text) return {}
|
||||
return load(text, filepath).catch((error) => {
|
||||
log.warn("failed to load tui config", { path: filepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
async function load(text: string, configFilepath: string): Promise<Info> {
|
||||
return ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" })
|
||||
.then((expanded) => ConfigParse.jsonc(expanded, configFilepath))
|
||||
.then((data) => {
|
||||
if (!isRecord(data)) return {}
|
||||
|
||||
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
|
||||
// (mirroring the old opencode.json shape) still get their settings applied.
|
||||
return ConfigParse.schema(Info, normalize(data), configFilepath)
|
||||
})
|
||||
.then((data) => resolvePlugins(data, configFilepath))
|
||||
.catch((error) => {
|
||||
log.warn("invalid tui config", { path: configFilepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ export const { use: useKV, provider: KVProvider } = createSimpleContext({
|
||||
const [store, setStore] = createStore<Record<string, any>>()
|
||||
const filePath = path.join(Global.Path.state, "kv.json")
|
||||
|
||||
Filesystem.readJson(filePath)
|
||||
Filesystem.readJson<Record<string, any>>(filePath)
|
||||
.then((x) => {
|
||||
setStore(x)
|
||||
})
|
||||
|
||||
@@ -463,6 +463,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
return store.status
|
||||
},
|
||||
get ready() {
|
||||
if (process.env.OPENCODE_FAST_BOOT) return true
|
||||
return store.status !== "loading"
|
||||
},
|
||||
get path() {
|
||||
@@ -474,6 +475,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
if (match.found) return store.session[match.index]
|
||||
return undefined
|
||||
},
|
||||
async refresh() {
|
||||
const start = Date.now() - 30 * 24 * 60 * 60 * 1000
|
||||
const list = await sdk.client.session
|
||||
.list({ start })
|
||||
.then((x) => (x.data ?? []).toSorted((a, b) => a.id.localeCompare(b.id)))
|
||||
setStore("session", reconcile(list))
|
||||
},
|
||||
status(sessionID: string) {
|
||||
const session = result.session.get(sessionID)
|
||||
if (!session) return "idle"
|
||||
@@ -486,12 +494,11 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
|
||||
},
|
||||
async sync(sessionID: string) {
|
||||
if (fullSyncedSessions.has(sessionID)) return
|
||||
const workspace = project.workspace.current()
|
||||
const [session, messages, todo, diff] = await Promise.all([
|
||||
sdk.client.session.get({ sessionID, workspace }, { throwOnError: true }),
|
||||
sdk.client.session.messages({ sessionID, limit: 100, workspace }),
|
||||
sdk.client.session.todo({ sessionID, workspace }),
|
||||
sdk.client.session.diff({ sessionID, workspace }),
|
||||
sdk.client.session.get({ sessionID }, { throwOnError: true }),
|
||||
sdk.client.session.messages({ sessionID, limit: 100 }),
|
||||
sdk.client.session.todo({ sessionID }),
|
||||
sdk.client.session.diff({ sessionID }),
|
||||
])
|
||||
setStore(
|
||||
produce((draft) => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// import "@opentui/solid/runtime-plugin-support"
|
||||
import "@opentui/solid/runtime-plugin-support"
|
||||
import {
|
||||
type TuiDispose,
|
||||
type TuiPlugin,
|
||||
@@ -16,6 +16,7 @@ import { TuiConfig } from "@/cli/cmd/tui/config/tui"
|
||||
import { Log } from "@/util"
|
||||
import { errorData, errorMessage } from "@/util/error"
|
||||
import { isRecord } from "@/util/record"
|
||||
import { Instance } from "@/project/instance"
|
||||
import {
|
||||
readPackageThemes,
|
||||
readPluginId,
|
||||
@@ -789,7 +790,13 @@ async function addPluginBySpec(state: RuntimeState | undefined, raw: string) {
|
||||
state.pending.delete(spec)
|
||||
return true
|
||||
}
|
||||
const ready = await resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies())
|
||||
const ready = await Instance.provide({
|
||||
directory: state.directory,
|
||||
fn: () => resolveExternalPlugins([cfg], () => TuiConfig.waitForDependencies()),
|
||||
}).catch((error) => {
|
||||
fail("failed to add tui plugin", { path: next, error })
|
||||
return [] as PluginLoad[]
|
||||
})
|
||||
if (!ready.length) {
|
||||
return false
|
||||
}
|
||||
@@ -911,108 +918,113 @@ async function installPluginBySpec(
|
||||
}
|
||||
}
|
||||
|
||||
export namespace TuiPluginRuntime {
|
||||
let dir = ""
|
||||
let loaded: Promise<void> | undefined
|
||||
let runtime: RuntimeState | undefined
|
||||
export const Slot = View
|
||||
let dir = ""
|
||||
let loaded: Promise<void> | undefined
|
||||
let runtime: RuntimeState | undefined
|
||||
export const Slot = View
|
||||
|
||||
export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) {
|
||||
const cwd = process.cwd()
|
||||
if (loaded) {
|
||||
if (dir !== cwd) {
|
||||
throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`)
|
||||
}
|
||||
return loaded
|
||||
export async function init(input: { api: HostPluginApi; config: TuiConfig.Info }) {
|
||||
const cwd = process.cwd()
|
||||
if (loaded) {
|
||||
if (dir !== cwd) {
|
||||
throw new Error(`TuiPluginRuntime.init() called with a different working directory. expected=${dir} got=${cwd}`)
|
||||
}
|
||||
|
||||
dir = cwd
|
||||
loaded = load(input)
|
||||
return loaded
|
||||
}
|
||||
|
||||
export function list() {
|
||||
if (!runtime) return []
|
||||
return listPluginStatus(runtime)
|
||||
}
|
||||
dir = cwd
|
||||
loaded = load(input)
|
||||
return loaded
|
||||
}
|
||||
|
||||
export async function activatePlugin(id: string) {
|
||||
return activatePluginById(runtime, id, true)
|
||||
}
|
||||
export function list() {
|
||||
if (!runtime) return []
|
||||
return listPluginStatus(runtime)
|
||||
}
|
||||
|
||||
export async function deactivatePlugin(id: string) {
|
||||
return deactivatePluginById(runtime, id, true)
|
||||
}
|
||||
export async function activatePlugin(id: string) {
|
||||
return activatePluginById(runtime, id, true)
|
||||
}
|
||||
|
||||
export async function addPlugin(spec: string) {
|
||||
return addPluginBySpec(runtime, spec)
|
||||
}
|
||||
export async function deactivatePlugin(id: string) {
|
||||
return deactivatePluginById(runtime, id, true)
|
||||
}
|
||||
|
||||
export async function installPlugin(spec: string, options?: { global?: boolean }) {
|
||||
return installPluginBySpec(runtime, spec, options?.global)
|
||||
}
|
||||
export async function addPlugin(spec: string) {
|
||||
return addPluginBySpec(runtime, spec)
|
||||
}
|
||||
|
||||
export async function dispose() {
|
||||
const task = loaded
|
||||
loaded = undefined
|
||||
dir = ""
|
||||
if (task) await task
|
||||
const state = runtime
|
||||
runtime = undefined
|
||||
if (!state) return
|
||||
const queue = [...state.plugins].reverse()
|
||||
for (const plugin of queue) {
|
||||
await deactivatePluginEntry(state, plugin, false)
|
||||
}
|
||||
}
|
||||
export async function installPlugin(spec: string, options?: { global?: boolean }) {
|
||||
return installPluginBySpec(runtime, spec, options?.global)
|
||||
}
|
||||
|
||||
async function load(input: { api: Api; config: TuiConfig.Info }) {
|
||||
const { api, config } = input
|
||||
const cwd = process.cwd()
|
||||
const slots = setupSlots(api)
|
||||
const next: RuntimeState = {
|
||||
directory: cwd,
|
||||
api,
|
||||
slots,
|
||||
plugins: [],
|
||||
plugins_by_id: new Map(),
|
||||
pending: new Map(),
|
||||
}
|
||||
runtime = next
|
||||
try {
|
||||
const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? [])
|
||||
if (Flag.OPENCODE_PURE && config.plugin_origins?.length) {
|
||||
log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length })
|
||||
}
|
||||
|
||||
for (const item of INTERNAL_TUI_PLUGINS) {
|
||||
log.info("loading internal tui plugin", { id: item.id })
|
||||
const entry = loadInternalPlugin(item)
|
||||
const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id)
|
||||
addPluginEntry(next, {
|
||||
id: entry.id,
|
||||
load: entry,
|
||||
meta,
|
||||
themes: {},
|
||||
plugin: entry.module.tui,
|
||||
enabled: true,
|
||||
})
|
||||
}
|
||||
|
||||
const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies())
|
||||
await addExternalPluginEntries(next, ready)
|
||||
|
||||
applyInitialPluginEnabledState(next, config)
|
||||
for (const plugin of next.plugins) {
|
||||
if (!plugin.enabled) continue
|
||||
// Keep plugin execution sequential for deterministic side effects:
|
||||
// command registration order affects keybind/command precedence,
|
||||
// route registration is last-wins when ids collide,
|
||||
// and hook chains rely on stable plugin ordering.
|
||||
await activatePluginEntry(next, plugin, false)
|
||||
}
|
||||
} catch (error) {
|
||||
fail("failed to load tui plugins", { directory: cwd, error })
|
||||
}
|
||||
export async function dispose() {
|
||||
const task = loaded
|
||||
loaded = undefined
|
||||
dir = ""
|
||||
if (task) await task
|
||||
const state = runtime
|
||||
runtime = undefined
|
||||
if (!state) return
|
||||
const queue = [...state.plugins].reverse()
|
||||
for (const plugin of queue) {
|
||||
await deactivatePluginEntry(state, plugin, false)
|
||||
}
|
||||
}
|
||||
|
||||
async function load(input: { api: Api; config: TuiConfig.Info }) {
|
||||
const { api, config } = input
|
||||
const cwd = process.cwd()
|
||||
const slots = setupSlots(api)
|
||||
const next: RuntimeState = {
|
||||
directory: cwd,
|
||||
api,
|
||||
slots,
|
||||
plugins: [],
|
||||
plugins_by_id: new Map(),
|
||||
pending: new Map(),
|
||||
}
|
||||
runtime = next
|
||||
try {
|
||||
await Instance.provide({
|
||||
directory: cwd,
|
||||
fn: async () => {
|
||||
const records = Flag.OPENCODE_PURE ? [] : (config.plugin_origins ?? [])
|
||||
if (Flag.OPENCODE_PURE && config.plugin_origins?.length) {
|
||||
log.info("skipping external tui plugins in pure mode", { count: config.plugin_origins.length })
|
||||
}
|
||||
|
||||
for (const item of INTERNAL_TUI_PLUGINS) {
|
||||
log.info("loading internal tui plugin", { id: item.id })
|
||||
const entry = loadInternalPlugin(item)
|
||||
const meta = createMeta(entry.source, entry.spec, entry.target, undefined, entry.id)
|
||||
addPluginEntry(next, {
|
||||
id: entry.id,
|
||||
load: entry,
|
||||
meta,
|
||||
themes: {},
|
||||
plugin: entry.module.tui,
|
||||
enabled: true,
|
||||
})
|
||||
}
|
||||
|
||||
const ready = await resolveExternalPlugins(records, () => TuiConfig.waitForDependencies())
|
||||
await addExternalPluginEntries(next, ready)
|
||||
|
||||
applyInitialPluginEnabledState(next, config)
|
||||
for (const plugin of next.plugins) {
|
||||
if (!plugin.enabled) continue
|
||||
// Keep plugin execution sequential for deterministic side effects:
|
||||
// command registration order affects keybind/command precedence,
|
||||
// route registration is last-wins when ids collide,
|
||||
// and hook chains rely on stable plugin ordering.
|
||||
await activatePluginEntry(next, plugin, false)
|
||||
}
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
fail("failed to load tui plugins", { directory: cwd, error })
|
||||
}
|
||||
}
|
||||
|
||||
export * as TuiPluginRuntime from "./runtime"
|
||||
|
||||
@@ -44,6 +44,8 @@ import type { GrepTool } from "@/tool/grep"
|
||||
import type { EditTool } from "@/tool/edit"
|
||||
import type { ApplyPatchTool } from "@/tool/apply_patch"
|
||||
import type { WebFetchTool } from "@/tool/webfetch"
|
||||
import type { CodeSearchTool } from "@/tool/codesearch"
|
||||
import type { WebSearchTool } from "@/tool/websearch"
|
||||
import type { TaskTool } from "@/tool/task"
|
||||
import type { QuestionTool } from "@/tool/question"
|
||||
import type { SkillTool } from "@/tool/skill"
|
||||
@@ -52,7 +54,6 @@ import { useSDK } from "@tui/context/sdk"
|
||||
import { useCommandDialog } from "@tui/component/dialog-command"
|
||||
import type { DialogContext } from "@tui/ui/dialog"
|
||||
import { useKeybind } from "@tui/context/keybind"
|
||||
import { parsePatch } from "diff"
|
||||
import { useDialog } from "../../ui/dialog"
|
||||
import { TodoItem } from "../../component/todo-item"
|
||||
import { DialogMessage } from "./dialog-message"
|
||||
@@ -86,6 +87,7 @@ import { getScrollAcceleration } from "../../util/scroll"
|
||||
import { TuiPluginRuntime } from "../../plugin"
|
||||
import { DialogGoUpsell } from "../../component/dialog-go-upsell"
|
||||
import { SessionRetry } from "@/session/retry"
|
||||
import { getRevertDiffFiles } from "../../util/revert-diff"
|
||||
|
||||
addDefaultParsers(parsers.parsers)
|
||||
|
||||
@@ -597,7 +599,7 @@ export function Session() {
|
||||
{
|
||||
title: conceal() ? "Disable code concealment" : "Enable code concealment",
|
||||
value: "session.toggle.conceal",
|
||||
keybind: "messages_toggle_conceal" as any,
|
||||
keybind: "messages_toggle_conceal",
|
||||
category: "Session",
|
||||
onSelect: (dialog) => {
|
||||
setConceal((prev) => !prev)
|
||||
@@ -989,31 +991,7 @@ export function Session() {
|
||||
const revertInfo = createMemo(() => session()?.revert)
|
||||
const revertMessageID = createMemo(() => revertInfo()?.messageID)
|
||||
|
||||
const revertDiffFiles = createMemo(() => {
|
||||
const diffText = revertInfo()?.diff ?? ""
|
||||
if (!diffText) return []
|
||||
|
||||
try {
|
||||
const patches = parsePatch(diffText)
|
||||
return patches.map((patch) => {
|
||||
const filename = patch.newFileName || patch.oldFileName || "unknown"
|
||||
const cleanFilename = filename.replace(/^[ab]\//, "")
|
||||
return {
|
||||
filename: cleanFilename,
|
||||
additions: patch.hunks.reduce(
|
||||
(sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("+")).length,
|
||||
0,
|
||||
),
|
||||
deletions: patch.hunks.reduce(
|
||||
(sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("-")).length,
|
||||
0,
|
||||
),
|
||||
}
|
||||
})
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
})
|
||||
const revertDiffFiles = createMemo(() => getRevertDiffFiles(revertInfo()?.diff ?? ""))
|
||||
|
||||
const revertRevertedMessages = createMemo(() => {
|
||||
const messageID = revertMessageID()
|
||||
@@ -1934,28 +1912,26 @@ function Grep(props: ToolProps<typeof GrepTool>) {
|
||||
|
||||
function WebFetch(props: ToolProps<typeof WebFetchTool>) {
|
||||
return (
|
||||
<InlineTool icon="%" pending="Fetching from the web..." complete={(props.input as any).url} part={props.part}>
|
||||
WebFetch {(props.input as any).url}
|
||||
<InlineTool icon="%" pending="Fetching from the web..." complete={props.input.url} part={props.part}>
|
||||
WebFetch {props.input.url}
|
||||
</InlineTool>
|
||||
)
|
||||
}
|
||||
|
||||
function CodeSearch(props: ToolProps<any>) {
|
||||
const input = props.input as any
|
||||
const metadata = props.metadata as any
|
||||
function CodeSearch(props: ToolProps<typeof CodeSearchTool>) {
|
||||
const metadata = props.metadata as { results?: number }
|
||||
return (
|
||||
<InlineTool icon="◇" pending="Searching code..." complete={input.query} part={props.part}>
|
||||
Exa Code Search "{input.query}" <Show when={metadata.results}>({metadata.results} results)</Show>
|
||||
<InlineTool icon="◇" pending="Searching code..." complete={props.input.query} part={props.part}>
|
||||
Exa Code Search "{props.input.query}" <Show when={metadata.results}>({metadata.results} results)</Show>
|
||||
</InlineTool>
|
||||
)
|
||||
}
|
||||
|
||||
function WebSearch(props: ToolProps<any>) {
|
||||
const input = props.input as any
|
||||
const metadata = props.metadata as any
|
||||
function WebSearch(props: ToolProps<typeof WebSearchTool>) {
|
||||
const metadata = props.metadata as { numResults?: number }
|
||||
return (
|
||||
<InlineTool icon="◈" pending="Searching web..." complete={input.query} part={props.part}>
|
||||
Exa Web Search "{input.query}" <Show when={metadata.numResults}>({metadata.numResults} results)</Show>
|
||||
<InlineTool icon="◈" pending="Searching web..." complete={props.input.query} part={props.part}>
|
||||
Exa Web Search "{props.input.query}" <Show when={metadata.numResults}>({metadata.numResults} results)</Show>
|
||||
</InlineTool>
|
||||
)
|
||||
}
|
||||
@@ -1979,7 +1955,9 @@ function Task(props: ToolProps<typeof TaskTool>) {
|
||||
)
|
||||
})
|
||||
|
||||
const current = createMemo(() => tools().findLast((x) => (x.state as any).title))
|
||||
const current = createMemo(() =>
|
||||
tools().findLast((x) => (x.state.status === "running" || x.state.status === "completed") && x.state.title),
|
||||
)
|
||||
|
||||
const isRunning = createMemo(() => props.part.state.status === "running")
|
||||
|
||||
@@ -1996,8 +1974,11 @@ function Task(props: ToolProps<typeof TaskTool>) {
|
||||
|
||||
if (isRunning() && tools().length > 0) {
|
||||
// content[0] += ` · ${tools().length} toolcalls`
|
||||
if (current()) content.push(`↳ ${Locale.titlecase(current()!.tool)} ${(current()!.state as any).title}`)
|
||||
else content.push(`↳ ${tools().length} toolcalls`)
|
||||
if (current()) {
|
||||
const state = current()!.state
|
||||
const title = state.status === "running" || state.status === "completed" ? state.title : undefined
|
||||
content.push(`↳ ${Locale.titlecase(current()!.tool)} ${title}`)
|
||||
} else content.push(`↳ ${tools().length} toolcalls`)
|
||||
}
|
||||
|
||||
if (props.part.state.status === "completed") {
|
||||
|
||||
18
packages/opencode/src/cli/cmd/tui/util/revert-diff.ts
Normal file
18
packages/opencode/src/cli/cmd/tui/util/revert-diff.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { parsePatch } from "diff"
|
||||
|
||||
export function getRevertDiffFiles(diffText: string) {
|
||||
if (!diffText) return []
|
||||
|
||||
try {
|
||||
return parsePatch(diffText).map((patch) => {
|
||||
const filename = [patch.newFileName, patch.oldFileName].find((item) => item && item !== "/dev/null") ?? "unknown"
|
||||
return {
|
||||
filename: filename.replace(/^[ab]\//, ""),
|
||||
additions: patch.hunks.reduce((sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("+")).length, 0),
|
||||
deletions: patch.hunks.reduce((sum, hunk) => sum + hunk.lines.filter((line) => line.startsWith("-")).length, 0),
|
||||
}
|
||||
})
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { dlopen, ptr } from "bun:ffi"
|
||||
import type { ReadStream } from "node:tty"
|
||||
|
||||
const STD_INPUT_HANDLE = -10
|
||||
const ENABLE_PROCESSED_INPUT = 0x0001
|
||||
@@ -71,7 +72,7 @@ export function win32InstallCtrlCGuard() {
|
||||
if (!load()) return
|
||||
if (unhook) return unhook
|
||||
|
||||
const stdin = process.stdin as any
|
||||
const stdin = process.stdin as ReadStream
|
||||
const original = stdin.setRawMode
|
||||
|
||||
const handle = k32!.symbols.GetStdHandle(STD_INPUT_HANDLE)
|
||||
@@ -93,7 +94,7 @@ export function win32InstallCtrlCGuard() {
|
||||
setImmediate(enforce)
|
||||
}
|
||||
|
||||
let wrapped: ((mode: boolean) => unknown) | undefined
|
||||
let wrapped: ReadStream["setRawMode"] | undefined
|
||||
|
||||
if (typeof original === "function") {
|
||||
wrapped = (mode: boolean) => {
|
||||
|
||||
@@ -6,7 +6,7 @@ export const memoMap = Layer.makeMemoMapUnsafe()
|
||||
export function makeRuntime<I, S, E>(service: Context.Service<I, S>, layer: Layer.Layer<I, E>) {
|
||||
let rt: ManagedRuntime.ManagedRuntime<I, E> | undefined
|
||||
const getRuntime = () =>
|
||||
(rt ??= ManagedRuntime.make(Layer.merge(layer, Observability.layer) as Layer.Layer<I, E>, { memoMap }))
|
||||
(rt ??= ManagedRuntime.make(Layer.provideMerge(layer, Observability.layer) as Layer.Layer<I, E>, { memoMap }))
|
||||
|
||||
return {
|
||||
runSync: <A, Err>(fn: (svc: S) => Effect.Effect<A, Err, I>) => getRuntime().runSync(service.use(fn)),
|
||||
|
||||
@@ -28,10 +28,10 @@ export function FormatError(input: unknown) {
|
||||
// ProviderModelNotFoundError: { providerID: string, modelID: string, suggestions?: string[] }
|
||||
if (NamedError.hasName(input, "ProviderModelNotFoundError")) {
|
||||
const data = (input as ErrorLike).data
|
||||
const suggestions = data?.suggestions as string[] | undefined
|
||||
const suggestions: string[] = Array.isArray(data?.suggestions) ? data.suggestions : []
|
||||
return [
|
||||
`Model not found: ${data?.providerID}/${data?.modelID}`,
|
||||
...(Array.isArray(suggestions) && suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
|
||||
...(suggestions.length ? ["Did you mean: " + suggestions.join(", ")] : []),
|
||||
`Try: \`opencode models\` to list available models`,
|
||||
`Or check your config (opencode.json) provider/model names`,
|
||||
].join("\n")
|
||||
@@ -64,10 +64,10 @@ export function FormatError(input: unknown) {
|
||||
const data = (input as ErrorLike).data
|
||||
const path = data?.path
|
||||
const message = data?.message
|
||||
const issues = data?.issues as Array<{ message: string; path: string[] }> | undefined
|
||||
const issues: Array<{ message: string; path: string[] }> = Array.isArray(data?.issues) ? data.issues : []
|
||||
return [
|
||||
`Configuration is invalid${path && path !== "config" ? ` at ${path}` : ""}` + (message ? `: ${message}` : ""),
|
||||
...(issues?.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")) ?? []),
|
||||
...issues.map((issue) => "↳ " + issue.message + " " + issue.path.join(".")),
|
||||
].join("\n")
|
||||
}
|
||||
|
||||
|
||||
@@ -8,52 +8,52 @@ const log = Log.create({ service: "heap" })
|
||||
const MINUTE = 60_000
|
||||
const LIMIT = 2 * 1024 * 1024 * 1024
|
||||
|
||||
export namespace Heap {
|
||||
let timer: Timer | undefined
|
||||
let lock = false
|
||||
let armed = true
|
||||
let timer: Timer | undefined
|
||||
let lock = false
|
||||
let armed = true
|
||||
|
||||
export function start() {
|
||||
if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return
|
||||
if (timer) return
|
||||
export function start() {
|
||||
if (!Flag.OPENCODE_AUTO_HEAP_SNAPSHOT) return
|
||||
if (timer) return
|
||||
|
||||
const run = async () => {
|
||||
if (lock) return
|
||||
const run = async () => {
|
||||
if (lock) return
|
||||
|
||||
const stat = process.memoryUsage()
|
||||
if (stat.rss <= LIMIT) {
|
||||
armed = true
|
||||
return
|
||||
}
|
||||
if (!armed) return
|
||||
const stat = process.memoryUsage()
|
||||
if (stat.rss <= LIMIT) {
|
||||
armed = true
|
||||
return
|
||||
}
|
||||
if (!armed) return
|
||||
|
||||
lock = true
|
||||
armed = false
|
||||
const file = path.join(
|
||||
Global.Path.log,
|
||||
`heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`,
|
||||
)
|
||||
log.warn("heap usage exceeded limit", {
|
||||
rss: stat.rss,
|
||||
heap: stat.heapUsed,
|
||||
file,
|
||||
lock = true
|
||||
armed = false
|
||||
const file = path.join(
|
||||
Global.Path.log,
|
||||
`heap-${process.pid}-${new Date().toISOString().replace(/[:.]/g, "")}.heapsnapshot`,
|
||||
)
|
||||
log.warn("heap usage exceeded limit", {
|
||||
rss: stat.rss,
|
||||
heap: stat.heapUsed,
|
||||
file,
|
||||
})
|
||||
|
||||
await Promise.resolve()
|
||||
.then(() => writeHeapSnapshot(file))
|
||||
.catch((err) => {
|
||||
log.error("failed to write heap snapshot", {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
file,
|
||||
})
|
||||
})
|
||||
|
||||
await Promise.resolve()
|
||||
.then(() => writeHeapSnapshot(file))
|
||||
.catch((err) => {
|
||||
log.error("failed to write heap snapshot", {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
file,
|
||||
})
|
||||
})
|
||||
|
||||
lock = false
|
||||
}
|
||||
|
||||
timer = setInterval(() => {
|
||||
void run()
|
||||
}, MINUTE)
|
||||
timer.unref?.()
|
||||
lock = false
|
||||
}
|
||||
|
||||
timer = setInterval(() => {
|
||||
void run()
|
||||
}, MINUTE)
|
||||
timer.unref?.()
|
||||
}
|
||||
|
||||
export * as Heap from "./heap"
|
||||
|
||||
@@ -3,4 +3,9 @@ export const logo = {
|
||||
right: [" ▄ ", "█▀▀▀ █▀▀█ █▀▀█ █▀▀█", "█___ █__█ █__█ █^^^", "▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀"],
|
||||
}
|
||||
|
||||
export const marks = "_^~"
|
||||
export const go = {
|
||||
left: [" ", "█▀▀▀", "█_^█", "▀▀▀▀"],
|
||||
right: [" ", "█▀▀█", "█__█", "▀▀▀▀"],
|
||||
}
|
||||
|
||||
export const marks = "_^~,"
|
||||
|
||||
@@ -3,131 +3,131 @@ import { EOL } from "os"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
import { logo as glyphs } from "./logo"
|
||||
|
||||
export namespace UI {
|
||||
const wordmark = [
|
||||
`⠀ ▄ `,
|
||||
`█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`,
|
||||
`█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`,
|
||||
`▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`,
|
||||
]
|
||||
const wordmark = [
|
||||
`⠀ ▄ `,
|
||||
`█▀▀█ █▀▀█ █▀▀█ █▀▀▄ █▀▀▀ █▀▀█ █▀▀█ █▀▀█`,
|
||||
`█ █ █ █ █▀▀▀ █ █ █ █ █ █ █ █▀▀▀`,
|
||||
`▀▀▀▀ █▀▀▀ ▀▀▀▀ ▀ ▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀ ▀▀▀▀`,
|
||||
]
|
||||
|
||||
export const CancelledError = NamedError.create("UICancelledError", z.void())
|
||||
export const CancelledError = NamedError.create("UICancelledError", z.void())
|
||||
|
||||
export const Style = {
|
||||
TEXT_HIGHLIGHT: "\x1b[96m",
|
||||
TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m",
|
||||
TEXT_DIM: "\x1b[90m",
|
||||
TEXT_DIM_BOLD: "\x1b[90m\x1b[1m",
|
||||
TEXT_NORMAL: "\x1b[0m",
|
||||
TEXT_NORMAL_BOLD: "\x1b[1m",
|
||||
TEXT_WARNING: "\x1b[93m",
|
||||
TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m",
|
||||
TEXT_DANGER: "\x1b[91m",
|
||||
TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m",
|
||||
TEXT_SUCCESS: "\x1b[92m",
|
||||
TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m",
|
||||
TEXT_INFO: "\x1b[94m",
|
||||
TEXT_INFO_BOLD: "\x1b[94m\x1b[1m",
|
||||
}
|
||||
export const Style = {
|
||||
TEXT_HIGHLIGHT: "\x1b[96m",
|
||||
TEXT_HIGHLIGHT_BOLD: "\x1b[96m\x1b[1m",
|
||||
TEXT_DIM: "\x1b[90m",
|
||||
TEXT_DIM_BOLD: "\x1b[90m\x1b[1m",
|
||||
TEXT_NORMAL: "\x1b[0m",
|
||||
TEXT_NORMAL_BOLD: "\x1b[1m",
|
||||
TEXT_WARNING: "\x1b[93m",
|
||||
TEXT_WARNING_BOLD: "\x1b[93m\x1b[1m",
|
||||
TEXT_DANGER: "\x1b[91m",
|
||||
TEXT_DANGER_BOLD: "\x1b[91m\x1b[1m",
|
||||
TEXT_SUCCESS: "\x1b[92m",
|
||||
TEXT_SUCCESS_BOLD: "\x1b[92m\x1b[1m",
|
||||
TEXT_INFO: "\x1b[94m",
|
||||
TEXT_INFO_BOLD: "\x1b[94m\x1b[1m",
|
||||
}
|
||||
|
||||
export function println(...message: string[]) {
|
||||
print(...message)
|
||||
process.stderr.write(EOL)
|
||||
}
|
||||
export function println(...message: string[]) {
|
||||
print(...message)
|
||||
process.stderr.write(EOL)
|
||||
}
|
||||
|
||||
export function print(...message: string[]) {
|
||||
blank = false
|
||||
process.stderr.write(message.join(" "))
|
||||
}
|
||||
export function print(...message: string[]) {
|
||||
blank = false
|
||||
process.stderr.write(message.join(" "))
|
||||
}
|
||||
|
||||
let blank = false
|
||||
export function empty() {
|
||||
if (blank) return
|
||||
println("" + Style.TEXT_NORMAL)
|
||||
blank = true
|
||||
}
|
||||
let blank = false
|
||||
export function empty() {
|
||||
if (blank) return
|
||||
println("" + Style.TEXT_NORMAL)
|
||||
blank = true
|
||||
}
|
||||
|
||||
export function logo(pad?: string) {
|
||||
if (!process.stdout.isTTY && !process.stderr.isTTY) {
|
||||
const result = []
|
||||
for (const row of wordmark) {
|
||||
if (pad) result.push(pad)
|
||||
result.push(row)
|
||||
result.push(EOL)
|
||||
}
|
||||
return result.join("").trimEnd()
|
||||
}
|
||||
|
||||
const result: string[] = []
|
||||
const reset = "\x1b[0m"
|
||||
const left = {
|
||||
fg: "\x1b[90m",
|
||||
shadow: "\x1b[38;5;235m",
|
||||
bg: "\x1b[48;5;235m",
|
||||
}
|
||||
const right = {
|
||||
fg: reset,
|
||||
shadow: "\x1b[38;5;238m",
|
||||
bg: "\x1b[48;5;238m",
|
||||
}
|
||||
const gap = " "
|
||||
const draw = (line: string, fg: string, shadow: string, bg: string) => {
|
||||
const parts: string[] = []
|
||||
for (const char of line) {
|
||||
if (char === "_") {
|
||||
parts.push(bg, " ", reset)
|
||||
continue
|
||||
}
|
||||
if (char === "^") {
|
||||
parts.push(fg, bg, "▀", reset)
|
||||
continue
|
||||
}
|
||||
if (char === "~") {
|
||||
parts.push(shadow, "▀", reset)
|
||||
continue
|
||||
}
|
||||
if (char === " ") {
|
||||
parts.push(" ")
|
||||
continue
|
||||
}
|
||||
parts.push(fg, char, reset)
|
||||
}
|
||||
return parts.join("")
|
||||
}
|
||||
glyphs.left.forEach((row, index) => {
|
||||
export function logo(pad?: string) {
|
||||
if (!process.stdout.isTTY && !process.stderr.isTTY) {
|
||||
const result = []
|
||||
for (const row of wordmark) {
|
||||
if (pad) result.push(pad)
|
||||
result.push(draw(row, left.fg, left.shadow, left.bg))
|
||||
result.push(gap)
|
||||
const other = glyphs.right[index] ?? ""
|
||||
result.push(draw(other, right.fg, right.shadow, right.bg))
|
||||
result.push(row)
|
||||
result.push(EOL)
|
||||
})
|
||||
}
|
||||
return result.join("").trimEnd()
|
||||
}
|
||||
|
||||
export async function input(prompt: string): Promise<string> {
|
||||
const readline = require("readline")
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
})
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer: string) => {
|
||||
rl.close()
|
||||
resolve(answer.trim())
|
||||
})
|
||||
})
|
||||
const result: string[] = []
|
||||
const reset = "\x1b[0m"
|
||||
const left = {
|
||||
fg: "\x1b[90m",
|
||||
shadow: "\x1b[38;5;235m",
|
||||
bg: "\x1b[48;5;235m",
|
||||
}
|
||||
|
||||
export function error(message: string) {
|
||||
if (message.startsWith("Error: ")) {
|
||||
message = message.slice("Error: ".length)
|
||||
const right = {
|
||||
fg: reset,
|
||||
shadow: "\x1b[38;5;238m",
|
||||
bg: "\x1b[48;5;238m",
|
||||
}
|
||||
const gap = " "
|
||||
const draw = (line: string, fg: string, shadow: string, bg: string) => {
|
||||
const parts: string[] = []
|
||||
for (const char of line) {
|
||||
if (char === "_") {
|
||||
parts.push(bg, " ", reset)
|
||||
continue
|
||||
}
|
||||
if (char === "^") {
|
||||
parts.push(fg, bg, "▀", reset)
|
||||
continue
|
||||
}
|
||||
if (char === "~") {
|
||||
parts.push(shadow, "▀", reset)
|
||||
continue
|
||||
}
|
||||
if (char === " ") {
|
||||
parts.push(" ")
|
||||
continue
|
||||
}
|
||||
parts.push(fg, char, reset)
|
||||
}
|
||||
println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message)
|
||||
}
|
||||
|
||||
export function markdown(text: string): string {
|
||||
return text
|
||||
return parts.join("")
|
||||
}
|
||||
glyphs.left.forEach((row, index) => {
|
||||
if (pad) result.push(pad)
|
||||
result.push(draw(row, left.fg, left.shadow, left.bg))
|
||||
result.push(gap)
|
||||
const other = glyphs.right[index] ?? ""
|
||||
result.push(draw(other, right.fg, right.shadow, right.bg))
|
||||
result.push(EOL)
|
||||
})
|
||||
return result.join("").trimEnd()
|
||||
}
|
||||
|
||||
export async function input(prompt: string): Promise<string> {
|
||||
const readline = require("readline")
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
})
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer: string) => {
|
||||
rl.close()
|
||||
resolve(answer.trim())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function error(message: string) {
|
||||
if (message.startsWith("Error: ")) {
|
||||
message = message.slice("Error: ".length)
|
||||
}
|
||||
println(Style.TEXT_DANGER_BOLD + "Error: " + Style.TEXT_NORMAL + message)
|
||||
}
|
||||
|
||||
export function markdown(text: string): string {
|
||||
return text
|
||||
}
|
||||
|
||||
export * as UI from "./ui"
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { InstanceState } from "@/effect"
|
||||
import { EffectBridge } from "@/effect"
|
||||
import type { InstanceContext } from "@/project/instance"
|
||||
import { SessionID, MessageID } from "@/session/schema"
|
||||
import { Effect, Layer, Context } from "effect"
|
||||
import z from "zod"
|
||||
import { Config } from "../config"
|
||||
import { MCP } from "../mcp"
|
||||
import { Skill } from "../skill"
|
||||
import PROMPT_INITIALIZE from "./template/initialize.txt"
|
||||
import PROMPT_REVIEW from "./template/review.txt"
|
||||
|
||||
type State = {
|
||||
commands: Record<string, Info>
|
||||
}
|
||||
|
||||
export const Event = {
|
||||
Executed: BusEvent.define(
|
||||
"command.executed",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
sessionID: SessionID.zod,
|
||||
arguments: z.string(),
|
||||
messageID: MessageID.zod,
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
agent: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
source: z.enum(["command", "mcp", "skill"]).optional(),
|
||||
// workaround for zod not supporting async functions natively so we use getters
|
||||
// https://zod.dev/v4/changelog?id=zfunction
|
||||
template: z.promise(z.string()).or(z.string()),
|
||||
subtask: z.boolean().optional(),
|
||||
hints: z.array(z.string()),
|
||||
})
|
||||
.meta({
|
||||
ref: "Command",
|
||||
})
|
||||
|
||||
// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it
|
||||
export type Info = Omit<z.infer<typeof Info>, "template"> & { template: Promise<string> | string }
|
||||
|
||||
export function hints(template: string) {
|
||||
const result: string[] = []
|
||||
const numbered = template.match(/\$\d+/g)
|
||||
if (numbered) {
|
||||
for (const match of [...new Set(numbered)].sort()) result.push(match)
|
||||
}
|
||||
if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS")
|
||||
return result
|
||||
}
|
||||
|
||||
export const Default = {
|
||||
INIT: "init",
|
||||
REVIEW: "review",
|
||||
} as const
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (name: string) => Effect.Effect<Info | undefined>
|
||||
readonly list: () => Effect.Effect<Info[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Command") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const mcp = yield* MCP.Service
|
||||
const skill = yield* Skill.Service
|
||||
|
||||
const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) {
|
||||
const cfg = yield* config.get()
|
||||
const bridge = yield* EffectBridge.make()
|
||||
const commands: Record<string, Info> = {}
|
||||
|
||||
commands[Default.INIT] = {
|
||||
name: Default.INIT,
|
||||
description: "guided AGENTS.md setup",
|
||||
source: "command",
|
||||
get template() {
|
||||
return PROMPT_INITIALIZE.replace("${path}", ctx.worktree)
|
||||
},
|
||||
hints: hints(PROMPT_INITIALIZE),
|
||||
}
|
||||
commands[Default.REVIEW] = {
|
||||
name: Default.REVIEW,
|
||||
description: "review changes [commit|branch|pr], defaults to uncommitted",
|
||||
source: "command",
|
||||
get template() {
|
||||
return PROMPT_REVIEW.replace("${path}", ctx.worktree)
|
||||
},
|
||||
subtask: true,
|
||||
hints: hints(PROMPT_REVIEW),
|
||||
}
|
||||
|
||||
for (const [name, command] of Object.entries(cfg.command ?? {})) {
|
||||
commands[name] = {
|
||||
name,
|
||||
agent: command.agent,
|
||||
model: command.model,
|
||||
description: command.description,
|
||||
source: "command",
|
||||
get template() {
|
||||
return command.template
|
||||
},
|
||||
subtask: command.subtask,
|
||||
hints: hints(command.template),
|
||||
}
|
||||
}
|
||||
|
||||
for (const [name, prompt] of Object.entries(yield* mcp.prompts())) {
|
||||
commands[name] = {
|
||||
name,
|
||||
source: "mcp",
|
||||
description: prompt.description,
|
||||
get template() {
|
||||
return bridge.promise(
|
||||
mcp
|
||||
.getPrompt(
|
||||
prompt.client,
|
||||
prompt.name,
|
||||
prompt.arguments
|
||||
? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`]))
|
||||
: {},
|
||||
)
|
||||
.pipe(
|
||||
Effect.map(
|
||||
(template) =>
|
||||
template?.messages
|
||||
.map((message) => (message.content.type === "text" ? message.content.text : ""))
|
||||
.join("\n") || "",
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [],
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of yield* skill.all()) {
|
||||
if (commands[item.name]) continue
|
||||
commands[item.name] = {
|
||||
name: item.name,
|
||||
description: item.description,
|
||||
source: "skill",
|
||||
get template() {
|
||||
return item.content
|
||||
},
|
||||
hints: [],
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
commands,
|
||||
}
|
||||
})
|
||||
|
||||
const state = yield* InstanceState.make<State>((ctx) => init(ctx))
|
||||
|
||||
const get = Effect.fn("Command.get")(function* (name: string) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return s.commands[name]
|
||||
})
|
||||
|
||||
const list = Effect.fn("Command.list")(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return Object.values(s.commands)
|
||||
})
|
||||
|
||||
return Service.of({ get, list })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(MCP.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
@@ -1 +1,188 @@
|
||||
export * as Command from "./command"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { InstanceState } from "@/effect"
|
||||
import { EffectBridge } from "@/effect"
|
||||
import type { InstanceContext } from "@/project/instance"
|
||||
import { SessionID, MessageID } from "@/session/schema"
|
||||
import { Effect, Layer, Context } from "effect"
|
||||
import z from "zod"
|
||||
import { Config } from "../config"
|
||||
import { MCP } from "../mcp"
|
||||
import { Skill } from "../skill"
|
||||
import PROMPT_INITIALIZE from "./template/initialize.txt"
|
||||
import PROMPT_REVIEW from "./template/review.txt"
|
||||
|
||||
type State = {
|
||||
commands: Record<string, Info>
|
||||
}
|
||||
|
||||
export const Event = {
|
||||
Executed: BusEvent.define(
|
||||
"command.executed",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
sessionID: SessionID.zod,
|
||||
arguments: z.string(),
|
||||
messageID: MessageID.zod,
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
agent: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
source: z.enum(["command", "mcp", "skill"]).optional(),
|
||||
// workaround for zod not supporting async functions natively so we use getters
|
||||
// https://zod.dev/v4/changelog?id=zfunction
|
||||
template: z.promise(z.string()).or(z.string()),
|
||||
subtask: z.boolean().optional(),
|
||||
hints: z.array(z.string()),
|
||||
})
|
||||
.meta({
|
||||
ref: "Command",
|
||||
})
|
||||
|
||||
// for some reason zod is inferring `string` for z.promise(z.string()).or(z.string()) so we have to manually override it
|
||||
export type Info = Omit<z.infer<typeof Info>, "template"> & { template: Promise<string> | string }
|
||||
|
||||
export function hints(template: string) {
|
||||
const result: string[] = []
|
||||
const numbered = template.match(/\$\d+/g)
|
||||
if (numbered) {
|
||||
for (const match of [...new Set(numbered)].sort()) result.push(match)
|
||||
}
|
||||
if (template.includes("$ARGUMENTS")) result.push("$ARGUMENTS")
|
||||
return result
|
||||
}
|
||||
|
||||
export const Default = {
|
||||
INIT: "init",
|
||||
REVIEW: "review",
|
||||
} as const
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (name: string) => Effect.Effect<Info | undefined>
|
||||
readonly list: () => Effect.Effect<Info[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Command") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const mcp = yield* MCP.Service
|
||||
const skill = yield* Skill.Service
|
||||
|
||||
const init = Effect.fn("Command.state")(function* (ctx: InstanceContext) {
|
||||
const cfg = yield* config.get()
|
||||
const bridge = yield* EffectBridge.make()
|
||||
const commands: Record<string, Info> = {}
|
||||
|
||||
commands[Default.INIT] = {
|
||||
name: Default.INIT,
|
||||
description: "guided AGENTS.md setup",
|
||||
source: "command",
|
||||
get template() {
|
||||
return PROMPT_INITIALIZE.replace("${path}", ctx.worktree)
|
||||
},
|
||||
hints: hints(PROMPT_INITIALIZE),
|
||||
}
|
||||
commands[Default.REVIEW] = {
|
||||
name: Default.REVIEW,
|
||||
description: "review changes [commit|branch|pr], defaults to uncommitted",
|
||||
source: "command",
|
||||
get template() {
|
||||
return PROMPT_REVIEW.replace("${path}", ctx.worktree)
|
||||
},
|
||||
subtask: true,
|
||||
hints: hints(PROMPT_REVIEW),
|
||||
}
|
||||
|
||||
for (const [name, command] of Object.entries(cfg.command ?? {})) {
|
||||
commands[name] = {
|
||||
name,
|
||||
agent: command.agent,
|
||||
model: command.model,
|
||||
description: command.description,
|
||||
source: "command",
|
||||
get template() {
|
||||
return command.template
|
||||
},
|
||||
subtask: command.subtask,
|
||||
hints: hints(command.template),
|
||||
}
|
||||
}
|
||||
|
||||
for (const [name, prompt] of Object.entries(yield* mcp.prompts())) {
|
||||
commands[name] = {
|
||||
name,
|
||||
source: "mcp",
|
||||
description: prompt.description,
|
||||
get template() {
|
||||
return bridge.promise(
|
||||
mcp
|
||||
.getPrompt(
|
||||
prompt.client,
|
||||
prompt.name,
|
||||
prompt.arguments
|
||||
? Object.fromEntries(prompt.arguments.map((argument, i) => [argument.name, `$${i + 1}`]))
|
||||
: {},
|
||||
)
|
||||
.pipe(
|
||||
Effect.map(
|
||||
(template) =>
|
||||
template?.messages
|
||||
.map((message) => (message.content.type === "text" ? message.content.text : ""))
|
||||
.join("\n") || "",
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
hints: prompt.arguments?.map((_, i) => `$${i + 1}`) ?? [],
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of yield* skill.all()) {
|
||||
if (commands[item.name]) continue
|
||||
commands[item.name] = {
|
||||
name: item.name,
|
||||
description: item.description,
|
||||
source: "skill",
|
||||
get template() {
|
||||
return item.content
|
||||
},
|
||||
hints: [],
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
commands,
|
||||
}
|
||||
})
|
||||
|
||||
const state = yield* InstanceState.make<State>((ctx) => init(ctx))
|
||||
|
||||
const get = Effect.fn("Command.get")(function* (name: string) {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return s.commands[name]
|
||||
})
|
||||
|
||||
const list = Effect.fn("Command.list")(function* () {
|
||||
const s = yield* InstanceState.get(state)
|
||||
return Object.values(s.commands)
|
||||
})
|
||||
|
||||
return Service.of({ get, list })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Config.defaultLayer),
|
||||
Layer.provide(MCP.defaultLayer),
|
||||
Layer.provide(Skill.defaultLayer),
|
||||
)
|
||||
|
||||
export * as Command from "."
|
||||
|
||||
171
packages/opencode/src/config/agent.ts
Normal file
171
packages/opencode/src/config/agent.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
export * as ConfigAgent from "./agent"
|
||||
|
||||
import { Log } from "../util"
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
import { Glob } from "@opencode-ai/shared/util/glob"
|
||||
import { Bus } from "@/bus"
|
||||
import { configEntryNameFromPath } from "./entry-name"
|
||||
import { InvalidError } from "./error"
|
||||
import * as ConfigMarkdown from "./markdown"
|
||||
import { ConfigModelID } from "./model-id"
|
||||
import { ConfigPermission } from "./permission"
|
||||
|
||||
const log = Log.create({ service: "config" })
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
model: ConfigModelID.optional(),
|
||||
variant: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Default model variant for this agent (applies only when using the agent's configured model)."),
|
||||
temperature: z.number().optional(),
|
||||
top_p: z.number().optional(),
|
||||
prompt: z.string().optional(),
|
||||
tools: z.record(z.string(), z.boolean()).optional().describe("@deprecated Use 'permission' field instead"),
|
||||
disable: z.boolean().optional(),
|
||||
description: z.string().optional().describe("Description of when to use the agent"),
|
||||
mode: z.enum(["subagent", "primary", "all"]).optional(),
|
||||
hidden: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("Hide this subagent from the @ autocomplete menu (default: false, only applies to mode: subagent)"),
|
||||
options: z.record(z.string(), z.any()).optional(),
|
||||
color: z
|
||||
.union([
|
||||
z.string().regex(/^#[0-9a-fA-F]{6}$/, "Invalid hex color format"),
|
||||
z.enum(["primary", "secondary", "accent", "success", "warning", "error", "info"]),
|
||||
])
|
||||
.optional()
|
||||
.describe("Hex color code (e.g., #FF5733) or theme color (e.g., primary)"),
|
||||
steps: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.optional()
|
||||
.describe("Maximum number of agentic iterations before forcing text-only response"),
|
||||
maxSteps: z.number().int().positive().optional().describe("@deprecated Use 'steps' field instead."),
|
||||
permission: ConfigPermission.Info.optional(),
|
||||
})
|
||||
.catchall(z.any())
|
||||
.transform((agent, _ctx) => {
|
||||
const knownKeys = new Set([
|
||||
"name",
|
||||
"model",
|
||||
"variant",
|
||||
"prompt",
|
||||
"description",
|
||||
"temperature",
|
||||
"top_p",
|
||||
"mode",
|
||||
"hidden",
|
||||
"color",
|
||||
"steps",
|
||||
"maxSteps",
|
||||
"options",
|
||||
"permission",
|
||||
"disable",
|
||||
"tools",
|
||||
])
|
||||
|
||||
const options: Record<string, unknown> = { ...agent.options }
|
||||
for (const [key, value] of Object.entries(agent)) {
|
||||
if (!knownKeys.has(key)) options[key] = value
|
||||
}
|
||||
|
||||
const permission: ConfigPermission.Info = {}
|
||||
for (const [tool, enabled] of Object.entries(agent.tools ?? {})) {
|
||||
const action = enabled ? "allow" : "deny"
|
||||
if (tool === "write" || tool === "edit" || tool === "patch" || tool === "multiedit") {
|
||||
permission.edit = action
|
||||
continue
|
||||
}
|
||||
permission[tool] = action
|
||||
}
|
||||
Object.assign(permission, agent.permission)
|
||||
|
||||
const steps = agent.steps ?? agent.maxSteps
|
||||
|
||||
return { ...agent, options, permission, steps } as typeof agent & {
|
||||
options?: Record<string, unknown>
|
||||
permission?: ConfigPermission.Info
|
||||
steps?: number
|
||||
}
|
||||
})
|
||||
.meta({
|
||||
ref: "AgentConfig",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export async function load(dir: string) {
|
||||
const result: Record<string, Info> = {}
|
||||
for (const item of await Glob.scan("{agent,agents}/**/*.md", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
dot: true,
|
||||
symlink: true,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse agent ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load agent", { agent: item, err })
|
||||
return undefined
|
||||
})
|
||||
if (!md) continue
|
||||
|
||||
const patterns = ["/.opencode/agent/", "/.opencode/agents/", "/agent/", "/agents/"]
|
||||
const name = configEntryNameFromPath(item, patterns)
|
||||
|
||||
const config = {
|
||||
name,
|
||||
...md.data,
|
||||
prompt: md.content.trim(),
|
||||
}
|
||||
const parsed = Info.safeParse(config)
|
||||
if (parsed.success) {
|
||||
result[config.name] = parsed.data
|
||||
continue
|
||||
}
|
||||
throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error })
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export async function loadMode(dir: string) {
|
||||
const result: Record<string, Info> = {}
|
||||
for (const item of await Glob.scan("{mode,modes}/*.md", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
dot: true,
|
||||
symlink: true,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse mode ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load mode", { mode: item, err })
|
||||
return undefined
|
||||
})
|
||||
if (!md) continue
|
||||
|
||||
const config = {
|
||||
name: configEntryNameFromPath(item, []),
|
||||
...md.data,
|
||||
prompt: md.content.trim(),
|
||||
}
|
||||
const parsed = Info.safeParse(config)
|
||||
if (parsed.success) {
|
||||
result[config.name] = {
|
||||
...parsed.data,
|
||||
mode: "primary" as const,
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
60
packages/opencode/src/config/command.ts
Normal file
60
packages/opencode/src/config/command.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
export * as ConfigCommand from "./command"
|
||||
|
||||
import { Log } from "../util"
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
import { Glob } from "@opencode-ai/shared/util/glob"
|
||||
import { Bus } from "@/bus"
|
||||
import { configEntryNameFromPath } from "./entry-name"
|
||||
import { InvalidError } from "./error"
|
||||
import * as ConfigMarkdown from "./markdown"
|
||||
import { ConfigModelID } from "./model-id"
|
||||
|
||||
const log = Log.create({ service: "config" })
|
||||
|
||||
export const Info = z.object({
|
||||
template: z.string(),
|
||||
description: z.string().optional(),
|
||||
agent: z.string().optional(),
|
||||
model: ConfigModelID.optional(),
|
||||
subtask: z.boolean().optional(),
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export async function load(dir: string) {
|
||||
const result: Record<string, Info> = {}
|
||||
for (const item of await Glob.scan("{command,commands}/**/*.md", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
dot: true,
|
||||
symlink: true,
|
||||
})) {
|
||||
const md = await ConfigMarkdown.parse(item).catch(async (err) => {
|
||||
const message = ConfigMarkdown.FrontmatterError.isInstance(err)
|
||||
? err.data.message
|
||||
: `Failed to parse command ${item}`
|
||||
const { Session } = await import("@/session")
|
||||
void Bus.publish(Session.Event.Error, { error: new NamedError.Unknown({ message }).toObject() })
|
||||
log.error("failed to load command", { command: item, err })
|
||||
return undefined
|
||||
})
|
||||
if (!md) continue
|
||||
|
||||
const patterns = ["/.opencode/command/", "/.opencode/commands/", "/command/", "/commands/"]
|
||||
const name = configEntryNameFromPath(item, patterns)
|
||||
|
||||
const config = {
|
||||
name,
|
||||
...md.data,
|
||||
template: md.content.trim(),
|
||||
}
|
||||
const parsed = Info.safeParse(config)
|
||||
if (parsed.success) {
|
||||
result[config.name] = parsed.data
|
||||
continue
|
||||
}
|
||||
throw new InvalidError({ path: item, issues: parsed.error.issues }, { cause: parsed.error })
|
||||
}
|
||||
return result
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
16
packages/opencode/src/config/entry-name.ts
Normal file
16
packages/opencode/src/config/entry-name.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import path from "path"
|
||||
|
||||
function sliceAfterMatch(filePath: string, searchRoots: string[]) {
|
||||
const normalizedPath = filePath.replaceAll("\\", "/")
|
||||
for (const searchRoot of searchRoots) {
|
||||
const index = normalizedPath.indexOf(searchRoot)
|
||||
if (index === -1) continue
|
||||
return normalizedPath.slice(index + searchRoot.length)
|
||||
}
|
||||
}
|
||||
|
||||
export function configEntryNameFromPath(filePath: string, searchRoots: string[]) {
|
||||
const candidate = sliceAfterMatch(filePath, searchRoots) ?? path.basename(filePath)
|
||||
const ext = path.extname(candidate)
|
||||
return ext.length ? candidate.slice(0, -ext.length) : candidate
|
||||
}
|
||||
21
packages/opencode/src/config/error.ts
Normal file
21
packages/opencode/src/config/error.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
export * as ConfigError from "./error"
|
||||
|
||||
import z from "zod"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
|
||||
export const JsonError = NamedError.create(
|
||||
"ConfigJsonError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const InvalidError = NamedError.create(
|
||||
"ConfigInvalidError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
issues: z.custom<z.core.$ZodIssue[]>().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
13
packages/opencode/src/config/formatter.ts
Normal file
13
packages/opencode/src/config/formatter.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export * as ConfigFormatter from "./formatter"
|
||||
|
||||
import z from "zod"
|
||||
|
||||
export const Entry = z.object({
|
||||
disabled: z.boolean().optional(),
|
||||
command: z.array(z.string()).optional(),
|
||||
environment: z.record(z.string(), z.string()).optional(),
|
||||
extensions: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
export const Info = z.union([z.boolean(), z.record(z.string(), Entry)])
|
||||
export type Info = z.infer<typeof Info>
|
||||
@@ -1,3 +1,16 @@
|
||||
export * as Config from "./config"
|
||||
export * as ConfigAgent from "./agent"
|
||||
export * as ConfigCommand from "./command"
|
||||
export * as ConfigError from "./error"
|
||||
export * as ConfigFormatter from "./formatter"
|
||||
export * as ConfigLSP from "./lsp"
|
||||
export * as ConfigVariable from "./variable"
|
||||
export { ConfigManaged } from "./managed"
|
||||
export * as ConfigMarkdown from "./markdown"
|
||||
export * as ConfigMCP from "./mcp"
|
||||
export { ConfigModelID } from "./model-id"
|
||||
export * as ConfigParse from "./parse"
|
||||
export * as ConfigPermission from "./permission"
|
||||
export * as ConfigPaths from "./paths"
|
||||
export * as ConfigProvider from "./provider"
|
||||
export * as ConfigSkills from "./skills"
|
||||
|
||||
@@ -1,164 +1,166 @@
|
||||
export * as ConfigKeybinds from "./keybinds"
|
||||
|
||||
import z from "zod"
|
||||
|
||||
export namespace ConfigKeybinds {
|
||||
export const Keybinds = z
|
||||
.object({
|
||||
leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
|
||||
app_exit: z.string().optional().default("ctrl+c,ctrl+d,<leader>q").describe("Exit the application"),
|
||||
editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
|
||||
theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
|
||||
sidebar_toggle: z.string().optional().default("<leader>b").describe("Toggle sidebar"),
|
||||
scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"),
|
||||
username_toggle: z.string().optional().default("none").describe("Toggle username visibility"),
|
||||
status_view: z.string().optional().default("<leader>s").describe("View status"),
|
||||
session_export: z.string().optional().default("<leader>x").describe("Export session to editor"),
|
||||
session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
|
||||
session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
|
||||
session_timeline: z.string().optional().default("<leader>g").describe("Show session timeline"),
|
||||
session_fork: z.string().optional().default("none").describe("Fork session from message"),
|
||||
session_rename: z.string().optional().default("ctrl+r").describe("Rename session"),
|
||||
session_delete: z.string().optional().default("ctrl+d").describe("Delete session"),
|
||||
stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"),
|
||||
model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"),
|
||||
model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"),
|
||||
session_share: z.string().optional().default("none").describe("Share current session"),
|
||||
session_unshare: z.string().optional().default("none").describe("Unshare current session"),
|
||||
session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"),
|
||||
session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
|
||||
messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"),
|
||||
messages_page_down: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("pagedown,ctrl+alt+f")
|
||||
.describe("Scroll messages down by one page"),
|
||||
messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"),
|
||||
messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"),
|
||||
messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
|
||||
messages_half_page_down: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+alt+d")
|
||||
.describe("Scroll messages down by half page"),
|
||||
messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"),
|
||||
messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"),
|
||||
messages_next: z.string().optional().default("none").describe("Navigate to next message"),
|
||||
messages_previous: z.string().optional().default("none").describe("Navigate to previous message"),
|
||||
messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"),
|
||||
messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
|
||||
messages_undo: z.string().optional().default("<leader>u").describe("Undo message"),
|
||||
messages_redo: z.string().optional().default("<leader>r").describe("Redo message"),
|
||||
messages_toggle_conceal: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("<leader>h")
|
||||
.describe("Toggle code block concealment in messages"),
|
||||
tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"),
|
||||
model_list: z.string().optional().default("<leader>m").describe("List available models"),
|
||||
model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"),
|
||||
model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"),
|
||||
model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"),
|
||||
model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"),
|
||||
command_list: z.string().optional().default("ctrl+p").describe("List available commands"),
|
||||
agent_list: z.string().optional().default("<leader>a").describe("List agents"),
|
||||
agent_cycle: z.string().optional().default("tab").describe("Next agent"),
|
||||
agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"),
|
||||
variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"),
|
||||
variant_list: z.string().optional().default("none").describe("List model variants"),
|
||||
input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
|
||||
input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
|
||||
input_submit: z.string().optional().default("return").describe("Submit input"),
|
||||
input_newline: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("shift+return,ctrl+return,alt+return,ctrl+j")
|
||||
.describe("Insert newline in input"),
|
||||
input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"),
|
||||
input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"),
|
||||
input_move_up: z.string().optional().default("up").describe("Move cursor up in input"),
|
||||
input_move_down: z.string().optional().default("down").describe("Move cursor down in input"),
|
||||
input_select_left: z.string().optional().default("shift+left").describe("Select left in input"),
|
||||
input_select_right: z.string().optional().default("shift+right").describe("Select right in input"),
|
||||
input_select_up: z.string().optional().default("shift+up").describe("Select up in input"),
|
||||
input_select_down: z.string().optional().default("shift+down").describe("Select down in input"),
|
||||
input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"),
|
||||
input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"),
|
||||
input_select_line_home: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+shift+a")
|
||||
.describe("Select to start of line in input"),
|
||||
input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"),
|
||||
input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"),
|
||||
input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"),
|
||||
input_select_visual_line_home: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+a")
|
||||
.describe("Select to start of visual line in input"),
|
||||
input_select_visual_line_end: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+e")
|
||||
.describe("Select to end of visual line in input"),
|
||||
input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"),
|
||||
input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"),
|
||||
input_select_buffer_home: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("shift+home")
|
||||
.describe("Select to start of buffer in input"),
|
||||
input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"),
|
||||
input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"),
|
||||
input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"),
|
||||
input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"),
|
||||
input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"),
|
||||
input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"),
|
||||
input_undo: z.string().optional().default("ctrl+-,super+z").describe("Undo in input"),
|
||||
input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"),
|
||||
input_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+f,alt+right,ctrl+right")
|
||||
.describe("Move word forward in input"),
|
||||
input_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+b,alt+left,ctrl+left")
|
||||
.describe("Move word backward in input"),
|
||||
input_select_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+f,alt+shift+right")
|
||||
.describe("Select word forward in input"),
|
||||
input_select_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+b,alt+shift+left")
|
||||
.describe("Select word backward in input"),
|
||||
input_delete_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+d,alt+delete,ctrl+delete")
|
||||
.describe("Delete word forward in input"),
|
||||
input_delete_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+w,ctrl+backspace,alt+backspace")
|
||||
.describe("Delete word backward in input"),
|
||||
history_previous: z.string().optional().default("up").describe("Previous history item"),
|
||||
history_next: z.string().optional().default("down").describe("Next history item"),
|
||||
session_child_first: z.string().optional().default("<leader>down").describe("Go to first child session"),
|
||||
session_child_cycle: z.string().optional().default("right").describe("Go to next child session"),
|
||||
session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"),
|
||||
session_parent: z.string().optional().default("up").describe("Go to parent session"),
|
||||
terminal_suspend: z.string().optional().default("ctrl+z").describe("Suspend terminal"),
|
||||
terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"),
|
||||
tips_toggle: z.string().optional().default("<leader>h").describe("Toggle tips on home screen"),
|
||||
plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"),
|
||||
display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "KeybindsConfig",
|
||||
})
|
||||
}
|
||||
export const Keybinds = z
|
||||
.object({
|
||||
leader: z.string().optional().default("ctrl+x").describe("Leader key for keybind combinations"),
|
||||
app_exit: z.string().optional().default("ctrl+c,ctrl+d,<leader>q").describe("Exit the application"),
|
||||
editor_open: z.string().optional().default("<leader>e").describe("Open external editor"),
|
||||
theme_list: z.string().optional().default("<leader>t").describe("List available themes"),
|
||||
sidebar_toggle: z.string().optional().default("<leader>b").describe("Toggle sidebar"),
|
||||
scrollbar_toggle: z.string().optional().default("none").describe("Toggle session scrollbar"),
|
||||
username_toggle: z.string().optional().default("none").describe("Toggle username visibility"),
|
||||
status_view: z.string().optional().default("<leader>s").describe("View status"),
|
||||
session_export: z.string().optional().default("<leader>x").describe("Export session to editor"),
|
||||
session_new: z.string().optional().default("<leader>n").describe("Create a new session"),
|
||||
session_list: z.string().optional().default("<leader>l").describe("List all sessions"),
|
||||
session_timeline: z.string().optional().default("<leader>g").describe("Show session timeline"),
|
||||
session_fork: z.string().optional().default("none").describe("Fork session from message"),
|
||||
session_rename: z.string().optional().default("ctrl+r").describe("Rename session"),
|
||||
session_delete: z.string().optional().default("ctrl+d").describe("Delete session"),
|
||||
stash_delete: z.string().optional().default("ctrl+d").describe("Delete stash entry"),
|
||||
model_provider_list: z.string().optional().default("ctrl+a").describe("Open provider list from model dialog"),
|
||||
model_favorite_toggle: z.string().optional().default("ctrl+f").describe("Toggle model favorite status"),
|
||||
session_share: z.string().optional().default("none").describe("Share current session"),
|
||||
session_unshare: z.string().optional().default("none").describe("Unshare current session"),
|
||||
session_interrupt: z.string().optional().default("escape").describe("Interrupt current session"),
|
||||
session_compact: z.string().optional().default("<leader>c").describe("Compact the session"),
|
||||
messages_page_up: z.string().optional().default("pageup,ctrl+alt+b").describe("Scroll messages up by one page"),
|
||||
messages_page_down: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("pagedown,ctrl+alt+f")
|
||||
.describe("Scroll messages down by one page"),
|
||||
messages_line_up: z.string().optional().default("ctrl+alt+y").describe("Scroll messages up by one line"),
|
||||
messages_line_down: z.string().optional().default("ctrl+alt+e").describe("Scroll messages down by one line"),
|
||||
messages_half_page_up: z.string().optional().default("ctrl+alt+u").describe("Scroll messages up by half page"),
|
||||
messages_half_page_down: z.string().optional().default("ctrl+alt+d").describe("Scroll messages down by half page"),
|
||||
messages_first: z.string().optional().default("ctrl+g,home").describe("Navigate to first message"),
|
||||
messages_last: z.string().optional().default("ctrl+alt+g,end").describe("Navigate to last message"),
|
||||
messages_next: z.string().optional().default("none").describe("Navigate to next message"),
|
||||
messages_previous: z.string().optional().default("none").describe("Navigate to previous message"),
|
||||
messages_last_user: z.string().optional().default("none").describe("Navigate to last user message"),
|
||||
messages_copy: z.string().optional().default("<leader>y").describe("Copy message"),
|
||||
messages_undo: z.string().optional().default("<leader>u").describe("Undo message"),
|
||||
messages_redo: z.string().optional().default("<leader>r").describe("Redo message"),
|
||||
messages_toggle_conceal: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("<leader>h")
|
||||
.describe("Toggle code block concealment in messages"),
|
||||
tool_details: z.string().optional().default("none").describe("Toggle tool details visibility"),
|
||||
model_list: z.string().optional().default("<leader>m").describe("List available models"),
|
||||
model_cycle_recent: z.string().optional().default("f2").describe("Next recently used model"),
|
||||
model_cycle_recent_reverse: z.string().optional().default("shift+f2").describe("Previous recently used model"),
|
||||
model_cycle_favorite: z.string().optional().default("none").describe("Next favorite model"),
|
||||
model_cycle_favorite_reverse: z.string().optional().default("none").describe("Previous favorite model"),
|
||||
command_list: z.string().optional().default("ctrl+p").describe("List available commands"),
|
||||
agent_list: z.string().optional().default("<leader>a").describe("List agents"),
|
||||
agent_cycle: z.string().optional().default("tab").describe("Next agent"),
|
||||
agent_cycle_reverse: z.string().optional().default("shift+tab").describe("Previous agent"),
|
||||
variant_cycle: z.string().optional().default("ctrl+t").describe("Cycle model variants"),
|
||||
variant_list: z.string().optional().default("none").describe("List model variants"),
|
||||
input_clear: z.string().optional().default("ctrl+c").describe("Clear input field"),
|
||||
input_paste: z.string().optional().default("ctrl+v").describe("Paste from clipboard"),
|
||||
input_submit: z.string().optional().default("return").describe("Submit input"),
|
||||
input_newline: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("shift+return,ctrl+return,alt+return,ctrl+j")
|
||||
.describe("Insert newline in input"),
|
||||
input_move_left: z.string().optional().default("left,ctrl+b").describe("Move cursor left in input"),
|
||||
input_move_right: z.string().optional().default("right,ctrl+f").describe("Move cursor right in input"),
|
||||
input_move_up: z.string().optional().default("up").describe("Move cursor up in input"),
|
||||
input_move_down: z.string().optional().default("down").describe("Move cursor down in input"),
|
||||
input_select_left: z.string().optional().default("shift+left").describe("Select left in input"),
|
||||
input_select_right: z.string().optional().default("shift+right").describe("Select right in input"),
|
||||
input_select_up: z.string().optional().default("shift+up").describe("Select up in input"),
|
||||
input_select_down: z.string().optional().default("shift+down").describe("Select down in input"),
|
||||
input_line_home: z.string().optional().default("ctrl+a").describe("Move to start of line in input"),
|
||||
input_line_end: z.string().optional().default("ctrl+e").describe("Move to end of line in input"),
|
||||
input_select_line_home: z.string().optional().default("ctrl+shift+a").describe("Select to start of line in input"),
|
||||
input_select_line_end: z.string().optional().default("ctrl+shift+e").describe("Select to end of line in input"),
|
||||
input_visual_line_home: z.string().optional().default("alt+a").describe("Move to start of visual line in input"),
|
||||
input_visual_line_end: z.string().optional().default("alt+e").describe("Move to end of visual line in input"),
|
||||
input_select_visual_line_home: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+a")
|
||||
.describe("Select to start of visual line in input"),
|
||||
input_select_visual_line_end: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+e")
|
||||
.describe("Select to end of visual line in input"),
|
||||
input_buffer_home: z.string().optional().default("home").describe("Move to start of buffer in input"),
|
||||
input_buffer_end: z.string().optional().default("end").describe("Move to end of buffer in input"),
|
||||
input_select_buffer_home: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("shift+home")
|
||||
.describe("Select to start of buffer in input"),
|
||||
input_select_buffer_end: z.string().optional().default("shift+end").describe("Select to end of buffer in input"),
|
||||
input_delete_line: z.string().optional().default("ctrl+shift+d").describe("Delete line in input"),
|
||||
input_delete_to_line_end: z.string().optional().default("ctrl+k").describe("Delete to end of line in input"),
|
||||
input_delete_to_line_start: z.string().optional().default("ctrl+u").describe("Delete to start of line in input"),
|
||||
input_backspace: z.string().optional().default("backspace,shift+backspace").describe("Backspace in input"),
|
||||
input_delete: z.string().optional().default("ctrl+d,delete,shift+delete").describe("Delete character in input"),
|
||||
input_undo: z
|
||||
.string()
|
||||
.optional()
|
||||
// On Windows prepend ctrl+z since terminal_suspend releases the binding.
|
||||
.default(process.platform === "win32" ? "ctrl+z,ctrl+-,super+z" : "ctrl+-,super+z")
|
||||
.describe("Undo in input"),
|
||||
input_redo: z.string().optional().default("ctrl+.,super+shift+z").describe("Redo in input"),
|
||||
input_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+f,alt+right,ctrl+right")
|
||||
.describe("Move word forward in input"),
|
||||
input_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+b,alt+left,ctrl+left")
|
||||
.describe("Move word backward in input"),
|
||||
input_select_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+f,alt+shift+right")
|
||||
.describe("Select word forward in input"),
|
||||
input_select_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+shift+b,alt+shift+left")
|
||||
.describe("Select word backward in input"),
|
||||
input_delete_word_forward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("alt+d,alt+delete,ctrl+delete")
|
||||
.describe("Delete word forward in input"),
|
||||
input_delete_word_backward: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+w,ctrl+backspace,alt+backspace")
|
||||
.describe("Delete word backward in input"),
|
||||
history_previous: z.string().optional().default("up").describe("Previous history item"),
|
||||
history_next: z.string().optional().default("down").describe("Next history item"),
|
||||
session_child_first: z.string().optional().default("<leader>down").describe("Go to first child session"),
|
||||
session_child_cycle: z.string().optional().default("right").describe("Go to next child session"),
|
||||
session_child_cycle_reverse: z.string().optional().default("left").describe("Go to previous child session"),
|
||||
session_parent: z.string().optional().default("up").describe("Go to parent session"),
|
||||
terminal_suspend: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("ctrl+z")
|
||||
.transform((v) => (process.platform === "win32" ? "none" : v))
|
||||
.describe("Suspend terminal"),
|
||||
terminal_title_toggle: z.string().optional().default("none").describe("Toggle terminal title"),
|
||||
tips_toggle: z.string().optional().default("<leader>h").describe("Toggle tips on home screen"),
|
||||
plugin_manager: z.string().optional().default("none").describe("Open plugin manager dialog"),
|
||||
display_thinking: z.string().optional().default("none").describe("Toggle thinking blocks visibility"),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "KeybindsConfig",
|
||||
})
|
||||
|
||||
37
packages/opencode/src/config/lsp.ts
Normal file
37
packages/opencode/src/config/lsp.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
export * as ConfigLSP from "./lsp"
|
||||
|
||||
import z from "zod"
|
||||
import * as LSPServer from "../lsp/server"
|
||||
|
||||
export const Disabled = z.object({
|
||||
disabled: z.literal(true),
|
||||
})
|
||||
|
||||
export const Entry = z.union([
|
||||
Disabled,
|
||||
z.object({
|
||||
command: z.array(z.string()),
|
||||
extensions: z.array(z.string()).optional(),
|
||||
disabled: z.boolean().optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
initialization: z.record(z.string(), z.any()).optional(),
|
||||
}),
|
||||
])
|
||||
|
||||
export const Info = z.union([z.boolean(), z.record(z.string(), Entry)]).refine(
|
||||
(data) => {
|
||||
if (typeof data === "boolean") return true
|
||||
const serverIds = new Set(Object.values(LSPServer).map((server) => server.id))
|
||||
|
||||
return Object.entries(data).every(([id, config]) => {
|
||||
if (config.disabled) return true
|
||||
if (serverIds.has(id)) return true
|
||||
return Boolean(config.extensions)
|
||||
})
|
||||
},
|
||||
{
|
||||
error: "For custom LSP servers, 'extensions' array is required.",
|
||||
},
|
||||
)
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
70
packages/opencode/src/config/managed.ts
Normal file
70
packages/opencode/src/config/managed.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
export * as ConfigManaged from "./managed"
|
||||
|
||||
import { existsSync } from "fs"
|
||||
import os from "os"
|
||||
import path from "path"
|
||||
import { Log, Process } from "../util"
|
||||
import { warn } from "console"
|
||||
|
||||
const log = Log.create({ service: "config" })
|
||||
|
||||
const MANAGED_PLIST_DOMAIN = "ai.opencode.managed"
|
||||
|
||||
// Keys injected by macOS/MDM into the managed plist that are not OpenCode config
|
||||
const PLIST_META = new Set([
|
||||
"PayloadDisplayName",
|
||||
"PayloadIdentifier",
|
||||
"PayloadType",
|
||||
"PayloadUUID",
|
||||
"PayloadVersion",
|
||||
"_manualProfile",
|
||||
])
|
||||
|
||||
function systemManagedConfigDir(): string {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return "/Library/Application Support/opencode"
|
||||
case "win32":
|
||||
return path.join(process.env.ProgramData || "C:\\ProgramData", "opencode")
|
||||
default:
|
||||
return "/etc/opencode"
|
||||
}
|
||||
}
|
||||
|
||||
export function managedConfigDir() {
|
||||
return process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || systemManagedConfigDir()
|
||||
}
|
||||
|
||||
export function parseManagedPlist(json: string): string {
|
||||
const raw = JSON.parse(json)
|
||||
for (const key of Object.keys(raw)) {
|
||||
if (PLIST_META.has(key)) delete raw[key]
|
||||
}
|
||||
return JSON.stringify(raw)
|
||||
}
|
||||
|
||||
export async function readManagedPreferences() {
|
||||
if (process.platform !== "darwin") return
|
||||
|
||||
const user = os.userInfo().username
|
||||
const paths = [
|
||||
path.join("/Library/Managed Preferences", user, `${MANAGED_PLIST_DOMAIN}.plist`),
|
||||
path.join("/Library/Managed Preferences", `${MANAGED_PLIST_DOMAIN}.plist`),
|
||||
]
|
||||
|
||||
for (const plist of paths) {
|
||||
if (!existsSync(plist)) continue
|
||||
log.info("reading macOS managed preferences", { path: plist })
|
||||
const result = await Process.run(["plutil", "-convert", "json", "-o", "-", plist], { nothrow: true })
|
||||
if (result.code !== 0) {
|
||||
log.warn("failed to convert managed preferences plist", { path: plist })
|
||||
continue
|
||||
}
|
||||
return {
|
||||
source: `mobileconfig:${plist}`,
|
||||
text: parseManagedPlist(result.stdout.toString()),
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
68
packages/opencode/src/config/mcp.ts
Normal file
68
packages/opencode/src/config/mcp.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import z from "zod"
|
||||
|
||||
export const Local = z
|
||||
.object({
|
||||
type: z.literal("local").describe("Type of MCP server connection"),
|
||||
command: z.string().array().describe("Command and arguments to run the MCP server"),
|
||||
environment: z
|
||||
.record(z.string(), z.string())
|
||||
.optional()
|
||||
.describe("Environment variables to set when running the MCP server"),
|
||||
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
|
||||
timeout: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.optional()
|
||||
.describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "McpLocalConfig",
|
||||
})
|
||||
|
||||
export const OAuth = z
|
||||
.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("OAuth client ID. If not provided, dynamic client registration (RFC 7591) will be attempted."),
|
||||
clientSecret: z.string().optional().describe("OAuth client secret (if required by the authorization server)"),
|
||||
scope: z.string().optional().describe("OAuth scopes to request during authorization"),
|
||||
redirectUri: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("OAuth redirect URI (default: http://127.0.0.1:19876/mcp/oauth/callback)."),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "McpOAuthConfig",
|
||||
})
|
||||
export type OAuth = z.infer<typeof OAuth>
|
||||
|
||||
export const Remote = z
|
||||
.object({
|
||||
type: z.literal("remote").describe("Type of MCP server connection"),
|
||||
url: z.string().describe("URL of the remote MCP server"),
|
||||
enabled: z.boolean().optional().describe("Enable or disable the MCP server on startup"),
|
||||
headers: z.record(z.string(), z.string()).optional().describe("Headers to send with the request"),
|
||||
oauth: z
|
||||
.union([OAuth, z.literal(false)])
|
||||
.optional()
|
||||
.describe("OAuth authentication configuration for the MCP server. Set to false to disable OAuth auto-detection."),
|
||||
timeout: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.optional()
|
||||
.describe("Timeout in ms for MCP server requests. Defaults to 5000 (5 seconds) if not specified."),
|
||||
})
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "McpRemoteConfig",
|
||||
})
|
||||
|
||||
export const Info = z.discriminatedUnion("type", [Local, Remote])
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export * as ConfigMCP from "./mcp"
|
||||
3
packages/opencode/src/config/model-id.ts
Normal file
3
packages/opencode/src/config/model-id.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import z from "zod"
|
||||
|
||||
export const ConfigModelID = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" })
|
||||
44
packages/opencode/src/config/parse.ts
Normal file
44
packages/opencode/src/config/parse.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
export * as ConfigParse from "./parse"
|
||||
|
||||
import { type ParseError as JsoncParseError, parse as parseJsoncImpl, printParseErrorCode } from "jsonc-parser"
|
||||
import z from "zod"
|
||||
import { InvalidError, JsonError } from "./error"
|
||||
|
||||
type Schema<T> = z.ZodType<T>
|
||||
|
||||
export function jsonc(text: string, filepath: string): unknown {
|
||||
const errors: JsoncParseError[] = []
|
||||
const data = parseJsoncImpl(text, errors, { allowTrailingComma: true })
|
||||
if (errors.length) {
|
||||
const lines = text.split("\n")
|
||||
const issues = errors
|
||||
.map((e) => {
|
||||
const beforeOffset = text.substring(0, e.offset).split("\n")
|
||||
const line = beforeOffset.length
|
||||
const column = beforeOffset[beforeOffset.length - 1].length + 1
|
||||
const problemLine = lines[line - 1]
|
||||
|
||||
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
|
||||
if (!problemLine) return error
|
||||
|
||||
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
|
||||
})
|
||||
.join("\n")
|
||||
throw new JsonError({
|
||||
path: filepath,
|
||||
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${issues}\n--- End ---`,
|
||||
})
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
export function schema<T>(schema: Schema<T>, data: unknown, source: string): T {
|
||||
const parsed = schema.safeParse(data)
|
||||
if (parsed.success) return parsed.data
|
||||
|
||||
throw new InvalidError({
|
||||
path: source,
|
||||
issues: parsed.error.issues,
|
||||
})
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
export * as ConfigPaths from "./paths"
|
||||
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import z from "zod"
|
||||
import { type ParseError as JsoncParseError, parse as parseJsonc, printParseErrorCode } from "jsonc-parser"
|
||||
import { NamedError } from "@opencode-ai/shared/util/error"
|
||||
import { Filesystem } from "@/util"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Global } from "@/global"
|
||||
import { unique } from "remeda"
|
||||
import { JsonError } from "./error"
|
||||
|
||||
export async function projectFiles(name: string, directory: string, worktree?: string) {
|
||||
return Filesystem.findUp([`${name}.json`, `${name}.jsonc`], directory, worktree, { rootFirst: true })
|
||||
}
|
||||
|
||||
export async function directories(directory: string, worktree?: string) {
|
||||
return [
|
||||
return unique([
|
||||
Global.Path.config,
|
||||
...(!Flag.OPENCODE_DISABLE_PROJECT_CONFIG
|
||||
? await Array.fromAsync(
|
||||
@@ -31,30 +31,13 @@ export async function directories(directory: string, worktree?: string) {
|
||||
}),
|
||||
)),
|
||||
...(Flag.OPENCODE_CONFIG_DIR ? [Flag.OPENCODE_CONFIG_DIR] : []),
|
||||
]
|
||||
])
|
||||
}
|
||||
|
||||
export function fileInDirectory(dir: string, name: string) {
|
||||
return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)]
|
||||
}
|
||||
|
||||
export const JsonError = NamedError.create(
|
||||
"ConfigJsonError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
export const InvalidError = NamedError.create(
|
||||
"ConfigInvalidError",
|
||||
z.object({
|
||||
path: z.string(),
|
||||
issues: z.custom<z.core.$ZodIssue[]>().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
|
||||
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
|
||||
export async function readFile(filepath: string) {
|
||||
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
|
||||
@@ -62,104 +45,3 @@ export async function readFile(filepath: string) {
|
||||
throw new JsonError({ path: filepath }, { cause: err })
|
||||
})
|
||||
}
|
||||
|
||||
type ParseSource = string | { source: string; dir: string }
|
||||
|
||||
function source(input: ParseSource) {
|
||||
return typeof input === "string" ? input : input.source
|
||||
}
|
||||
|
||||
function dir(input: ParseSource) {
|
||||
return typeof input === "string" ? path.dirname(input) : input.dir
|
||||
}
|
||||
|
||||
/** Apply {env:VAR} and {file:path} substitutions to config text. */
|
||||
async function substitute(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
|
||||
text = text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
|
||||
return process.env[varName] || ""
|
||||
})
|
||||
|
||||
const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g))
|
||||
if (!fileMatches.length) return text
|
||||
|
||||
const configDir = dir(input)
|
||||
const configSource = source(input)
|
||||
let out = ""
|
||||
let cursor = 0
|
||||
|
||||
for (const match of fileMatches) {
|
||||
const token = match[0]
|
||||
const index = match.index!
|
||||
out += text.slice(cursor, index)
|
||||
|
||||
const lineStart = text.lastIndexOf("\n", index - 1) + 1
|
||||
const prefix = text.slice(lineStart, index).trimStart()
|
||||
if (prefix.startsWith("//")) {
|
||||
out += token
|
||||
cursor = index + token.length
|
||||
continue
|
||||
}
|
||||
|
||||
let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "")
|
||||
if (filePath.startsWith("~/")) {
|
||||
filePath = path.join(os.homedir(), filePath.slice(2))
|
||||
}
|
||||
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
|
||||
const fileContent = (
|
||||
await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => {
|
||||
if (missing === "empty") return ""
|
||||
|
||||
const errMsg = `bad file reference: "${token}"`
|
||||
if (error.code === "ENOENT") {
|
||||
throw new InvalidError(
|
||||
{
|
||||
path: configSource,
|
||||
message: errMsg + ` ${resolvedPath} does not exist`,
|
||||
},
|
||||
{ cause: error },
|
||||
)
|
||||
}
|
||||
throw new InvalidError({ path: configSource, message: errMsg }, { cause: error })
|
||||
})
|
||||
).trim()
|
||||
|
||||
out += JSON.stringify(fileContent).slice(1, -1)
|
||||
cursor = index + token.length
|
||||
}
|
||||
|
||||
out += text.slice(cursor)
|
||||
return out
|
||||
}
|
||||
|
||||
/** Substitute and parse JSONC text, throwing JsonError on syntax errors. */
|
||||
export async function parseText(text: string, input: ParseSource, missing: "error" | "empty" = "error") {
|
||||
const configSource = source(input)
|
||||
text = await substitute(text, input, missing)
|
||||
|
||||
const errors: JsoncParseError[] = []
|
||||
const data = parseJsonc(text, errors, { allowTrailingComma: true })
|
||||
if (errors.length) {
|
||||
const lines = text.split("\n")
|
||||
const errorDetails = errors
|
||||
.map((e) => {
|
||||
const beforeOffset = text.substring(0, e.offset).split("\n")
|
||||
const line = beforeOffset.length
|
||||
const column = beforeOffset[beforeOffset.length - 1].length + 1
|
||||
const problemLine = lines[line - 1]
|
||||
|
||||
const error = `${printParseErrorCode(e.error)} at line ${line}, column ${column}`
|
||||
if (!problemLine) return error
|
||||
|
||||
return `${error}\n Line ${line}: ${problemLine}\n${"".padStart(column + 9)}^`
|
||||
})
|
||||
.join("\n")
|
||||
|
||||
throw new JsonError({
|
||||
path: configSource,
|
||||
message: `\n--- JSONC Input ---\n${text}\n--- Errors ---\n${errorDetails}\n--- End ---`,
|
||||
})
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
68
packages/opencode/src/config/permission.ts
Normal file
68
packages/opencode/src/config/permission.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
export * as ConfigPermission from "./permission"
|
||||
import z from "zod"
|
||||
|
||||
const permissionPreprocess = (val: unknown) => {
|
||||
if (typeof val === "object" && val !== null && !Array.isArray(val)) {
|
||||
return { __originalKeys: globalThis.Object.keys(val), ...val }
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
export const Action = z.enum(["ask", "allow", "deny"]).meta({
|
||||
ref: "PermissionActionConfig",
|
||||
})
|
||||
export type Action = z.infer<typeof Action>
|
||||
|
||||
export const Object = z.record(z.string(), Action).meta({
|
||||
ref: "PermissionObjectConfig",
|
||||
})
|
||||
export type Object = z.infer<typeof Object>
|
||||
|
||||
export const Rule = z.union([Action, Object]).meta({
|
||||
ref: "PermissionRuleConfig",
|
||||
})
|
||||
export type Rule = z.infer<typeof Rule>
|
||||
|
||||
const transform = (x: unknown): Record<string, Rule> => {
|
||||
if (typeof x === "string") return { "*": x as Action }
|
||||
const obj = x as { __originalKeys?: string[] } & Record<string, unknown>
|
||||
const { __originalKeys, ...rest } = obj
|
||||
if (!__originalKeys) return rest as Record<string, Rule>
|
||||
const result: Record<string, Rule> = {}
|
||||
for (const key of __originalKeys) {
|
||||
if (key in rest) result[key] = rest[key] as Rule
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export const Info = z
|
||||
.preprocess(
|
||||
permissionPreprocess,
|
||||
z
|
||||
.object({
|
||||
__originalKeys: z.string().array().optional(),
|
||||
read: Rule.optional(),
|
||||
edit: Rule.optional(),
|
||||
glob: Rule.optional(),
|
||||
grep: Rule.optional(),
|
||||
list: Rule.optional(),
|
||||
bash: Rule.optional(),
|
||||
task: Rule.optional(),
|
||||
external_directory: Rule.optional(),
|
||||
todowrite: Action.optional(),
|
||||
question: Action.optional(),
|
||||
webfetch: Action.optional(),
|
||||
websearch: Action.optional(),
|
||||
codesearch: Action.optional(),
|
||||
lsp: Rule.optional(),
|
||||
doom_loop: Action.optional(),
|
||||
skill: Rule.optional(),
|
||||
})
|
||||
.catchall(Rule)
|
||||
.or(Action),
|
||||
)
|
||||
.transform(transform)
|
||||
.meta({
|
||||
ref: "PermissionConfig",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
@@ -4,72 +4,81 @@ import { pathToFileURL } from "url"
|
||||
import { isPathPluginSpec, parsePluginSpecifier, resolvePathPluginTarget } from "@/plugin/shared"
|
||||
import path from "path"
|
||||
|
||||
export namespace ConfigPlugin {
|
||||
const Options = z.record(z.string(), z.unknown())
|
||||
export type Options = z.infer<typeof Options>
|
||||
const Options = z.record(z.string(), z.unknown())
|
||||
export type Options = z.infer<typeof Options>
|
||||
|
||||
export const Spec = z.union([z.string(), z.tuple([z.string(), Options])])
|
||||
export type Spec = z.infer<typeof Spec>
|
||||
// Spec is the user-config value: either just a plugin identifier, or the identifier plus inline options.
|
||||
// It answers "what should we load?" but says nothing about where that value came from.
|
||||
export const Spec = z.union([z.string(), z.tuple([z.string(), Options])])
|
||||
export type Spec = z.infer<typeof Spec>
|
||||
|
||||
export type Scope = "global" | "local"
|
||||
export type Scope = "global" | "local"
|
||||
|
||||
export type Origin = {
|
||||
spec: Spec
|
||||
source: string
|
||||
scope: Scope
|
||||
}
|
||||
|
||||
export async function load(dir: string) {
|
||||
const plugins: ConfigPlugin.Spec[] = []
|
||||
|
||||
for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
dot: true,
|
||||
symlink: true,
|
||||
})) {
|
||||
plugins.push(pathToFileURL(item).href)
|
||||
}
|
||||
return plugins
|
||||
}
|
||||
|
||||
export function pluginSpecifier(plugin: ConfigPlugin.Spec): string {
|
||||
return Array.isArray(plugin) ? plugin[0] : plugin
|
||||
}
|
||||
|
||||
export function pluginOptions(plugin: Spec): Options | undefined {
|
||||
return Array.isArray(plugin) ? plugin[1] : undefined
|
||||
}
|
||||
|
||||
export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise<Spec> {
|
||||
const spec = pluginSpecifier(plugin)
|
||||
if (!isPathPluginSpec(spec)) return plugin
|
||||
|
||||
const base = path.dirname(configFilepath)
|
||||
const file = (() => {
|
||||
if (spec.startsWith("file://")) return spec
|
||||
if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href
|
||||
return pathToFileURL(path.resolve(base, spec)).href
|
||||
})()
|
||||
|
||||
const resolved = await resolvePathPluginTarget(file).catch(() => file)
|
||||
|
||||
if (Array.isArray(plugin)) return [resolved, plugin[1]]
|
||||
return resolved
|
||||
}
|
||||
|
||||
export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] {
|
||||
const seen = new Set<string>()
|
||||
const list: Origin[] = []
|
||||
|
||||
for (const plugin of plugins.toReversed()) {
|
||||
const spec = pluginSpecifier(plugin.spec)
|
||||
const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg
|
||||
if (seen.has(name)) continue
|
||||
seen.add(name)
|
||||
list.push(plugin)
|
||||
}
|
||||
|
||||
return list.toReversed()
|
||||
}
|
||||
// Origin keeps the original config provenance attached to a spec.
|
||||
// After multiple config files are merged, callers still need to know which file declared the plugin
|
||||
// and whether it should behave like a global or project-local plugin.
|
||||
export type Origin = {
|
||||
spec: Spec
|
||||
source: string
|
||||
scope: Scope
|
||||
}
|
||||
|
||||
export async function load(dir: string) {
|
||||
const plugins: Spec[] = []
|
||||
|
||||
for (const item of await Glob.scan("{plugin,plugins}/*.{ts,js}", {
|
||||
cwd: dir,
|
||||
absolute: true,
|
||||
dot: true,
|
||||
symlink: true,
|
||||
})) {
|
||||
plugins.push(pathToFileURL(item).href)
|
||||
}
|
||||
return plugins
|
||||
}
|
||||
|
||||
export function pluginSpecifier(plugin: Spec): string {
|
||||
return Array.isArray(plugin) ? plugin[0] : plugin
|
||||
}
|
||||
|
||||
export function pluginOptions(plugin: Spec): Options | undefined {
|
||||
return Array.isArray(plugin) ? plugin[1] : undefined
|
||||
}
|
||||
|
||||
// Path-like specs are resolved relative to the config file that declared them so merges later on do not
|
||||
// accidentally reinterpret `./plugin.ts` relative to some other directory.
|
||||
export async function resolvePluginSpec(plugin: Spec, configFilepath: string): Promise<Spec> {
|
||||
const spec = pluginSpecifier(plugin)
|
||||
if (!isPathPluginSpec(spec)) return plugin
|
||||
|
||||
const base = path.dirname(configFilepath)
|
||||
const file = (() => {
|
||||
if (spec.startsWith("file://")) return spec
|
||||
if (path.isAbsolute(spec) || /^[A-Za-z]:[\\/]/.test(spec)) return pathToFileURL(spec).href
|
||||
return pathToFileURL(path.resolve(base, spec)).href
|
||||
})()
|
||||
|
||||
const resolved = await resolvePathPluginTarget(file).catch(() => file)
|
||||
|
||||
if (Array.isArray(plugin)) return [resolved, plugin[1]]
|
||||
return resolved
|
||||
}
|
||||
|
||||
// Dedupe on the load identity (package name for npm specs, exact file URL for local specs), but keep the
|
||||
// full Origin so downstream code still knows which config file won and where follow-up writes should go.
|
||||
export function deduplicatePluginOrigins(plugins: Origin[]): Origin[] {
|
||||
const seen = new Set<string>()
|
||||
const list: Origin[] = []
|
||||
|
||||
for (const plugin of plugins.toReversed()) {
|
||||
const spec = pluginSpecifier(plugin.spec)
|
||||
const name = spec.startsWith("file://") ? spec : parsePluginSpecifier(spec).pkg
|
||||
if (seen.has(name)) continue
|
||||
seen.add(name)
|
||||
list.push(plugin)
|
||||
}
|
||||
|
||||
return list.toReversed()
|
||||
}
|
||||
|
||||
export * as ConfigPlugin from "./plugin"
|
||||
|
||||
120
packages/opencode/src/config/provider.ts
Normal file
120
packages/opencode/src/config/provider.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import z from "zod"
|
||||
|
||||
export const Model = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
family: z.string().optional(),
|
||||
release_date: z.string(),
|
||||
attachment: z.boolean(),
|
||||
reasoning: z.boolean(),
|
||||
temperature: z.boolean(),
|
||||
tool_call: z.boolean(),
|
||||
interleaved: z
|
||||
.union([
|
||||
z.literal(true),
|
||||
z
|
||||
.object({
|
||||
field: z.enum(["reasoning_content", "reasoning_details"]),
|
||||
})
|
||||
.strict(),
|
||||
])
|
||||
.optional(),
|
||||
cost: z
|
||||
.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
cache_read: z.number().optional(),
|
||||
cache_write: z.number().optional(),
|
||||
context_over_200k: z
|
||||
.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
cache_read: z.number().optional(),
|
||||
cache_write: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
limit: z.object({
|
||||
context: z.number(),
|
||||
input: z.number().optional(),
|
||||
output: z.number(),
|
||||
}),
|
||||
modalities: z
|
||||
.object({
|
||||
input: z.array(z.enum(["text", "audio", "image", "video", "pdf"])),
|
||||
output: z.array(z.enum(["text", "audio", "image", "video", "pdf"])),
|
||||
})
|
||||
.optional(),
|
||||
experimental: z.boolean().optional(),
|
||||
status: z.enum(["alpha", "beta", "deprecated"]).optional(),
|
||||
provider: z.object({ npm: z.string().optional(), api: z.string().optional() }).optional(),
|
||||
options: z.record(z.string(), z.any()),
|
||||
headers: z.record(z.string(), z.string()).optional(),
|
||||
variants: z
|
||||
.record(
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
disabled: z.boolean().optional().describe("Disable this variant for the model"),
|
||||
})
|
||||
.catchall(z.any()),
|
||||
)
|
||||
.optional()
|
||||
.describe("Variant-specific configuration"),
|
||||
})
|
||||
.partial()
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
api: z.string().optional(),
|
||||
name: z.string(),
|
||||
env: z.array(z.string()),
|
||||
id: z.string(),
|
||||
npm: z.string().optional(),
|
||||
whitelist: z.array(z.string()).optional(),
|
||||
blacklist: z.array(z.string()).optional(),
|
||||
options: z
|
||||
.object({
|
||||
apiKey: z.string().optional(),
|
||||
baseURL: z.string().optional(),
|
||||
enterpriseUrl: z.string().optional().describe("GitHub Enterprise URL for copilot authentication"),
|
||||
setCacheKey: z.boolean().optional().describe("Enable promptCacheKey for this provider (default false)"),
|
||||
timeout: z
|
||||
.union([
|
||||
z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.describe(
|
||||
"Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.",
|
||||
),
|
||||
z.literal(false).describe("Disable timeout for this provider entirely."),
|
||||
])
|
||||
.optional()
|
||||
.describe(
|
||||
"Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.",
|
||||
),
|
||||
chunkTimeout: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.optional()
|
||||
.describe(
|
||||
"Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.",
|
||||
),
|
||||
})
|
||||
.catchall(z.any())
|
||||
.optional(),
|
||||
models: z.record(z.string(), Model).optional(),
|
||||
})
|
||||
.partial()
|
||||
.strict()
|
||||
.meta({
|
||||
ref: "ProviderConfig",
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export * as ConfigProvider from "./provider"
|
||||
13
packages/opencode/src/config/skills.ts
Normal file
13
packages/opencode/src/config/skills.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import z from "zod"
|
||||
|
||||
export const Info = z.object({
|
||||
paths: z.array(z.string()).optional().describe("Additional paths to skill folders"),
|
||||
urls: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe("URLs to fetch skills from (e.g., https://example.com/.well-known/skills/)"),
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export * as ConfigSkills from "./skills"
|
||||
90
packages/opencode/src/config/variable.ts
Normal file
90
packages/opencode/src/config/variable.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
export * as ConfigVariable from "./variable"
|
||||
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import { Filesystem } from "@/util"
|
||||
import { InvalidError } from "./error"
|
||||
|
||||
type ParseSource =
|
||||
| {
|
||||
type: "path"
|
||||
path: string
|
||||
}
|
||||
| {
|
||||
type: "virtual"
|
||||
source: string
|
||||
dir: string
|
||||
}
|
||||
|
||||
type SubstituteInput = ParseSource & {
|
||||
text: string
|
||||
missing?: "error" | "empty"
|
||||
}
|
||||
|
||||
function source(input: ParseSource) {
|
||||
return input.type === "path" ? input.path : input.source
|
||||
}
|
||||
|
||||
function dir(input: ParseSource) {
|
||||
return input.type === "path" ? path.dirname(input.path) : input.dir
|
||||
}
|
||||
|
||||
/** Apply {env:VAR} and {file:path} substitutions to config text. */
|
||||
export async function substitute(input: SubstituteInput) {
|
||||
const missing = input.missing ?? "error"
|
||||
let text = input.text.replace(/\{env:([^}]+)\}/g, (_, varName) => {
|
||||
return process.env[varName] || ""
|
||||
})
|
||||
|
||||
const fileMatches = Array.from(text.matchAll(/\{file:[^}]+\}/g))
|
||||
if (!fileMatches.length) return text
|
||||
|
||||
const configDir = dir(input)
|
||||
const configSource = source(input)
|
||||
let out = ""
|
||||
let cursor = 0
|
||||
|
||||
for (const match of fileMatches) {
|
||||
const token = match[0]
|
||||
const index = match.index!
|
||||
out += text.slice(cursor, index)
|
||||
|
||||
const lineStart = text.lastIndexOf("\n", index - 1) + 1
|
||||
const prefix = text.slice(lineStart, index).trimStart()
|
||||
if (prefix.startsWith("//")) {
|
||||
out += token
|
||||
cursor = index + token.length
|
||||
continue
|
||||
}
|
||||
|
||||
let filePath = token.replace(/^\{file:/, "").replace(/\}$/, "")
|
||||
if (filePath.startsWith("~/")) {
|
||||
filePath = path.join(os.homedir(), filePath.slice(2))
|
||||
}
|
||||
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(configDir, filePath)
|
||||
const fileContent = (
|
||||
await Filesystem.readText(resolvedPath).catch((error: NodeJS.ErrnoException) => {
|
||||
if (missing === "empty") return ""
|
||||
|
||||
const errMsg = `bad file reference: "${token}"`
|
||||
if (error.code === "ENOENT") {
|
||||
throw new InvalidError(
|
||||
{
|
||||
path: configSource,
|
||||
message: errMsg + ` ${resolvedPath} does not exist`,
|
||||
},
|
||||
{ cause: error },
|
||||
)
|
||||
}
|
||||
throw new InvalidError({ path: configSource, message: errMsg }, { cause: error })
|
||||
})
|
||||
).trim()
|
||||
|
||||
out += JSON.stringify(fileContent).slice(1, -1)
|
||||
cursor = index + token.length
|
||||
}
|
||||
|
||||
out += text.slice(cursor)
|
||||
return out
|
||||
}
|
||||
@@ -28,7 +28,7 @@ export type WorkspaceAdaptor = {
|
||||
name: string
|
||||
description: string
|
||||
configure(info: WorkspaceInfo): WorkspaceInfo | Promise<WorkspaceInfo>
|
||||
create(info: WorkspaceInfo, from?: WorkspaceInfo): Promise<void>
|
||||
create(info: WorkspaceInfo, env: Record<string, string>, from?: WorkspaceInfo): Promise<void>
|
||||
remove(info: WorkspaceInfo): Promise<void>
|
||||
target(info: WorkspaceInfo): Target | Promise<Target>
|
||||
}
|
||||
|
||||
@@ -2,17 +2,17 @@ import { LocalContext } from "../util"
|
||||
import type { WorkspaceID } from "../control-plane/schema"
|
||||
|
||||
export interface WorkspaceContext {
|
||||
workspaceID: string
|
||||
workspaceID: WorkspaceID
|
||||
}
|
||||
|
||||
const context = LocalContext.create<WorkspaceContext>("instance")
|
||||
|
||||
export const WorkspaceContext = {
|
||||
async provide<R>(input: { workspaceID: WorkspaceID; fn: () => R }): Promise<R> {
|
||||
return context.provide({ workspaceID: input.workspaceID as string }, () => input.fn())
|
||||
return context.provide({ workspaceID: input.workspaceID }, () => input.fn())
|
||||
},
|
||||
|
||||
restore<R>(workspaceID: string, fn: () => R): R {
|
||||
restore<R>(workspaceID: WorkspaceID, fn: () => R): R {
|
||||
return context.provide({ workspaceID }, fn)
|
||||
},
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Database, asc, eq, inArray } from "@/storage"
|
||||
import { Project } from "@/project"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Auth } from "@/auth"
|
||||
import { SyncEvent } from "@/sync"
|
||||
import { EventTable } from "@/sync/event.sql"
|
||||
import { Flag } from "@/flag/flag"
|
||||
@@ -25,168 +26,239 @@ import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { EventSequenceTable } from "@/sync/event.sql"
|
||||
import { waitEvent } from "./util"
|
||||
|
||||
export namespace Workspace {
|
||||
export const Info = WorkspaceInfo.meta({
|
||||
ref: "Workspace",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
export const Info = WorkspaceInfo.meta({
|
||||
ref: "Workspace",
|
||||
})
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const ConnectionStatus = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
status: z.enum(["connected", "connecting", "disconnected", "error"]),
|
||||
error: z.string().optional(),
|
||||
})
|
||||
export type ConnectionStatus = z.infer<typeof ConnectionStatus>
|
||||
export const ConnectionStatus = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
status: z.enum(["connected", "connecting", "disconnected", "error"]),
|
||||
error: z.string().optional(),
|
||||
})
|
||||
export type ConnectionStatus = z.infer<typeof ConnectionStatus>
|
||||
|
||||
const Restore = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
sessionID: SessionID.zod,
|
||||
total: z.number().int().min(0),
|
||||
step: z.number().int().min(0),
|
||||
})
|
||||
const Restore = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
sessionID: SessionID.zod,
|
||||
total: z.number().int().min(0),
|
||||
step: z.number().int().min(0),
|
||||
})
|
||||
|
||||
export const Event = {
|
||||
Ready: BusEvent.define(
|
||||
"workspace.ready",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
}),
|
||||
),
|
||||
Failed: BusEvent.define(
|
||||
"workspace.failed",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
),
|
||||
Restore: BusEvent.define("workspace.restore", Restore),
|
||||
Status: BusEvent.define("workspace.status", ConnectionStatus),
|
||||
export const Event = {
|
||||
Ready: BusEvent.define(
|
||||
"workspace.ready",
|
||||
z.object({
|
||||
name: z.string(),
|
||||
}),
|
||||
),
|
||||
Failed: BusEvent.define(
|
||||
"workspace.failed",
|
||||
z.object({
|
||||
message: z.string(),
|
||||
}),
|
||||
),
|
||||
Restore: BusEvent.define("workspace.restore", Restore),
|
||||
Status: BusEvent.define("workspace.status", ConnectionStatus),
|
||||
}
|
||||
|
||||
function fromRow(row: typeof WorkspaceTable.$inferSelect): Info {
|
||||
return {
|
||||
id: row.id,
|
||||
type: row.type,
|
||||
branch: row.branch,
|
||||
name: row.name,
|
||||
directory: row.directory,
|
||||
extra: row.extra,
|
||||
projectID: row.project_id,
|
||||
}
|
||||
}
|
||||
|
||||
const CreateInput = z.object({
|
||||
id: WorkspaceID.zod.optional(),
|
||||
type: Info.shape.type,
|
||||
branch: Info.shape.branch,
|
||||
projectID: ProjectID.zod,
|
||||
extra: Info.shape.extra,
|
||||
})
|
||||
|
||||
export const create = fn(CreateInput, async (input) => {
|
||||
const id = WorkspaceID.ascending(input.id)
|
||||
const adaptor = await getAdaptor(input.projectID, input.type)
|
||||
|
||||
const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null })
|
||||
|
||||
const info: Info = {
|
||||
id,
|
||||
type: config.type,
|
||||
branch: config.branch ?? null,
|
||||
name: config.name ?? null,
|
||||
directory: config.directory ?? null,
|
||||
extra: config.extra ?? null,
|
||||
projectID: input.projectID,
|
||||
}
|
||||
|
||||
function fromRow(row: typeof WorkspaceTable.$inferSelect): Info {
|
||||
return {
|
||||
id: row.id,
|
||||
type: row.type,
|
||||
branch: row.branch,
|
||||
name: row.name,
|
||||
directory: row.directory,
|
||||
extra: row.extra,
|
||||
projectID: row.project_id,
|
||||
}
|
||||
}
|
||||
|
||||
const CreateInput = z.object({
|
||||
id: WorkspaceID.zod.optional(),
|
||||
type: Info.shape.type,
|
||||
branch: Info.shape.branch,
|
||||
projectID: ProjectID.zod,
|
||||
extra: Info.shape.extra,
|
||||
Database.use((db) => {
|
||||
db.insert(WorkspaceTable)
|
||||
.values({
|
||||
id: info.id,
|
||||
type: info.type,
|
||||
branch: info.branch,
|
||||
name: info.name,
|
||||
directory: info.directory,
|
||||
extra: info.extra,
|
||||
project_id: info.projectID,
|
||||
})
|
||||
.run()
|
||||
})
|
||||
|
||||
export const create = fn(CreateInput, async (input) => {
|
||||
const id = WorkspaceID.ascending(input.id)
|
||||
const adaptor = await getAdaptor(input.projectID, input.type)
|
||||
const env = {
|
||||
OPENCODE_AUTH_CONTENT: JSON.stringify(await AppRuntime.runPromise(Auth.Service.use((auth) => auth.all()))),
|
||||
OPENCODE_WORKSPACE_ID: config.id,
|
||||
OPENCODE_EXPERIMENTAL_WORKSPACES: "true",
|
||||
}
|
||||
await adaptor.create(config, env)
|
||||
|
||||
const config = await adaptor.configure({ ...input, id, name: Slug.create(), directory: null })
|
||||
startSync(info)
|
||||
|
||||
const info: Info = {
|
||||
id,
|
||||
type: config.type,
|
||||
branch: config.branch ?? null,
|
||||
name: config.name ?? null,
|
||||
directory: config.directory ?? null,
|
||||
extra: config.extra ?? null,
|
||||
projectID: input.projectID,
|
||||
}
|
||||
await waitEvent({
|
||||
timeout: TIMEOUT,
|
||||
fn(event) {
|
||||
if (event.workspace === info.id && event.payload.type === Event.Status.type) {
|
||||
const { status } = event.payload.properties
|
||||
return status === "error" || status === "connected"
|
||||
}
|
||||
return false
|
||||
},
|
||||
})
|
||||
|
||||
Database.use((db) => {
|
||||
db.insert(WorkspaceTable)
|
||||
.values({
|
||||
id: info.id,
|
||||
type: info.type,
|
||||
branch: info.branch,
|
||||
name: info.name,
|
||||
directory: info.directory,
|
||||
extra: info.extra,
|
||||
project_id: info.projectID,
|
||||
})
|
||||
.run()
|
||||
})
|
||||
return info
|
||||
})
|
||||
|
||||
await adaptor.create(config)
|
||||
const SessionRestoreInput = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
sessionID: SessionID.zod,
|
||||
})
|
||||
|
||||
startSync(info)
|
||||
export const sessionRestore = fn(SessionRestoreInput, async (input) => {
|
||||
log.info("session restore requested", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
})
|
||||
try {
|
||||
const space = await get(input.workspaceID)
|
||||
if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`)
|
||||
|
||||
await waitEvent({
|
||||
timeout: TIMEOUT,
|
||||
fn(event) {
|
||||
if (event.workspace === info.id && event.payload.type === Event.Status.type) {
|
||||
const { status } = event.payload.properties
|
||||
return status === "error" || status === "connected"
|
||||
}
|
||||
return false
|
||||
const adaptor = await getAdaptor(space.projectID, space.type)
|
||||
const target = await adaptor.target(space)
|
||||
|
||||
// Need to switch the workspace of the session
|
||||
SyncEvent.run(Session.Event.Updated, {
|
||||
sessionID: input.sessionID,
|
||||
info: {
|
||||
workspaceID: input.workspaceID,
|
||||
},
|
||||
})
|
||||
|
||||
return info
|
||||
})
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
id: EventTable.id,
|
||||
aggregateID: EventTable.aggregate_id,
|
||||
seq: EventTable.seq,
|
||||
type: EventTable.type,
|
||||
data: EventTable.data,
|
||||
})
|
||||
.from(EventTable)
|
||||
.where(eq(EventTable.aggregate_id, input.sessionID))
|
||||
.orderBy(asc(EventTable.seq))
|
||||
.all(),
|
||||
)
|
||||
if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`)
|
||||
|
||||
const SessionRestoreInput = z.object({
|
||||
workspaceID: WorkspaceID.zod,
|
||||
sessionID: SessionID.zod,
|
||||
})
|
||||
const all = rows
|
||||
|
||||
export const sessionRestore = fn(SessionRestoreInput, async (input) => {
|
||||
log.info("session restore requested", {
|
||||
const size = 10
|
||||
const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size))
|
||||
const total = sets.length
|
||||
log.info("session restore prepared", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
workspaceType: space.type,
|
||||
directory: space.directory,
|
||||
target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory,
|
||||
events: all.length,
|
||||
batches: total,
|
||||
first: all[0]?.seq,
|
||||
last: all.at(-1)?.seq,
|
||||
})
|
||||
try {
|
||||
const space = await get(input.workspaceID)
|
||||
if (!space) throw new Error(`Workspace not found: ${input.workspaceID}`)
|
||||
|
||||
const adaptor = await getAdaptor(space.projectID, space.type)
|
||||
const target = await adaptor.target(space)
|
||||
|
||||
// Need to switch the workspace of the session
|
||||
SyncEvent.run(Session.Event.Updated, {
|
||||
sessionID: input.sessionID,
|
||||
info: {
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: input.workspaceID,
|
||||
payload: {
|
||||
type: Event.Restore.type,
|
||||
properties: {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total,
|
||||
step: 0,
|
||||
},
|
||||
})
|
||||
|
||||
const rows = Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
id: EventTable.id,
|
||||
aggregateID: EventTable.aggregate_id,
|
||||
seq: EventTable.seq,
|
||||
type: EventTable.type,
|
||||
data: EventTable.data,
|
||||
})
|
||||
.from(EventTable)
|
||||
.where(eq(EventTable.aggregate_id, input.sessionID))
|
||||
.orderBy(asc(EventTable.seq))
|
||||
.all(),
|
||||
)
|
||||
if (rows.length === 0) throw new Error(`No events found for session: ${input.sessionID}`)
|
||||
|
||||
const all = rows
|
||||
|
||||
const size = 10
|
||||
const sets = Array.from({ length: Math.ceil(all.length / size) }, (_, i) => all.slice(i * size, (i + 1) * size))
|
||||
const total = sets.length
|
||||
log.info("session restore prepared", {
|
||||
},
|
||||
})
|
||||
for (const [i, events] of sets.entries()) {
|
||||
log.info("session restore batch starting", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
workspaceType: space.type,
|
||||
directory: space.directory,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
first: events[0]?.seq,
|
||||
last: events.at(-1)?.seq,
|
||||
target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory,
|
||||
events: all.length,
|
||||
batches: total,
|
||||
first: all[0]?.seq,
|
||||
last: all.at(-1)?.seq,
|
||||
})
|
||||
if (target.type === "local") {
|
||||
SyncEvent.replayAll(events)
|
||||
log.info("session restore batch replayed locally", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
})
|
||||
} else {
|
||||
const url = route(target.url, "/sync/replay")
|
||||
const headers = new Headers(target.headers)
|
||||
headers.set("content-type", "application/json")
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
directory: space.directory ?? "",
|
||||
events,
|
||||
}),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const body = await res.text()
|
||||
log.error("session restore batch failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
body,
|
||||
})
|
||||
throw new Error(
|
||||
`Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`,
|
||||
)
|
||||
}
|
||||
log.info("session restore batch posted", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
})
|
||||
}
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: input.workspaceID,
|
||||
@@ -196,330 +268,263 @@ export namespace Workspace {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total,
|
||||
step: 0,
|
||||
step: i + 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
for (const [i, events] of sets.entries()) {
|
||||
log.info("session restore batch starting", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
first: events[0]?.seq,
|
||||
last: events.at(-1)?.seq,
|
||||
target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory,
|
||||
}
|
||||
|
||||
log.info("session restore complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
batches: total,
|
||||
})
|
||||
|
||||
return {
|
||||
total,
|
||||
}
|
||||
} catch (err) {
|
||||
log.error("session restore failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(err),
|
||||
})
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
export function list(project: Project.Info) {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(),
|
||||
)
|
||||
const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id))
|
||||
|
||||
for (const space of spaces) startSync(space)
|
||||
return spaces
|
||||
}
|
||||
|
||||
function lookup(id: WorkspaceID) {
|
||||
const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get())
|
||||
if (!row) return
|
||||
return fromRow(row)
|
||||
}
|
||||
|
||||
export const get = fn(WorkspaceID.zod, async (id) => {
|
||||
const space = lookup(id)
|
||||
if (!space) return
|
||||
startSync(space)
|
||||
return space
|
||||
})
|
||||
|
||||
export const remove = fn(WorkspaceID.zod, async (id) => {
|
||||
const sessions = Database.use((db) =>
|
||||
db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(),
|
||||
)
|
||||
for (const session of sessions) {
|
||||
await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id)))
|
||||
}
|
||||
|
||||
const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get())
|
||||
|
||||
if (row) {
|
||||
stopSync(id)
|
||||
|
||||
const info = fromRow(row)
|
||||
try {
|
||||
const adaptor = await getAdaptor(info.projectID, row.type)
|
||||
await adaptor.remove(info)
|
||||
} catch {
|
||||
log.error("adaptor not available when removing workspace", { type: row.type })
|
||||
}
|
||||
Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run())
|
||||
return info
|
||||
}
|
||||
})
|
||||
|
||||
const connections = new Map<WorkspaceID, ConnectionStatus>()
|
||||
const aborts = new Map<WorkspaceID, AbortController>()
|
||||
const TIMEOUT = 5000
|
||||
|
||||
function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) {
|
||||
const prev = connections.get(id)
|
||||
if (prev?.status === status && prev?.error === error) return
|
||||
const next = { workspaceID: id, status, error }
|
||||
connections.set(id, next)
|
||||
|
||||
if (status === "error") {
|
||||
aborts.delete(id)
|
||||
}
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: id,
|
||||
payload: {
|
||||
type: Event.Status.type,
|
||||
properties: next,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function status(): ConnectionStatus[] {
|
||||
return [...connections.values()]
|
||||
}
|
||||
|
||||
function synced(state: Record<string, number>) {
|
||||
const ids = Object.keys(state)
|
||||
if (ids.length === 0) return true
|
||||
|
||||
const done = Object.fromEntries(
|
||||
Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
id: EventSequenceTable.aggregate_id,
|
||||
seq: EventSequenceTable.seq,
|
||||
})
|
||||
if (target.type === "local") {
|
||||
SyncEvent.replayAll(events)
|
||||
log.info("session restore batch replayed locally", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
})
|
||||
} else {
|
||||
const url = route(target.url, "/sync/replay")
|
||||
const headers = new Headers(target.headers)
|
||||
headers.set("content-type", "application/json")
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
directory: space.directory ?? "",
|
||||
events,
|
||||
}),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const body = await res.text()
|
||||
log.error("session restore batch failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
body,
|
||||
})
|
||||
throw new Error(
|
||||
`Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`,
|
||||
)
|
||||
}
|
||||
log.info("session restore batch posted", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
})
|
||||
.from(EventSequenceTable)
|
||||
.where(inArray(EventSequenceTable.aggregate_id, ids))
|
||||
.all(),
|
||||
).map((row) => [row.id, row.seq]),
|
||||
) as Record<string, number>
|
||||
|
||||
return ids.every((id) => {
|
||||
return (done[id] ?? -1) >= state[id]
|
||||
})
|
||||
}
|
||||
|
||||
export async function isSyncing(workspaceID: WorkspaceID) {
|
||||
return aborts.has(workspaceID)
|
||||
}
|
||||
|
||||
export async function waitForSync(workspaceID: WorkspaceID, state: Record<string, number>, signal?: AbortSignal) {
|
||||
if (synced(state)) return
|
||||
|
||||
try {
|
||||
await waitEvent({
|
||||
timeout: TIMEOUT,
|
||||
signal,
|
||||
fn(event) {
|
||||
if (event.workspace !== workspaceID && event.payload.type !== "sync") {
|
||||
return false
|
||||
}
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: input.workspaceID,
|
||||
payload: {
|
||||
type: Event.Restore.type,
|
||||
properties: {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total,
|
||||
step: i + 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
log.info("session restore complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
batches: total,
|
||||
})
|
||||
|
||||
return {
|
||||
total,
|
||||
}
|
||||
} catch (err) {
|
||||
log.error("session restore failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(err),
|
||||
})
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
export function list(project: Project.Info) {
|
||||
const rows = Database.use((db) =>
|
||||
db.select().from(WorkspaceTable).where(eq(WorkspaceTable.project_id, project.id)).all(),
|
||||
)
|
||||
const spaces = rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id))
|
||||
|
||||
for (const space of spaces) startSync(space)
|
||||
return spaces
|
||||
}
|
||||
|
||||
function lookup(id: WorkspaceID) {
|
||||
const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get())
|
||||
if (!row) return
|
||||
return fromRow(row)
|
||||
}
|
||||
|
||||
export const get = fn(WorkspaceID.zod, async (id) => {
|
||||
const space = lookup(id)
|
||||
if (!space) return
|
||||
startSync(space)
|
||||
return space
|
||||
})
|
||||
|
||||
export const remove = fn(WorkspaceID.zod, async (id) => {
|
||||
const sessions = Database.use((db) =>
|
||||
db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.workspace_id, id)).all(),
|
||||
)
|
||||
for (const session of sessions) {
|
||||
await AppRuntime.runPromise(Session.Service.use((svc) => svc.remove(session.id)))
|
||||
}
|
||||
|
||||
const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get())
|
||||
|
||||
if (row) {
|
||||
stopSync(id)
|
||||
|
||||
const info = fromRow(row)
|
||||
try {
|
||||
const adaptor = await getAdaptor(info.projectID, row.type)
|
||||
await adaptor.remove(info)
|
||||
} catch {
|
||||
log.error("adaptor not available when removing workspace", { type: row.type })
|
||||
}
|
||||
Database.use((db) => db.delete(WorkspaceTable).where(eq(WorkspaceTable.id, id)).run())
|
||||
return info
|
||||
}
|
||||
})
|
||||
|
||||
const connections = new Map<WorkspaceID, ConnectionStatus>()
|
||||
const aborts = new Map<WorkspaceID, AbortController>()
|
||||
const TIMEOUT = 5000
|
||||
|
||||
function setStatus(id: WorkspaceID, status: ConnectionStatus["status"], error?: string) {
|
||||
const prev = connections.get(id)
|
||||
if (prev?.status === status && prev?.error === error) return
|
||||
const next = { workspaceID: id, status, error }
|
||||
connections.set(id, next)
|
||||
|
||||
if (status === "error") {
|
||||
aborts.delete(id)
|
||||
}
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: id,
|
||||
payload: {
|
||||
type: Event.Status.type,
|
||||
properties: next,
|
||||
return synced(state)
|
||||
},
|
||||
})
|
||||
} catch {
|
||||
if (signal?.aborted) throw signal.reason ?? new Error("Request aborted")
|
||||
throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`)
|
||||
}
|
||||
}
|
||||
|
||||
export function status(): ConnectionStatus[] {
|
||||
return [...connections.values()]
|
||||
}
|
||||
const log = Log.create({ service: "workspace-sync" })
|
||||
|
||||
function synced(state: Record<string, number>) {
|
||||
const ids = Object.keys(state)
|
||||
if (ids.length === 0) return true
|
||||
function route(url: string | URL, path: string) {
|
||||
const next = new URL(url)
|
||||
next.pathname = `${next.pathname.replace(/\/$/, "")}${path}`
|
||||
next.search = ""
|
||||
next.hash = ""
|
||||
return next
|
||||
}
|
||||
|
||||
const done = Object.fromEntries(
|
||||
Database.use((db) =>
|
||||
db
|
||||
.select({
|
||||
id: EventSequenceTable.aggregate_id,
|
||||
seq: EventSequenceTable.seq,
|
||||
})
|
||||
.from(EventSequenceTable)
|
||||
.where(inArray(EventSequenceTable.aggregate_id, ids))
|
||||
.all(),
|
||||
).map((row) => [row.id, row.seq]),
|
||||
) as Record<string, number>
|
||||
|
||||
return ids.every((id) => {
|
||||
return (done[id] ?? -1) >= state[id]
|
||||
})
|
||||
}
|
||||
|
||||
export async function isSyncing(workspaceID: WorkspaceID) {
|
||||
return aborts.has(workspaceID)
|
||||
}
|
||||
|
||||
export async function waitForSync(workspaceID: WorkspaceID, state: Record<string, number>, signal?: AbortSignal) {
|
||||
if (synced(state)) return
|
||||
|
||||
try {
|
||||
await waitEvent({
|
||||
timeout: TIMEOUT,
|
||||
signal,
|
||||
fn(event) {
|
||||
if (event.workspace !== workspaceID && event.payload.type !== "sync") {
|
||||
return false
|
||||
}
|
||||
return synced(state)
|
||||
},
|
||||
})
|
||||
} catch {
|
||||
if (signal?.aborted) throw signal.reason ?? new Error("Request aborted")
|
||||
throw new Error(`Timed out waiting for sync fence: ${JSON.stringify(state)}`)
|
||||
}
|
||||
}
|
||||
|
||||
const log = Log.create({ service: "workspace-sync" })
|
||||
|
||||
function route(url: string | URL, path: string) {
|
||||
const next = new URL(url)
|
||||
next.pathname = `${next.pathname.replace(/\/$/, "")}${path}`
|
||||
next.search = ""
|
||||
next.hash = ""
|
||||
return next
|
||||
}
|
||||
|
||||
async function syncWorkspace(space: Info, signal: AbortSignal) {
|
||||
while (!signal.aborted) {
|
||||
log.info("connecting to global sync", { workspace: space.name })
|
||||
setStatus(space.id, "connecting")
|
||||
|
||||
const adaptor = await getAdaptor(space.projectID, space.type)
|
||||
const target = await adaptor.target(space)
|
||||
|
||||
if (target.type === "local") return
|
||||
|
||||
const res = await fetch(route(target.url, "/global/event"), {
|
||||
method: "GET",
|
||||
headers: target.headers,
|
||||
signal,
|
||||
}).catch((err: unknown) => {
|
||||
setStatus(space.id, "error", err instanceof Error ? err.message : String(err))
|
||||
|
||||
log.info("failed to connect to global sync", {
|
||||
workspace: space.name,
|
||||
error: err,
|
||||
})
|
||||
return undefined
|
||||
})
|
||||
|
||||
if (!res || !res.ok || !res.body) {
|
||||
const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}`
|
||||
log.info("failed to connect to global sync", { workspace: space.name, error })
|
||||
setStatus(space.id, "error", error)
|
||||
await sleep(1000)
|
||||
continue
|
||||
}
|
||||
|
||||
log.info("global sync connected", { workspace: space.name })
|
||||
setStatus(space.id, "connected")
|
||||
|
||||
await parseSSE(res.body, signal, (evt: any) => {
|
||||
try {
|
||||
if (!("payload" in evt)) return
|
||||
|
||||
if (evt.payload.type === "sync") {
|
||||
// This name -> type is temporary
|
||||
SyncEvent.replay({ ...evt.payload, type: evt.payload.name } as SyncEvent.SerializedEvent)
|
||||
}
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: evt.directory,
|
||||
project: evt.project,
|
||||
workspace: space.id,
|
||||
payload: evt.payload,
|
||||
})
|
||||
} catch (err) {
|
||||
log.info("failed to replay global event", {
|
||||
workspaceID: space.id,
|
||||
error: err,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
log.info("disconnected from global sync: " + space.id)
|
||||
setStatus(space.id, "disconnected")
|
||||
|
||||
// TODO: Implement exponential backoff
|
||||
await sleep(1000)
|
||||
}
|
||||
}
|
||||
|
||||
async function startSync(space: Info) {
|
||||
if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return
|
||||
async function syncWorkspace(space: Info, signal: AbortSignal) {
|
||||
while (!signal.aborted) {
|
||||
log.info("connecting to global sync", { workspace: space.name })
|
||||
setStatus(space.id, "connecting")
|
||||
|
||||
const adaptor = await getAdaptor(space.projectID, space.type)
|
||||
const target = await adaptor.target(space)
|
||||
|
||||
if (target.type === "local") {
|
||||
void Filesystem.exists(target.directory).then((exists) => {
|
||||
setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist")
|
||||
if (target.type === "local") return
|
||||
|
||||
const res = await fetch(route(target.url, "/global/event"), {
|
||||
method: "GET",
|
||||
headers: target.headers,
|
||||
signal,
|
||||
}).catch((err: unknown) => {
|
||||
setStatus(space.id, "error", err instanceof Error ? err.message : String(err))
|
||||
|
||||
log.info("failed to connect to global sync", {
|
||||
workspace: space.name,
|
||||
error: err,
|
||||
})
|
||||
return
|
||||
return undefined
|
||||
})
|
||||
|
||||
if (!res || !res.ok || !res.body) {
|
||||
const error = !res ? "No response from global sync" : `Global sync HTTP ${res.status}`
|
||||
log.info("failed to connect to global sync", { workspace: space.name, error })
|
||||
setStatus(space.id, "error", error)
|
||||
await sleep(1000)
|
||||
continue
|
||||
}
|
||||
|
||||
if (aborts.has(space.id)) return true
|
||||
log.info("global sync connected", { workspace: space.name })
|
||||
setStatus(space.id, "connected")
|
||||
|
||||
await parseSSE(res.body, signal, (evt: any) => {
|
||||
try {
|
||||
if (!("payload" in evt)) return
|
||||
|
||||
if (evt.payload.type === "sync") {
|
||||
SyncEvent.replay(evt.payload.syncEvent as SyncEvent.SerializedEvent)
|
||||
}
|
||||
|
||||
GlobalBus.emit("event", {
|
||||
directory: evt.directory,
|
||||
project: evt.project,
|
||||
workspace: space.id,
|
||||
payload: evt.payload,
|
||||
})
|
||||
} catch (err) {
|
||||
log.info("failed to replay global event", {
|
||||
workspaceID: space.id,
|
||||
error: err,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
log.info("disconnected from global sync: " + space.id)
|
||||
setStatus(space.id, "disconnected")
|
||||
|
||||
const abort = new AbortController()
|
||||
aborts.set(space.id, abort)
|
||||
|
||||
void syncWorkspace(space, abort.signal).catch((error) => {
|
||||
aborts.delete(space.id)
|
||||
|
||||
setStatus(space.id, "error", String(error))
|
||||
log.warn("workspace listener failed", {
|
||||
workspaceID: space.id,
|
||||
error,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function stopSync(id: WorkspaceID) {
|
||||
aborts.get(id)?.abort()
|
||||
aborts.delete(id)
|
||||
connections.delete(id)
|
||||
// TODO: Implement exponential backoff
|
||||
await sleep(1000)
|
||||
}
|
||||
}
|
||||
|
||||
async function startSync(space: Info) {
|
||||
if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return
|
||||
|
||||
const adaptor = await getAdaptor(space.projectID, space.type)
|
||||
const target = await adaptor.target(space)
|
||||
|
||||
if (target.type === "local") {
|
||||
void Filesystem.exists(target.directory).then((exists) => {
|
||||
setStatus(space.id, exists ? "connected" : "error", exists ? undefined : "directory does not exist")
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (aborts.has(space.id)) return true
|
||||
|
||||
setStatus(space.id, "disconnected")
|
||||
|
||||
const abort = new AbortController()
|
||||
aborts.set(space.id, abort)
|
||||
|
||||
void syncWorkspace(space, abort.signal).catch((error) => {
|
||||
aborts.delete(space.id)
|
||||
|
||||
setStatus(space.id, "error", String(error))
|
||||
log.warn("workspace listener failed", {
|
||||
workspaceID: space.id,
|
||||
error,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function stopSync(id: WorkspaceID) {
|
||||
aborts.get(id)?.abort()
|
||||
aborts.delete(id)
|
||||
connections.delete(id)
|
||||
}
|
||||
|
||||
export * as Workspace from "./workspace"
|
||||
|
||||
@@ -5,7 +5,7 @@ import * as Observability from "./observability"
|
||||
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
import { Bus } from "@/bus"
|
||||
import { Auth } from "@/auth"
|
||||
import { Account } from "@/account"
|
||||
import { Account } from "@/account/account"
|
||||
import { Config } from "@/config"
|
||||
import { Git } from "@/git"
|
||||
import { Ripgrep } from "@/file/ripgrep"
|
||||
|
||||
@@ -10,9 +10,11 @@ import { File } from "@/file"
|
||||
import { Vcs } from "@/project"
|
||||
import { Snapshot } from "@/snapshot"
|
||||
import { Bus } from "@/bus"
|
||||
import { Config } from "@/config"
|
||||
import * as Observability from "./observability"
|
||||
|
||||
export const BootstrapLayer = Layer.mergeAll(
|
||||
Config.defaultLayer,
|
||||
Plugin.defaultLayer,
|
||||
ShareNext.defaultLayer,
|
||||
Format.defaultLayer,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Effect, Fiber } from "effect"
|
||||
import { WorkspaceContext } from "@/control-plane/workspace-context"
|
||||
import { Instance, type InstanceContext } from "@/project/instance"
|
||||
import type { WorkspaceID } from "@/control-plane/schema"
|
||||
import { LocalContext } from "@/util"
|
||||
import { InstanceRef, WorkspaceRef } from "./instance-ref"
|
||||
import { attachWith } from "./run-service"
|
||||
@@ -10,7 +11,7 @@ export interface Shape {
|
||||
readonly fork: <A, E, R>(effect: Effect.Effect<A, E, R>) => Fiber.Fiber<A, E>
|
||||
}
|
||||
|
||||
function restore<R>(instance: InstanceContext | undefined, workspace: string | undefined, fn: () => R): R {
|
||||
function restore<R>(instance: InstanceContext | undefined, workspace: WorkspaceID | undefined, fn: () => R): R {
|
||||
if (instance && workspace !== undefined) {
|
||||
return WorkspaceContext.restore(workspace, () => Instance.restore(instance, fn))
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Context } from "effect"
|
||||
import type { InstanceContext } from "@/project/instance"
|
||||
import type { WorkspaceID } from "@/control-plane/schema"
|
||||
|
||||
export const InstanceRef = Context.Reference<InstanceContext | undefined>("~opencode/InstanceRef", {
|
||||
defaultValue: () => undefined,
|
||||
})
|
||||
|
||||
export const WorkspaceRef = Context.Reference<string | undefined>("~opencode/WorkspaceRef", {
|
||||
export const WorkspaceRef = Context.Reference<WorkspaceID | undefined>("~opencode/WorkspaceRef", {
|
||||
defaultValue: () => undefined,
|
||||
})
|
||||
|
||||
35
packages/opencode/src/env/env.ts
vendored
35
packages/opencode/src/env/env.ts
vendored
@@ -1,35 +0,0 @@
|
||||
import { Context, Effect, Layer } from "effect"
|
||||
import { InstanceState } from "@/effect"
|
||||
|
||||
type State = Record<string, string | undefined>
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (key: string) => Effect.Effect<string | undefined>
|
||||
readonly all: () => Effect.Effect<State>
|
||||
readonly set: (key: string, value: string) => Effect.Effect<void>
|
||||
readonly remove: (key: string) => Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Env") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const state = yield* InstanceState.make<State>(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env })))
|
||||
|
||||
const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key]))
|
||||
const all = Effect.fn("Env.all")(() => InstanceState.get(state))
|
||||
const set = Effect.fn("Env.set")(function* (key: string, value: string) {
|
||||
const env = yield* InstanceState.get(state)
|
||||
env[key] = value
|
||||
})
|
||||
const remove = Effect.fn("Env.remove")(function* (key: string) {
|
||||
const env = yield* InstanceState.get(state)
|
||||
delete env[key]
|
||||
})
|
||||
|
||||
return Service.of({ get, all, set, remove })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer
|
||||
38
packages/opencode/src/env/index.ts
vendored
38
packages/opencode/src/env/index.ts
vendored
@@ -1 +1,37 @@
|
||||
export * as Env from "./env"
|
||||
import { Context, Effect, Layer } from "effect"
|
||||
import { InstanceState } from "@/effect"
|
||||
|
||||
type State = Record<string, string | undefined>
|
||||
|
||||
export interface Interface {
|
||||
readonly get: (key: string) => Effect.Effect<string | undefined>
|
||||
readonly all: () => Effect.Effect<State>
|
||||
readonly set: (key: string, value: string) => Effect.Effect<void>
|
||||
readonly remove: (key: string) => Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Env") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const state = yield* InstanceState.make<State>(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env })))
|
||||
|
||||
const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key]))
|
||||
const all = Effect.fn("Env.all")(() => InstanceState.get(state))
|
||||
const set = Effect.fn("Env.set")(function* (key: string, value: string) {
|
||||
const env = yield* InstanceState.get(state)
|
||||
env[key] = value
|
||||
})
|
||||
const remove = Effect.fn("Env.remove")(function* (key: string) {
|
||||
const env = yield* InstanceState.get(state)
|
||||
delete env[key]
|
||||
})
|
||||
|
||||
return Service.of({ get, all, set, remove })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer
|
||||
|
||||
export * as Env from "."
|
||||
|
||||
@@ -1,654 +0,0 @@
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { InstanceState } from "@/effect"
|
||||
|
||||
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
import { Git } from "@/git"
|
||||
import { Effect, Layer, Context, Scope } from "effect"
|
||||
import * as Stream from "effect/Stream"
|
||||
import { formatPatch, structuredPatch } from "diff"
|
||||
import fuzzysort from "fuzzysort"
|
||||
import ignore from "ignore"
|
||||
import path from "path"
|
||||
import z from "zod"
|
||||
import { Global } from "../global"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Log } from "../util"
|
||||
import { Protected } from "./protected"
|
||||
import { Ripgrep } from "./ripgrep"
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
path: z.string(),
|
||||
added: z.number().int(),
|
||||
removed: z.number().int(),
|
||||
status: z.enum(["added", "deleted", "modified"]),
|
||||
})
|
||||
.meta({
|
||||
ref: "File",
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const Node = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
path: z.string(),
|
||||
absolute: z.string(),
|
||||
type: z.enum(["file", "directory"]),
|
||||
ignored: z.boolean(),
|
||||
})
|
||||
.meta({
|
||||
ref: "FileNode",
|
||||
})
|
||||
export type Node = z.infer<typeof Node>
|
||||
|
||||
export const Content = z
|
||||
.object({
|
||||
type: z.enum(["text", "binary"]),
|
||||
content: z.string(),
|
||||
diff: z.string().optional(),
|
||||
patch: z
|
||||
.object({
|
||||
oldFileName: z.string(),
|
||||
newFileName: z.string(),
|
||||
oldHeader: z.string().optional(),
|
||||
newHeader: z.string().optional(),
|
||||
hunks: z.array(
|
||||
z.object({
|
||||
oldStart: z.number(),
|
||||
oldLines: z.number(),
|
||||
newStart: z.number(),
|
||||
newLines: z.number(),
|
||||
lines: z.array(z.string()),
|
||||
}),
|
||||
),
|
||||
index: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
encoding: z.literal("base64").optional(),
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({
|
||||
ref: "FileContent",
|
||||
})
|
||||
export type Content = z.infer<typeof Content>
|
||||
|
||||
export const Event = {
|
||||
Edited: BusEvent.define(
|
||||
"file.edited",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
const log = Log.create({ service: "file" })
|
||||
|
||||
const binary = new Set([
|
||||
"exe",
|
||||
"dll",
|
||||
"pdb",
|
||||
"bin",
|
||||
"so",
|
||||
"dylib",
|
||||
"o",
|
||||
"a",
|
||||
"lib",
|
||||
"wav",
|
||||
"mp3",
|
||||
"ogg",
|
||||
"oga",
|
||||
"ogv",
|
||||
"ogx",
|
||||
"flac",
|
||||
"aac",
|
||||
"wma",
|
||||
"m4a",
|
||||
"weba",
|
||||
"mp4",
|
||||
"avi",
|
||||
"mov",
|
||||
"wmv",
|
||||
"flv",
|
||||
"webm",
|
||||
"mkv",
|
||||
"zip",
|
||||
"tar",
|
||||
"gz",
|
||||
"gzip",
|
||||
"bz",
|
||||
"bz2",
|
||||
"bzip",
|
||||
"bzip2",
|
||||
"7z",
|
||||
"rar",
|
||||
"xz",
|
||||
"lz",
|
||||
"z",
|
||||
"pdf",
|
||||
"doc",
|
||||
"docx",
|
||||
"ppt",
|
||||
"pptx",
|
||||
"xls",
|
||||
"xlsx",
|
||||
"dmg",
|
||||
"iso",
|
||||
"img",
|
||||
"vmdk",
|
||||
"ttf",
|
||||
"otf",
|
||||
"woff",
|
||||
"woff2",
|
||||
"eot",
|
||||
"sqlite",
|
||||
"db",
|
||||
"mdb",
|
||||
"apk",
|
||||
"ipa",
|
||||
"aab",
|
||||
"xapk",
|
||||
"app",
|
||||
"pkg",
|
||||
"deb",
|
||||
"rpm",
|
||||
"snap",
|
||||
"flatpak",
|
||||
"appimage",
|
||||
"msi",
|
||||
"msp",
|
||||
"jar",
|
||||
"war",
|
||||
"ear",
|
||||
"class",
|
||||
"kotlin_module",
|
||||
"dex",
|
||||
"vdex",
|
||||
"odex",
|
||||
"oat",
|
||||
"art",
|
||||
"wasm",
|
||||
"wat",
|
||||
"bc",
|
||||
"ll",
|
||||
"s",
|
||||
"ko",
|
||||
"sys",
|
||||
"drv",
|
||||
"efi",
|
||||
"rom",
|
||||
"com",
|
||||
])
|
||||
|
||||
const image = new Set([
|
||||
"png",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"gif",
|
||||
"bmp",
|
||||
"webp",
|
||||
"ico",
|
||||
"tif",
|
||||
"tiff",
|
||||
"svg",
|
||||
"svgz",
|
||||
"avif",
|
||||
"apng",
|
||||
"jxl",
|
||||
"heic",
|
||||
"heif",
|
||||
"raw",
|
||||
"cr2",
|
||||
"nef",
|
||||
"arw",
|
||||
"dng",
|
||||
"orf",
|
||||
"raf",
|
||||
"pef",
|
||||
"x3f",
|
||||
])
|
||||
|
||||
const text = new Set([
|
||||
"ts",
|
||||
"tsx",
|
||||
"mts",
|
||||
"cts",
|
||||
"mtsx",
|
||||
"ctsx",
|
||||
"js",
|
||||
"jsx",
|
||||
"mjs",
|
||||
"cjs",
|
||||
"sh",
|
||||
"bash",
|
||||
"zsh",
|
||||
"fish",
|
||||
"ps1",
|
||||
"psm1",
|
||||
"cmd",
|
||||
"bat",
|
||||
"json",
|
||||
"jsonc",
|
||||
"json5",
|
||||
"yaml",
|
||||
"yml",
|
||||
"toml",
|
||||
"md",
|
||||
"mdx",
|
||||
"txt",
|
||||
"xml",
|
||||
"html",
|
||||
"htm",
|
||||
"css",
|
||||
"scss",
|
||||
"sass",
|
||||
"less",
|
||||
"graphql",
|
||||
"gql",
|
||||
"sql",
|
||||
"ini",
|
||||
"cfg",
|
||||
"conf",
|
||||
"env",
|
||||
])
|
||||
|
||||
const textName = new Set([
|
||||
"dockerfile",
|
||||
"makefile",
|
||||
".gitignore",
|
||||
".gitattributes",
|
||||
".editorconfig",
|
||||
".npmrc",
|
||||
".nvmrc",
|
||||
".prettierrc",
|
||||
".eslintrc",
|
||||
])
|
||||
|
||||
const mime: Record<string, string> = {
|
||||
png: "image/png",
|
||||
jpg: "image/jpeg",
|
||||
jpeg: "image/jpeg",
|
||||
gif: "image/gif",
|
||||
bmp: "image/bmp",
|
||||
webp: "image/webp",
|
||||
ico: "image/x-icon",
|
||||
tif: "image/tiff",
|
||||
tiff: "image/tiff",
|
||||
svg: "image/svg+xml",
|
||||
svgz: "image/svg+xml",
|
||||
avif: "image/avif",
|
||||
apng: "image/apng",
|
||||
jxl: "image/jxl",
|
||||
heic: "image/heic",
|
||||
heif: "image/heif",
|
||||
}
|
||||
|
||||
type Entry = { files: string[]; dirs: string[] }
|
||||
|
||||
const ext = (file: string) => path.extname(file).toLowerCase().slice(1)
|
||||
const name = (file: string) => path.basename(file).toLowerCase()
|
||||
const isImageByExtension = (file: string) => image.has(ext(file))
|
||||
const isTextByExtension = (file: string) => text.has(ext(file))
|
||||
const isTextByName = (file: string) => textName.has(name(file))
|
||||
const isBinaryByExtension = (file: string) => binary.has(ext(file))
|
||||
const isImage = (mimeType: string) => mimeType.startsWith("image/")
|
||||
const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file)
|
||||
|
||||
function shouldEncode(mimeType: string) {
|
||||
const type = mimeType.toLowerCase()
|
||||
log.debug("shouldEncode", { type })
|
||||
if (!type) return false
|
||||
if (type.startsWith("text/")) return false
|
||||
if (type.includes("charset=")) return false
|
||||
const top = type.split("/", 2)[0]
|
||||
return ["image", "audio", "video", "font", "model", "multipart"].includes(top)
|
||||
}
|
||||
|
||||
const hidden = (item: string) => {
|
||||
const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "")
|
||||
return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1)
|
||||
}
|
||||
|
||||
const sortHiddenLast = (items: string[], prefer: boolean) => {
|
||||
if (prefer) return items
|
||||
const visible: string[] = []
|
||||
const hiddenItems: string[] = []
|
||||
for (const item of items) {
|
||||
if (hidden(item)) hiddenItems.push(item)
|
||||
else visible.push(item)
|
||||
}
|
||||
return [...visible, ...hiddenItems]
|
||||
}
|
||||
|
||||
interface State {
|
||||
cache: Entry
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly init: () => Effect.Effect<void>
|
||||
readonly status: () => Effect.Effect<Info[]>
|
||||
readonly read: (file: string) => Effect.Effect<Content>
|
||||
readonly list: (dir?: string) => Effect.Effect<Node[]>
|
||||
readonly search: (input: {
|
||||
query: string
|
||||
limit?: number
|
||||
dirs?: boolean
|
||||
type?: "file" | "directory"
|
||||
}) => Effect.Effect<string[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/File") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const appFs = yield* AppFileSystem.Service
|
||||
const rg = yield* Ripgrep.Service
|
||||
const git = yield* Git.Service
|
||||
const scope = yield* Scope.Scope
|
||||
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("File.state")(() =>
|
||||
Effect.succeed({
|
||||
cache: { files: [], dirs: [] } as Entry,
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
const scan = Effect.fn("File.scan")(function* () {
|
||||
if (Instance.directory === path.parse(Instance.directory).root) return
|
||||
const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global"
|
||||
const next: Entry = { files: [], dirs: [] }
|
||||
|
||||
if (isGlobalHome) {
|
||||
const dirs = new Set<string>()
|
||||
const protectedNames = Protected.names()
|
||||
const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"])
|
||||
const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name)
|
||||
const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name)
|
||||
const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => []))
|
||||
|
||||
for (const entry of top) {
|
||||
if (entry.type !== "directory") continue
|
||||
if (shouldIgnoreName(entry.name)) continue
|
||||
dirs.add(entry.name + "/")
|
||||
|
||||
const base = path.join(Instance.directory, entry.name)
|
||||
const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => []))
|
||||
for (const child of children) {
|
||||
if (child.type !== "directory") continue
|
||||
if (shouldIgnoreNested(child.name)) continue
|
||||
dirs.add(entry.name + "/" + child.name + "/")
|
||||
}
|
||||
}
|
||||
|
||||
next.dirs = Array.from(dirs).toSorted()
|
||||
} else {
|
||||
const files = yield* rg.files({ cwd: Instance.directory }).pipe(
|
||||
Stream.runCollect,
|
||||
Effect.map((chunk) => [...chunk]),
|
||||
)
|
||||
const seen = new Set<string>()
|
||||
for (const file of files) {
|
||||
next.files.push(file)
|
||||
let current = file
|
||||
while (true) {
|
||||
const dir = path.dirname(current)
|
||||
if (dir === ".") break
|
||||
if (dir === current) break
|
||||
current = dir
|
||||
if (seen.has(dir)) continue
|
||||
seen.add(dir)
|
||||
next.dirs.push(dir + "/")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const s = yield* InstanceState.get(state)
|
||||
s.cache = next
|
||||
})
|
||||
|
||||
let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void)))
|
||||
|
||||
const ensure = Effect.fn("File.ensure")(function* () {
|
||||
yield* cachedScan
|
||||
cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void)))
|
||||
})
|
||||
|
||||
const gitText = Effect.fnUntraced(function* (args: string[]) {
|
||||
return (yield* git.run(args, { cwd: Instance.directory })).text()
|
||||
})
|
||||
|
||||
const init = Effect.fn("File.init")(function* () {
|
||||
yield* ensure().pipe(Effect.forkIn(scope))
|
||||
})
|
||||
|
||||
const status = Effect.fn("File.status")(function* () {
|
||||
if (Instance.project.vcs !== "git") return []
|
||||
|
||||
const diffOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"diff",
|
||||
"--numstat",
|
||||
"HEAD",
|
||||
])
|
||||
|
||||
const changed: Info[] = []
|
||||
|
||||
if (diffOutput.trim()) {
|
||||
for (const line of diffOutput.trim().split("\n")) {
|
||||
const [added, removed, file] = line.split("\t")
|
||||
changed.push({
|
||||
path: file,
|
||||
added: added === "-" ? 0 : parseInt(added, 10),
|
||||
removed: removed === "-" ? 0 : parseInt(removed, 10),
|
||||
status: "modified",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const untrackedOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"ls-files",
|
||||
"--others",
|
||||
"--exclude-standard",
|
||||
])
|
||||
|
||||
if (untrackedOutput.trim()) {
|
||||
for (const file of untrackedOutput.trim().split("\n")) {
|
||||
const content = yield* appFs
|
||||
.readFileString(path.join(Instance.directory, file))
|
||||
.pipe(Effect.catch(() => Effect.succeed<string | undefined>(undefined)))
|
||||
if (content === undefined) continue
|
||||
changed.push({
|
||||
path: file,
|
||||
added: content.split("\n").length,
|
||||
removed: 0,
|
||||
status: "added",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const deletedOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"diff",
|
||||
"--name-only",
|
||||
"--diff-filter=D",
|
||||
"HEAD",
|
||||
])
|
||||
|
||||
if (deletedOutput.trim()) {
|
||||
for (const file of deletedOutput.trim().split("\n")) {
|
||||
changed.push({
|
||||
path: file,
|
||||
added: 0,
|
||||
removed: 0,
|
||||
status: "deleted",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return changed.map((item) => {
|
||||
const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path)
|
||||
return {
|
||||
...item,
|
||||
path: path.relative(Instance.directory, full),
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) {
|
||||
using _ = log.time("read", { file })
|
||||
const full = path.join(Instance.directory, file)
|
||||
|
||||
if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory")
|
||||
|
||||
if (isImageByExtension(file)) {
|
||||
const exists = yield* appFs.existsSafe(full)
|
||||
if (exists) {
|
||||
const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
|
||||
return {
|
||||
type: "text" as const,
|
||||
content: Buffer.from(bytes).toString("base64"),
|
||||
mimeType: getImageMimeType(file),
|
||||
encoding: "base64" as const,
|
||||
}
|
||||
}
|
||||
return { type: "text" as const, content: "" }
|
||||
}
|
||||
|
||||
const knownText = isTextByExtension(file) || isTextByName(file)
|
||||
|
||||
if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" }
|
||||
|
||||
const exists = yield* appFs.existsSafe(full)
|
||||
if (!exists) return { type: "text" as const, content: "" }
|
||||
|
||||
const mimeType = AppFileSystem.mimeType(full)
|
||||
const encode = knownText ? false : shouldEncode(mimeType)
|
||||
|
||||
if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType }
|
||||
|
||||
if (encode) {
|
||||
const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
|
||||
return {
|
||||
type: "text" as const,
|
||||
content: Buffer.from(bytes).toString("base64"),
|
||||
mimeType,
|
||||
encoding: "base64" as const,
|
||||
}
|
||||
}
|
||||
|
||||
const content = yield* appFs.readFileString(full).pipe(
|
||||
Effect.map((s) => s.trim()),
|
||||
Effect.catch(() => Effect.succeed("")),
|
||||
)
|
||||
|
||||
if (Instance.project.vcs === "git") {
|
||||
let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file])
|
||||
if (!diff.trim()) {
|
||||
diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file])
|
||||
}
|
||||
if (diff.trim()) {
|
||||
const original = yield* git.show(Instance.directory, "HEAD", file)
|
||||
const patch = structuredPatch(file, file, original, content, "old", "new", {
|
||||
context: Infinity,
|
||||
ignoreWhitespace: true,
|
||||
})
|
||||
return { type: "text" as const, content, patch, diff: formatPatch(patch) }
|
||||
}
|
||||
return { type: "text" as const, content }
|
||||
}
|
||||
|
||||
return { type: "text" as const, content }
|
||||
})
|
||||
|
||||
const list = Effect.fn("File.list")(function* (dir?: string) {
|
||||
const exclude = [".git", ".DS_Store"]
|
||||
let ignored = (_: string) => false
|
||||
if (Instance.project.vcs === "git") {
|
||||
const ig = ignore()
|
||||
const gitignore = path.join(Instance.project.worktree, ".gitignore")
|
||||
const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed("")))
|
||||
if (gitignoreText) ig.add(gitignoreText)
|
||||
const ignoreFile = path.join(Instance.project.worktree, ".ignore")
|
||||
const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed("")))
|
||||
if (ignoreText) ig.add(ignoreText)
|
||||
ignored = ig.ignores.bind(ig)
|
||||
}
|
||||
|
||||
const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory
|
||||
if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory")
|
||||
|
||||
const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => []))
|
||||
|
||||
const nodes: Node[] = []
|
||||
for (const entry of entries) {
|
||||
if (exclude.includes(entry.name)) continue
|
||||
const absolute = path.join(resolved, entry.name)
|
||||
const file = path.relative(Instance.directory, absolute)
|
||||
const type = entry.type === "directory" ? "directory" : "file"
|
||||
nodes.push({
|
||||
name: entry.name,
|
||||
path: file,
|
||||
absolute,
|
||||
type,
|
||||
ignored: ignored(type === "directory" ? file + "/" : file),
|
||||
})
|
||||
}
|
||||
return nodes.sort((a, b) => {
|
||||
if (a.type !== b.type) return a.type === "directory" ? -1 : 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
})
|
||||
|
||||
const search = Effect.fn("File.search")(function* (input: {
|
||||
query: string
|
||||
limit?: number
|
||||
dirs?: boolean
|
||||
type?: "file" | "directory"
|
||||
}) {
|
||||
yield* ensure()
|
||||
const { cache } = yield* InstanceState.get(state)
|
||||
|
||||
const query = input.query.trim()
|
||||
const limit = input.limit ?? 100
|
||||
const kind = input.type ?? (input.dirs === false ? "file" : "all")
|
||||
log.info("search", { query, kind })
|
||||
|
||||
const preferHidden = query.startsWith(".") || query.includes("/.")
|
||||
|
||||
if (!query) {
|
||||
if (kind === "file") return cache.files.slice(0, limit)
|
||||
return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit)
|
||||
}
|
||||
|
||||
const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs]
|
||||
|
||||
const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit
|
||||
const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target)
|
||||
const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted
|
||||
|
||||
log.info("search", { query, kind, results: output.length })
|
||||
return output
|
||||
})
|
||||
|
||||
log.info("init")
|
||||
return Service.of({ init, status, read, list, search })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Ripgrep.defaultLayer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(Git.defaultLayer),
|
||||
)
|
||||
@@ -1,81 +1,81 @@
|
||||
import { Glob } from "@opencode-ai/shared/util/glob"
|
||||
|
||||
export namespace FileIgnore {
|
||||
const FOLDERS = new Set([
|
||||
"node_modules",
|
||||
"bower_components",
|
||||
".pnpm-store",
|
||||
"vendor",
|
||||
".npm",
|
||||
"dist",
|
||||
"build",
|
||||
"out",
|
||||
".next",
|
||||
"target",
|
||||
"bin",
|
||||
"obj",
|
||||
".git",
|
||||
".svn",
|
||||
".hg",
|
||||
".vscode",
|
||||
".idea",
|
||||
".turbo",
|
||||
".output",
|
||||
"desktop",
|
||||
".sst",
|
||||
".cache",
|
||||
".webkit-cache",
|
||||
"__pycache__",
|
||||
".pytest_cache",
|
||||
"mypy_cache",
|
||||
".history",
|
||||
".gradle",
|
||||
])
|
||||
const FOLDERS = new Set([
|
||||
"node_modules",
|
||||
"bower_components",
|
||||
".pnpm-store",
|
||||
"vendor",
|
||||
".npm",
|
||||
"dist",
|
||||
"build",
|
||||
"out",
|
||||
".next",
|
||||
"target",
|
||||
"bin",
|
||||
"obj",
|
||||
".git",
|
||||
".svn",
|
||||
".hg",
|
||||
".vscode",
|
||||
".idea",
|
||||
".turbo",
|
||||
".output",
|
||||
"desktop",
|
||||
".sst",
|
||||
".cache",
|
||||
".webkit-cache",
|
||||
"__pycache__",
|
||||
".pytest_cache",
|
||||
"mypy_cache",
|
||||
".history",
|
||||
".gradle",
|
||||
])
|
||||
|
||||
const FILES = [
|
||||
"**/*.swp",
|
||||
"**/*.swo",
|
||||
const FILES = [
|
||||
"**/*.swp",
|
||||
"**/*.swo",
|
||||
|
||||
"**/*.pyc",
|
||||
"**/*.pyc",
|
||||
|
||||
// OS
|
||||
"**/.DS_Store",
|
||||
"**/Thumbs.db",
|
||||
// OS
|
||||
"**/.DS_Store",
|
||||
"**/Thumbs.db",
|
||||
|
||||
// Logs & temp
|
||||
"**/logs/**",
|
||||
"**/tmp/**",
|
||||
"**/temp/**",
|
||||
"**/*.log",
|
||||
// Logs & temp
|
||||
"**/logs/**",
|
||||
"**/tmp/**",
|
||||
"**/temp/**",
|
||||
"**/*.log",
|
||||
|
||||
// Coverage/test outputs
|
||||
"**/coverage/**",
|
||||
"**/.nyc_output/**",
|
||||
]
|
||||
// Coverage/test outputs
|
||||
"**/coverage/**",
|
||||
"**/.nyc_output/**",
|
||||
]
|
||||
|
||||
export const PATTERNS = [...FILES, ...FOLDERS]
|
||||
export const PATTERNS = [...FILES, ...FOLDERS]
|
||||
|
||||
export function match(
|
||||
filepath: string,
|
||||
opts?: {
|
||||
extra?: string[]
|
||||
whitelist?: string[]
|
||||
},
|
||||
) {
|
||||
for (const pattern of opts?.whitelist || []) {
|
||||
if (Glob.match(pattern, filepath)) return false
|
||||
}
|
||||
|
||||
const parts = filepath.split(/[/\\]/)
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
if (FOLDERS.has(parts[i])) return true
|
||||
}
|
||||
|
||||
const extra = opts?.extra || []
|
||||
for (const pattern of [...FILES, ...extra]) {
|
||||
if (Glob.match(pattern, filepath)) return true
|
||||
}
|
||||
|
||||
return false
|
||||
export function match(
|
||||
filepath: string,
|
||||
opts?: {
|
||||
extra?: string[]
|
||||
whitelist?: string[]
|
||||
},
|
||||
) {
|
||||
for (const pattern of opts?.whitelist || []) {
|
||||
if (Glob.match(pattern, filepath)) return false
|
||||
}
|
||||
|
||||
const parts = filepath.split(/[/\\]/)
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
if (FOLDERS.has(parts[i])) return true
|
||||
}
|
||||
|
||||
const extra = opts?.extra || []
|
||||
for (const pattern of [...FILES, ...extra]) {
|
||||
if (Glob.match(pattern, filepath)) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export * as FileIgnore from "./ignore"
|
||||
|
||||
@@ -1 +1,656 @@
|
||||
export * as File from "./file"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { InstanceState } from "@/effect"
|
||||
|
||||
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
import { Git } from "@/git"
|
||||
import { Effect, Layer, Context, Scope } from "effect"
|
||||
import * as Stream from "effect/Stream"
|
||||
import { formatPatch, structuredPatch } from "diff"
|
||||
import fuzzysort from "fuzzysort"
|
||||
import ignore from "ignore"
|
||||
import path from "path"
|
||||
import z from "zod"
|
||||
import { Global } from "../global"
|
||||
import { Instance } from "../project/instance"
|
||||
import { Log } from "../util"
|
||||
import { Protected } from "./protected"
|
||||
import { Ripgrep } from "./ripgrep"
|
||||
|
||||
export const Info = z
|
||||
.object({
|
||||
path: z.string(),
|
||||
added: z.number().int(),
|
||||
removed: z.number().int(),
|
||||
status: z.enum(["added", "deleted", "modified"]),
|
||||
})
|
||||
.meta({
|
||||
ref: "File",
|
||||
})
|
||||
|
||||
export type Info = z.infer<typeof Info>
|
||||
|
||||
export const Node = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
path: z.string(),
|
||||
absolute: z.string(),
|
||||
type: z.enum(["file", "directory"]),
|
||||
ignored: z.boolean(),
|
||||
})
|
||||
.meta({
|
||||
ref: "FileNode",
|
||||
})
|
||||
export type Node = z.infer<typeof Node>
|
||||
|
||||
export const Content = z
|
||||
.object({
|
||||
type: z.enum(["text", "binary"]),
|
||||
content: z.string(),
|
||||
diff: z.string().optional(),
|
||||
patch: z
|
||||
.object({
|
||||
oldFileName: z.string(),
|
||||
newFileName: z.string(),
|
||||
oldHeader: z.string().optional(),
|
||||
newHeader: z.string().optional(),
|
||||
hunks: z.array(
|
||||
z.object({
|
||||
oldStart: z.number(),
|
||||
oldLines: z.number(),
|
||||
newStart: z.number(),
|
||||
newLines: z.number(),
|
||||
lines: z.array(z.string()),
|
||||
}),
|
||||
),
|
||||
index: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
encoding: z.literal("base64").optional(),
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({
|
||||
ref: "FileContent",
|
||||
})
|
||||
export type Content = z.infer<typeof Content>
|
||||
|
||||
export const Event = {
|
||||
Edited: BusEvent.define(
|
||||
"file.edited",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
const log = Log.create({ service: "file" })
|
||||
|
||||
const binary = new Set([
|
||||
"exe",
|
||||
"dll",
|
||||
"pdb",
|
||||
"bin",
|
||||
"so",
|
||||
"dylib",
|
||||
"o",
|
||||
"a",
|
||||
"lib",
|
||||
"wav",
|
||||
"mp3",
|
||||
"ogg",
|
||||
"oga",
|
||||
"ogv",
|
||||
"ogx",
|
||||
"flac",
|
||||
"aac",
|
||||
"wma",
|
||||
"m4a",
|
||||
"weba",
|
||||
"mp4",
|
||||
"avi",
|
||||
"mov",
|
||||
"wmv",
|
||||
"flv",
|
||||
"webm",
|
||||
"mkv",
|
||||
"zip",
|
||||
"tar",
|
||||
"gz",
|
||||
"gzip",
|
||||
"bz",
|
||||
"bz2",
|
||||
"bzip",
|
||||
"bzip2",
|
||||
"7z",
|
||||
"rar",
|
||||
"xz",
|
||||
"lz",
|
||||
"z",
|
||||
"pdf",
|
||||
"doc",
|
||||
"docx",
|
||||
"ppt",
|
||||
"pptx",
|
||||
"xls",
|
||||
"xlsx",
|
||||
"dmg",
|
||||
"iso",
|
||||
"img",
|
||||
"vmdk",
|
||||
"ttf",
|
||||
"otf",
|
||||
"woff",
|
||||
"woff2",
|
||||
"eot",
|
||||
"sqlite",
|
||||
"db",
|
||||
"mdb",
|
||||
"apk",
|
||||
"ipa",
|
||||
"aab",
|
||||
"xapk",
|
||||
"app",
|
||||
"pkg",
|
||||
"deb",
|
||||
"rpm",
|
||||
"snap",
|
||||
"flatpak",
|
||||
"appimage",
|
||||
"msi",
|
||||
"msp",
|
||||
"jar",
|
||||
"war",
|
||||
"ear",
|
||||
"class",
|
||||
"kotlin_module",
|
||||
"dex",
|
||||
"vdex",
|
||||
"odex",
|
||||
"oat",
|
||||
"art",
|
||||
"wasm",
|
||||
"wat",
|
||||
"bc",
|
||||
"ll",
|
||||
"s",
|
||||
"ko",
|
||||
"sys",
|
||||
"drv",
|
||||
"efi",
|
||||
"rom",
|
||||
"com",
|
||||
])
|
||||
|
||||
const image = new Set([
|
||||
"png",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"gif",
|
||||
"bmp",
|
||||
"webp",
|
||||
"ico",
|
||||
"tif",
|
||||
"tiff",
|
||||
"svg",
|
||||
"svgz",
|
||||
"avif",
|
||||
"apng",
|
||||
"jxl",
|
||||
"heic",
|
||||
"heif",
|
||||
"raw",
|
||||
"cr2",
|
||||
"nef",
|
||||
"arw",
|
||||
"dng",
|
||||
"orf",
|
||||
"raf",
|
||||
"pef",
|
||||
"x3f",
|
||||
])
|
||||
|
||||
const text = new Set([
|
||||
"ts",
|
||||
"tsx",
|
||||
"mts",
|
||||
"cts",
|
||||
"mtsx",
|
||||
"ctsx",
|
||||
"js",
|
||||
"jsx",
|
||||
"mjs",
|
||||
"cjs",
|
||||
"sh",
|
||||
"bash",
|
||||
"zsh",
|
||||
"fish",
|
||||
"ps1",
|
||||
"psm1",
|
||||
"cmd",
|
||||
"bat",
|
||||
"json",
|
||||
"jsonc",
|
||||
"json5",
|
||||
"yaml",
|
||||
"yml",
|
||||
"toml",
|
||||
"md",
|
||||
"mdx",
|
||||
"txt",
|
||||
"xml",
|
||||
"html",
|
||||
"htm",
|
||||
"css",
|
||||
"scss",
|
||||
"sass",
|
||||
"less",
|
||||
"graphql",
|
||||
"gql",
|
||||
"sql",
|
||||
"ini",
|
||||
"cfg",
|
||||
"conf",
|
||||
"env",
|
||||
])
|
||||
|
||||
const textName = new Set([
|
||||
"dockerfile",
|
||||
"makefile",
|
||||
".gitignore",
|
||||
".gitattributes",
|
||||
".editorconfig",
|
||||
".npmrc",
|
||||
".nvmrc",
|
||||
".prettierrc",
|
||||
".eslintrc",
|
||||
])
|
||||
|
||||
const mime: Record<string, string> = {
|
||||
png: "image/png",
|
||||
jpg: "image/jpeg",
|
||||
jpeg: "image/jpeg",
|
||||
gif: "image/gif",
|
||||
bmp: "image/bmp",
|
||||
webp: "image/webp",
|
||||
ico: "image/x-icon",
|
||||
tif: "image/tiff",
|
||||
tiff: "image/tiff",
|
||||
svg: "image/svg+xml",
|
||||
svgz: "image/svg+xml",
|
||||
avif: "image/avif",
|
||||
apng: "image/apng",
|
||||
jxl: "image/jxl",
|
||||
heic: "image/heic",
|
||||
heif: "image/heif",
|
||||
}
|
||||
|
||||
type Entry = { files: string[]; dirs: string[] }
|
||||
|
||||
const ext = (file: string) => path.extname(file).toLowerCase().slice(1)
|
||||
const name = (file: string) => path.basename(file).toLowerCase()
|
||||
const isImageByExtension = (file: string) => image.has(ext(file))
|
||||
const isTextByExtension = (file: string) => text.has(ext(file))
|
||||
const isTextByName = (file: string) => textName.has(name(file))
|
||||
const isBinaryByExtension = (file: string) => binary.has(ext(file))
|
||||
const isImage = (mimeType: string) => mimeType.startsWith("image/")
|
||||
const getImageMimeType = (file: string) => mime[ext(file)] || "image/" + ext(file)
|
||||
|
||||
function shouldEncode(mimeType: string) {
|
||||
const type = mimeType.toLowerCase()
|
||||
log.debug("shouldEncode", { type })
|
||||
if (!type) return false
|
||||
if (type.startsWith("text/")) return false
|
||||
if (type.includes("charset=")) return false
|
||||
const top = type.split("/", 2)[0]
|
||||
return ["image", "audio", "video", "font", "model", "multipart"].includes(top)
|
||||
}
|
||||
|
||||
const hidden = (item: string) => {
|
||||
const normalized = item.replaceAll("\\", "/").replace(/\/+$/, "")
|
||||
return normalized.split("/").some((part) => part.startsWith(".") && part.length > 1)
|
||||
}
|
||||
|
||||
const sortHiddenLast = (items: string[], prefer: boolean) => {
|
||||
if (prefer) return items
|
||||
const visible: string[] = []
|
||||
const hiddenItems: string[] = []
|
||||
for (const item of items) {
|
||||
if (hidden(item)) hiddenItems.push(item)
|
||||
else visible.push(item)
|
||||
}
|
||||
return [...visible, ...hiddenItems]
|
||||
}
|
||||
|
||||
interface State {
|
||||
cache: Entry
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly init: () => Effect.Effect<void>
|
||||
readonly status: () => Effect.Effect<Info[]>
|
||||
readonly read: (file: string) => Effect.Effect<Content>
|
||||
readonly list: (dir?: string) => Effect.Effect<Node[]>
|
||||
readonly search: (input: {
|
||||
query: string
|
||||
limit?: number
|
||||
dirs?: boolean
|
||||
type?: "file" | "directory"
|
||||
}) => Effect.Effect<string[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/File") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const appFs = yield* AppFileSystem.Service
|
||||
const rg = yield* Ripgrep.Service
|
||||
const git = yield* Git.Service
|
||||
const scope = yield* Scope.Scope
|
||||
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("File.state")(() =>
|
||||
Effect.succeed({
|
||||
cache: { files: [], dirs: [] } as Entry,
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
const scan = Effect.fn("File.scan")(function* () {
|
||||
if (Instance.directory === path.parse(Instance.directory).root) return
|
||||
const isGlobalHome = Instance.directory === Global.Path.home && Instance.project.id === "global"
|
||||
const next: Entry = { files: [], dirs: [] }
|
||||
|
||||
if (isGlobalHome) {
|
||||
const dirs = new Set<string>()
|
||||
const protectedNames = Protected.names()
|
||||
const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"])
|
||||
const shouldIgnoreName = (name: string) => name.startsWith(".") || protectedNames.has(name)
|
||||
const shouldIgnoreNested = (name: string) => name.startsWith(".") || ignoreNested.has(name)
|
||||
const top = yield* appFs.readDirectoryEntries(Instance.directory).pipe(Effect.orElseSucceed(() => []))
|
||||
|
||||
for (const entry of top) {
|
||||
if (entry.type !== "directory") continue
|
||||
if (shouldIgnoreName(entry.name)) continue
|
||||
dirs.add(entry.name + "/")
|
||||
|
||||
const base = path.join(Instance.directory, entry.name)
|
||||
const children = yield* appFs.readDirectoryEntries(base).pipe(Effect.orElseSucceed(() => []))
|
||||
for (const child of children) {
|
||||
if (child.type !== "directory") continue
|
||||
if (shouldIgnoreNested(child.name)) continue
|
||||
dirs.add(entry.name + "/" + child.name + "/")
|
||||
}
|
||||
}
|
||||
|
||||
next.dirs = Array.from(dirs).toSorted()
|
||||
} else {
|
||||
const files = yield* rg.files({ cwd: Instance.directory }).pipe(
|
||||
Stream.runCollect,
|
||||
Effect.map((chunk) => [...chunk]),
|
||||
)
|
||||
const seen = new Set<string>()
|
||||
for (const file of files) {
|
||||
next.files.push(file)
|
||||
let current = file
|
||||
while (true) {
|
||||
const dir = path.dirname(current)
|
||||
if (dir === ".") break
|
||||
if (dir === current) break
|
||||
current = dir
|
||||
if (seen.has(dir)) continue
|
||||
seen.add(dir)
|
||||
next.dirs.push(dir + "/")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const s = yield* InstanceState.get(state)
|
||||
s.cache = next
|
||||
})
|
||||
|
||||
let cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void)))
|
||||
|
||||
const ensure = Effect.fn("File.ensure")(function* () {
|
||||
yield* cachedScan
|
||||
cachedScan = yield* Effect.cached(scan().pipe(Effect.catchCause(() => Effect.void)))
|
||||
})
|
||||
|
||||
const gitText = Effect.fnUntraced(function* (args: string[]) {
|
||||
return (yield* git.run(args, { cwd: Instance.directory })).text()
|
||||
})
|
||||
|
||||
const init = Effect.fn("File.init")(function* () {
|
||||
yield* ensure().pipe(Effect.forkIn(scope))
|
||||
})
|
||||
|
||||
const status = Effect.fn("File.status")(function* () {
|
||||
if (Instance.project.vcs !== "git") return []
|
||||
|
||||
const diffOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"diff",
|
||||
"--numstat",
|
||||
"HEAD",
|
||||
])
|
||||
|
||||
const changed: Info[] = []
|
||||
|
||||
if (diffOutput.trim()) {
|
||||
for (const line of diffOutput.trim().split("\n")) {
|
||||
const [added, removed, file] = line.split("\t")
|
||||
changed.push({
|
||||
path: file,
|
||||
added: added === "-" ? 0 : parseInt(added, 10),
|
||||
removed: removed === "-" ? 0 : parseInt(removed, 10),
|
||||
status: "modified",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const untrackedOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"ls-files",
|
||||
"--others",
|
||||
"--exclude-standard",
|
||||
])
|
||||
|
||||
if (untrackedOutput.trim()) {
|
||||
for (const file of untrackedOutput.trim().split("\n")) {
|
||||
const content = yield* appFs
|
||||
.readFileString(path.join(Instance.directory, file))
|
||||
.pipe(Effect.catch(() => Effect.succeed<string | undefined>(undefined)))
|
||||
if (content === undefined) continue
|
||||
changed.push({
|
||||
path: file,
|
||||
added: content.split("\n").length,
|
||||
removed: 0,
|
||||
status: "added",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const deletedOutput = yield* gitText([
|
||||
"-c",
|
||||
"core.fsmonitor=false",
|
||||
"-c",
|
||||
"core.quotepath=false",
|
||||
"diff",
|
||||
"--name-only",
|
||||
"--diff-filter=D",
|
||||
"HEAD",
|
||||
])
|
||||
|
||||
if (deletedOutput.trim()) {
|
||||
for (const file of deletedOutput.trim().split("\n")) {
|
||||
changed.push({
|
||||
path: file,
|
||||
added: 0,
|
||||
removed: 0,
|
||||
status: "deleted",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return changed.map((item) => {
|
||||
const full = path.isAbsolute(item.path) ? item.path : path.join(Instance.directory, item.path)
|
||||
return {
|
||||
...item,
|
||||
path: path.relative(Instance.directory, full),
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const read: Interface["read"] = Effect.fn("File.read")(function* (file: string) {
|
||||
using _ = log.time("read", { file })
|
||||
const full = path.join(Instance.directory, file)
|
||||
|
||||
if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory")
|
||||
|
||||
if (isImageByExtension(file)) {
|
||||
const exists = yield* appFs.existsSafe(full)
|
||||
if (exists) {
|
||||
const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
|
||||
return {
|
||||
type: "text" as const,
|
||||
content: Buffer.from(bytes).toString("base64"),
|
||||
mimeType: getImageMimeType(file),
|
||||
encoding: "base64" as const,
|
||||
}
|
||||
}
|
||||
return { type: "text" as const, content: "" }
|
||||
}
|
||||
|
||||
const knownText = isTextByExtension(file) || isTextByName(file)
|
||||
|
||||
if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" }
|
||||
|
||||
const exists = yield* appFs.existsSafe(full)
|
||||
if (!exists) return { type: "text" as const, content: "" }
|
||||
|
||||
const mimeType = AppFileSystem.mimeType(full)
|
||||
const encode = knownText ? false : shouldEncode(mimeType)
|
||||
|
||||
if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType }
|
||||
|
||||
if (encode) {
|
||||
const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
|
||||
return {
|
||||
type: "text" as const,
|
||||
content: Buffer.from(bytes).toString("base64"),
|
||||
mimeType,
|
||||
encoding: "base64" as const,
|
||||
}
|
||||
}
|
||||
|
||||
const content = yield* appFs.readFileString(full).pipe(
|
||||
Effect.map((s) => s.trim()),
|
||||
Effect.catch(() => Effect.succeed("")),
|
||||
)
|
||||
|
||||
if (Instance.project.vcs === "git") {
|
||||
let diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--", file])
|
||||
if (!diff.trim()) {
|
||||
diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file])
|
||||
}
|
||||
if (diff.trim()) {
|
||||
const original = yield* git.show(Instance.directory, "HEAD", file)
|
||||
const patch = structuredPatch(file, file, original, content, "old", "new", {
|
||||
context: Infinity,
|
||||
ignoreWhitespace: true,
|
||||
})
|
||||
return { type: "text" as const, content, patch, diff: formatPatch(patch) }
|
||||
}
|
||||
return { type: "text" as const, content }
|
||||
}
|
||||
|
||||
return { type: "text" as const, content }
|
||||
})
|
||||
|
||||
const list = Effect.fn("File.list")(function* (dir?: string) {
|
||||
const exclude = [".git", ".DS_Store"]
|
||||
let ignored = (_: string) => false
|
||||
if (Instance.project.vcs === "git") {
|
||||
const ig = ignore()
|
||||
const gitignore = path.join(Instance.project.worktree, ".gitignore")
|
||||
const gitignoreText = yield* appFs.readFileString(gitignore).pipe(Effect.catch(() => Effect.succeed("")))
|
||||
if (gitignoreText) ig.add(gitignoreText)
|
||||
const ignoreFile = path.join(Instance.project.worktree, ".ignore")
|
||||
const ignoreText = yield* appFs.readFileString(ignoreFile).pipe(Effect.catch(() => Effect.succeed("")))
|
||||
if (ignoreText) ig.add(ignoreText)
|
||||
ignored = ig.ignores.bind(ig)
|
||||
}
|
||||
|
||||
const resolved = dir ? path.join(Instance.directory, dir) : Instance.directory
|
||||
if (!Instance.containsPath(resolved)) throw new Error("Access denied: path escapes project directory")
|
||||
|
||||
const entries = yield* appFs.readDirectoryEntries(resolved).pipe(Effect.orElseSucceed(() => []))
|
||||
|
||||
const nodes: Node[] = []
|
||||
for (const entry of entries) {
|
||||
if (exclude.includes(entry.name)) continue
|
||||
const absolute = path.join(resolved, entry.name)
|
||||
const file = path.relative(Instance.directory, absolute)
|
||||
const type = entry.type === "directory" ? "directory" : "file"
|
||||
nodes.push({
|
||||
name: entry.name,
|
||||
path: file,
|
||||
absolute,
|
||||
type,
|
||||
ignored: ignored(type === "directory" ? file + "/" : file),
|
||||
})
|
||||
}
|
||||
return nodes.sort((a, b) => {
|
||||
if (a.type !== b.type) return a.type === "directory" ? -1 : 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
})
|
||||
|
||||
const search = Effect.fn("File.search")(function* (input: {
|
||||
query: string
|
||||
limit?: number
|
||||
dirs?: boolean
|
||||
type?: "file" | "directory"
|
||||
}) {
|
||||
yield* ensure()
|
||||
const { cache } = yield* InstanceState.get(state)
|
||||
|
||||
const query = input.query.trim()
|
||||
const limit = input.limit ?? 100
|
||||
const kind = input.type ?? (input.dirs === false ? "file" : "all")
|
||||
log.info("search", { query, kind })
|
||||
|
||||
const preferHidden = query.startsWith(".") || query.includes("/.")
|
||||
|
||||
if (!query) {
|
||||
if (kind === "file") return cache.files.slice(0, limit)
|
||||
return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit)
|
||||
}
|
||||
|
||||
const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs]
|
||||
|
||||
const searchLimit = kind === "directory" && !preferHidden ? limit * 20 : limit
|
||||
const sorted = fuzzysort.go(query, items, { limit: searchLimit }).map((item) => item.target)
|
||||
const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted
|
||||
|
||||
log.info("search", { query, kind, results: output.length })
|
||||
return output
|
||||
})
|
||||
|
||||
log.info("init")
|
||||
return Service.of({ init, status, read, list, search })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Ripgrep.defaultLayer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(Git.defaultLayer),
|
||||
)
|
||||
|
||||
export * as File from "."
|
||||
|
||||
@@ -37,23 +37,23 @@ const DARWIN_ROOT = ["/.DocumentRevisions-V100", "/.Spotlight-V100", "/.Trashes"
|
||||
|
||||
const WIN32_HOME = ["AppData", "Downloads", "Desktop", "Documents", "Pictures", "Music", "Videos", "OneDrive"]
|
||||
|
||||
export namespace Protected {
|
||||
/** Directory basenames to skip when scanning the home directory. */
|
||||
export function names(): ReadonlySet<string> {
|
||||
if (process.platform === "darwin") return new Set(DARWIN_HOME)
|
||||
if (process.platform === "win32") return new Set(WIN32_HOME)
|
||||
return new Set()
|
||||
}
|
||||
|
||||
/** Absolute paths that should never be watched, stated, or scanned. */
|
||||
export function paths(): string[] {
|
||||
if (process.platform === "darwin")
|
||||
return [
|
||||
...DARWIN_HOME.map((n) => path.join(home, n)),
|
||||
...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)),
|
||||
...DARWIN_ROOT,
|
||||
]
|
||||
if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n))
|
||||
return []
|
||||
}
|
||||
/** Directory basenames to skip when scanning the home directory. */
|
||||
export function names(): ReadonlySet<string> {
|
||||
if (process.platform === "darwin") return new Set(DARWIN_HOME)
|
||||
if (process.platform === "win32") return new Set(WIN32_HOME)
|
||||
return new Set()
|
||||
}
|
||||
|
||||
/** Absolute paths that should never be watched, stated, or scanned. */
|
||||
export function paths(): string[] {
|
||||
if (process.platform === "darwin")
|
||||
return [
|
||||
...DARWIN_HOME.map((n) => path.join(home, n)),
|
||||
...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)),
|
||||
...DARWIN_ROOT,
|
||||
]
|
||||
if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n))
|
||||
return []
|
||||
}
|
||||
|
||||
export * as Protected from "./protected"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,109 +5,109 @@ import { Flag } from "@/flag/flag"
|
||||
import type { SessionID } from "@/session/schema"
|
||||
import { Log } from "../util"
|
||||
|
||||
export namespace FileTime {
|
||||
const log = Log.create({ service: "file.time" })
|
||||
const log = Log.create({ service: "file.time" })
|
||||
|
||||
export type Stamp = {
|
||||
readonly read: Date
|
||||
readonly mtime: number | undefined
|
||||
readonly size: number | undefined
|
||||
}
|
||||
|
||||
const session = (reads: Map<SessionID, Map<string, Stamp>>, sessionID: SessionID) => {
|
||||
const value = reads.get(sessionID)
|
||||
if (value) return value
|
||||
|
||||
const next = new Map<string, Stamp>()
|
||||
reads.set(sessionID, next)
|
||||
return next
|
||||
}
|
||||
|
||||
interface State {
|
||||
reads: Map<SessionID, Map<string, Stamp>>
|
||||
locks: Map<string, Semaphore.Semaphore>
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly read: (sessionID: SessionID, file: string) => Effect.Effect<void>
|
||||
readonly get: (sessionID: SessionID, file: string) => Effect.Effect<Date | undefined>
|
||||
readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect<void>
|
||||
readonly withLock: <T>(filepath: string, fn: () => Effect.Effect<T>) => Effect.Effect<T>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/FileTime") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fsys = yield* AppFileSystem.Service
|
||||
const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK
|
||||
|
||||
const stamp = Effect.fnUntraced(function* (file: string) {
|
||||
const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void))
|
||||
return {
|
||||
read: yield* DateTime.nowAsDate,
|
||||
mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined,
|
||||
size: info ? Number(info.size) : undefined,
|
||||
}
|
||||
})
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("FileTime.state")(() =>
|
||||
Effect.succeed({
|
||||
reads: new Map<SessionID, Map<string, Stamp>>(),
|
||||
locks: new Map<string, Semaphore.Semaphore>(),
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) {
|
||||
filepath = AppFileSystem.normalizePath(filepath)
|
||||
const locks = (yield* InstanceState.get(state)).locks
|
||||
const lock = locks.get(filepath)
|
||||
if (lock) return lock
|
||||
|
||||
const next = Semaphore.makeUnsafe(1)
|
||||
locks.set(filepath, next)
|
||||
return next
|
||||
})
|
||||
|
||||
const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) {
|
||||
file = AppFileSystem.normalizePath(file)
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
log.info("read", { sessionID, file })
|
||||
session(reads, sessionID).set(file, yield* stamp(file))
|
||||
})
|
||||
|
||||
const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) {
|
||||
file = AppFileSystem.normalizePath(file)
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
return reads.get(sessionID)?.get(file)?.read
|
||||
})
|
||||
|
||||
const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) {
|
||||
if (disableCheck) return
|
||||
filepath = AppFileSystem.normalizePath(filepath)
|
||||
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
const time = reads.get(sessionID)?.get(filepath)
|
||||
if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`)
|
||||
|
||||
const next = yield* stamp(filepath)
|
||||
const changed = next.mtime !== time.mtime || next.size !== time.size
|
||||
if (!changed) return
|
||||
|
||||
throw new Error(
|
||||
`File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`,
|
||||
)
|
||||
})
|
||||
|
||||
const withLock = Effect.fn("FileTime.withLock")(function* <T>(filepath: string, fn: () => Effect.Effect<T>) {
|
||||
return yield* fn().pipe((yield* getLock(filepath)).withPermits(1))
|
||||
})
|
||||
|
||||
return Service.of({ read, get, assert, withLock })
|
||||
}),
|
||||
).pipe(Layer.orDie)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
export type Stamp = {
|
||||
readonly read: Date
|
||||
readonly mtime: number | undefined
|
||||
readonly size: number | undefined
|
||||
}
|
||||
|
||||
const session = (reads: Map<SessionID, Map<string, Stamp>>, sessionID: SessionID) => {
|
||||
const value = reads.get(sessionID)
|
||||
if (value) return value
|
||||
|
||||
const next = new Map<string, Stamp>()
|
||||
reads.set(sessionID, next)
|
||||
return next
|
||||
}
|
||||
|
||||
interface State {
|
||||
reads: Map<SessionID, Map<string, Stamp>>
|
||||
locks: Map<string, Semaphore.Semaphore>
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
readonly read: (sessionID: SessionID, file: string) => Effect.Effect<void>
|
||||
readonly get: (sessionID: SessionID, file: string) => Effect.Effect<Date | undefined>
|
||||
readonly assert: (sessionID: SessionID, filepath: string) => Effect.Effect<void>
|
||||
readonly withLock: <T>(filepath: string, fn: () => Effect.Effect<T>) => Effect.Effect<T>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/FileTime") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fsys = yield* AppFileSystem.Service
|
||||
const disableCheck = yield* Flag.OPENCODE_DISABLE_FILETIME_CHECK
|
||||
|
||||
const stamp = Effect.fnUntraced(function* (file: string) {
|
||||
const info = yield* fsys.stat(file).pipe(Effect.catch(() => Effect.void))
|
||||
return {
|
||||
read: yield* DateTime.nowAsDate,
|
||||
mtime: info ? Option.getOrUndefined(info.mtime)?.getTime() : undefined,
|
||||
size: info ? Number(info.size) : undefined,
|
||||
}
|
||||
})
|
||||
const state = yield* InstanceState.make<State>(
|
||||
Effect.fn("FileTime.state")(() =>
|
||||
Effect.succeed({
|
||||
reads: new Map<SessionID, Map<string, Stamp>>(),
|
||||
locks: new Map<string, Semaphore.Semaphore>(),
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
const getLock = Effect.fn("FileTime.lock")(function* (filepath: string) {
|
||||
filepath = AppFileSystem.normalizePath(filepath)
|
||||
const locks = (yield* InstanceState.get(state)).locks
|
||||
const lock = locks.get(filepath)
|
||||
if (lock) return lock
|
||||
|
||||
const next = Semaphore.makeUnsafe(1)
|
||||
locks.set(filepath, next)
|
||||
return next
|
||||
})
|
||||
|
||||
const read = Effect.fn("FileTime.read")(function* (sessionID: SessionID, file: string) {
|
||||
file = AppFileSystem.normalizePath(file)
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
log.info("read", { sessionID, file })
|
||||
session(reads, sessionID).set(file, yield* stamp(file))
|
||||
})
|
||||
|
||||
const get = Effect.fn("FileTime.get")(function* (sessionID: SessionID, file: string) {
|
||||
file = AppFileSystem.normalizePath(file)
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
return reads.get(sessionID)?.get(file)?.read
|
||||
})
|
||||
|
||||
const assert = Effect.fn("FileTime.assert")(function* (sessionID: SessionID, filepath: string) {
|
||||
if (disableCheck) return
|
||||
filepath = AppFileSystem.normalizePath(filepath)
|
||||
|
||||
const reads = (yield* InstanceState.get(state)).reads
|
||||
const time = reads.get(sessionID)?.get(filepath)
|
||||
if (!time) throw new Error(`You must read file ${filepath} before overwriting it. Use the Read tool first`)
|
||||
|
||||
const next = yield* stamp(filepath)
|
||||
const changed = next.mtime !== time.mtime || next.size !== time.size
|
||||
if (!changed) return
|
||||
|
||||
throw new Error(
|
||||
`File ${filepath} has been modified since it was last read.\nLast modification: ${new Date(next.mtime ?? next.read.getTime()).toISOString()}\nLast read: ${time.read.toISOString()}\n\nPlease read the file again before modifying it.`,
|
||||
)
|
||||
})
|
||||
|
||||
const withLock = Effect.fn("FileTime.withLock")(function* <T>(filepath: string, fn: () => Effect.Effect<T>) {
|
||||
return yield* fn().pipe((yield* getLock(filepath)).withPermits(1))
|
||||
})
|
||||
|
||||
return Service.of({ read, get, assert, withLock })
|
||||
}),
|
||||
).pipe(Layer.orDie)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
|
||||
|
||||
export * as FileTime from "./time"
|
||||
|
||||
@@ -19,145 +19,145 @@ import { Log } from "../util"
|
||||
|
||||
declare const OPENCODE_LIBC: string | undefined
|
||||
|
||||
export namespace FileWatcher {
|
||||
const log = Log.create({ service: "file.watcher" })
|
||||
const SUBSCRIBE_TIMEOUT_MS = 10_000
|
||||
const log = Log.create({ service: "file.watcher" })
|
||||
const SUBSCRIBE_TIMEOUT_MS = 10_000
|
||||
|
||||
export const Event = {
|
||||
Updated: BusEvent.define(
|
||||
"file.watcher.updated",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
||||
const watcher = lazy((): typeof import("@parcel/watcher") | undefined => {
|
||||
try {
|
||||
const binding = require(
|
||||
`@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`,
|
||||
)
|
||||
return createWrapper(binding) as typeof import("@parcel/watcher")
|
||||
} catch (error) {
|
||||
log.error("failed to load watcher binding", { error })
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
function getBackend() {
|
||||
if (process.platform === "win32") return "windows"
|
||||
if (process.platform === "darwin") return "fs-events"
|
||||
if (process.platform === "linux") return "inotify"
|
||||
}
|
||||
|
||||
function protecteds(dir: string) {
|
||||
return Protected.paths().filter((item) => {
|
||||
const rel = path.relative(dir, item)
|
||||
return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel)
|
||||
})
|
||||
}
|
||||
|
||||
export const hasNativeBinding = () => !!watcher()
|
||||
|
||||
export interface Interface {
|
||||
readonly init: () => Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/FileWatcher") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const git = yield* Git.Service
|
||||
|
||||
const state = yield* InstanceState.make(
|
||||
Effect.fn("FileWatcher.state")(
|
||||
function* () {
|
||||
if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return
|
||||
|
||||
log.info("init", { directory: Instance.directory })
|
||||
|
||||
const backend = getBackend()
|
||||
if (!backend) {
|
||||
log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform })
|
||||
return
|
||||
}
|
||||
|
||||
const w = watcher()
|
||||
if (!w) return
|
||||
|
||||
log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend })
|
||||
|
||||
const subs: ParcelWatcher.AsyncSubscription[] = []
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))),
|
||||
)
|
||||
|
||||
const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => {
|
||||
if (err) return
|
||||
for (const evt of evts) {
|
||||
if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" })
|
||||
if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" })
|
||||
if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
|
||||
}
|
||||
})
|
||||
|
||||
const subscribe = (dir: string, ignore: string[]) => {
|
||||
const pending = w.subscribe(dir, cb, { ignore, backend })
|
||||
return Effect.gen(function* () {
|
||||
const sub = yield* Effect.promise(() => pending)
|
||||
subs.push(sub)
|
||||
}).pipe(
|
||||
Effect.timeout(SUBSCRIBE_TIMEOUT_MS),
|
||||
Effect.catchCause((cause) => {
|
||||
log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) })
|
||||
pending.then((s) => s.unsubscribe()).catch(() => {})
|
||||
return Effect.void
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const cfg = yield* config.get()
|
||||
const cfgIgnores = cfg.watcher?.ignore ?? []
|
||||
|
||||
if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) {
|
||||
yield* subscribe(Instance.directory, [
|
||||
...FileIgnore.PATTERNS,
|
||||
...cfgIgnores,
|
||||
...protecteds(Instance.directory),
|
||||
])
|
||||
}
|
||||
|
||||
if (Instance.project.vcs === "git") {
|
||||
const result = yield* git.run(["rev-parse", "--git-dir"], {
|
||||
cwd: Instance.project.worktree,
|
||||
})
|
||||
const vcsDir =
|
||||
result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined
|
||||
if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) {
|
||||
const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter(
|
||||
(entry) => entry !== "HEAD",
|
||||
)
|
||||
yield* subscribe(vcsDir, ignore)
|
||||
}
|
||||
}
|
||||
},
|
||||
Effect.catchCause((cause) => {
|
||||
log.error("failed to init watcher service", { cause: Cause.pretty(cause) })
|
||||
return Effect.void
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
return Service.of({
|
||||
init: Effect.fn("FileWatcher.init")(function* () {
|
||||
yield* InstanceState.get(state)
|
||||
}),
|
||||
})
|
||||
export const Event = {
|
||||
Updated: BusEvent.define(
|
||||
"file.watcher.updated",
|
||||
z.object({
|
||||
file: z.string(),
|
||||
event: z.union([z.literal("add"), z.literal("change"), z.literal("unlink")]),
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer))
|
||||
),
|
||||
}
|
||||
|
||||
const watcher = lazy((): typeof import("@parcel/watcher") | undefined => {
|
||||
try {
|
||||
const binding = require(
|
||||
`@parcel/watcher-${process.platform}-${process.arch}${process.platform === "linux" ? `-${OPENCODE_LIBC || "glibc"}` : ""}`,
|
||||
)
|
||||
return createWrapper(binding) as typeof import("@parcel/watcher")
|
||||
} catch (error) {
|
||||
log.error("failed to load watcher binding", { error })
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
function getBackend() {
|
||||
if (process.platform === "win32") return "windows"
|
||||
if (process.platform === "darwin") return "fs-events"
|
||||
if (process.platform === "linux") return "inotify"
|
||||
}
|
||||
|
||||
function protecteds(dir: string) {
|
||||
return Protected.paths().filter((item) => {
|
||||
const rel = path.relative(dir, item)
|
||||
return rel !== "" && !rel.startsWith("..") && !path.isAbsolute(rel)
|
||||
})
|
||||
}
|
||||
|
||||
export const hasNativeBinding = () => !!watcher()
|
||||
|
||||
export interface Interface {
|
||||
readonly init: () => Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/FileWatcher") {}
|
||||
|
||||
export const layer = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const config = yield* Config.Service
|
||||
const git = yield* Git.Service
|
||||
|
||||
const state = yield* InstanceState.make(
|
||||
Effect.fn("FileWatcher.state")(
|
||||
function* () {
|
||||
if (yield* Flag.OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER) return
|
||||
|
||||
log.info("init", { directory: Instance.directory })
|
||||
|
||||
const backend = getBackend()
|
||||
if (!backend) {
|
||||
log.error("watcher backend not supported", { directory: Instance.directory, platform: process.platform })
|
||||
return
|
||||
}
|
||||
|
||||
const w = watcher()
|
||||
if (!w) return
|
||||
|
||||
log.info("watcher backend", { directory: Instance.directory, platform: process.platform, backend })
|
||||
|
||||
const subs: ParcelWatcher.AsyncSubscription[] = []
|
||||
yield* Effect.addFinalizer(() =>
|
||||
Effect.promise(() => Promise.allSettled(subs.map((sub) => sub.unsubscribe()))),
|
||||
)
|
||||
|
||||
const cb: ParcelWatcher.SubscribeCallback = Instance.bind((err, evts) => {
|
||||
if (err) return
|
||||
for (const evt of evts) {
|
||||
if (evt.type === "create") void Bus.publish(Event.Updated, { file: evt.path, event: "add" })
|
||||
if (evt.type === "update") void Bus.publish(Event.Updated, { file: evt.path, event: "change" })
|
||||
if (evt.type === "delete") void Bus.publish(Event.Updated, { file: evt.path, event: "unlink" })
|
||||
}
|
||||
})
|
||||
|
||||
const subscribe = (dir: string, ignore: string[]) => {
|
||||
const pending = w.subscribe(dir, cb, { ignore, backend })
|
||||
return Effect.gen(function* () {
|
||||
const sub = yield* Effect.promise(() => pending)
|
||||
subs.push(sub)
|
||||
}).pipe(
|
||||
Effect.timeout(SUBSCRIBE_TIMEOUT_MS),
|
||||
Effect.catchCause((cause) => {
|
||||
log.error("failed to subscribe", { dir, cause: Cause.pretty(cause) })
|
||||
pending.then((s) => s.unsubscribe()).catch(() => {})
|
||||
return Effect.void
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const cfg = yield* config.get()
|
||||
const cfgIgnores = cfg.watcher?.ignore ?? []
|
||||
|
||||
if (yield* Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) {
|
||||
yield* subscribe(Instance.directory, [
|
||||
...FileIgnore.PATTERNS,
|
||||
...cfgIgnores,
|
||||
...protecteds(Instance.directory),
|
||||
])
|
||||
}
|
||||
|
||||
if (Instance.project.vcs === "git") {
|
||||
const result = yield* git.run(["rev-parse", "--git-dir"], {
|
||||
cwd: Instance.project.worktree,
|
||||
})
|
||||
const vcsDir =
|
||||
result.exitCode === 0 ? path.resolve(Instance.project.worktree, result.text().trim()) : undefined
|
||||
if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) {
|
||||
const ignore = (yield* Effect.promise(() => readdir(vcsDir).catch(() => []))).filter(
|
||||
(entry) => entry !== "HEAD",
|
||||
)
|
||||
yield* subscribe(vcsDir, ignore)
|
||||
}
|
||||
}
|
||||
},
|
||||
Effect.catchCause((cause) => {
|
||||
log.error("failed to init watcher service", { cause: Cause.pretty(cause) })
|
||||
return Effect.void
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
return Service.of({
|
||||
init: Effect.fn("FileWatcher.init")(function* () {
|
||||
yield* InstanceState.get(state)
|
||||
}),
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Config.defaultLayer), Layer.provide(Git.defaultLayer))
|
||||
|
||||
export * as FileWatcher from "./watcher"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user