fix(merge): resolve dev conflicts for openclaw branch

This commit is contained in:
GeonWoo Jeon (Jay)
2026-04-09 12:23:38 +09:00
177 changed files with 5633 additions and 483 deletions

View File

@@ -47,7 +47,7 @@ jobs:
BUN_INSTALL_ALLOW_SCRIPTS: "@ast-grep/napi"
- name: Run tests
run: bun test
run: bun run script/run-ci-tests.ts
typecheck:
runs-on: ubuntu-latest

View File

@@ -46,7 +46,7 @@ jobs:
BUN_INSTALL_ALLOW_SCRIPTS: "@ast-grep/napi"
- name: Run tests
run: bun test
run: bun run script/run-ci-tests.ts
typecheck:
runs-on: ubuntu-latest

View File

@@ -1,6 +1,6 @@
# oh-my-opencode — O P E N C O D E Plugin
**Generated:** 2026-04-05 | **Commit:** c9be5bb51 | **Branch:** dev
**Generated:** 2026-04-08 | **Commit:** 4f196f49 | **Branch:** dev
## OVERVIEW

View File

@@ -1,5 +1,8 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://raw.githubusercontent.com/code-yeongyu/oh-my-openagent/dev/assets/oh-my-opencode.schema.json",
"title": "Oh My OpenCode Configuration",
"description": "Configuration schema for oh-my-opencode plugin",
"type": "object",
"properties": {
"$schema": {
@@ -6068,8 +6071,5 @@
"required": [
"git_master"
],
"additionalProperties": false,
"$id": "https://raw.githubusercontent.com/code-yeongyu/oh-my-openagent/dev/assets/oh-my-opencode.schema.json",
"title": "Oh My OpenCode Configuration",
"description": "Configuration schema for oh-my-opencode plugin"
"additionalProperties": false
}

View File

@@ -10,8 +10,8 @@
"@clack/prompts": "^0.11.0",
"@code-yeongyu/comment-checker": "^0.7.0",
"@modelcontextprotocol/sdk": "^1.25.2",
"@opencode-ai/plugin": "^1.2.24",
"@opencode-ai/sdk": "^1.2.24",
"@opencode-ai/plugin": "^1.4.0",
"@opencode-ai/sdk": "^1.4.0",
"commander": "^14.0.2",
"detect-libc": "^2.0.0",
"diff": "^8.0.3",
@@ -20,8 +20,7 @@
"picocolors": "^1.1.1",
"picomatch": "^4.0.2",
"vscode-jsonrpc": "^8.2.0",
"zod": "^3.24.0",
"zod-to-json-schema": "^3.25.1",
"zod": "^4.3.0",
},
"devDependencies": {
"@types/js-yaml": "^4.0.9",
@@ -30,17 +29,17 @@
"typescript": "^5.7.3",
},
"optionalDependencies": {
"oh-my-opencode-darwin-arm64": "3.15.3",
"oh-my-opencode-darwin-x64": "3.15.3",
"oh-my-opencode-darwin-x64-baseline": "3.15.3",
"oh-my-opencode-linux-arm64": "3.15.3",
"oh-my-opencode-linux-arm64-musl": "3.15.3",
"oh-my-opencode-linux-x64": "3.15.3",
"oh-my-opencode-linux-x64-baseline": "3.15.3",
"oh-my-opencode-linux-x64-musl": "3.15.3",
"oh-my-opencode-linux-x64-musl-baseline": "3.15.3",
"oh-my-opencode-windows-x64": "3.15.3",
"oh-my-opencode-windows-x64-baseline": "3.15.3",
"oh-my-opencode-darwin-arm64": "3.16.0",
"oh-my-opencode-darwin-x64": "3.16.0",
"oh-my-opencode-darwin-x64-baseline": "3.16.0",
"oh-my-opencode-linux-arm64": "3.16.0",
"oh-my-opencode-linux-arm64-musl": "3.16.0",
"oh-my-opencode-linux-x64": "3.16.0",
"oh-my-opencode-linux-x64-baseline": "3.16.0",
"oh-my-opencode-linux-x64-musl": "3.16.0",
"oh-my-opencode-linux-x64-musl-baseline": "3.16.0",
"oh-my-opencode-windows-x64": "3.16.0",
"oh-my-opencode-windows-x64-baseline": "3.16.0",
},
},
},
@@ -49,9 +48,6 @@
"@ast-grep/napi",
"@code-yeongyu/comment-checker",
],
"overrides": {
"@opencode-ai/sdk": "^1.2.24",
},
"packages": {
"@ast-grep/cli": ["@ast-grep/cli@0.41.1", "", { "dependencies": { "detect-libc": "2.1.2" }, "optionalDependencies": { "@ast-grep/cli-darwin-arm64": "0.41.1", "@ast-grep/cli-darwin-x64": "0.41.1", "@ast-grep/cli-linux-arm64-gnu": "0.41.1", "@ast-grep/cli-linux-x64-gnu": "0.41.1", "@ast-grep/cli-win32-arm64-msvc": "0.41.1", "@ast-grep/cli-win32-ia32-msvc": "0.41.1", "@ast-grep/cli-win32-x64-msvc": "0.41.1" }, "bin": { "sg": "sg", "ast-grep": "ast-grep" } }, "sha512-6oSuzF1Ra0d9jdcmflRIR1DHcicI7TYVxaaV/hajV51J49r6C+1BA2H9G+e47lH4sDEXUS9KWLNGNvXa/Gqs5A=="],
@@ -99,9 +95,9 @@
"@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.27.1", "", { "dependencies": { "@hono/node-server": "^1.19.9", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.2.1", "express-rate-limit": "^8.2.1", "hono": "^4.11.4", "jose": "^6.1.3", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.1" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-sr6GbP+4edBwFndLbM60gf07z0FQ79gaExpnsjMGePXqFcSSb7t6iscpjk9DhFhwd+mTEQrzNafGP8/iGGFYaA=="],
"@opencode-ai/plugin": ["@opencode-ai/plugin@1.2.24", "", { "dependencies": { "@opencode-ai/sdk": "1.2.24", "zod": "4.1.8" } }, "sha512-B3hw415D+2w6AtdRdvKWkuQVT0LXDWTdnAZhZC6gbd+UHh5O5DMmnZTe/YM8yK8ZZO9Dvo5rnV78TdDDYunJiw=="],
"@opencode-ai/plugin": ["@opencode-ai/plugin@1.4.0", "", { "dependencies": { "@opencode-ai/sdk": "1.4.0", "zod": "4.1.8" }, "peerDependencies": { "@opentui/core": ">=0.1.97", "@opentui/solid": ">=0.1.97" }, "optionalPeers": ["@opentui/core", "@opentui/solid"] }, "sha512-VFIff6LHp/RVaJdrK3EQ1ijx0K1tV5i1DY5YJ+pRqwC6trunPHbvqSN0GHSTZX39RdnSc+XuzCTZQCy1W2qNOg=="],
"@opencode-ai/sdk": ["@opencode-ai/sdk@1.2.24", "", {}, "sha512-MQamFkRl4B/3d6oIRLNpkYR2fcwet1V/ffKyOKJXWjtP/CT9PDJMtLpu6olVHjXKQi8zMNltwuMhv1QsNtRlZg=="],
"@opencode-ai/sdk": ["@opencode-ai/sdk@1.4.0", "", { "dependencies": { "cross-spawn": "7.0.6" } }, "sha512-mfa3MzhqNM+Az4bgPDDXL3NdG+aYOHClXmT6/4qLxf2ulyfPpMNHqb9Dfmo4D8UfmrDsPuJHmbune73/nUQnuw=="],
"@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
@@ -239,28 +235,6 @@
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
"oh-my-opencode-darwin-arm64": ["oh-my-opencode-darwin-arm64@3.15.3", "", { "os": "darwin", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-FApyQE45gv3VFwS/7iLS1/84v4iTX6BIVNcYYU2faqPazcZkvenkMbtxuWRfohQyZ1lhADopnjUcqOdcKjLDGQ=="],
"oh-my-opencode-darwin-x64": ["oh-my-opencode-darwin-x64@3.15.3", "", { "os": "darwin", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-h4fr0/myoyvvytdizfLNQgRAWK+hw+1tW32rgL7ENLv1JQ8ChXHnHKEQ2saEqGfn1SuXvA0xUTsFMYR8q3mnbA=="],
"oh-my-opencode-darwin-x64-baseline": ["oh-my-opencode-darwin-x64-baseline@3.15.3", "", { "os": "darwin", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-Zhi5xGcEhirHcx95kZtABYlIdSt6a5L5+T+exR4Kcnu+KR1mJ6li9n3UBIiW8eVgDz2ls7W25ePD78xRlqnxlg=="],
"oh-my-opencode-linux-arm64": ["oh-my-opencode-linux-arm64@3.15.3", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-+lDsQMPfXGCrwe9vqHdmp1tCJ8PV+5OkKueVorRwXNfiZNOW3848TKxtW3QdkKopiBKejEaDfyu/IGSgWQ/iyQ=="],
"oh-my-opencode-linux-arm64-musl": ["oh-my-opencode-linux-arm64-musl@3.15.3", "", { "os": "linux", "cpu": "arm64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-cokhNYK+dBVPRmZ2bYd3ZNp7dSGZdko77qUaeb0jjALFWkNzzmFgOV0spgOGZ3iS+yMS1XjAheTo5Qswh0capQ=="],
"oh-my-opencode-linux-x64": ["oh-my-opencode-linux-x64@3.15.3", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-8+57NMUwdcc2DZGX6KlNb1EchTB6xmwiiHcRhFZpYiAB1GCUFNeWihq3D7r5GUtOs0zQYWUT/F1Rj2nzBxuy+A=="],
"oh-my-opencode-linux-x64-baseline": ["oh-my-opencode-linux-x64-baseline@3.15.3", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-1awTpjU8m1cLF+GiiT7BuK5+y+WvTZwAaBZzYrJBzldiqdqMGJVYaH/uLiKt6CdZ0T6jh0zR/v85VFZIaXRusQ=="],
"oh-my-opencode-linux-x64-musl": ["oh-my-opencode-linux-x64-musl@3.15.3", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-WhmJ9ZwXxe3Nv0sVnFN3ibykie1JDiXthOmErhtKbcAL9V25IDYSbTcjxY2jUq0rNr4PeTvBva+WkMW4k9438w=="],
"oh-my-opencode-linux-x64-musl-baseline": ["oh-my-opencode-linux-x64-musl-baseline@3.15.3", "", { "os": "linux", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode" } }, "sha512-Gx2YitS/Ydg1XdwZMAH186ABvHGPlnuVA/1j7nGdARIwNM/xz6bZRq+kaeMmlj2N1U63unMOHe1ibE6nL1oZSw=="],
"oh-my-opencode-windows-x64": ["oh-my-opencode-windows-x64@3.15.3", "", { "os": "win32", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode.exe" } }, "sha512-Q6xskcBlBqUT77OK+7oHID9McrHu6t5+P/YCaDU/zLvr1T8M0Z5WgakM5hRsqCI8e4P1NEX6wHtwQNbVfUgo1w=="],
"oh-my-opencode-windows-x64-baseline": ["oh-my-opencode-windows-x64-baseline@3.15.3", "", { "os": "win32", "cpu": "x64", "bin": { "oh-my-opencode": "bin/oh-my-opencode.exe" } }, "sha512-2BlXtH+DrSRPFGEOtfY1mlROOXFeWQbG/EpDw0JD27s7QQOkShaDff8Vc48PnmD1H8vW4d7/o/eP8jJPPjGQ0w=="],
"on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="],
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
@@ -331,12 +305,10 @@
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
"zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="],
"@modelcontextprotocol/sdk/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
"@opencode-ai/plugin/zod": ["zod@4.1.8", "", {}, "sha512-5R1P+WwQqmmMIEACyzSvo4JXHY5WiAFHRMg+zBZKgKS+Q1viRa0C1hmUKtHltoIFKtIdki3pRxkmpP74jnNYHQ=="],
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode",
"version": "3.15.3",
"version": "3.16.0",
"description": "The Best AI Agent Harness - Batteries-Included OpenCode Plugin with Multi-Model Orchestration, Parallel Background Agents, and Crafted LSP/AST Tools",
"main": "./dist/index.js",
"types": "dist/index.d.ts",
@@ -59,8 +59,8 @@
"@clack/prompts": "^0.11.0",
"@code-yeongyu/comment-checker": "^0.7.0",
"@modelcontextprotocol/sdk": "^1.25.2",
"@opencode-ai/plugin": "^1.2.24",
"@opencode-ai/sdk": "^1.2.24",
"@opencode-ai/plugin": "^1.4.0",
"@opencode-ai/sdk": "^1.4.0",
"commander": "^14.0.2",
"detect-libc": "^2.0.0",
"diff": "^8.0.3",
@@ -69,8 +69,7 @@
"picocolors": "^1.1.1",
"picomatch": "^4.0.2",
"vscode-jsonrpc": "^8.2.0",
"zod-to-json-schema": "^3.25.1",
"zod": "^3.24.0"
"zod": "^4.3.0"
},
"devDependencies": {
"@types/js-yaml": "^4.0.9",
@@ -79,21 +78,19 @@
"typescript": "^5.7.3"
},
"optionalDependencies": {
"oh-my-opencode-darwin-arm64": "3.15.3",
"oh-my-opencode-darwin-x64": "3.15.3",
"oh-my-opencode-darwin-x64-baseline": "3.15.3",
"oh-my-opencode-linux-arm64": "3.15.3",
"oh-my-opencode-linux-arm64-musl": "3.15.3",
"oh-my-opencode-linux-x64": "3.15.3",
"oh-my-opencode-linux-x64-baseline": "3.15.3",
"oh-my-opencode-linux-x64-musl": "3.15.3",
"oh-my-opencode-linux-x64-musl-baseline": "3.15.3",
"oh-my-opencode-windows-x64": "3.15.3",
"oh-my-opencode-windows-x64-baseline": "3.15.3"
},
"overrides": {
"@opencode-ai/sdk": "^1.2.24"
"oh-my-opencode-darwin-arm64": "3.16.0",
"oh-my-opencode-darwin-x64": "3.16.0",
"oh-my-opencode-darwin-x64-baseline": "3.16.0",
"oh-my-opencode-linux-arm64": "3.16.0",
"oh-my-opencode-linux-arm64-musl": "3.16.0",
"oh-my-opencode-linux-x64": "3.16.0",
"oh-my-opencode-linux-x64-baseline": "3.16.0",
"oh-my-opencode-linux-x64-musl": "3.16.0",
"oh-my-opencode-linux-x64-musl-baseline": "3.16.0",
"oh-my-opencode-windows-x64": "3.16.0",
"oh-my-opencode-windows-x64-baseline": "3.16.0"
},
"overrides": {},
"trustedDependencies": [
"@ast-grep/cli",
"@ast-grep/napi",

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-darwin-arm64",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (darwin-arm64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-darwin-x64-baseline",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (darwin-x64-baseline, no AVX2)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-darwin-x64",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (darwin-x64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-arm64-musl",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-arm64-musl)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-arm64",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-arm64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64-baseline",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-x64-baseline, no AVX2)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64-musl-baseline",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-x64-musl-baseline, no AVX2)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64-musl",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-x64-musl)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-linux-x64",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (linux-x64)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-windows-x64-baseline",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (windows-x64-baseline, no AVX2)",
"license": "MIT",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "oh-my-opencode-windows-x64",
"version": "3.15.3",
"version": "3.16.0",
"description": "Platform-specific binary for oh-my-opencode (windows-x64)",
"license": "MIT",
"repository": {

View File

@@ -7,6 +7,60 @@ import { getPlatformPackageCandidates, getBinaryPath } from "./bin/platform.js";
const require = createRequire(import.meta.url);
const MIN_OPENCODE_VERSION = "1.4.0";
/**
* Parse version string into numeric parts
* @param {string} version
* @returns {number[]}
*/
function parseVersion(version) {
return version
.replace(/^v/, "")
.split("-")[0]
.split(".")
.map((part) => Number.parseInt(part, 10) || 0);
}
/**
* Compare two version strings
* @param {string} current
* @param {string} minimum
* @returns {boolean} true if current >= minimum
*/
function compareVersions(current, minimum) {
const currentParts = parseVersion(current);
const minimumParts = parseVersion(minimum);
const length = Math.max(currentParts.length, minimumParts.length);
for (let index = 0; index < length; index++) {
const currentPart = currentParts[index] ?? 0;
const minimumPart = minimumParts[index] ?? 0;
if (currentPart > minimumPart) return true;
if (currentPart < minimumPart) return false;
}
return true;
}
/**
* Check if opencode version meets minimum requirement
* @returns {{ok: boolean, version: string | null}}
*/
function checkOpenCodeVersion() {
try {
const result = require("child_process").execSync("opencode --version", {
encoding: "utf-8",
stdio: ["pipe", "pipe", "ignore"],
});
const version = result.trim();
const ok = compareVersions(version, MIN_OPENCODE_VERSION);
return { ok, version };
} catch {
return { ok: true, version: null };
}
}
/**
* Detect libc family on Linux
*/
@@ -36,7 +90,15 @@ function main() {
const { platform, arch } = process;
const libcFamily = getLibcFamily();
const packageBaseName = getPackageBaseName();
// Check opencode version requirement
const versionCheck = checkOpenCodeVersion();
if (versionCheck.version && !versionCheck.ok) {
console.warn(`⚠ oh-my-opencode requires OpenCode >= ${MIN_OPENCODE_VERSION}`);
console.warn(` Detected: ${versionCheck.version}`);
console.warn(` Please update OpenCode to avoid compatibility issues.`);
}
try {
const packageCandidates = getPlatformPackageCandidates({
platform,

View File

@@ -1,4 +1,4 @@
import * as z from "zod"
import { z } from "zod"
import { OhMyOpenCodeConfigSchema } from "../src/config/schema"
export function createOhMyOpenCodeJsonSchema(): Record<string, unknown> {
@@ -8,10 +8,10 @@ export function createOhMyOpenCodeJsonSchema(): Record<string, unknown> {
}) as Record<string, unknown>
return {
...jsonSchema,
$schema: "http://json-schema.org/draft-07/schema#",
$id: "https://raw.githubusercontent.com/code-yeongyu/oh-my-openagent/dev/assets/oh-my-opencode.schema.json",
title: "Oh My OpenCode Configuration",
description: "Configuration schema for oh-my-opencode plugin",
...jsonSchema,
}
}

View File

@@ -15,7 +15,7 @@ describe("test workflows", () => {
const workflow = readFileSync(workflowPath, "utf8")
expect(workflow).toContain("- name: Run tests")
expect(workflow).toContain("run: bun test")
expect(workflow).toMatch(/run: bun (test|run script\/run-ci-tests\.ts)/)
}
})
})

View File

@@ -2607,6 +2607,54 @@
"created_at": "2026-04-07T13:06:07Z",
"repoId": 1108837393,
"pullRequestNo": 3203
},
{
"name": "dhruvkej9",
"id": 96516827,
"comment_id": 4204071246,
"created_at": "2026-04-08T05:36:52Z",
"repoId": 1108837393,
"pullRequestNo": 3217
},
{
"name": "dhruvkej9",
"id": 96516827,
"comment_id": 4204084942,
"created_at": "2026-04-08T05:40:40Z",
"repoId": 1108837393,
"pullRequestNo": 3217
},
{
"name": "FrancoStino",
"id": 32127923,
"comment_id": 4205715582,
"created_at": "2026-04-08T10:52:39Z",
"repoId": 1108837393,
"pullRequestNo": 3234
},
{
"name": "sen7971",
"id": 193416996,
"comment_id": 4207621925,
"created_at": "2026-04-08T15:57:15Z",
"repoId": 1108837393,
"pullRequestNo": 3248
},
{
"name": "NikkeTryHard",
"id": 111729769,
"comment_id": 4210843488,
"created_at": "2026-04-09T01:34:03Z",
"repoId": 1108837393,
"pullRequestNo": 3261
},
{
"name": "gwegwe1234",
"id": 43298107,
"comment_id": 4211103484,
"created_at": "2026-04-09T02:46:26Z",
"repoId": 1108837393,
"pullRequestNo": 3264
}
]
}

View File

@@ -114,6 +114,8 @@ describe("delegation trust prompt rules", () => {
expect(prompt).toContain("do only non-overlapping work simultaneously")
expect(prompt).toContain("Continue only with non-overlapping work")
expect(prompt).toContain("DO NOT perform the same search yourself")
expect(prompt).toContain("Do not use `apply_patch`")
expect(prompt).toContain("`edit` and `write`")
})
test("Sisyphus-Junior GPT-5.4 prompt forbids duplicate delegated exploration", () => {

View File

@@ -192,6 +192,8 @@ describe("createHephaestusAgent", () => {
expect(config.prompt).toContain("You build context by examining");
expect(config.prompt).toContain("Never chain together bash commands");
expect(config.prompt).toContain("<tool_usage_rules>");
expect(config.prompt).toContain("Do not use `apply_patch`");
expect(config.prompt).toContain("`edit` and `write`");
});
test("GPT 5.3-codex model includes GPT-5.3 specific prompt content", () => {
@@ -205,6 +207,8 @@ describe("createHephaestusAgent", () => {
expect(config.prompt).toContain("Senior Staff Engineer");
expect(config.prompt).toContain("Hard Constraints");
expect(config.prompt).toContain("<tool_usage_rules>");
expect(config.prompt).toContain("Do not use `apply_patch`");
expect(config.prompt).toContain("`edit` and `write`");
});
test("includes Hephaestus identity in prompt", () => {
@@ -219,6 +223,35 @@ describe("createHephaestusAgent", () => {
expect(config.prompt).toContain("autonomous deep worker");
});
test("generic GPT model includes apply_patch workaround guidance", () => {
// given
const model = "openai/gpt-4o";
// when
const config = createHephaestusAgent(model);
// then
expect(config.prompt).toContain("Do not use `apply_patch`");
expect(config.prompt).toContain("`edit` and `write`");
});
test("GPT models deny apply_patch while non-GPT models do not", () => {
// given
const gpt54Model = "openai/gpt-5.4";
const gptGenericModel = "openai/gpt-4o";
const claudeModel = "anthropic/claude-opus-4-6";
// when
const gpt54Config = createHephaestusAgent(gpt54Model);
const gptGenericConfig = createHephaestusAgent(gptGenericModel);
const claudeConfig = createHephaestusAgent(claudeModel);
// then
expect(gpt54Config.permission ?? {}).toHaveProperty("apply_patch", "deny");
expect(gptGenericConfig.permission ?? {}).toHaveProperty("apply_patch", "deny");
expect(claudeConfig.permission ?? {}).not.toHaveProperty("apply_patch");
});
test("useTaskSystem=true produces Task Discipline prompt", () => {
// given
const model = "openai/gpt-5.4";

View File

@@ -1,6 +1,6 @@
import type { AgentConfig } from "@opencode-ai/sdk";
import type { AgentMode, AgentPromptMetadata } from "../types";
import { isGpt5_4Model, isGpt5_3CodexModel } from "../types";
import { isGptModel, isGpt5_4Model, isGpt5_3CodexModel } from "../types";
import type {
AvailableAgent,
AvailableTool,
@@ -125,6 +125,7 @@ export function createHephaestusAgent(
permission: {
question: "allow",
call_omo_agent: "deny",
...(isGptModel(model) ? { apply_patch: "deny" as const } : {}),
} as AgentConfig["permission"],
reasoningEffort: "medium",
};

View File

@@ -448,6 +448,7 @@ ${oracleSection}
1. SEARCH existing codebase for similar patterns/styles
2. Match naming, indentation, import styles, error handling conventions
3. Default to ASCII. Add comments only for non-obvious blocks
4. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
### After Implementation (MANDATORY - DO NOT SKIP)

View File

@@ -252,7 +252,7 @@ ${antiPatterns}
1. **Explore**: Fire 2-5 explore/librarian agents in parallel + direct tool reads. Goal: complete understanding, not just enough context.
2. **Plan**: List files to modify, specific changes, dependencies, complexity estimate.
3. **Decide**: Trivial (<10 lines, single file) -> self. Complex (multi-file, >100 lines) -> delegate.
4. **Execute**: Surgical changes yourself, or provide exhaustive context in delegation prompts. Match existing patterns. Minimal diff. Search the codebase for similar patterns before writing code. Default to ASCII. Add comments only for non-obvious blocks.
4. **Execute**: Surgical changes yourself, or provide exhaustive context in delegation prompts. Match existing patterns. Minimal diff. Search the codebase for similar patterns before writing code. Default to ASCII. Add comments only for non-obvious blocks. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
5. **Verify**: \`lsp_diagnostics\` on all modified files (zero errors) -> run related tests (\`foo.ts\` -> \`foo.test.ts\`) -> typecheck -> build if applicable (exit 0). Fix only issues your changes caused.
If verification fails, return to step 1 with a materially different approach. After three attempts: stop, revert to last working state, document what you tried, consult Oracle. If Oracle cannot resolve, ask the user.

View File

@@ -311,6 +311,7 @@ ${oracleSection}
1. SEARCH existing codebase for similar patterns/styles
2. Match naming, indentation, import styles, error handling conventions
3. Default to ASCII. Add comments only for non-obvious blocks
4. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
### After Implementation (MANDATORY - DO NOT SKIP)

View File

@@ -30,6 +30,7 @@ const MODE: AgentMode = "subagent"
// Core tools that Sisyphus-Junior must NEVER have access to
// Note: call_omo_agent is ALLOWED so subagents can spawn explore/librarian
const BLOCKED_TOOLS = ["task"]
const GPT_BLOCKED_TOOLS = ["task", "apply_patch"]
export const SISYPHUS_JUNIOR_DEFAULTS = {
model: "anthropic/claude-sonnet-4-6",
@@ -91,13 +92,14 @@ export function createSisyphusJuniorAgentWithOverrides(
const promptAppend = override?.prompt_append
const prompt = buildSisyphusJuniorPrompt(model, useTaskSystem, promptAppend)
const blockedTools = isGptModel(model) ? GPT_BLOCKED_TOOLS : BLOCKED_TOOLS
const baseRestrictions = createAgentToolRestrictions(BLOCKED_TOOLS)
const baseRestrictions = createAgentToolRestrictions(blockedTools)
const userPermission = (override?.permission ?? {}) as Record<string, PermissionValue>
const basePermission = baseRestrictions.permission
const merged: Record<string, PermissionValue> = { ...userPermission }
for (const tool of BLOCKED_TOOLS) {
for (const tool of blockedTools) {
merged[tool] = "deny"
}
merged.call_omo_agent = "allow"

View File

@@ -92,6 +92,7 @@ Style:
1. SEARCH existing codebase for similar patterns/styles
2. Match naming, indentation, import styles, error handling conventions
3. Default to ASCII. Add comments only for non-obvious blocks
4. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
### After Implementation (MANDATORY - DO NOT SKIP)

View File

@@ -96,7 +96,7 @@ Style:
1. SEARCH existing codebase for similar patterns/styles
2. Match naming, indentation, import styles, error handling conventions
3. Default to ASCII. Add comments only for non-obvious blocks
4. Always use apply_patch for manual code edits. Do not use cat or echo for file creation/editing. Formatting commands or bulk edits don't need apply_patch
4. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
5. Do not chain bash commands with separators - each command should be a separate tool call
### After Implementation (MANDATORY - DO NOT SKIP)

View File

@@ -93,6 +93,7 @@ Style:
1. SEARCH existing codebase for similar patterns/styles
2. Match naming, indentation, import styles, error handling conventions
3. Default to ASCII. Add comments only for non-obvious blocks
4. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
### After Implementation (MANDATORY - DO NOT SKIP)

View File

@@ -350,6 +350,8 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
expect(result.prompt).toContain("Scope Discipline")
expect(result.prompt).toContain("<tool_usage_rules>")
expect(result.prompt).toContain("Progress Updates")
expect(result.prompt).toContain("Do not use `apply_patch`")
expect(result.prompt).toContain("`edit` and `write`")
})
test("GPT 5.4 model uses GPT-5.4 specific prompt", () => {
@@ -362,6 +364,9 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
// then
expect(result.prompt).toContain("expert coding agent")
expect(result.prompt).toContain("<tool_usage_rules>")
expect(result.prompt).toContain("Do not use `apply_patch`")
expect(result.prompt).toContain("`edit` and `write`")
expect(result.prompt).not.toContain("Always use apply_patch")
})
test("GPT 5.3 Codex model uses GPT-5.3-codex specific prompt", () => {
@@ -374,6 +379,28 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
// then
expect(result.prompt).toContain("Senior Engineer")
expect(result.prompt).toContain("<tool_usage_rules>")
expect(result.prompt).toContain("Do not use `apply_patch`")
expect(result.prompt).toContain("`edit` and `write`")
})
test("GPT variants deny apply_patch while Claude variants do not", () => {
// given
const gpt54Override = { model: "openai/gpt-5.4" }
const gpt53Override = { model: "openai/gpt-5.3-codex" }
const gptGenericOverride = { model: "openai/gpt-4o" }
const claudeOverride = { model: "anthropic/claude-sonnet-4-6" }
// when
const gpt54Result = createSisyphusJuniorAgentWithOverrides(gpt54Override)
const gpt53Result = createSisyphusJuniorAgentWithOverrides(gpt53Override)
const gptGenericResult = createSisyphusJuniorAgentWithOverrides(gptGenericOverride)
const claudeResult = createSisyphusJuniorAgentWithOverrides(claudeOverride)
// then
expect(gpt54Result.permission ?? {}).toHaveProperty("apply_patch", "deny")
expect(gpt53Result.permission ?? {}).toHaveProperty("apply_patch", "deny")
expect(gptGenericResult.permission ?? {}).toHaveProperty("apply_patch", "deny")
expect(claudeResult.permission ?? {}).not.toHaveProperty("apply_patch")
})
test("prompt_append is added after base prompt", () => {
@@ -494,6 +521,7 @@ describe("buildSisyphusJuniorPrompt", () => {
expect(prompt).toContain("expert coding agent")
expect(prompt).toContain("Scope Discipline")
expect(prompt).toContain("<tool_usage_rules>")
expect(prompt).toContain("Do not use `apply_patch`")
})
test("GPT 5.3 Codex model uses GPT-5.3-codex prompt", () => {
@@ -507,6 +535,7 @@ describe("buildSisyphusJuniorPrompt", () => {
expect(prompt).toContain("Senior Engineer")
expect(prompt).toContain("Scope Discipline")
expect(prompt).toContain("<tool_usage_rules>")
expect(prompt).toContain("Do not use `apply_patch`")
})
test("generic GPT model uses generic GPT prompt", () => {
@@ -521,6 +550,7 @@ describe("buildSisyphusJuniorPrompt", () => {
expect(prompt).toContain("Scope Discipline")
expect(prompt).toContain("<tool_usage_rules>")
expect(prompt).toContain("Progress Updates")
expect(prompt).toContain("Do not use `apply_patch`")
})
test("Claude model prompt contains Claude-specific sections", () => {

View File

@@ -499,6 +499,7 @@ export function createSisyphusAgent(
permission: {
question: "allow",
call_omo_agent: "deny",
apply_patch: "deny",
} as AgentConfig["permission"],
reasoningEffort: "medium",
};
@@ -538,6 +539,7 @@ export function createSisyphusAgent(
const permission = {
question: "allow",
call_omo_agent: "deny",
...(isGptModel(model) ? { apply_patch: "deny" as const } : {}),
} as AgentConfig["permission"];
const base = {
description:

View File

@@ -310,7 +310,7 @@ Every implementation task follows this cycle. No exceptions.
Skills: if ANY available skill's domain overlaps with the task, load it NOW via \`skill\` tool and include it in \`load_skills\`. When the connection is even remotely plausible, load the skill - the cost of loading an irrelevant skill is near zero, the cost of missing a relevant one is high.
4. EXECUTE_OR_SUPERVISE -
If self: surgical changes, match existing patterns, minimal diff. Never suppress type errors. Never commit unless asked. Bugfix rule: fix minimally, never refactor while fixing.
If self: surgical changes, match existing patterns, minimal diff. Never suppress type errors. Never commit unless asked. Bugfix rule: fix minimally, never refactor while fixing. Use the \`edit\` and \`write\` tools for file changes. Do not use \`apply_patch\` on GPT models - it is unreliable here and can hang during verification.
If delegated: exhaustive 6-section prompt per \`<delegation>\` protocol. Session continuity for follow-ups.
5. VERIFY -

View File

@@ -5,6 +5,7 @@ import { createExploreAgent } from "./explore"
import { createMomusAgent } from "./momus"
import { createMetisAgent } from "./metis"
import { createAtlasAgent } from "./atlas"
import { createSisyphusAgent } from "./sisyphus"
const TEST_MODEL = "anthropic/claude-sonnet-4-5"
@@ -111,4 +112,23 @@ describe("read-only agent tool restrictions", () => {
expect(permission["call_omo_agent"]).toBeUndefined()
})
})
describe("Sisyphus GPT variants", () => {
test("deny apply_patch for GPT models but not Claude models", () => {
// given
const gpt54Agent = createSisyphusAgent("openai/gpt-5.4")
const gptGenericAgent = createSisyphusAgent("openai/gpt-5.2")
const claudeAgent = createSisyphusAgent(TEST_MODEL)
// when
const gpt54Permission = (gpt54Agent.permission ?? {}) as Record<string, string>
const gptGenericPermission = (gptGenericAgent.permission ?? {}) as Record<string, string>
const claudePermission = (claudeAgent.permission ?? {}) as Record<string, string>
// then
expect(gpt54Permission["apply_patch"]).toBe("deny")
expect(gptGenericPermission["apply_patch"]).toBe("deny")
expect(claudePermission["apply_patch"]).toBeUndefined()
})
})
})

View File

@@ -21,11 +21,12 @@ describe("runCliInstaller", () => {
console.error = originalConsoleError
})
it("completes installation without auth plugin or provider config steps", async () => {
//#given
it("blocks installation when OpenCode is below the minimum version", async () => {
// given
const restoreSpies = [
spyOn(configManager, "detectCurrentConfig").mockReturnValue({
isInstalled: false,
installedVersion: null,
hasClaude: false,
isMax20: false,
hasOpenAI: false,
@@ -34,9 +35,56 @@ describe("runCliInstaller", () => {
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.0.200"),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.3.9"),
]
const addPluginSpy = spyOn(configManager, "addPluginToOpenCodeConfig")
const args: InstallArgs = {
tui: false,
claude: "no",
openai: "no",
gemini: "no",
copilot: "no",
opencodeZen: "no",
zaiCodingPlan: "no",
kimiForCoding: "no",
opencodeGo: "no",
}
// when
const result = await runCliInstaller(args, "3.16.0")
// then
expect(result).toBe(1)
expect(addPluginSpy).not.toHaveBeenCalled()
for (const spy of restoreSpies) {
spy.mockRestore()
}
addPluginSpy.mockRestore()
})
it("completes installation without auth plugin or provider config steps", async () => {
// given
const restoreSpies = [
spyOn(configManager, "detectCurrentConfig").mockReturnValue({
isInstalled: false,
installedVersion: null,
hasClaude: false,
isMax20: false,
hasOpenAI: false,
hasGemini: false,
hasCopilot: false,
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0"),
spyOn(configManager, "addPluginToOpenCodeConfig").mockResolvedValue({
success: true,
configPath: "/tmp/opencode.jsonc",
@@ -56,12 +104,13 @@ describe("runCliInstaller", () => {
opencodeZen: "no",
zaiCodingPlan: "no",
kimiForCoding: "no",
opencodeGo: "no",
}
//#when
// when
const result = await runCliInstaller(args, "3.4.0")
//#then
// then
expect(result).toBe(0)
for (const spy of restoreSpies) {

View File

@@ -22,6 +22,7 @@ import {
printWarning,
validateNonTuiArgs,
} from "./install-validators"
import { getUnsupportedOpenCodeVersionMessage } from "./minimum-opencode-version"
export async function runCliInstaller(args: InstallArgs, version: string): Promise<number> {
const validation = validateNonTuiArgs(args)
@@ -57,6 +58,12 @@ export async function runCliInstaller(args: InstallArgs, version: string): Promi
printInfo("Visit https://opencode.ai/docs for installation instructions")
} else {
printSuccess(`OpenCode ${openCodeVersion ?? ""} detected`)
const unsupportedVersionMessage = getUnsupportedOpenCodeVersionMessage(openCodeVersion)
if (unsupportedVersionMessage) {
printWarning(unsupportedVersionMessage)
return 1
}
}
if (isUpdate) {

View File

@@ -18,3 +18,12 @@ export { detectCurrentConfig } from "./config-manager/detect-current-config"
export type { BunInstallResult } from "./config-manager/bun-install"
export { runBunInstall, runBunInstallWithDetails } from "./config-manager/bun-install"
export type { VersionCompatibility } from "./config-manager/version-compatibility"
export {
checkVersionCompatibility,
extractVersionFromPluginEntry,
} from "./config-manager/version-compatibility"
export type { BackupResult } from "./config-manager/backup-config"
export { backupConfigFile } from "./config-manager/backup-config"

View File

@@ -1,12 +1,14 @@
import { readFileSync, writeFileSync } from "node:fs"
import type { ConfigMergeResult } from "../types"
import { PLUGIN_NAME, LEGACY_PLUGIN_NAME } from "../../shared"
import { backupConfigFile } from "./backup-config"
import { getConfigDir } from "./config-context"
import { ensureConfigDirectoryExists } from "./ensure-config-directory-exists"
import { formatErrorWithSuggestion } from "./format-error-with-suggestion"
import { detectConfigFormat } from "./opencode-config-format"
import { parseOpenCodeConfigFileWithError, type OpenCodeConfig } from "./parse-opencode-config-file"
import { getPluginNameWithVersion } from "./plugin-name-with-version"
import { checkVersionCompatibility, extractVersionFromPluginEntry } from "./version-compatibility"
export async function addPluginToOpenCodeConfig(currentVersion: string): Promise<ConfigMergeResult> {
try {
@@ -52,14 +54,33 @@ export async function addPluginToOpenCodeConfig(currentVersion: string): Promise
&& !(plugin === LEGACY_PLUGIN_NAME || plugin.startsWith(`${LEGACY_PLUGIN_NAME}@`))
)
const existingEntry = canonicalEntries[0] ?? legacyEntries[0]
if (existingEntry) {
const installedVersion = extractVersionFromPluginEntry(existingEntry)
const compatibility = checkVersionCompatibility(installedVersion, currentVersion)
if (!compatibility.canUpgrade) {
return {
success: false,
configPath: path,
error: compatibility.reason ?? "Version compatibility check failed",
}
}
const backupResult = backupConfigFile(path)
if (!backupResult.success) {
return {
success: false,
configPath: path,
error: `Failed to create backup: ${backupResult.error}`,
}
}
}
const normalizedPlugins = [...otherPlugins]
if (canonicalEntries.length > 0) {
normalizedPlugins.push(canonicalEntries[0])
} else if (legacyEntries.length > 0) {
const versionMatch = legacyEntries[0].match(/@(.+)$/)
const preservedVersion = versionMatch ? versionMatch[1] : null
normalizedPlugins.push(preservedVersion ? `${PLUGIN_NAME}@${preservedVersion}` : pluginEntry)
if (canonicalEntries.length > 0 || legacyEntries.length > 0) {
normalizedPlugins.push(pluginEntry)
} else {
normalizedPlugins.push(pluginEntry)
}

View File

@@ -0,0 +1,32 @@
import { copyFileSync, existsSync, mkdirSync } from "node:fs"
import { dirname } from "node:path"
export interface BackupResult {
success: boolean
backupPath?: string
error?: string
}
export function backupConfigFile(configPath: string): BackupResult {
if (!existsSync(configPath)) {
return { success: true }
}
const timestamp = new Date().toISOString().replace(/[:.]/g, "-")
const backupPath = `${configPath}.backup-${timestamp}`
try {
const dir = dirname(backupPath)
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true })
}
copyFileSync(configPath, backupPath)
return { success: true, backupPath }
} catch (err) {
return {
success: false,
error: err instanceof Error ? err.message : "Failed to create backup",
}
}
}

View File

@@ -4,6 +4,7 @@ import type { DetectedConfig } from "../types"
import { getOmoConfigPath } from "./config-context"
import { detectConfigFormat } from "./opencode-config-format"
import { parseOpenCodeConfigFileWithError } from "./parse-opencode-config-file"
import { extractVersionFromPluginEntry } from "./version-compatibility"
function detectProvidersFromOmoConfig(): {
hasOpenAI: boolean
@@ -60,9 +61,14 @@ function isOurPlugin(plugin: string): boolean {
plugin === LEGACY_PLUGIN_NAME || plugin.startsWith(`${LEGACY_PLUGIN_NAME}@`)
}
function findOurPluginEntry(plugins: string[]): string | null {
return plugins.find(isOurPlugin) ?? null
}
export function detectCurrentConfig(): DetectedConfig {
const result: DetectedConfig = {
isInstalled: false,
installedVersion: null,
hasClaude: true,
isMax20: true,
hasOpenAI: true,
@@ -86,7 +92,12 @@ export function detectCurrentConfig(): DetectedConfig {
const openCodeConfig = parseResult.config
const plugins = openCodeConfig.plugin ?? []
result.isInstalled = plugins.some(isOurPlugin)
const ourPluginEntry = findOurPluginEntry(plugins)
result.isInstalled = !!ourPluginEntry
if (ourPluginEntry) {
result.installedVersion = extractVersionFromPluginEntry(ourPluginEntry)
}
if (!result.isInstalled) {
return result

View File

@@ -1,4 +1,4 @@
import { afterEach, beforeEach, describe, expect, it } from "bun:test"
import { afterEach, beforeEach, describe, expect, it, spyOn } from "bun:test"
import { mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
@@ -6,6 +6,7 @@ import { join } from "node:path"
import { resetConfigContext } from "./config-context"
import { detectCurrentConfig } from "./detect-current-config"
import { addPluginToOpenCodeConfig } from "./add-plugin-to-opencode-config"
import * as pluginNameWithVersion from "./plugin-name-with-version"
describe("detectCurrentConfig - single package detection", () => {
let testConfigDir = ""
@@ -109,17 +110,19 @@ describe("addPluginToOpenCodeConfig - single package writes", () => {
expect(savedConfig.plugin).toEqual(["oh-my-openagent"])
})
it("upgrades a version-pinned legacy entry to canonical", async () => {
it("updates a version-pinned legacy entry to the requested version", async () => {
// given
writeFileSync(testConfigPath, JSON.stringify({ plugin: ["oh-my-opencode@3.10.0"] }, null, 2) + "\n", "utf-8")
const getPluginNameWithVersionSpy = spyOn(pluginNameWithVersion, "getPluginNameWithVersion").mockResolvedValue("oh-my-openagent@3.16.0")
writeFileSync(testConfigPath, JSON.stringify({ plugin: ["oh-my-opencode@3.15.0"] }, null, 2) + "\n", "utf-8")
// when
const result = await addPluginToOpenCodeConfig("3.11.0")
const result = await addPluginToOpenCodeConfig("3.16.0")
// then
expect(result.success).toBe(true)
const savedConfig = JSON.parse(readFileSync(testConfigPath, "utf-8"))
expect(savedConfig.plugin).toEqual(["oh-my-openagent@3.10.0"])
expect(savedConfig.plugin).toEqual(["oh-my-openagent@3.16.0"])
getPluginNameWithVersionSpy.mockRestore()
})
it("removes stale legacy entry when canonical and legacy entries both exist", async () => {
@@ -135,17 +138,36 @@ describe("addPluginToOpenCodeConfig - single package writes", () => {
expect(savedConfig.plugin).toEqual(["oh-my-openagent"])
})
it("preserves a canonical entry when it already exists", async () => {
it("preserves a canonical entry when the same version is re-installed", async () => {
// given
const getPluginNameWithVersionSpy = spyOn(pluginNameWithVersion, "getPluginNameWithVersion").mockResolvedValue("oh-my-openagent@3.10.0")
writeFileSync(testConfigPath, JSON.stringify({ plugin: ["oh-my-openagent@3.10.0"] }, null, 2) + "\n", "utf-8")
// when
const result = await addPluginToOpenCodeConfig("3.11.0")
const result = await addPluginToOpenCodeConfig("3.10.0")
// then
expect(result.success).toBe(true)
const savedConfig = JSON.parse(readFileSync(testConfigPath, "utf-8"))
expect(savedConfig.plugin).toEqual(["oh-my-openagent@3.10.0"])
getPluginNameWithVersionSpy.mockRestore()
})
it("blocks a downgrade for a version-pinned canonical entry", async () => {
// given
const getPluginNameWithVersionSpy = spyOn(pluginNameWithVersion, "getPluginNameWithVersion").mockResolvedValue("oh-my-openagent@3.15.0")
writeFileSync(testConfigPath, JSON.stringify({ plugin: ["oh-my-openagent@3.16.0"] }, null, 2) + "\n", "utf-8")
// when
const result = await addPluginToOpenCodeConfig("3.15.0")
// then
expect(result.success).toBe(false)
expect(result.error).toContain("Downgrade")
const savedConfig = JSON.parse(readFileSync(testConfigPath, "utf-8"))
expect(savedConfig.plugin).toEqual(["oh-my-openagent@3.16.0"])
getPluginNameWithVersionSpy.mockRestore()
})
it("rewrites quoted jsonc plugin field in place", async () => {

View File

@@ -0,0 +1,82 @@
import { describe, expect, it } from "bun:test"
import {
checkVersionCompatibility,
extractVersionFromPluginEntry,
} from "./version-compatibility"
describe("checkVersionCompatibility", () => {
it("allows fresh install when no current version", () => {
const result = checkVersionCompatibility(null, "3.15.0")
expect(result.canUpgrade).toBe(true)
expect(result.isDowngrade).toBe(false)
expect(result.requiresMigration).toBe(false)
})
it("detects same version as already installed", () => {
const result = checkVersionCompatibility("3.15.0", "3.15.0")
expect(result.canUpgrade).toBe(true)
expect(result.reason).toContain("already installed")
})
it("blocks downgrade from higher to lower version", () => {
const result = checkVersionCompatibility("3.15.0", "3.14.0")
expect(result.canUpgrade).toBe(false)
expect(result.isDowngrade).toBe(true)
expect(result.reason).toContain("Downgrade")
})
it("allows patch version upgrade", () => {
const result = checkVersionCompatibility("3.15.0", "3.15.1")
expect(result.canUpgrade).toBe(true)
expect(result.isMajorBump).toBe(false)
expect(result.requiresMigration).toBe(false)
})
it("allows minor version upgrade", () => {
const result = checkVersionCompatibility("3.15.0", "3.16.0")
expect(result.canUpgrade).toBe(true)
expect(result.isMajorBump).toBe(false)
expect(result.requiresMigration).toBe(false)
})
it("detects major version bump requiring migration", () => {
const result = checkVersionCompatibility("3.15.0", "4.0.0")
expect(result.canUpgrade).toBe(true)
expect(result.isMajorBump).toBe(true)
expect(result.requiresMigration).toBe(true)
expect(result.reason).toContain("Major version upgrade")
})
it("handles v prefix in versions", () => {
const result = checkVersionCompatibility("v3.15.0", "v3.16.0")
expect(result.canUpgrade).toBe(true)
expect(result.isDowngrade).toBe(false)
})
it("handles mixed v prefix", () => {
const result = checkVersionCompatibility("3.15.0", "v3.16.0")
expect(result.canUpgrade).toBe(true)
})
})
describe("extractVersionFromPluginEntry", () => {
it("extracts version from canonical plugin entry", () => {
const version = extractVersionFromPluginEntry("oh-my-openagent@3.15.0")
expect(version).toBe("3.15.0")
})
it("extracts version from legacy plugin entry", () => {
const version = extractVersionFromPluginEntry("oh-my-opencode@3.14.0")
expect(version).toBe("3.14.0")
})
it("returns null for bare plugin entry", () => {
const version = extractVersionFromPluginEntry("oh-my-openagent")
expect(version).toBeNull()
})
it("handles prerelease versions", () => {
const version = extractVersionFromPluginEntry("oh-my-openagent@3.16.0-beta.1")
expect(version).toBe("3.16.0-beta.1")
})
})

View File

@@ -0,0 +1,103 @@
export interface VersionCompatibility {
canUpgrade: boolean
reason?: string
isDowngrade: boolean
isMajorBump: boolean
requiresMigration: boolean
}
function parseVersion(version: string): number[] {
const clean = version.replace(/^v/, "").split("-")[0]
return clean.split(".").map(Number)
}
function compareVersions(a: string, b: string): number {
const partsA = parseVersion(a)
const partsB = parseVersion(b)
const maxLen = Math.max(partsA.length, partsB.length)
for (let i = 0; i < maxLen; i++) {
const numA = partsA[i] ?? 0
const numB = partsB[i] ?? 0
if (numA !== numB) {
return numA - numB
}
}
return 0
}
export function checkVersionCompatibility(
currentVersion: string | null,
newVersion: string
): VersionCompatibility {
if (!currentVersion) {
return {
canUpgrade: true,
isDowngrade: false,
isMajorBump: false,
requiresMigration: false,
}
}
const cleanCurrent = currentVersion.replace(/^v/, "")
const cleanNew = newVersion.replace(/^v/, "")
try {
const comparison = compareVersions(cleanNew, cleanCurrent)
if (comparison < 0) {
return {
canUpgrade: false,
reason: `Downgrade from ${currentVersion} to ${newVersion} is not allowed`,
isDowngrade: true,
isMajorBump: false,
requiresMigration: false,
}
}
if (comparison === 0) {
return {
canUpgrade: true,
reason: `Version ${newVersion} is already installed`,
isDowngrade: false,
isMajorBump: false,
requiresMigration: false,
}
}
const currentMajor = cleanCurrent.split(".")[0]
const newMajor = cleanNew.split(".")[0]
const isMajorBump = currentMajor !== newMajor
if (isMajorBump) {
return {
canUpgrade: true,
reason: `Major version upgrade from ${currentVersion} to ${newVersion} - configuration migration may be required`,
isDowngrade: false,
isMajorBump: true,
requiresMigration: true,
}
}
return {
canUpgrade: true,
isDowngrade: false,
isMajorBump: false,
requiresMigration: false,
}
} catch {
return {
canUpgrade: true,
reason: `Unable to compare versions ${currentVersion} and ${newVersion} - proceeding with caution`,
isDowngrade: false,
isMajorBump: false,
requiresMigration: false,
}
}
}
export function extractVersionFromPluginEntry(entry: string): string | null {
const match = entry.match(/@(.+)$/)
return match ? match[1] : null
}

View File

@@ -18,6 +18,7 @@ const installConfig: InstallConfig = {
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}
function getRecord(value: unknown): Record<string, unknown> {

View File

@@ -1,6 +1,7 @@
import { existsSync, readFileSync, statSync, writeFileSync } from "node:fs"
import { parseJsonc } from "../../shared"
import type { ConfigMergeResult, InstallConfig } from "../types"
import { backupConfigFile } from "./backup-config"
import { getConfigDir, getOmoConfigPath } from "./config-context"
import { deepMergeRecord } from "./deep-merge-record"
import { ensureConfigDirectoryExists } from "./ensure-config-directory-exists"
@@ -28,6 +29,15 @@ export function writeOmoConfig(installConfig: InstallConfig): ConfigMergeResult
const newConfig = generateOmoConfig(installConfig)
if (existsSync(omoConfigPath)) {
const backupResult = backupConfigFile(omoConfigPath)
if (!backupResult.success) {
return {
success: false,
configPath: omoConfigPath,
error: `Failed to create backup: ${backupResult.error}`,
}
}
try {
const stat = statSync(omoConfigPath)
const content = readFileSync(omoConfigPath, "utf-8")

View File

@@ -0,0 +1,150 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { mkdirSync, writeFileSync, rmSync } from "node:fs"
import { join } from "node:path"
import { loadAvailableModelsFromCache } from "./model-resolution-cache"
describe("loadAvailableModelsFromCache", () => {
const originalXDGCache = process.env.XDG_CACHE_HOME
const originalXDGConfig = process.env.XDG_CONFIG_HOME
let tempDir: string
beforeEach(() => {
tempDir = join("/tmp", `doctor-cache-test-${Date.now()}`)
mkdirSync(join(tempDir, "cache", "opencode"), { recursive: true })
mkdirSync(join(tempDir, "config", "opencode"), { recursive: true })
process.env.XDG_CACHE_HOME = join(tempDir, "cache")
process.env.XDG_CONFIG_HOME = join(tempDir, "config")
})
afterEach(() => {
process.env.XDG_CACHE_HOME = originalXDGCache
process.env.XDG_CONFIG_HOME = originalXDGConfig
rmSync(tempDir, { recursive: true, force: true })
})
test("returns cacheExists: false when no models.json and no custom providers", () => {
const result = loadAvailableModelsFromCache()
expect(result.cacheExists).toBe(false)
expect(result.providers).toEqual([])
expect(result.modelCount).toBe(0)
})
test("reads providers from models.json cache", () => {
writeFileSync(
join(tempDir, "cache", "opencode", "models.json"),
JSON.stringify({
openai: { models: { "gpt-5.4": {} } },
anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-6": {} } },
})
)
const result = loadAvailableModelsFromCache()
expect(result.cacheExists).toBe(true)
expect(result.providers).toContain("openai")
expect(result.providers).toContain("anthropic")
expect(result.modelCount).toBe(3)
})
test("includes custom providers from opencode.json even if not in cache", () => {
writeFileSync(
join(tempDir, "cache", "opencode", "models.json"),
JSON.stringify({
openai: { models: { "gpt-5.4": {} } },
})
)
writeFileSync(
join(tempDir, "config", "opencode", "opencode.json"),
JSON.stringify({
provider: {
"openai-custom": {
npm: "@ai-sdk/openai-compatible",
models: { "gpt-5.4": {} },
},
"my-local-llm": {
npm: "@ai-sdk/openai-compatible",
models: { "local-model": {} },
},
},
})
)
const result = loadAvailableModelsFromCache()
expect(result.cacheExists).toBe(true)
expect(result.providers).toContain("openai")
expect(result.providers).toContain("openai-custom")
expect(result.providers).toContain("my-local-llm")
})
test("deduplicates providers that appear in both cache and opencode.json", () => {
writeFileSync(
join(tempDir, "cache", "opencode", "models.json"),
JSON.stringify({
openai: { models: { "gpt-5.4": {} } },
})
)
writeFileSync(
join(tempDir, "config", "opencode", "opencode.json"),
JSON.stringify({
provider: {
openai: { models: { "custom-model": {} } },
},
})
)
const result = loadAvailableModelsFromCache()
const openaiCount = result.providers.filter((p) => p === "openai").length
expect(openaiCount).toBe(1)
})
test("returns custom providers even without models.json cache", () => {
// No models.json exists
writeFileSync(
join(tempDir, "config", "opencode", "opencode.json"),
JSON.stringify({
provider: {
"openai-custom": {
npm: "@ai-sdk/openai-compatible",
models: { "gpt-5.4": {} },
},
},
})
)
const result = loadAvailableModelsFromCache()
expect(result.cacheExists).toBe(true) // custom providers make it effectively "exists"
expect(result.providers).toContain("openai-custom")
})
test("reads from opencode.jsonc (JSONC variant)", () => {
writeFileSync(
join(tempDir, "config", "opencode", "opencode.jsonc"),
`{
// This is a comment
"provider": {
"my-provider": {
"models": { "test-model": {} }
}
}
}`
)
const result = loadAvailableModelsFromCache()
expect(result.providers).toContain("my-provider")
})
test("ignores malformed opencode.json gracefully", () => {
writeFileSync(
join(tempDir, "cache", "opencode", "models.json"),
JSON.stringify({ openai: { models: { "gpt-5.4": {} } } })
)
writeFileSync(
join(tempDir, "config", "opencode", "opencode.json"),
"this is not valid json {{{",
)
const result = loadAvailableModelsFromCache()
expect(result.cacheExists).toBe(true)
expect(result.providers).toContain("openai")
// Should not crash, just skip the config
})
})

View File

@@ -10,10 +10,51 @@ function getOpenCodeCacheDir(): string {
return join(homedir(), ".cache", "opencode")
}
function getOpenCodeConfigDir(): string {
const xdgConfig = process.env.XDG_CONFIG_HOME
if (xdgConfig) return join(xdgConfig, "opencode")
return join(homedir(), ".config", "opencode")
}
/**
* Read custom provider names from opencode.json configs.
* Custom providers defined in the user's opencode.json (under the "provider" key)
* are valid at runtime but don't appear in the model cache (models.json), which
* only contains built-in providers from models.dev. This causes false-positive
* warnings in doctor.
*/
function loadCustomProviderNames(): string[] {
const configDir = getOpenCodeConfigDir()
const candidatePaths = [
join(configDir, "opencode.json"),
join(configDir, "opencode.jsonc"),
]
for (const configPath of candidatePaths) {
if (!existsSync(configPath)) continue
try {
const content = readFileSync(configPath, "utf-8")
const data = parseJsonc<{ provider?: Record<string, unknown> }>(content)
if (data?.provider && typeof data.provider === "object") {
return Object.keys(data.provider)
}
} catch {
// ignore parse errors
}
}
return []
}
export function loadAvailableModelsFromCache(): AvailableModelsInfo {
const cacheFile = join(getOpenCodeCacheDir(), "models.json")
const customProviders = loadCustomProviderNames()
if (!existsSync(cacheFile)) {
// Even without the cache, custom providers are valid
if (customProviders.length > 0) {
return { providers: customProviders, modelCount: 0, cacheExists: true }
}
return { providers: [], modelCount: 0, cacheExists: false }
}
@@ -21,16 +62,19 @@ export function loadAvailableModelsFromCache(): AvailableModelsInfo {
const content = readFileSync(cacheFile, "utf-8")
const data = parseJsonc<Record<string, { models?: Record<string, unknown> }>>(content)
const providers = Object.keys(data)
const cacheProviders = Object.keys(data)
let modelCount = 0
for (const providerId of providers) {
for (const providerId of cacheProviders) {
const models = data[providerId]?.models
if (models && typeof models === "object") {
modelCount += Object.keys(models).length
}
}
return { providers, modelCount, cacheExists: true }
// Merge cache providers with custom providers from opencode.json
const allProviders = [...new Set([...cacheProviders, ...customProviders])]
return { providers: allProviders, modelCount, cacheExists: true }
} catch {
return { providers: [], modelCount: 0, cacheExists: false }
}

View File

@@ -37,7 +37,7 @@ export const EXIT_CODES = {
FAILURE: 1,
} as const
export const MIN_OPENCODE_VERSION = "1.0.150"
export const MIN_OPENCODE_VERSION = "1.4.0"
export const PACKAGE_NAME = PLUGIN_NAME

View File

@@ -128,7 +128,7 @@ describe("install CLI - binary check behavior", () => {
test("non-TUI mode: should still succeed and complete all steps when binary exists", async () => {
// given OpenCode binary IS installed
isOpenCodeInstalledSpy = spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true)
getOpenCodeVersionSpy = spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.0.200")
getOpenCodeVersionSpy = spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0")
// given mock npm fetch
globalThis.fetch = mock(() =>
@@ -157,6 +157,6 @@ describe("install CLI - binary check behavior", () => {
// then should have printed success (OK symbol)
const allCalls = mockConsoleLog.mock.calls.flat().join("\n")
expect(allCalls).toContain("[OK]")
expect(allCalls).toContain("OpenCode 1.0.200")
expect(allCalls).toContain("OpenCode 1.4.0")
})
})

View File

@@ -0,0 +1,14 @@
import { MIN_OPENCODE_VERSION } from "./doctor/constants"
import { compareVersions } from "../shared/opencode-version"
export function getUnsupportedOpenCodeVersionMessage(openCodeVersion: string | null): string | null {
if (!openCodeVersion) {
return null
}
if (compareVersions(openCodeVersion, MIN_OPENCODE_VERSION) >= 0) {
return null
}
return `Detected OpenCode ${openCodeVersion}, but ${MIN_OPENCODE_VERSION}+ is required. Update OpenCode, then rerun the installer.`
}

View File

@@ -0,0 +1,129 @@
import { afterEach, beforeEach, describe, expect, it, spyOn } from "bun:test"
import * as p from "@clack/prompts"
import * as configManager from "./config-manager"
import * as tuiInstallPrompts from "./tui-install-prompts"
import { runTuiInstaller } from "./tui-installer"
function createMockSpinner(): ReturnType<typeof p.spinner> {
return {
start: () => undefined,
stop: () => undefined,
message: () => undefined,
}
}
describe("runTuiInstaller", () => {
const originalIsStdinTty = process.stdin.isTTY
const originalIsStdoutTty = process.stdout.isTTY
beforeEach(() => {
Object.defineProperty(process.stdin, "isTTY", { configurable: true, value: true })
Object.defineProperty(process.stdout, "isTTY", { configurable: true, value: true })
})
afterEach(() => {
Object.defineProperty(process.stdin, "isTTY", { configurable: true, value: originalIsStdinTty })
Object.defineProperty(process.stdout, "isTTY", { configurable: true, value: originalIsStdoutTty })
})
it("blocks installation when OpenCode is below the minimum version", async () => {
// given
const restoreSpies = [
spyOn(p, "spinner").mockReturnValue(createMockSpinner()),
spyOn(p, "intro").mockImplementation(() => undefined),
spyOn(p.log, "warn").mockImplementation(() => undefined),
spyOn(configManager, "detectCurrentConfig").mockReturnValue({
isInstalled: false,
installedVersion: null,
hasClaude: false,
isMax20: false,
hasOpenAI: false,
hasGemini: false,
hasCopilot: false,
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.3.9"),
]
const promptSpy = spyOn(tuiInstallPrompts, "promptInstallConfig")
const addPluginSpy = spyOn(configManager, "addPluginToOpenCodeConfig")
const outroSpy = spyOn(p, "outro").mockImplementation(() => undefined)
// when
const result = await runTuiInstaller({ tui: true }, "3.16.0")
// then
expect(result).toBe(1)
expect(promptSpy).not.toHaveBeenCalled()
expect(addPluginSpy).not.toHaveBeenCalled()
expect(outroSpy).toHaveBeenCalled()
for (const spy of restoreSpies) {
spy.mockRestore()
}
promptSpy.mockRestore()
addPluginSpy.mockRestore()
outroSpy.mockRestore()
})
it("proceeds when OpenCode meets the minimum version", async () => {
// given
const restoreSpies = [
spyOn(p, "spinner").mockReturnValue(createMockSpinner()),
spyOn(p, "intro").mockImplementation(() => undefined),
spyOn(p.log, "info").mockImplementation(() => undefined),
spyOn(p.log, "warn").mockImplementation(() => undefined),
spyOn(p.log, "success").mockImplementation(() => undefined),
spyOn(p.log, "message").mockImplementation(() => undefined),
spyOn(p, "note").mockImplementation(() => undefined),
spyOn(p, "outro").mockImplementation(() => undefined),
spyOn(configManager, "detectCurrentConfig").mockReturnValue({
isInstalled: false,
installedVersion: null,
hasClaude: false,
isMax20: false,
hasOpenAI: false,
hasGemini: false,
hasCopilot: false,
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}),
spyOn(configManager, "isOpenCodeInstalled").mockResolvedValue(true),
spyOn(configManager, "getOpenCodeVersion").mockResolvedValue("1.4.0"),
spyOn(tuiInstallPrompts, "promptInstallConfig").mockResolvedValue({
hasClaude: false,
isMax20: false,
hasOpenAI: false,
hasGemini: false,
hasCopilot: false,
hasOpencodeZen: false,
hasZaiCodingPlan: false,
hasKimiForCoding: false,
hasOpencodeGo: false,
}),
spyOn(configManager, "addPluginToOpenCodeConfig").mockResolvedValue({
success: true,
configPath: "/tmp/opencode.jsonc",
}),
spyOn(configManager, "writeOmoConfig").mockReturnValue({
success: true,
configPath: "/tmp/oh-my-opencode.jsonc",
}),
]
// when
const result = await runTuiInstaller({ tui: true }, "3.16.0")
// then
expect(result).toBe(0)
for (const spy of restoreSpies) {
spy.mockRestore()
}
})
})

View File

@@ -10,6 +10,7 @@ import {
writeOmoConfig,
} from "./config-manager"
import { detectedToInitialValues, formatConfigSummary, SYMBOLS } from "./install-validators"
import { getUnsupportedOpenCodeVersionMessage } from "./minimum-opencode-version"
import { promptInstallConfig } from "./tui-install-prompts"
export async function runTuiInstaller(args: InstallArgs, version: string): Promise<number> {
@@ -39,6 +40,13 @@ export async function runTuiInstaller(args: InstallArgs, version: string): Promi
p.note("Visit https://opencode.ai/docs for installation instructions", "Installation Guide")
} else {
spinner.stop(`OpenCode ${openCodeVersion ?? "installed"} ${color.green("[OK]")}`)
const unsupportedVersionMessage = getUnsupportedOpenCodeVersionMessage(openCodeVersion)
if (unsupportedVersionMessage) {
p.log.warn(unsupportedVersionMessage)
p.outro(color.red("Installation blocked."))
return 1
}
}
const config = await promptInstallConfig(detected)

View File

@@ -34,6 +34,7 @@ export interface ConfigMergeResult {
export interface DetectedConfig {
isInstalled: boolean
installedVersion: string | null
hasClaude: boolean
isMax20: boolean
hasOpenAI: boolean

View File

@@ -1,5 +1,5 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { mkdtempSync, writeFileSync, rmSync } from "node:fs"
import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from "node:fs"
import { join } from "node:path"
import { tmpdir } from "node:os"
import {
@@ -11,6 +11,7 @@ import {
clearCompactionAgentConfigCheckpoint,
setCompactionAgentConfigCheckpoint,
} from "../../shared/compaction-agent-config-checkpoint"
import { PART_STORAGE } from "../../shared"
describe("isCompactionAgent", () => {
describe("#given agent name variations", () => {
@@ -73,6 +74,7 @@ describe("findNearestMessageExcludingCompaction", () => {
afterEach(() => {
rmSync(tempDir, { force: true, recursive: true })
rmSync(join(PART_STORAGE, "msg_test_background_compaction_marker"), { force: true, recursive: true })
clearCompactionAgentConfigCheckpoint("ses_checkpoint")
})
@@ -116,6 +118,30 @@ describe("findNearestMessageExcludingCompaction", () => {
expect(result?.agent).toBe("sisyphus")
})
test("skips JSON messages whose part storage contains a compaction marker", () => {
// given
const compactionMessageID = "msg_test_background_compaction_marker"
const partDir = join(PART_STORAGE, compactionMessageID)
writeFileSync(join(tempDir, "002.json"), JSON.stringify({
id: compactionMessageID,
agent: "atlas",
model: { providerID: "anthropic", modelID: "claude-opus-4-6" },
}))
writeFileSync(join(tempDir, "001.json"), JSON.stringify({
id: "msg_001",
agent: "sisyphus",
model: { providerID: "anthropic", modelID: "claude-opus-4-6" },
}))
mkdirSync(partDir, { recursive: true })
writeFileSync(join(partDir, "prt_0001.json"), JSON.stringify({ type: "compaction" }))
// when
const result = findNearestMessageExcludingCompaction(tempDir)
// then
expect(result?.agent).toBe("sisyphus")
})
test("falls back to partial agent/model match", () => {
// given
const messageWithAgentOnly = {
@@ -256,4 +282,28 @@ describe("resolvePromptContextFromSessionMessages", () => {
tools: { bash: true },
})
})
test("skips SDK messages that only exist to mark compaction", () => {
// given
const messages = [
{
id: "msg_compaction",
info: { agent: "atlas", model: { providerID: "openai", modelID: "gpt-5" } },
parts: [{ type: "compaction" }],
},
{ info: { agent: "sisyphus" } },
{ info: { model: { providerID: "anthropic", modelID: "claude-opus-4-1" } } },
{ info: { tools: { bash: true } } },
]
// when
const result = resolvePromptContextFromSessionMessages(messages)
// then
expect(result).toEqual({
agent: "sisyphus",
model: { providerID: "anthropic", modelID: "claude-opus-4-1" },
tools: { bash: true },
})
})
})

View File

@@ -2,8 +2,16 @@ import { readdirSync, readFileSync } from "node:fs"
import { join } from "node:path"
import type { StoredMessage } from "../hook-message-injector"
import { getCompactionAgentConfigCheckpoint } from "../../shared/compaction-agent-config-checkpoint"
import {
hasCompactionPartInStorage,
isCompactionAgent,
isCompactionMessage,
} from "../../shared/compaction-marker"
export { isCompactionAgent } from "../../shared/compaction-marker"
type SessionMessage = {
id?: string
info?: {
agent?: string
model?: {
@@ -15,10 +23,7 @@ type SessionMessage = {
modelID?: string
tools?: StoredMessage["tools"]
}
}
export function isCompactionAgent(agent: string | undefined): boolean {
return agent?.trim().toLowerCase() === "compaction"
parts?: Array<{ type?: string }>
}
function hasFullAgentAndModel(message: StoredMessage): boolean {
@@ -35,6 +40,10 @@ function hasPartialAgentOrModel(message: StoredMessage): boolean {
}
function convertSessionMessageToStoredMessage(message: SessionMessage): StoredMessage | null {
if (isCompactionMessage(message)) {
return null
}
const info = message.info
if (!info) {
return null
@@ -138,7 +147,11 @@ export function findNearestMessageExcludingCompaction(
for (const file of files) {
try {
const content = readFileSync(join(messageDir, file), "utf-8")
messages.push(JSON.parse(content) as StoredMessage)
const parsed = JSON.parse(content) as StoredMessage & { id?: string }
if (hasCompactionPartInStorage(parsed.id) || isCompactionAgent(parsed.agent)) {
continue
}
messages.push(parsed)
} catch {
continue
}

View File

@@ -218,6 +218,10 @@ function getRootDescendantCounts(manager: BackgroundManager): Map<string, number
return (manager as unknown as { rootDescendantCounts: Map<string, number> }).rootDescendantCounts
}
function getPreStartDescendantReservations(manager: BackgroundManager): Set<string> {
return (manager as unknown as { preStartDescendantReservations: Set<string> }).preStartDescendantReservations
}
function getQueuesByKey(
manager: BackgroundManager
): Map<string, Array<{ task: BackgroundTask; input: import("./types").LaunchInput }>> {
@@ -1144,7 +1148,18 @@ describe("BackgroundManager.notifyParentSession - notifications toggle", () => {
prompt: promptMock,
promptAsync: promptMock,
abort: async () => ({}),
messages: async () => ({ data: [] }),
messages: async () => ({
data: [{
info: {
agent: "explore",
model: {
providerID: "anthropic",
modelID: "claude-opus-4-6",
variant: "high",
},
},
}],
}),
},
}
const manager = new BackgroundManager(
@@ -1177,6 +1192,101 @@ describe("BackgroundManager.notifyParentSession - notifications toggle", () => {
})
})
describe("BackgroundManager.notifyParentSession - variant propagation", () => {
test("should prefer parent session variant over child task variant in parent notification promptAsync body", async () => {
//#given
const promptCalls: Array<{ body: Record<string, unknown> }> = []
const client = {
session: {
prompt: async () => ({}),
promptAsync: async (args: { path: { id: string }; body: Record<string, unknown> }) => {
promptCalls.push({ body: args.body })
return {}
},
abort: async () => ({}),
messages: async () => ({
data: [{
info: {
agent: "explore",
model: {
providerID: "anthropic",
modelID: "claude-opus-4-6",
variant: "max",
},
},
}],
}),
},
}
const manager = new BackgroundManager({ client, directory: tmpdir() } as unknown as PluginInput)
const task: BackgroundTask = {
id: "task-parent-variant-wins",
sessionID: "session-child",
parentSessionID: "session-parent",
parentMessageID: "msg-parent",
description: "task with mismatched variant",
prompt: "test",
agent: "explore",
status: "completed",
startedAt: new Date(),
completedAt: new Date(),
model: { providerID: "anthropic", modelID: "claude-opus-4-6", variant: "high" },
}
getPendingByParent(manager).set("session-parent", new Set([task.id]))
//#when
await (manager as unknown as { notifyParentSession: (task: BackgroundTask) => Promise<void> })
.notifyParentSession(task)
//#then
expect(promptCalls).toHaveLength(1)
expect(promptCalls[0].body.variant).toBe("max")
manager.shutdown()
})
test("should not include variant in promptAsync body when task has no variant", async () => {
//#given
const promptCalls: Array<{ body: Record<string, unknown> }> = []
const client = {
session: {
prompt: async () => ({}),
promptAsync: async (args: { path: { id: string }; body: Record<string, unknown> }) => {
promptCalls.push({ body: args.body })
return {}
},
abort: async () => ({}),
messages: async () => ({ data: [] }),
},
}
const manager = new BackgroundManager({ client, directory: tmpdir() } as unknown as PluginInput)
const task: BackgroundTask = {
id: "task-no-variant",
sessionID: "session-child",
parentSessionID: "session-parent",
parentMessageID: "msg-parent",
description: "task without variant",
prompt: "test",
agent: "explore",
status: "completed",
startedAt: new Date(),
completedAt: new Date(),
model: { providerID: "anthropic", modelID: "claude-opus-4-6" },
}
getPendingByParent(manager).set("session-parent", new Set([task.id]))
//#when
await (manager as unknown as { notifyParentSession: (task: BackgroundTask) => Promise<void> })
.notifyParentSession(task)
//#then
expect(promptCalls).toHaveLength(1)
expect(promptCalls[0].body.variant).toBeUndefined()
manager.shutdown()
})
})
describe("BackgroundManager.injectPendingNotificationsIntoChatMessage", () => {
test("should prepend queued notifications to first text part and clear queue", () => {
// given
@@ -1437,6 +1547,7 @@ describe("BackgroundManager.tryCompleteTask", () => {
const task = createMockTask({
id: "task-zombie-session",
sessionID: "session-zombie-placeholder",
parentSessionID: "parent-zombie",
status: "pending",
agent: "explore",
@@ -1779,10 +1890,10 @@ describe("BackgroundManager.resume model persistence", () => {
expect(getSessionPromptParams("session-advanced")).toEqual({
temperature: 0.25,
topP: 0.55,
maxOutputTokens: 8192,
options: {
reasoningEffort: "high",
thinking: { type: "disabled" },
maxTokens: 8192,
},
})
})
@@ -2379,6 +2490,46 @@ describe("BackgroundManager - Non-blocking Queue Integration", () => {
expect(retryTask.status).toBe("pending")
})
test("should only roll back the failed task reservation once when siblings still exist", async () => {
// given
const concurrencyKey = "test-agent"
const task = createMockTask({
id: "task-single-reservation-rollback",
sessionID: "session-single-reservation-rollback",
parentSessionID: "session-root",
status: "pending",
agent: "test-agent",
rootSessionID: "session-root",
})
delete (task as Partial<BackgroundTask>).sessionID
const input = {
description: task.description,
prompt: task.prompt,
agent: task.agent,
parentSessionID: task.parentSessionID,
parentMessageID: task.parentMessageID,
}
getTaskMap(manager).set(task.id, task)
getQueuesByKey(manager).set(concurrencyKey, [{ task, input }])
getRootDescendantCounts(manager).set("session-root", 2)
getPreStartDescendantReservations(manager).add(task.id)
stubNotifyParentSession(manager)
;(manager as unknown as {
startTask: (item: { task: BackgroundTask; input: typeof input }) => Promise<void>
}).startTask = async () => {
throw new Error("session create failed")
}
// when
await processKeyForTest(manager, concurrencyKey)
// then
expect(getRootDescendantCounts(manager).get("session-root")).toBe(1)
})
test("should keep the next queued task when the first task is cancelled during session creation", async () => {
// given
const firstSessionID = "ses-first-cancelled-during-create"

View File

@@ -422,10 +422,6 @@ export class BackgroundManager {
this.concurrencyManager.release(key)
}
if (item.task.rootSessionID) {
this.unregisterRootDescendant(item.task.rootSessionID)
}
removeTaskToastTracking(item.task.id)
// Abort the orphaned session if one was created before the error
@@ -1783,6 +1779,7 @@ export class BackgroundManager {
let agent: string | undefined = task.parentAgent
let model: { providerID: string; modelID: string } | undefined
let tools: Record<string, boolean> | undefined = task.parentTools
let promptContext: ReturnType<typeof resolvePromptContextFromSessionMessages> = null
if (this.enableParentSessionNotifications) {
try {
@@ -1796,7 +1793,7 @@ export class BackgroundManager {
tools?: Record<string, boolean | "allow" | "deny" | "ask">
}
}>)
const promptContext = resolvePromptContextFromSessionMessages(
promptContext = resolvePromptContextFromSessionMessages(
messages,
task.parentSessionID,
)
@@ -1840,6 +1837,8 @@ export class BackgroundManager {
const isTaskFailure = task.status === "error" || task.status === "cancelled" || task.status === "interrupt"
const shouldReply = allComplete || isTaskFailure
const variant = promptContext?.model?.variant
try {
await this.client.session.promptAsync({
path: { id: task.parentSessionID },
@@ -1847,6 +1846,7 @@ export class BackgroundManager {
noReply: !shouldReply,
...(agent !== undefined ? { agent } : {}),
...(model !== undefined ? { model } : {}),
...(variant !== undefined ? { variant } : {}),
...(resolvedTools ? { tools: resolvedTools } : {}),
parts: [createInternalAgentTextPart(notification)],
},

View File

@@ -400,10 +400,10 @@ describe("background-agent spawner fallback model promotion", () => {
expect(getSessionPromptParams("session-123")).toEqual({
temperature: 0.4,
topP: 0.7,
maxOutputTokens: 4096,
options: {
reasoningEffort: "high",
thinking: { type: "disabled" },
maxTokens: 4096,
},
})
})
@@ -466,4 +466,58 @@ describe("background-agent spawner fallback model promotion", () => {
})
expect(promptCalls[0]?.body?.variant).toBe("medium")
})
test("strips leading zwsp from prompt body agent before promptAsync", async () => {
//#given
const promptCalls: Array<{ body?: { agent?: string } }> = []
const client = {
session: {
get: async () => ({ data: { directory: "/parent/dir" } }),
create: async () => ({ data: { id: "ses_child_clean_agent" } }),
promptAsync: async (args?: { body?: { agent?: string } }) => {
promptCalls.push(args ?? {})
return {}
},
},
}
const task = createTask({
description: "Test task",
prompt: "Do work",
agent: "\u200Bsisyphus-junior",
parentSessionID: "ses_parent",
parentMessageID: "msg_parent",
})
const item = {
task,
input: {
description: task.description,
prompt: task.prompt,
agent: task.agent,
parentSessionID: task.parentSessionID,
parentMessageID: task.parentMessageID,
parentModel: task.parentModel,
parentAgent: task.parentAgent,
model: task.model,
},
}
const ctx = {
client,
directory: "/fallback",
concurrencyManager: { release: () => {} },
tmuxEnabled: false,
onTaskError: () => {},
}
//#when
await startTask(item as any, ctx as any)
await new Promise((resolve) => setTimeout(resolve, 0))
//#then
expect(promptCalls).toHaveLength(1)
expect(promptCalls[0]?.body?.agent).toBe("sisyphus-junior")
})
})

View File

@@ -6,6 +6,7 @@ import { applySessionPromptParams } from "../../shared/session-prompt-params-hel
import { subagentSessions } from "../claude-code-session-state"
import { getTaskToastManager } from "../task-toast-manager"
import { isInsideTmux } from "../../shared/tmux"
import { stripAgentListSortPrefix } from "../../shared/agent-display-names"
import type { ConcurrencyManager } from "./concurrency"
export const FALLBACK_AGENT = "general"
@@ -168,11 +169,12 @@ export async function startTask(
}
: undefined
const launchVariant = input.model?.variant
const normalizedAgent = stripAgentListSortPrefix(input.agent)
applySessionPromptParams(sessionID, input.model)
const promptBody = {
agent: input.agent,
agent: normalizedAgent,
...(launchModel ? { model: launchModel } : {}),
...(launchVariant ? { variant: launchVariant } : {}),
system: input.skillContent,
@@ -180,7 +182,7 @@ export async function startTask(
task: false,
call_omo_agent: true,
question: false,
...getAgentToolRestrictions(input.agent),
...getAgentToolRestrictions(normalizedAgent),
},
parts: [createInternalAgentTextPart(input.prompt)],
}

View File

@@ -1,6 +1,14 @@
import { describe, expect, test } from "bun:test"
import type { OpencodeClient } from "./constants"
import { resolveSubagentSpawnContext } from "./subagent-spawn-limits"
import {
resolveSubagentSpawnContext,
getMaxSubagentDepth,
DEFAULT_MAX_SUBAGENT_DEPTH,
createSubagentDepthLimitError,
createSubagentDescendantLimitError,
getMaxRootSessionSpawnBudget,
DEFAULT_MAX_ROOT_SESSION_SPAWN_BUDGET,
} from "./subagent-spawn-limits"
function createMockClient(sessionGet: OpencodeClient["session"]["get"]): OpencodeClient {
return {
@@ -41,4 +49,177 @@ describe("resolveSubagentSpawnContext", () => {
await expect(result).rejects.toThrow(/background_task\.maxDescendants cannot be enforced safely.*No session data returned/)
})
})
describe("depth calculation smoke tests (regression guard)", () => {
test("root session (no parentID) reports depth 0 and childDepth 1", async () => {
// given - a root session with no parent
const client = createMockClient(async (opts) => {
if (opts.path.id === "root-session") {
return { data: { id: "root-session", parentID: undefined } }
}
return { error: "not found", data: undefined }
})
// when
const result = await resolveSubagentSpawnContext(client, "root-session")
// then
expect(result.rootSessionID).toBe("root-session")
expect(result.parentDepth).toBe(0)
expect(result.childDepth).toBe(1)
})
test("depth-1 child reports childDepth 2", async () => {
// given - child -> root chain
const client = createMockClient(async (opts) => {
if (opts.path.id === "child-1") {
return { data: { id: "child-1", parentID: "root-session" } }
}
if (opts.path.id === "root-session") {
return { data: { id: "root-session", parentID: undefined } }
}
return { error: "not found", data: undefined }
})
// when
const result = await resolveSubagentSpawnContext(client, "child-1")
// then
expect(result.rootSessionID).toBe("root-session")
expect(result.parentDepth).toBe(1)
expect(result.childDepth).toBe(2)
})
test("depth-2 grandchild reports childDepth 3", async () => {
// given - grandchild -> child -> root chain
const client = createMockClient(async (opts) => {
const sessions: Record<string, { id: string; parentID?: string }> = {
"grandchild": { id: "grandchild", parentID: "child" },
"child": { id: "child", parentID: "root" },
"root": { id: "root", parentID: undefined },
}
const session = sessions[opts.path.id]
if (session) return { data: session }
return { error: "not found", data: undefined }
})
// when
const result = await resolveSubagentSpawnContext(client, "grandchild")
// then
expect(result.rootSessionID).toBe("root")
expect(result.parentDepth).toBe(2)
expect(result.childDepth).toBe(3)
})
test("depth at DEFAULT_MAX_SUBAGENT_DEPTH reports exact max childDepth", async () => {
// given - chain of exactly DEFAULT_MAX_SUBAGENT_DEPTH depth
// With default=3: session-3 -> session-2 -> session-1 -> root
const sessions: Record<string, { id: string; parentID?: string }> = {
"root": { id: "root" },
}
for (let i = 1; i <= DEFAULT_MAX_SUBAGENT_DEPTH; i++) {
sessions[`session-${i}`] = {
id: `session-${i}`,
parentID: i === 1 ? "root" : `session-${i - 1}`,
}
}
const client = createMockClient(async (opts) => {
const session = sessions[opts.path.id]
if (session) return { data: session }
return { error: "not found", data: undefined }
})
// when - resolve from the deepest session
const deepest = `session-${DEFAULT_MAX_SUBAGENT_DEPTH}`
const result = await resolveSubagentSpawnContext(client, deepest)
// then - childDepth should be DEFAULT_MAX_SUBAGENT_DEPTH + 1 (exceeds limit)
expect(result.childDepth).toBe(DEFAULT_MAX_SUBAGENT_DEPTH + 1)
expect(result.parentDepth).toBe(DEFAULT_MAX_SUBAGENT_DEPTH)
})
test("detects parent cycle and throws", async () => {
// given - A -> B -> A (cycle)
const client = createMockClient(async (opts) => {
const sessions: Record<string, { id: string; parentID?: string }> = {
"session-a": { id: "session-a", parentID: "session-b" },
"session-b": { id: "session-b", parentID: "session-a" },
}
const session = sessions[opts.path.id]
if (session) return { data: session }
return { error: "not found", data: undefined }
})
// when
const result = resolveSubagentSpawnContext(client, "session-a")
// then
await expect(result).rejects.toThrow(/session parent cycle/)
})
})
})
describe("getMaxSubagentDepth", () => {
test("returns DEFAULT_MAX_SUBAGENT_DEPTH when no config", () => {
expect(getMaxSubagentDepth()).toBe(DEFAULT_MAX_SUBAGENT_DEPTH)
expect(getMaxSubagentDepth(undefined)).toBe(DEFAULT_MAX_SUBAGENT_DEPTH)
})
test("returns config.maxDepth when provided", () => {
expect(getMaxSubagentDepth({ maxDepth: 5 })).toBe(5)
expect(getMaxSubagentDepth({ maxDepth: 1 })).toBe(1)
expect(getMaxSubagentDepth({ maxDepth: 0 })).toBe(0)
})
test("default is 3", () => {
expect(DEFAULT_MAX_SUBAGENT_DEPTH).toBe(3)
})
})
describe("getMaxRootSessionSpawnBudget", () => {
test("returns DEFAULT_MAX_ROOT_SESSION_SPAWN_BUDGET when no config", () => {
expect(getMaxRootSessionSpawnBudget()).toBe(DEFAULT_MAX_ROOT_SESSION_SPAWN_BUDGET)
})
test("returns config.maxDescendants when provided", () => {
expect(getMaxRootSessionSpawnBudget({ maxDescendants: 10 })).toBe(10)
})
test("default is 50", () => {
expect(DEFAULT_MAX_ROOT_SESSION_SPAWN_BUDGET).toBe(50)
})
})
describe("createSubagentDepthLimitError", () => {
test("includes childDepth, maxDepth, and session IDs in message", () => {
const error = createSubagentDepthLimitError({
childDepth: 4,
maxDepth: 3,
parentSessionID: "parent-123",
rootSessionID: "root-456",
})
expect(error.message).toContain("child depth 4")
expect(error.message).toContain("maxDepth=3")
expect(error.message).toContain("parent-123")
expect(error.message).toContain("root-456")
expect(error.message).toContain("spawn blocked")
})
})
describe("createSubagentDescendantLimitError", () => {
test("includes descendant count, max, and root session ID", () => {
const error = createSubagentDescendantLimitError({
rootSessionID: "root-789",
descendantCount: 50,
maxDescendants: 50,
})
expect(error.message).toContain("root-789")
expect(error.message).toContain("50")
expect(error.message).toContain("maxDescendants=50")
expect(error.message).toContain("spawn blocked")
})
})

View File

@@ -650,6 +650,65 @@ describe("boulder-state", () => {
expect(progress.completed).toBe(1)
expect(progress.isComplete).toBe(false)
})
test("should count only top-level checkboxes for simple plans with nested tasks", () => {
// given
const planPath = join(TEST_DIR, "simple-nested-plan.md")
writeFileSync(planPath, `# Plan
- [ ] Top-level task 1
- [x] Nested task ignored
- [x] Top-level task 2
* [ ] Another nested task ignored
`)
// when
const progress = getPlanProgress(planPath)
// then
expect(progress.total).toBe(2)
expect(progress.completed).toBe(1)
expect(progress.isComplete).toBe(false)
})
test("should treat final-wave-only plans as structured mode", () => {
// given
const planPath = join(TEST_DIR, "final-wave-only-plan.md")
writeFileSync(planPath, `# Plan
## Final Verification Wave
- [ ] F1. Top-level final review
- [x] Nested verification detail ignored
`)
// when
const progress = getPlanProgress(planPath)
// then
expect(progress.total).toBe(1)
expect(progress.completed).toBe(0)
expect(progress.isComplete).toBe(false)
})
test("should ignore mixed indentation levels in simple plans", () => {
// given
const planPath = join(TEST_DIR, "simple-mixed-indentation-plan.md")
writeFileSync(planPath, `# Plan
* [x] Top-level star task
- [ ] Indented task ignored
- [x] Tab-indented task ignored
- [ ] Top-level dash task
`)
// when
const progress = getPlanProgress(planPath)
// then
expect(progress.total).toBe(2)
expect(progress.completed).toBe(1)
expect(progress.isComplete).toBe(false)
})
})
describe("getPlanName", () => {

View File

@@ -226,7 +226,9 @@ export function getPlanProgress(planPath: string): PlanProgress {
const lines = content.split(/\r?\n/)
// Check if the plan has structured sections (## TODOs / ## Final Verification Wave)
const hasStructuredSections = lines.some((line) => TODO_HEADING_PATTERN.test(line))
const hasStructuredSections = lines.some(
(line) => TODO_HEADING_PATTERN.test(line) || FINAL_VERIFICATION_HEADING_PATTERN.test(line),
)
if (hasStructuredSections) {
// Structured plan: only count top-level checkboxes with numbered labels
@@ -291,8 +293,8 @@ function getStructuredPlanProgress(lines: string[]): PlanProgress {
}
function getSimplePlanProgress(content: string): PlanProgress {
const uncheckedMatches = content.match(/^\s*[-*]\s*\[\s*\]/gm) || []
const checkedMatches = content.match(/^\s*[-*]\s*\[[xX]\]/gm) || []
const uncheckedMatches = content.match(/^[-*]\s*\[\s*\]/gm) || []
const checkedMatches = content.match(/^[-*]\s*\[[xX]\]/gm) || []
const total = uncheckedMatches.length + checkedMatches.length
const completed = checkedMatches.length

View File

@@ -0,0 +1,15 @@
import { describe, expect, test } from "bun:test"
import { ULW_LOOP_TEMPLATE } from "./ralph-loop"
describe("ULW_LOOP_TEMPLATE", () => {
test("returns the documented iteration caps for ultrawork and normal modes", () => {
// given
const expectedIterationCaps = "The iteration limit is 500 for ultrawork mode, 100 for normal mode"
// when
const template = ULW_LOOP_TEMPLATE
// then
expect(template).toContain(expectedIterationCaps)
})
})

View File

@@ -36,7 +36,7 @@ export const ULW_LOOP_TEMPLATE = `You are starting an ULTRAWORK Loop - a self-re
2. When you believe the work is complete, output: \`<promise>{{COMPLETION_PROMISE}}</promise>\`
3. That does NOT finish the loop yet. The system will require Oracle verification
4. The loop only ends after the system confirms Oracle verified the result
5. There is no iteration limit
5. The iteration limit is 500 for ultrawork mode, 100 for normal mode
## Rules

View File

@@ -10,6 +10,7 @@ import { join } from "node:path"
const originalClaudePluginsHome = process.env.CLAUDE_PLUGINS_HOME
const temporaryDirectories: string[] = []
const originalCwd = process.cwd()
function createTemporaryDirectory(prefix: string): string {
const directory = mkdtempSync(join(tmpdir(), prefix))
@@ -17,6 +18,14 @@ function createTemporaryDirectory(prefix: string): string {
return directory
}
function writeDatabase(pluginsHome: string, database: unknown): void {
writeFileSync(join(pluginsHome, "installed_plugins.json"), JSON.stringify(database), "utf-8")
}
function createInstallPath(prefix: string): string {
return createTemporaryDirectory(prefix)
}
describe("discoverInstalledPlugins", () => {
beforeEach(() => {
mock.module("../../shared/logger", () => ({
@@ -36,6 +45,10 @@ describe("discoverInstalledPlugins", () => {
process.env.CLAUDE_PLUGINS_HOME = originalClaudePluginsHome
}
if (process.cwd() !== originalCwd) {
process.chdir(originalCwd)
}
for (const directory of temporaryDirectories.splice(0)) {
rmSync(directory, { recursive: true, force: true })
}
@@ -156,4 +169,488 @@ describe("discoverInstalledPlugins", () => {
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("oh-my-openagent")
})
describe("#given project-scoped entries in v1 format", () => {
it("#when cwd matches projectPath #then the plugin loads", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v1-project-match-")
const installPath = createInstallPath("omo-v1-install-")
writeDatabase(pluginsHome, {
version: 1,
plugins: {
"project-plugin@market": {
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v1-match`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("project-plugin")
})
it("#when cwd is a subdirectory of projectPath #then the plugin loads", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v1-project-sub-")
const subdirectory = join(projectDirectory, "packages", "app")
mkdirSync(subdirectory, { recursive: true })
const installPath = createInstallPath("omo-v1-install-")
writeDatabase(pluginsHome, {
version: 1,
plugins: {
"sub-plugin@market": {
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
},
})
process.chdir(subdirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v1-sub`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("sub-plugin")
})
it("#when cwd does not match projectPath #then the plugin is skipped", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v1-project-miss-")
const otherDirectory = createTemporaryDirectory("omo-v1-other-")
const installPath = createInstallPath("omo-v1-install-")
writeDatabase(pluginsHome, {
version: 1,
plugins: {
"outside-plugin@market": {
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
},
})
process.chdir(otherDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v1-miss`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(0)
})
it("#when projectPath is missing #then the plugin is skipped", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const installPath = createInstallPath("omo-v1-install-")
writeDatabase(pluginsHome, {
version: 1,
plugins: {
"no-path-plugin@market": {
scope: "project",
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
},
})
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v1-noproj`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(0)
})
it("#when scope is user #then it always loads regardless of cwd", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const unrelatedDirectory = createTemporaryDirectory("omo-v1-unrelated-")
const installPath = createInstallPath("omo-v1-install-")
writeDatabase(pluginsHome, {
version: 1,
plugins: {
"user-plugin@market": {
scope: "user",
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
},
})
process.chdir(unrelatedDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v1-user`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("user-plugin")
})
})
describe("#given project and local scoped entries in v2 format", () => {
it("#when cwd matches project-scoped projectPath #then it loads while non-matching entries are dropped", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v2-project-")
const otherDirectory = createTemporaryDirectory("omo-v2-other-")
const matchingInstall = createInstallPath("omo-v2-match-install-")
const missingInstall = createInstallPath("omo-v2-miss-install-")
const userInstall = createInstallPath("omo-v2-user-install-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"matching-project@market": [
{
scope: "project",
projectPath: projectDirectory,
installPath: matchingInstall,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
"other-project@market": [
{
scope: "project",
projectPath: otherDirectory,
installPath: missingInstall,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
"global-user@market": [
{
scope: "user",
installPath: userInstall,
version: "2.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v2-mix`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
const names = discovered.plugins.map((plugin) => plugin.name).sort()
expect(names).toEqual(["global-user", "matching-project"])
})
it("#when scope is local and cwd matches projectPath #then it loads", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v2-local-match-")
const installPath = createInstallPath("omo-v2-local-install-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"local-plugin@market": [
{
scope: "local",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v2-local-match`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("local-plugin")
})
it("#when scope is local and cwd does not match projectPath #then it is skipped", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v2-local-miss-")
const otherDirectory = createTemporaryDirectory("omo-v2-local-other-")
const installPath = createInstallPath("omo-v2-local-install-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"local-plugin@market": [
{
scope: "local",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(otherDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v2-local-miss`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(0)
})
it("#when multiple installations are present #then only the first is considered and scope filtering still applies", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v2-multi-")
const otherDirectory = createTemporaryDirectory("omo-v2-multi-other-")
const primaryInstall = createInstallPath("omo-v2-multi-primary-")
const secondaryInstall = createInstallPath("omo-v2-multi-secondary-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"multi-plugin@market": [
{
scope: "project",
projectPath: otherDirectory,
installPath: primaryInstall,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
{
scope: "project",
projectPath: projectDirectory,
installPath: secondaryInstall,
version: "2.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v2-multi`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then — existing behavior keeps only the first entry; with scope filter it is
// (correctly) skipped because the first entry points at a different project.
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(0)
})
})
describe("#given project and local scoped entries in v3 flat-array format", () => {
it("#when cwd matches projectPath #then projectPath flows through and the plugin loads", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v3-match-")
const installPath = createInstallPath("omo-v3-install-")
writeDatabase(pluginsHome, [
{
name: "v3-project-plugin",
marketplace: "market",
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
lastUpdated: "2026-03-25T00:00:00Z",
},
])
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v3-match`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("v3-project-plugin")
})
it("#when cwd does not match projectPath #then the plugin is skipped", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-v3-miss-")
const otherDirectory = createTemporaryDirectory("omo-v3-miss-other-")
const installPath = createInstallPath("omo-v3-install-")
writeDatabase(pluginsHome, [
{
name: "v3-skipped-plugin",
marketplace: "market",
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
lastUpdated: "2026-03-25T00:00:00Z",
},
{
name: "v3-user-plugin",
marketplace: "market",
scope: "user",
installPath: createInstallPath("omo-v3-user-install-"),
version: "2.0.0",
lastUpdated: "2026-03-25T00:00:00Z",
},
])
process.chdir(otherDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-v3-miss`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("v3-user-plugin")
})
})
describe("#given enabledPluginsOverride combined with scope filtering", () => {
it("#when a project-scoped plugin is disabled via override #then it is still skipped even if cwd would match", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-enabled-proj-")
const installPath = createInstallPath("omo-enabled-install-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"gated-plugin@market": [
{
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-enabled-off`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
enabledPluginsOverride: { "gated-plugin@market": false },
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(0)
})
it("#when a project-scoped plugin is enabled and cwd matches #then it loads", async () => {
//#given
const pluginsHome = process.env.CLAUDE_PLUGINS_HOME as string
const projectDirectory = createTemporaryDirectory("omo-enabled-match-")
const installPath = createInstallPath("omo-enabled-match-install-")
writeDatabase(pluginsHome, {
version: 2,
plugins: {
"enabled-plugin@market": [
{
scope: "project",
projectPath: projectDirectory,
installPath,
version: "1.0.0",
installedAt: "2026-03-25T00:00:00Z",
lastUpdated: "2026-03-25T00:00:00Z",
},
],
},
})
process.chdir(projectDirectory)
//#when
const { discoverInstalledPlugins } = await import(`./discovery?t=${Date.now()}-enabled-on`)
const discovered = discoverInstalledPlugins({
pluginsHomeOverride: pluginsHome,
loadPluginManifestOverride: () => null,
enabledPluginsOverride: { "enabled-plugin@market": true },
})
//#then
expect(discovered.errors).toHaveLength(0)
expect(discovered.plugins).toHaveLength(1)
expect(discovered.plugins[0]?.name).toBe("enabled-plugin")
})
})
})

View File

@@ -3,6 +3,7 @@ import { homedir } from "os"
import { basename, join } from "path"
import { fileURLToPath } from "url"
import { log } from "../../shared/logger"
import { shouldLoadPluginForCwd } from "./scope-filter"
import type {
InstalledPluginsDatabase,
InstalledPluginEntryV3,
@@ -132,6 +133,7 @@ function v3EntryToInstallation(entry: InstalledPluginEntryV3): PluginInstallatio
installedAt: entry.lastUpdated,
lastUpdated: entry.lastUpdated,
gitCommitSha: entry.gitCommitSha,
projectPath: entry.projectPath,
}
}
@@ -177,6 +179,7 @@ export function discoverInstalledPlugins(options?: PluginLoaderOptions): PluginL
const settingsEnabledPlugins = settings?.enabledPlugins
const overrideEnabledPlugins = options?.enabledPluginsOverride
const pluginManifestLoader = options?.loadPluginManifestOverride ?? loadPluginManifest
const cwd = process.cwd()
for (const [pluginKey, installation] of extractPluginEntries(db)) {
if (!installation) continue
@@ -186,6 +189,14 @@ export function discoverInstalledPlugins(options?: PluginLoaderOptions): PluginL
continue
}
if (!shouldLoadPluginForCwd(installation, cwd)) {
log(`Skipping ${installation.scope}-scoped plugin outside current cwd: ${pluginKey}`, {
projectPath: installation.projectPath,
cwd,
})
continue
}
const { installPath, scope, version } = installation
if (!existsSync(installPath)) {

View File

@@ -0,0 +1,244 @@
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test"
import { mkdtempSync, rmSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import { shouldLoadPluginForCwd } from "./scope-filter"
const temporaryDirectories: string[] = []
function createTemporaryDirectory(prefix: string): string {
const directory = mkdtempSync(join(tmpdir(), prefix))
temporaryDirectories.push(directory)
return directory
}
describe("shouldLoadPluginForCwd", () => {
afterEach(() => {
mock.restore()
for (const directory of temporaryDirectories.splice(0)) {
rmSync(directory, { recursive: true, force: true })
}
})
describe("#given a user-scoped plugin", () => {
it("#when called with any cwd #then it loads", () => {
//#given
const installation = { scope: "user" as const }
//#when
const result = shouldLoadPluginForCwd(installation, "/tmp/anywhere")
//#then
expect(result).toBe(true)
})
})
describe("#given a managed-scoped plugin", () => {
it("#when called with any cwd #then it loads", () => {
//#given
const installation = { scope: "managed" as const }
//#when
const result = shouldLoadPluginForCwd(installation, "/tmp/anywhere")
//#then
expect(result).toBe(true)
})
})
describe("#given a project-scoped plugin without projectPath", () => {
it("#when called with any cwd #then it is skipped", () => {
//#given
const installation = { scope: "project" as const }
//#when
const result = shouldLoadPluginForCwd(installation, "/tmp/anywhere")
//#then
expect(result).toBe(false)
})
})
describe("#given a local-scoped plugin without projectPath", () => {
it("#when called with any cwd #then it is skipped", () => {
//#given
const installation = { scope: "local" as const }
//#when
const result = shouldLoadPluginForCwd(installation, "/tmp/anywhere")
//#then
expect(result).toBe(false)
})
})
describe("#given a project-scoped plugin with matching projectPath", () => {
it("#when cwd exactly matches projectPath #then it loads", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const installation = {
scope: "project" as const,
projectPath: projectDirectory,
}
//#when
const result = shouldLoadPluginForCwd(installation, projectDirectory)
//#then
expect(result).toBe(true)
})
it("#when cwd is a subdirectory of projectPath #then it loads", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const installation = {
scope: "project" as const,
projectPath: projectDirectory,
}
//#when
const result = shouldLoadPluginForCwd(installation, join(projectDirectory, "packages", "app"))
//#then
expect(result).toBe(true)
})
})
describe("#given a project-scoped plugin with non-matching projectPath", () => {
it("#when cwd is unrelated #then it is skipped", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const otherDirectory = createTemporaryDirectory("omo-other-")
const installation = {
scope: "project" as const,
projectPath: projectDirectory,
}
//#when
const result = shouldLoadPluginForCwd(installation, otherDirectory)
//#then
expect(result).toBe(false)
})
it("#when cwd is the parent of projectPath #then it is skipped", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const installation = {
scope: "project" as const,
projectPath: join(projectDirectory, "nested"),
}
//#when
const result = shouldLoadPluginForCwd(installation, projectDirectory)
//#then
expect(result).toBe(false)
})
})
describe("#given a local-scoped plugin with matching projectPath", () => {
it("#when cwd matches projectPath #then it loads", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const installation = {
scope: "local" as const,
projectPath: projectDirectory,
}
//#when
const result = shouldLoadPluginForCwd(installation, projectDirectory)
//#then
expect(result).toBe(true)
})
})
describe("#given a local-scoped plugin with non-matching projectPath", () => {
it("#when cwd is unrelated #then it is skipped", () => {
//#given
const projectDirectory = createTemporaryDirectory("omo-scope-")
const otherDirectory = createTemporaryDirectory("omo-other-")
const installation = {
scope: "local" as const,
projectPath: projectDirectory,
}
//#when
const result = shouldLoadPluginForCwd(installation, otherDirectory)
//#then
expect(result).toBe(false)
})
})
describe("#given a project-scoped plugin with a tilde-prefixed projectPath", () => {
let fakeHome: string
beforeEach(() => {
fakeHome = createTemporaryDirectory("omo-home-")
mock.module("node:os", () => ({
homedir: () => fakeHome,
tmpdir,
}))
mock.module("os", () => ({
homedir: () => fakeHome,
tmpdir,
}))
})
it("#when the expanded home matches cwd #then it loads", async () => {
//#given
const { shouldLoadPluginForCwd: freshShouldLoad } = await import(
`./scope-filter?t=${Date.now()}-tilde-match`
)
const installation = {
scope: "project" as const,
projectPath: "~/workspace/proj-a",
}
const cwd = join(fakeHome, "workspace", "proj-a")
//#when
const result = freshShouldLoad(installation, cwd)
//#then
expect(result).toBe(true)
})
it("#when the expanded home does not match cwd #then it is skipped", async () => {
//#given
const { shouldLoadPluginForCwd: freshShouldLoad } = await import(
`./scope-filter?t=${Date.now()}-tilde-mismatch`
)
const installation = {
scope: "project" as const,
projectPath: "~/workspace/proj-a",
}
const cwd = join(fakeHome, "workspace", "proj-b")
//#when
const result = freshShouldLoad(installation, cwd)
//#then
expect(result).toBe(false)
})
it("#when projectPath is exactly ~ and cwd equals fake home #then it loads", async () => {
//#given
const { shouldLoadPluginForCwd: freshShouldLoad } = await import(
`./scope-filter?t=${Date.now()}-tilde-root`
)
const installation = {
scope: "project" as const,
projectPath: "~",
}
//#when
const result = freshShouldLoad(installation, fakeHome)
//#then
expect(result).toBe(true)
})
})
})

View File

@@ -0,0 +1,29 @@
import { homedir } from "os"
import { join } from "path"
import { containsPath } from "../../shared/contains-path"
import type { PluginInstallation } from "./types"
function expandTilde(inputPath: string): string {
if (inputPath === "~") {
return homedir()
}
if (inputPath.startsWith("~/") || inputPath.startsWith("~\\")) {
return join(homedir(), inputPath.slice(2))
}
return inputPath
}
export function shouldLoadPluginForCwd(
installation: Pick<PluginInstallation, "scope" | "projectPath">,
cwd: string = process.cwd(),
): boolean {
if (installation.scope !== "project" && installation.scope !== "local") {
return true
}
if (!installation.projectPath) {
return false
}
return containsPath(expandTilde(installation.projectPath), cwd)
}

View File

@@ -18,6 +18,12 @@ export interface PluginInstallation {
lastUpdated: string
gitCommitSha?: string
isLocal?: boolean
/**
* Claude Code records this on project/local-scoped installations.
* Absolute path (or `~`-prefixed) of the project the plugin was installed for.
* Used to filter project/local plugins that do not belong to the current cwd.
*/
projectPath?: string
}
/**
@@ -51,6 +57,11 @@ export interface InstalledPluginEntryV3 {
installPath: string
lastUpdated: string
gitCommitSha?: string
/**
* Claude Code records this on project/local-scoped installations.
* Absolute path (or `~`-prefixed) of the project the plugin was installed for.
*/
projectPath?: string
}
/**

View File

@@ -10,6 +10,7 @@ import {
getMainSessionID,
registerAgentName,
isAgentRegistered,
resolveRegisteredAgentName,
_resetForTesting,
} from "./state"
@@ -140,6 +141,15 @@ describe("claude-code-session-state", () => {
expect(isAgentRegistered("Atlas - Plan Executor")).toBe(true)
})
test("should resolve config keys back to the registered raw agent name", () => {
// given
registerAgentName("\u200B\u200B\u200B\u200BAtlas - Plan Executor")
// when / then
expect(resolveRegisteredAgentName("atlas")).toBe("\u200B\u200B\u200B\u200BAtlas - Plan Executor")
expect(resolveRegisteredAgentName("Atlas - Plan Executor")).toBe("\u200B\u200B\u200B\u200BAtlas - Plan Executor")
})
describe("#given atlas display name with zero-width prefix", () => {
describe("#when checking registration without the zero-width prefix", () => {
test("#then it treats the display name as registered", () => {

View File

@@ -14,6 +14,7 @@ export function getMainSessionID(): string | undefined {
}
const registeredAgentNames = new Set<string>()
const registeredAgentAliases = new Map<string, string>()
const ZERO_WIDTH_CHARACTERS_REGEX = /[\u200B\u200C\u200D\uFEFF]/g
@@ -28,10 +29,16 @@ function normalizeStoredAgentName(name: string): string {
export function registerAgentName(name: string): void {
const normalizedName = normalizeRegisteredAgentName(name)
registeredAgentNames.add(normalizedName)
if (!registeredAgentAliases.has(normalizedName)) {
registeredAgentAliases.set(normalizedName, name)
}
const configKey = normalizeRegisteredAgentName(getAgentConfigKey(name))
if (configKey !== normalizedName) {
registeredAgentNames.add(configKey)
if (!registeredAgentAliases.has(configKey)) {
registeredAgentAliases.set(configKey, name)
}
}
}
@@ -39,6 +46,15 @@ export function isAgentRegistered(name: string): boolean {
return registeredAgentNames.has(normalizeRegisteredAgentName(name))
}
export function resolveRegisteredAgentName(name: string | undefined): string | undefined {
if (typeof name !== "string") {
return undefined
}
const normalizedName = normalizeRegisteredAgentName(name)
return registeredAgentAliases.get(normalizedName) ?? normalizeStoredAgentName(name)
}
/** @internal For testing only */
export function _resetForTesting(): void {
_mainSessionID = undefined
@@ -46,6 +62,7 @@ export function _resetForTesting(): void {
syncSubagentSessions.clear()
sessionAgentMap.clear()
registeredAgentNames.clear()
registeredAgentAliases.clear()
}
const sessionAgentMap = new Map<string, string>()

View File

@@ -11,6 +11,7 @@ import {
generatePartId,
injectHookMessage,
} from "./injector"
import { PART_STORAGE } from "../../shared"
import { isSqliteBackend, resetSqliteBackendCache } from "../../shared/opencode-storage-detection"
//#region Mocks
@@ -53,6 +54,7 @@ function createMockClient(messages: Array<{
tools?: Record<string, boolean>
time?: { created?: number }
}
parts?: Array<{ type?: string }>
}>): {
session: {
messages: (opts: { path: { id: string } }) => Promise<{ data: typeof messages }>
@@ -176,6 +178,24 @@ describe("findNearestMessageWithFieldsFromSDK", () => {
expect(result?.agent).toBe("newest-by-time")
})
it("skips compaction marker user messages when resolving nearest message", async () => {
const mockClient = createMockClient([
{
id: "msg_compaction",
info: { agent: "atlas", model: { providerID: "openai", modelID: "gpt-5" }, time: { created: 200 } },
parts: [{ type: "compaction" }],
},
{
id: "msg_real",
info: { agent: "sisyphus", model: { providerID: "anthropic", modelID: "claude-opus-4" }, time: { created: 100 } },
},
])
const result = await findNearestMessageWithFieldsFromSDK(mockClient as any, "ses_123")
expect(result?.agent).toBe("sisyphus")
})
})
describe("findNearestMessageWithFields JSON backend ordering", () => {
@@ -197,6 +217,34 @@ describe("findNearestMessageWithFields JSON backend ordering", () => {
expect(result?.agent).toBe("newest-by-time")
})
it("skips JSON messages whose parts contain a compaction marker", () => {
mockIsSqliteBackend.mockReturnValue(false)
const messageDir = createMessageDir()
const compactionMessageID = "msg_test_injector_compaction_marker"
const partDir = join(PART_STORAGE, compactionMessageID)
tempDirs.push(partDir)
writeFileSync(join(messageDir, "msg_0001.json"), JSON.stringify({
id: compactionMessageID,
agent: "atlas",
model: { providerID: "openai", modelID: "gpt-5" },
time: { created: 200 },
}))
mkdirSync(partDir, { recursive: true })
writeFileSync(join(partDir, "prt_0001.json"), JSON.stringify({ type: "compaction" }))
writeFileSync(join(messageDir, "msg_0002.json"), JSON.stringify({
id: "msg_0002",
agent: "sisyphus",
model: { providerID: "anthropic", modelID: "claude-opus-4" },
time: { created: 100 },
}))
const result = findNearestMessageWithFields(messageDir)
expect(result?.agent).toBe("sisyphus")
})
})
describe("findFirstMessageWithAgentFromSDK", () => {
@@ -222,6 +270,17 @@ describe("findFirstMessageWithAgentFromSDK", () => {
expect(result).toBe("earliest-agent")
})
it("skips compaction marker user messages when resolving first agent", async () => {
const mockClient = createMockClient([
{ id: "msg_compaction", info: { agent: "atlas", time: { created: 10 } }, parts: [{ type: "compaction" }] },
{ id: "msg_real", info: { agent: "sisyphus", time: { created: 20 } } },
])
const result = await findFirstMessageWithAgentFromSDK(mockClient as any, "ses_123")
expect(result).toBe("sisyphus")
})
it("skips messages without agent field", async () => {
const mockClient = createMockClient([
{ info: {} },

View File

@@ -7,6 +7,7 @@ import type { MessageMeta, OriginalMessageContext, TextPart, ToolPermission } fr
import { log } from "../../shared/logger"
import { isSqliteBackend } from "../../shared/opencode-storage-detection"
import { createInternalAgentTextPart, normalizeSDKResponse } from "../../shared"
import { hasCompactionPartInStorage, isCompactionMessage } from "../../shared/compaction-marker"
export interface StoredMessage {
agent?: string
@@ -32,6 +33,7 @@ interface SDKMessage {
created?: number
}
}
parts?: Array<{ type?: string }>
}
const processPrefix = randomBytes(4).toString("hex")
@@ -39,6 +41,10 @@ let messageCounter = 0
let partCounter = 0
function convertSDKMessageToStoredMessage(msg: SDKMessage): StoredMessage | null {
if (isCompactionMessage(msg)) {
return null
}
const info = msg.info
if (!info) return null
@@ -164,22 +170,38 @@ export function findNearestMessageWithFields(messageDir: string): StoredMessage
return {
fileName,
msg,
hasCompactionMarker: hasCompactionPartInStorage(
typeof (msg as { id?: unknown }).id === "string" ? (msg as { id?: string }).id : undefined,
),
createdAt: typeof msg.time?.created === "number" ? msg.time.created : Number.NEGATIVE_INFINITY,
}
} catch {
return null
}
})
.filter((entry): entry is { fileName: string; msg: StoredMessage & { time?: { created?: number } }; createdAt: number } => entry !== null)
.filter((entry): entry is {
fileName: string
msg: StoredMessage & { time?: { created?: number } }
hasCompactionMarker: boolean
createdAt: number
} => entry !== null)
.sort((left, right) => right.createdAt - left.createdAt || right.fileName.localeCompare(left.fileName))
for (const entry of messages) {
if (entry.hasCompactionMarker || isCompactionMessage({ agent: entry.msg.agent })) {
continue
}
if (entry.msg.agent && entry.msg.model?.providerID && entry.msg.model?.modelID) {
return entry.msg
}
}
for (const entry of messages) {
if (entry.hasCompactionMarker || isCompactionMessage({ agent: entry.msg.agent })) {
continue
}
if (entry.msg.agent || (entry.msg.model?.providerID && entry.msg.model?.modelID)) {
return entry.msg
}
@@ -216,16 +238,28 @@ export function findFirstMessageWithAgent(messageDir: string): string | null {
return {
fileName,
msg,
hasCompactionMarker: hasCompactionPartInStorage(
typeof (msg as { id?: unknown }).id === "string" ? (msg as { id?: string }).id : undefined,
),
createdAt: typeof msg.time?.created === "number" ? msg.time.created : Number.POSITIVE_INFINITY,
}
} catch {
return null
}
})
.filter((entry): entry is { fileName: string; msg: StoredMessage & { time?: { created?: number } }; createdAt: number } => entry !== null)
.filter((entry): entry is {
fileName: string
msg: StoredMessage & { time?: { created?: number } }
hasCompactionMarker: boolean
createdAt: number
} => entry !== null)
.sort((left, right) => left.createdAt - right.createdAt || left.fileName.localeCompare(right.fileName))
for (const entry of messages) {
if (entry.hasCompactionMarker || isCompactionMessage({ agent: entry.msg.agent })) {
continue
}
if (entry.msg.agent) {
return entry.msg.agent
}

View File

@@ -0,0 +1,58 @@
import type { OAuthTokenData } from "./storage"
/**
* Per-server OAuth refresh mutex to prevent concurrent refresh race conditions.
*
* When multiple operations need to refresh a token for the same server,
* this ensures only one refresh request is made and all waiters receive
* the same result.
*/
const ongoingRefreshes = new Map<string, Promise<OAuthTokenData>>()
/**
* Execute a token refresh with per-server mutual exclusion.
*
* If a refresh is already in progress for the given server, this will
* return the same promise to all concurrent callers. Once the refresh
* completes (success or failure), the lock is released.
*
* @param serverUrl - The OAuth server URL (used as mutex key)
* @param refreshFn - The actual refresh operation to execute
* @returns Promise that resolves to the new token data
*/
export async function withRefreshMutex(
serverUrl: string,
refreshFn: () => Promise<OAuthTokenData>,
): Promise<OAuthTokenData> {
const existing = ongoingRefreshes.get(serverUrl)
if (existing) {
return existing
}
const refreshPromise = refreshFn().finally(() => {
ongoingRefreshes.delete(serverUrl)
})
ongoingRefreshes.set(serverUrl, refreshPromise)
return refreshPromise
}
/**
* Check if a refresh is currently in progress for a server.
*
* @param serverUrl - The OAuth server URL
* @returns true if a refresh operation is active
*/
export function isRefreshInProgress(serverUrl: string): boolean {
return ongoingRefreshes.has(serverUrl)
}
/**
* Get the number of servers currently undergoing token refresh.
*
* @returns Number of active refresh operations
*/
export function getActiveRefreshCount(): number {
return ongoingRefreshes.size
}

View File

@@ -1,4 +1,4 @@
import { chmodSync, existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs"
import { chmodSync, existsSync, mkdirSync, readFileSync, renameSync, unlinkSync, writeFileSync } from "node:fs"
import { dirname, join } from "node:path"
import { getOpenCodeConfigDir } from "../../shared"
@@ -82,8 +82,10 @@ function writeStore(store: TokenStore): boolean {
mkdirSync(dir, { recursive: true })
}
writeFileSync(filePath, JSON.stringify(store, null, 2), { encoding: "utf-8", mode: 0o600 })
chmodSync(filePath, 0o600)
const tempPath = `${filePath}.tmp.${Date.now()}`
writeFileSync(tempPath, JSON.stringify(store, null, 2), { encoding: "utf-8", mode: 0o600 })
chmodSync(tempPath, 0o600)
renameSync(tempPath, filePath)
return true
} catch {
return false

View File

@@ -1,4 +1,4 @@
import { afterAll, afterEach, beforeEach, describe, expect, it, mock } from "bun:test"
import { afterAll, afterEach, beforeEach, describe, expect, it, mock, test } from "bun:test"
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import type { SkillMcpClientInfo, SkillMcpManagerState } from "./types"
@@ -89,11 +89,15 @@ function createState(): SkillMcpManagerState {
return state
}
function createClientInfo(serverName: string): SkillMcpClientInfo {
function createClientInfo(
serverName: string,
scope?: SkillMcpClientInfo["scope"],
): SkillMcpClientInfo {
return {
serverName,
skillName: "env-skill",
sessionID: "session-env",
...(scope !== undefined ? { scope } : {}),
}
}
@@ -125,6 +129,68 @@ afterEach(async () => {
})
describe("getOrCreateClient env var expansion", () => {
describe("#given a scope-sensitive stdio skill MCP config", () => {
test.each([
["opencode-project", "Authorization:Bearer "],
["local", "Authorization:Bearer "],
["user", "Authorization:Bearer xoxp-scope-token"],
["builtin", "Authorization:Bearer xoxp-scope-token"],
] satisfies Array<[NonNullable<SkillMcpClientInfo["scope"]>, string]>) (
"#when creating the client for %s scope #then args expand to %s",
async (scope, expectedAuthorizationHeader) => {
// given
process.env.SLACK_USER_TOKEN = "xoxp-scope-token"
const state = createState()
const info = createClientInfo(`scope-${scope}`, scope)
const clientKey = createClientKey(info)
const config: ClaudeCodeMcpServer = {
command: "npx",
args: [
"-y",
"mcp-remote",
"https://mcp.slack.com/mcp",
"--header",
"Authorization:Bearer ${SLACK_USER_TOKEN}",
],
}
// when
await getOrCreateClient({ state, clientKey, info, config })
// then
expect(createdStdioTransports).toHaveLength(1)
expect(createdStdioTransports[0]?.options.args?.[4]).toBe(expectedAuthorizationHeader)
},
)
it("#when creating the client without scope #then env vars remain trusted for backward compatibility", async () => {
// given
process.env.SLACK_USER_TOKEN = "xoxp-undefined-scope-token"
const state = createState()
const info = createClientInfo("scope-undefined")
const clientKey = createClientKey(info)
const config: ClaudeCodeMcpServer = {
command: "npx",
args: [
"-y",
"mcp-remote",
"https://mcp.slack.com/mcp",
"--header",
"Authorization:Bearer ${SLACK_USER_TOKEN}",
],
}
// when
await getOrCreateClient({ state, clientKey, info, config })
// then
expect(createdStdioTransports).toHaveLength(1)
expect(createdStdioTransports[0]?.options.args?.[4]).toBe(
"Authorization:Bearer xoxp-undefined-scope-token",
)
})
})
describe("#given a stdio skill MCP config with sensitive env vars in args", () => {
it("#when creating the client #then sensitive env vars in args are expanded", async () => {
// given

View File

@@ -95,6 +95,7 @@ function createClientInfo(sessionID: string): SkillMcpClientInfo {
serverName: "race-server",
skillName: "race-skill",
sessionID,
scope: "builtin",
}
}

View File

@@ -14,6 +14,8 @@ function removeClientIfCurrent(state: SkillMcpManagerState, clientKey: string, c
}
}
const PROJECT_SCOPES = new Set(["project", "opencode-project", "local"])
export async function getOrCreateClient(params: {
state: SkillMcpManagerState
clientKey: string
@@ -38,7 +40,8 @@ export async function getOrCreateClient(params: {
return pending
}
const expandedConfig = expandEnvVarsInObject(config, { trusted: true })
const isTrusted = !PROJECT_SCOPES.has(info.scope ?? "")
const expandedConfig = expandEnvVarsInObject(config, { trusted: isTrusted })
let currentConnectionPromise!: Promise<Client>
state.inFlightConnections.set(info.sessionID, (state.inFlightConnections.get(info.sessionID) ?? 0) + 1)
currentConnectionPromise = (async () => {

View File

@@ -0,0 +1,47 @@
// Redacts sensitive tokens from error messages to prevent credential exposure
// Follows same patterns as env-cleaner.ts for consistency
const SENSITIVE_PATTERNS: RegExp[] = [
// API keys and tokens in common formats
/[a-zA-Z0-9_-]*(?:api[_-]?key|apikey)["\s]*[:=]["\s]*([a-zA-Z0-9_-]{16,})/gi,
/[a-zA-Z0-9_-]*(?:auth[_-]?token|authtoken)["\s]*[:=]["\s]*([a-zA-Z0-9_-]{16,})/gi,
/[a-zA-Z0-9_-]*(?:access[_-]?token|accesstoken)["\s]*[:=]["\s]*([a-zA-Z0-9_-]{16,})/gi,
/[a-zA-Z0-9_-]*(?:secret)["\s]*[:=]["\s]*([a-zA-Z0-9_-]{16,})/gi,
/[a-zA-Z0-9_-]*(?:password)["\s]*[:=]["\s]*([a-zA-Z0-9_-]{8,})/gi,
// Bearer tokens
/bearer\s+([a-zA-Z0-9_-]{20,})/gi,
// Common token prefixes
/sk-[a-zA-Z0-9]{20,}/g, // OpenAI-style secret keys
/gh[pousr]_[a-zA-Z0-9]{20,}/gi, // GitHub tokens
/glpat-[a-zA-Z0-9_-]{20,}/gi, // GitLab tokens
/[A-Za-z0-9_]{20,}-[A-Za-z0-9_]{10,}-[A-Za-z0-9_]{10,}/g, // Common JWT-like patterns
]
const REDACTION_MARKER = "[REDACTED]"
/**
* Redacts sensitive tokens from a string.
* Used for error messages that may contain command-line arguments or environment info.
*/
export function redactSensitiveData(input: string): string {
let result = input
for (const pattern of SENSITIVE_PATTERNS) {
result = result.replace(pattern, REDACTION_MARKER)
}
return result
}
/**
* Redacts sensitive data from an Error object, returning a new Error.
* Preserves the stack trace but redacts the message.
*/
export function redactErrorSensitiveData(error: Error): Error {
const redactedMessage = redactSensitiveData(error.message)
const redactedError = new Error(redactedMessage)
redactedError.stack = error.stack ? redactSensitiveData(error.stack) : undefined
return redactedError
}

View File

@@ -0,0 +1,162 @@
import { afterAll, beforeEach, describe, expect, it, mock } from "bun:test"
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import type { OAuthTokenData } from "../mcp-oauth/storage"
import type { SkillMcpClientInfo, SkillMcpServerContext } from "./types"
const mockGetOrCreateClient = mock(async () => {
throw new Error("not used")
})
const mockGetOrCreateClientWithRetryImpl = mock(async () => ({
callTool: mock(async () => ({ content: [{ type: "text", text: "unused" }] })),
close: mock(async () => {}),
}))
type ManagerModule = typeof import("./manager")
async function importFreshManagerModule(): Promise<ManagerModule> {
mock.module("./connection", () => ({
getOrCreateClient: mockGetOrCreateClient,
getOrCreateClientWithRetryImpl: mockGetOrCreateClientWithRetryImpl,
}))
mock.module("../mcp-oauth/provider", () => ({
McpOAuthProvider: class MockMcpOAuthProvider {},
}))
return await import(new URL(`./manager.ts?oauth-retry-test=${Date.now()}-${Math.random()}`, import.meta.url).href)
}
function createInfo(): SkillMcpClientInfo {
return {
serverName: "oauth-server",
skillName: "oauth-skill",
sessionID: "session-1",
scope: "builtin",
}
}
function createContext(): SkillMcpServerContext {
return {
skillName: "oauth-skill",
config: {
url: "https://mcp.example.com/mcp",
oauth: { clientId: "test-client" },
} satisfies ClaudeCodeMcpServer,
}
}
afterAll(() => {
mock.restore()
})
describe("SkillMcpManager post-request OAuth retry", () => {
beforeEach(() => {
mockGetOrCreateClient.mockClear()
mockGetOrCreateClientWithRetryImpl.mockClear()
})
it("retries the operation after a 401 refresh succeeds", async () => {
// given
const { SkillMcpManager } = await importFreshManagerModule()
const refresh = mock(async () => ({ accessToken: "refreshed-token" } satisfies OAuthTokenData))
const manager = new SkillMcpManager({
createOAuthProvider: () => ({
tokens: () => ({ accessToken: "stale-token", refreshToken: "refresh-token" }),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}),
})
const callTool = mock(async () => {
if (callTool.mock.calls.length === 1) {
throw new Error("401 Unauthorized")
}
return { content: [{ type: "text", text: "success" }] }
})
mockGetOrCreateClientWithRetryImpl.mockResolvedValue({ callTool, close: mock(async () => {}) })
// when
const result = await manager.callTool(createInfo(), createContext(), "test-tool", {})
// then
expect(result).toEqual([{ type: "text", text: "success" }])
expect(refresh).toHaveBeenCalledTimes(1)
expect(callTool).toHaveBeenCalledTimes(2)
})
it("retries the operation after a 403 refresh succeeds without step-up scope", async () => {
// given
const { SkillMcpManager } = await importFreshManagerModule()
const refresh = mock(async () => ({ accessToken: "refreshed-token" } satisfies OAuthTokenData))
const manager = new SkillMcpManager({
createOAuthProvider: () => ({
tokens: () => ({ accessToken: "stale-token", refreshToken: "refresh-token" }),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}),
})
const callTool = mock(async () => {
if (callTool.mock.calls.length === 1) {
throw new Error("403 Forbidden")
}
return { content: [{ type: "text", text: "success" }] }
})
mockGetOrCreateClientWithRetryImpl.mockResolvedValue({ callTool, close: mock(async () => {}) })
// when
const result = await manager.callTool(createInfo(), createContext(), "test-tool", {})
// then
expect(result).toEqual([{ type: "text", text: "success" }])
expect(refresh).toHaveBeenCalledTimes(1)
expect(callTool).toHaveBeenCalledTimes(2)
})
it("propagates the auth error without retry when refresh fails", async () => {
// given
const { SkillMcpManager } = await importFreshManagerModule()
const refresh = mock(async () => {
throw new Error("refresh failed")
})
const manager = new SkillMcpManager({
createOAuthProvider: () => ({
tokens: () => ({ accessToken: "stale-token", refreshToken: "refresh-token" }),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}),
})
const callTool = mock(async () => {
throw new Error("401 Unauthorized")
})
mockGetOrCreateClientWithRetryImpl.mockResolvedValue({ callTool, close: mock(async () => {}) })
// when / then
await expect(manager.callTool(createInfo(), createContext(), "test-tool", {})).rejects.toThrow("401 Unauthorized")
expect(refresh).toHaveBeenCalledTimes(1)
expect(callTool).toHaveBeenCalledTimes(1)
})
it("only attempts one refresh when the retried operation returns 401 again", async () => {
// given
const { SkillMcpManager } = await importFreshManagerModule()
const refresh = mock(async () => ({ accessToken: "refreshed-token" } satisfies OAuthTokenData))
const manager = new SkillMcpManager({
createOAuthProvider: () => ({
tokens: () => ({ accessToken: "stale-token", refreshToken: "refresh-token" }),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}),
})
const callTool = mock(async () => {
throw new Error("401 Unauthorized")
})
mockGetOrCreateClientWithRetryImpl.mockResolvedValue({ callTool, close: mock(async () => {}) })
// when / then
await expect(manager.callTool(createInfo(), createContext(), "test-tool", {})).rejects.toThrow("401 Unauthorized")
expect(refresh).toHaveBeenCalledTimes(1)
expect(callTool).toHaveBeenCalledTimes(2)
})
})

View File

@@ -65,6 +65,7 @@ describe("SkillMcpManager", () => {
serverName: "test-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {}
@@ -80,6 +81,7 @@ describe("SkillMcpManager", () => {
serverName: "my-mcp",
skillName: "data-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {}
@@ -95,6 +97,7 @@ describe("SkillMcpManager", () => {
serverName: "custom-server",
skillName: "custom-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {}
@@ -112,6 +115,7 @@ describe("SkillMcpManager", () => {
serverName: "http-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "http",
@@ -130,6 +134,7 @@ describe("SkillMcpManager", () => {
serverName: "sse-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "sse",
@@ -148,6 +153,7 @@ describe("SkillMcpManager", () => {
serverName: "inferred-http",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://example.com/mcp",
@@ -165,6 +171,7 @@ describe("SkillMcpManager", () => {
serverName: "stdio-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "stdio",
@@ -184,6 +191,7 @@ describe("SkillMcpManager", () => {
serverName: "inferred-stdio",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
command: "node",
@@ -202,6 +210,7 @@ describe("SkillMcpManager", () => {
serverName: "mixed-config",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "stdio",
@@ -224,6 +233,7 @@ describe("SkillMcpManager", () => {
serverName: "bad-url-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "http",
@@ -242,6 +252,7 @@ describe("SkillMcpManager", () => {
serverName: "http-error-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://nonexistent.example.com/mcp",
@@ -259,6 +270,7 @@ describe("SkillMcpManager", () => {
serverName: "hint-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://nonexistent.example.com/mcp",
@@ -276,6 +288,7 @@ describe("SkillMcpManager", () => {
serverName: "mock-test-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://example.com/mcp",
@@ -302,6 +315,7 @@ describe("SkillMcpManager", () => {
serverName: "missing-command",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
type: "stdio",
@@ -320,6 +334,7 @@ describe("SkillMcpManager", () => {
serverName: "test-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
command: "nonexistent-command-xyz",
@@ -338,6 +353,7 @@ describe("SkillMcpManager", () => {
serverName: "test-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
command: "nonexistent-command",
@@ -358,11 +374,13 @@ describe("SkillMcpManager", () => {
serverName: "server1",
skillName: "skill1",
sessionID: "session-1",
scope: "builtin",
}
const session2Info: SkillMcpClientInfo = {
serverName: "server1",
skillName: "skill1",
sessionID: "session-2",
scope: "builtin",
}
// when
@@ -396,6 +414,7 @@ describe("SkillMcpManager", () => {
serverName: "signal-server",
skillName: "signal-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://example.com/mcp",
@@ -423,11 +442,12 @@ describe("SkillMcpManager", () => {
describe("isConnected", () => {
it("returns false for unconnected server", () => {
// given
const info: SkillMcpClientInfo = {
serverName: "unknown",
skillName: "test",
sessionID: "session-1",
}
const info: SkillMcpClientInfo = {
serverName: "$1",
skillName: "$2",
sessionID: "$3",
scope: "builtin",
}
// when / #then
expect(manager.isConnected(info)).toBe(false)
@@ -448,6 +468,7 @@ describe("SkillMcpManager", () => {
serverName: "test-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const configWithoutEnv: ClaudeCodeMcpServer = {
command: "node",
@@ -471,6 +492,7 @@ describe("SkillMcpManager", () => {
serverName: "test-server",
skillName: "test-skill",
sessionID: "session-2",
scope: "builtin",
}
const configWithEnv: ClaudeCodeMcpServer = {
command: "node",
@@ -498,6 +520,7 @@ describe("SkillMcpManager", () => {
serverName: "auth-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://example.com/mcp",
@@ -526,6 +549,7 @@ describe("SkillMcpManager", () => {
serverName: "no-auth-server",
skillName: "test-skill",
sessionID: "session-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://example.com/mcp",
@@ -546,6 +570,7 @@ describe("SkillMcpManager", () => {
serverName: "retry-server",
skillName: "retry-skill",
sessionID: "session-retry-1",
scope: "builtin",
}
const context: SkillMcpServerContext = {
config: {
@@ -584,6 +609,7 @@ describe("SkillMcpManager", () => {
serverName: "fail-server",
skillName: "fail-skill",
sessionID: "session-fail-1",
scope: "builtin",
}
const context: SkillMcpServerContext = {
config: {
@@ -615,6 +641,7 @@ describe("SkillMcpManager", () => {
serverName: "error-server",
skillName: "error-skill",
sessionID: "session-error-1",
scope: "builtin",
}
const context: SkillMcpServerContext = {
config: {
@@ -653,6 +680,7 @@ describe("SkillMcpManager", () => {
serverName: "oauth-server",
skillName: "oauth-skill",
sessionID: "session-oauth-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -679,6 +707,7 @@ describe("SkillMcpManager", () => {
serverName: "oauth-no-token",
skillName: "oauth-skill",
sessionID: "session-oauth-2",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -705,6 +734,7 @@ describe("SkillMcpManager", () => {
serverName: "oauth-with-headers",
skillName: "oauth-skill",
sessionID: "session-oauth-3",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -734,6 +764,7 @@ describe("SkillMcpManager", () => {
serverName: "oauth-refresh",
skillName: "oauth-skill",
sessionID: "session-oauth-refresh",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -766,6 +797,7 @@ describe("SkillMcpManager", () => {
serverName: "oauth-refresh-fallback",
skillName: "oauth-skill",
sessionID: "session-oauth-refresh-fallback",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -799,6 +831,7 @@ describe("SkillMcpManager", () => {
serverName: "no-oauth-server",
skillName: "test-skill",
sessionID: "session-no-oauth",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -824,6 +857,7 @@ describe("SkillMcpManager", () => {
serverName: "stepup-server",
skillName: "stepup-skill",
sessionID: "session-stepup-1",
scope: "builtin",
}
const config: ClaudeCodeMcpServer = {
url: "https://mcp.example.com/mcp",
@@ -869,6 +903,7 @@ describe("SkillMcpManager", () => {
serverName: "no-stepup-server",
skillName: "no-stepup-skill",
sessionID: "session-no-stepup",
scope: "builtin",
}
const context: SkillMcpServerContext = {
config: {

View File

@@ -4,7 +4,7 @@ import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import { McpOAuthProvider } from "../mcp-oauth/provider"
import { disconnectAll, disconnectSession, forceReconnect } from "./cleanup"
import { getOrCreateClient, getOrCreateClientWithRetryImpl } from "./connection"
import { handleStepUpIfNeeded } from "./oauth-handler"
import { handlePostRequestAuthError, handleStepUpIfNeeded } from "./oauth-handler"
import type {
OAuthProviderFactory,
SkillMcpClientInfo,
@@ -110,6 +110,7 @@ export class SkillMcpManager {
): Promise<T> {
const maxRetries = 3
let lastError: Error | null = null
const refreshAttempted = new Set<string>()
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
@@ -130,6 +131,17 @@ export class SkillMcpManager {
continue
}
const postRequestRefreshHandled = await handlePostRequestAuthError({
error: lastError,
config,
authProviders: this.state.authProviders,
createOAuthProvider: this.state.createOAuthProvider,
refreshAttempted,
})
if (postRequestRefreshHandled) {
continue
}
if (!errorMessage.includes("not connected")) {
throw lastError
}

View File

@@ -0,0 +1,141 @@
import { describe, expect, it, mock } from "bun:test"
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import type { OAuthTokenData } from "../mcp-oauth/storage"
import type { OAuthProviderFactory, OAuthProviderLike } from "./types"
type OAuthHandlerModule = typeof import("./oauth-handler")
async function importFreshOAuthHandlerModule(): Promise<OAuthHandlerModule> {
mock.module("../mcp-oauth/provider", () => ({
McpOAuthProvider: class MockMcpOAuthProvider {},
}))
return await import(new URL(`./oauth-handler.ts?oauth-handler-test=${Date.now()}-${Math.random()}`, import.meta.url).href)
}
type Deferred<TValue> = {
promise: Promise<TValue>
resolve: (value: TValue) => void
}
function createDeferred<TValue>(): Deferred<TValue> {
let resolvePromise: ((value: TValue) => void) | null = null
const promise = new Promise<TValue>((resolve) => {
resolvePromise = resolve
})
if (!resolvePromise) {
throw new Error("Failed to create deferred promise")
}
return { promise, resolve: resolvePromise }
}
function createConfig(serverUrl: string): ClaudeCodeMcpServer {
return {
url: serverUrl,
oauth: {
clientId: "test-client",
},
}
}
describe("oauth-handler refresh mutex wiring", () => {
it("deduplicates concurrent pre-request refresh attempts for the same server", async () => {
// given
const { buildHttpRequestInit } = await importFreshOAuthHandlerModule()
const deferred = createDeferred<OAuthTokenData>()
const refresh = mock(() => deferred.promise)
const provider: OAuthProviderLike = {
tokens: () => ({
accessToken: "expired-token",
refreshToken: "refresh-token",
expiresAt: Math.floor(Date.now() / 1000) - 60,
}),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}
const authProviders = new Map<string, OAuthProviderLike>()
const createOAuthProvider: OAuthProviderFactory = () => provider
// when
const firstRequest = buildHttpRequestInit(createConfig("https://same.example.com/mcp"), authProviders, createOAuthProvider)
const secondRequest = buildHttpRequestInit(createConfig("https://same.example.com/mcp"), authProviders, createOAuthProvider)
// then
expect(refresh).toHaveBeenCalledTimes(1)
deferred.resolve({ accessToken: "refreshed-token" })
await expect(firstRequest).resolves.toEqual({ headers: { Authorization: "Bearer refreshed-token" } })
await expect(secondRequest).resolves.toEqual({ headers: { Authorization: "Bearer refreshed-token" } })
})
it("allows different servers to refresh independently after request auth errors", async () => {
// given
const { handlePostRequestAuthError } = await importFreshOAuthHandlerModule()
const firstDeferred = createDeferred<OAuthTokenData>()
const secondDeferred = createDeferred<OAuthTokenData>()
const firstProvider: OAuthProviderLike = {
tokens: () => ({ accessToken: "expired-a", refreshToken: "refresh-a" }),
login: mock(async () => ({ accessToken: "login-a" } satisfies OAuthTokenData)),
refresh: mock(() => firstDeferred.promise),
}
const secondProvider: OAuthProviderLike = {
tokens: () => ({ accessToken: "expired-b", refreshToken: "refresh-b" }),
login: mock(async () => ({ accessToken: "login-b" } satisfies OAuthTokenData)),
refresh: mock(() => secondDeferred.promise),
}
const providers = new Map([
["https://server-a.example.com/mcp", firstProvider],
["https://server-b.example.com/mcp", secondProvider],
])
// when
const firstAttempt = handlePostRequestAuthError({
error: new Error("401 Unauthorized"),
config: createConfig("https://server-a.example.com/mcp"),
authProviders: providers,
})
const secondAttempt = handlePostRequestAuthError({
error: new Error("403 Forbidden"),
config: createConfig("https://server-b.example.com/mcp"),
authProviders: providers,
})
// then
expect(firstProvider.refresh).toHaveBeenCalledTimes(1)
expect(secondProvider.refresh).toHaveBeenCalledTimes(1)
firstDeferred.resolve({ accessToken: "refreshed-a" })
secondDeferred.resolve({ accessToken: "refreshed-b" })
await expect(firstAttempt).resolves.toBe(true)
await expect(secondAttempt).resolves.toBe(true)
})
it("allows a new refresh after the previous same-server refresh completes", async () => {
// given
const { handlePostRequestAuthError } = await importFreshOAuthHandlerModule()
const refresh = mock(async () => ({ accessToken: `refreshed-${refresh.mock.calls.length + 1}` } satisfies OAuthTokenData))
const provider: OAuthProviderLike = {
tokens: () => ({ accessToken: "expired-token", refreshToken: "refresh-token" }),
login: mock(async () => ({ accessToken: "login-token" } satisfies OAuthTokenData)),
refresh,
}
const authProviders = new Map<string, OAuthProviderLike>([["https://same.example.com/mcp", provider]])
// when
const firstResult = await handlePostRequestAuthError({
error: new Error("401 Unauthorized"),
config: createConfig("https://same.example.com/mcp"),
authProviders,
})
const secondResult = await handlePostRequestAuthError({
error: new Error("401 Unauthorized"),
config: createConfig("https://same.example.com/mcp"),
authProviders,
})
// then
expect(firstResult).toBe(true)
expect(secondResult).toBe(true)
expect(refresh).toHaveBeenCalledTimes(2)
})
})

View File

@@ -1,5 +1,6 @@
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import { McpOAuthProvider } from "../mcp-oauth/provider"
import { withRefreshMutex } from "../mcp-oauth/refresh-mutex"
import type { OAuthTokenData } from "../mcp-oauth/storage"
import { isStepUpRequired, mergeScopes } from "../mcp-oauth/step-up"
import type { OAuthProviderFactory, OAuthProviderLike } from "./types"
@@ -52,14 +53,15 @@ export async function buildHttpRequestInit(
}
}
if (tokenData && isTokenExpired(tokenData)) {
try {
tokenData = tokenData.refreshToken
? await provider.refresh(tokenData.refreshToken)
: await provider.login()
} catch {
if (tokenData && isTokenExpired(tokenData)) {
try {
tokenData = await provider.login()
const refreshToken = tokenData.refreshToken
tokenData = refreshToken
? await withRefreshMutex(config.url, () => provider.refresh(refreshToken))
: await provider.login()
} catch {
try {
tokenData = await provider.login()
} catch {
tokenData = null
}
@@ -116,3 +118,43 @@ export async function handleStepUpIfNeeded(params: {
return false
}
}
export async function handlePostRequestAuthError(params: {
error: Error
config: ClaudeCodeMcpServer
authProviders: Map<string, OAuthProviderLike>
createOAuthProvider?: OAuthProviderFactory
refreshAttempted?: Set<string>
}): Promise<boolean> {
const { error, config, authProviders, createOAuthProvider, refreshAttempted = new Set() } = params
if (!config.oauth || !config.url) {
return false
}
const statusMatch = /\b(401|403)\b/.exec(error.message)
if (!statusMatch) {
return false
}
const provider = getOrCreateAuthProvider(authProviders, config.url, config.oauth, createOAuthProvider)
const tokenData = provider.tokens()
if (!tokenData?.refreshToken) {
return false
}
if (refreshAttempted.has(config.url)) {
return false
}
refreshAttempted.add(config.url)
try {
const refreshToken = tokenData.refreshToken
await withRefreshMutex(config.url, () => provider.refresh(refreshToken))
return true
} catch {
return false
}
}

View File

@@ -3,6 +3,7 @@ import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import { createCleanMcpEnvironment } from "./env-cleaner"
import { registerProcessCleanup, startCleanupTimer } from "./cleanup"
import { redactSensitiveData } from "./error-redaction"
import type { ManagedClient, SkillMcpClientConnectionParams } from "./types"
function getStdioCommand(config: ClaudeCodeMcpServer, serverName: string): string {
@@ -45,10 +46,13 @@ export async function createStdioClient(params: SkillMcpClientConnectionParams):
}
const errorMessage = error instanceof Error ? error.message : String(error)
const fullCommand = `${command} ${args.join(" ")}`
const safeCommand = redactSensitiveData(fullCommand)
const safeErrorMessage = redactSensitiveData(errorMessage)
throw new Error(
`Failed to connect to MCP server "${info.serverName}".\n\n` +
`Command: ${command} ${args.join(" ")}\n` +
`Reason: ${errorMessage}\n\n` +
`Command: ${safeCommand}\n` +
`Reason: ${safeErrorMessage}\n\n` +
`Hints:\n` +
` - Ensure the command is installed and available in PATH\n` +
` - Check if the MCP server package exists\n` +

View File

@@ -3,6 +3,7 @@ import type { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdi
import type { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"
import type { ClaudeCodeMcpServer } from "../claude-code-mcp-loader/types"
import type { McpOAuthProvider } from "../mcp-oauth/provider"
import type { SkillScope } from "../opencode-skill-loader/types"
export type SkillMcpConfig = Record<string, ClaudeCodeMcpServer>
@@ -10,6 +11,7 @@ export interface SkillMcpClientInfo {
serverName: string
skillName: string
sessionID: string
scope?: SkillScope | "local"
}
export interface SkillMcpServerContext {

View File

@@ -44,12 +44,22 @@ mock.module("./action-executor", () => ({
mock.module("../../shared/tmux", () => ({
isInsideTmux: mockIsInsideTmux,
getCurrentPaneId: mockGetCurrentPaneId,
isServerRunning: mock(async () => true),
resetServerCheck: mock(() => {}),
markServerRunningInProcess: mock(() => {}),
getPaneDimensions: mock(async () => ({ width: 220, height: 44 })),
spawnTmuxPane: mock(async () => ({ success: true, paneId: "%1" })),
closeTmuxPane: mock(async () => ({ success: true })),
replaceTmuxPane: mock(async () => ({ success: true, paneId: "%1" })),
applyLayout: mock(async () => ({ success: true })),
enforceMainPaneWidth: mock(async () => ({ success: true })),
POLL_INTERVAL_BACKGROUND_MS: 10,
SESSION_READY_POLL_INTERVAL_MS: 10,
SESSION_READY_TIMEOUT_MS: 50,
SESSION_MISSING_GRACE_MS: 1_000,
spawnTmuxWindow: mockSpawnTmuxWindow,
spawnTmuxSession: mockSpawnTmuxSession,
SESSION_TIMEOUT_MS: 600_000,
}))
afterAll(() => { mock.restore() })

View File

@@ -1,6 +1,9 @@
import type { PluginInput } from "@opencode-ai/plugin"
import type { BackgroundManager } from "../../features/background-agent"
import { isAgentRegistered } from "../../features/claude-code-session-state"
import {
isAgentRegistered,
resolveRegisteredAgentName,
} from "../../features/claude-code-session-state"
import { log } from "../../shared/logger"
import { createInternalAgentTextPart, resolveInheritedPromptTools } from "../../shared"
import { HOOK_NAME } from "./hook-name"
@@ -55,7 +58,9 @@ export async function injectBoulderContinuation(input: {
`\n\n[Status: ${total - remaining}/${total} completed, ${remaining} remaining]` +
preferredSessionContext +
worktreeContext
const continuationAgent = agent ?? (isAgentRegistered("atlas") ? "atlas" : undefined)
const continuationAgent = resolveRegisteredAgentName(
agent ?? (isAgentRegistered("atlas") ? "atlas" : undefined),
)
if (!continuationAgent || !isAgentRegistered(continuationAgent)) {
log(`[${HOOK_NAME}] Skipped injection: continuation agent unavailable`, {

View File

@@ -3,6 +3,7 @@ const { afterEach, describe, expect, mock, test, afterAll } = require("bun:test"
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"
import { join } from "node:path"
import { tmpdir } from "node:os"
import { PART_STORAGE } from "../../shared"
const testDirs: string[] = []
const TEST_STORAGE_ROOT = join(tmpdir(), `atlas-session-last-agent-${Date.now()}`)
@@ -64,4 +65,36 @@ describe("getLastAgentFromSession JSON backend", () => {
// then
expect(result).toBe("atlas")
})
test("skips JSON messages whose part storage contains a compaction marker", async () => {
// given
const sessionID = "ses_json_compaction_marker"
const messageDir = createTempMessageDir(sessionID)
const compactionMessageID = "msg_test_atlas_compaction_marker"
const partDir = join(PART_STORAGE, compactionMessageID)
testDirs.push(partDir)
writeFileSync(join(messageDir, "msg_0001.json"), JSON.stringify({
id: compactionMessageID,
agent: "atlas",
time: { created: 200 },
}), "utf-8")
mkdirSync(partDir, { recursive: true })
writeFileSync(join(partDir, "prt_0001.json"), JSON.stringify({
type: "compaction",
}), "utf-8")
writeFileSync(join(messageDir, "msg_0002.json"), JSON.stringify({
id: "msg_0002",
agent: "sisyphus-junior",
time: { created: 100 },
}), "utf-8")
const { getLastAgentFromSession } = await import("./session-last-agent")
// when
const result = await getLastAgentFromSession(sessionID)
// then
expect(result).toBe("sisyphus-junior")
})
})

View File

@@ -52,6 +52,30 @@ describe("getLastAgentFromSession SQLite backend ordering", () => {
expect(result).toBe("sisyphus-junior")
})
test("skips compaction marker user messages that retain the original agent", async () => {
// given
const client = {
session: {
messages: async () => ({
data: [
{ id: "msg_real", info: { agent: "sisyphus", time: { created: 100 } } },
{
id: "msg_compaction",
info: { agent: "atlas", time: { created: 200 } },
parts: [{ type: "compaction" }],
},
],
}),
},
}
// when
const result = await getLastAgentFromSession("ses_sqlite_compaction_marker", client as never)
// then
expect(result).toBe("sisyphus")
})
test("returns null instead of throwing when SQLite message lookup fails", async () => {
// given
const client = {

View File

@@ -2,6 +2,7 @@ import { readFileSync, readdirSync } from "node:fs"
import { join } from "node:path"
import { getMessageDir, isSqliteBackend, normalizeSDKResponse } from "../../shared"
import { hasCompactionPartInStorage, isCompactionMessage } from "../../shared/compaction-marker"
type SessionMessagesClient = {
session: {
@@ -9,10 +10,6 @@ type SessionMessagesClient = {
}
}
function isCompactionAgent(agent: unknown): boolean {
return typeof agent === "string" && agent.toLowerCase() === "compaction"
}
function getLastAgentFromMessageDir(messageDir: string): string | null {
try {
const messages = readdirSync(messageDir)
@@ -20,9 +17,10 @@ function getLastAgentFromMessageDir(messageDir: string): string | null {
.map((fileName) => {
try {
const content = readFileSync(join(messageDir, fileName), "utf-8")
const parsed = JSON.parse(content) as { agent?: unknown; time?: { created?: unknown } }
const parsed = JSON.parse(content) as { id?: string; agent?: unknown; time?: { created?: unknown } }
return {
fileName,
id: parsed.id,
agent: parsed.agent,
createdAt: typeof parsed.time?.created === "number" ? parsed.time.created : Number.NEGATIVE_INFINITY,
}
@@ -30,11 +28,16 @@ function getLastAgentFromMessageDir(messageDir: string): string | null {
return null
}
})
.filter((message): message is { fileName: string; agent: unknown; createdAt: number } => message !== null)
.sort((left, right) => right.createdAt - left.createdAt || right.fileName.localeCompare(left.fileName))
.filter((message): message is { fileName: string; id: string | undefined; agent: unknown; createdAt: number } => message !== null)
.sort((left, right) => (right?.createdAt ?? 0) - (left?.createdAt ?? 0) || (right?.fileName ?? "").localeCompare(left?.fileName ?? ""))
for (const message of messages) {
if (typeof message.agent === "string" && !isCompactionAgent(message.agent)) {
if (!message) continue
if (isCompactionMessage({ agent: message.agent }) || hasCompactionPartInStorage(message?.id)) {
continue
}
if (typeof message.agent === "string") {
return message.agent.toLowerCase()
}
}
@@ -52,7 +55,11 @@ export async function getLastAgentFromSession(
if (isSqliteBackend() && client) {
try {
const response = await client.session.messages({ path: { id: sessionID } })
const messages = normalizeSDKResponse(response, [] as Array<{ id?: string; info?: { agent?: string; time?: { created?: number } } }>, {
const messages = normalizeSDKResponse(response, [] as Array<{
id?: string
info?: { agent?: string; time?: { created?: number } }
parts?: Array<{ type?: string }>
}>, {
preferResponseOnMissingData: true,
}).sort((left, right) => {
const leftTime = (left as { info?: { time?: { created?: number } } }).info?.time?.created ?? Number.NEGATIVE_INFINITY
@@ -67,8 +74,12 @@ export async function getLastAgentFromSession(
})
for (const message of messages) {
if (isCompactionMessage(message)) {
continue
}
const agent = message.info?.agent
if (typeof agent === "string" && !isCompactionAgent(agent)) {
if (typeof agent === "string") {
return agent.toLowerCase()
}
}

View File

@@ -0,0 +1,80 @@
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test"
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
// Hold mutable mock state so beforeEach can swap the cache root for each test.
const mockState: { candidates: string[] } = { candidates: [] }
mock.module("../constants", () => ({
INSTALLED_PACKAGE_JSON_CANDIDATES: new Proxy([], {
get(_, prop) {
const current = mockState.candidates
// Forward array methods/properties to the mutable candidates list
// so getCachedVersion's `for (... of ...)` sees fresh data per test.
const value = (current as unknown as Record<PropertyKey, unknown>)[prop]
if (typeof value === "function") {
return (value as (...args: unknown[]) => unknown).bind(current)
}
return value
},
}),
}))
mock.module("./package-json-locator", () => ({
findPackageJsonUp: () => null,
}))
import { getCachedVersion } from "./cached-version"
describe("getCachedVersion (GH-3257)", () => {
let cacheRoot: string
beforeEach(() => {
cacheRoot = mkdtempSync(join(tmpdir(), "omo-cached-version-"))
mockState.candidates = [
join(cacheRoot, "node_modules", "oh-my-opencode", "package.json"),
join(cacheRoot, "node_modules", "oh-my-openagent", "package.json"),
]
})
afterEach(() => {
rmSync(cacheRoot, { recursive: true, force: true })
mockState.candidates = []
})
it("returns the version when the package is installed under oh-my-opencode", () => {
const pkgDir = join(cacheRoot, "node_modules", "oh-my-opencode")
mkdirSync(pkgDir, { recursive: true })
writeFileSync(join(pkgDir, "package.json"), JSON.stringify({ name: "oh-my-opencode", version: "3.16.0" }))
expect(getCachedVersion()).toBe("3.16.0")
})
it("returns the version when the package is installed under oh-my-openagent", () => {
// GH-3257: npm users who install the aliased `oh-my-openagent` package get
// node_modules/oh-my-openagent/package.json, not the canonical oh-my-opencode
// path. The cached version resolver must check both.
const pkgDir = join(cacheRoot, "node_modules", "oh-my-openagent")
mkdirSync(pkgDir, { recursive: true })
writeFileSync(join(pkgDir, "package.json"), JSON.stringify({ name: "oh-my-openagent", version: "3.16.0" }))
expect(getCachedVersion()).toBe("3.16.0")
})
it("prefers oh-my-opencode when both are installed", () => {
const legacyDir = join(cacheRoot, "node_modules", "oh-my-opencode")
mkdirSync(legacyDir, { recursive: true })
writeFileSync(join(legacyDir, "package.json"), JSON.stringify({ name: "oh-my-opencode", version: "3.16.0" }))
const aliasDir = join(cacheRoot, "node_modules", "oh-my-openagent")
mkdirSync(aliasDir, { recursive: true })
writeFileSync(join(aliasDir, "package.json"), JSON.stringify({ name: "oh-my-openagent", version: "3.15.0" }))
expect(getCachedVersion()).toBe("3.16.0")
})
it("returns null when neither candidate exists and fallbacks find nothing", () => {
expect(getCachedVersion()).toBeNull()
})
})

View File

@@ -3,18 +3,20 @@ import * as path from "node:path"
import { fileURLToPath } from "node:url"
import { log } from "../../../shared/logger"
import type { PackageJson } from "../types"
import { INSTALLED_PACKAGE_JSON } from "../constants"
import { INSTALLED_PACKAGE_JSON_CANDIDATES } from "../constants"
import { findPackageJsonUp } from "./package-json-locator"
export function getCachedVersion(): string | null {
try {
if (fs.existsSync(INSTALLED_PACKAGE_JSON)) {
const content = fs.readFileSync(INSTALLED_PACKAGE_JSON, "utf-8")
const pkg = JSON.parse(content) as PackageJson
if (pkg.version) return pkg.version
for (const candidate of INSTALLED_PACKAGE_JSON_CANDIDATES) {
try {
if (fs.existsSync(candidate)) {
const content = fs.readFileSync(candidate, "utf-8")
const pkg = JSON.parse(content) as PackageJson
if (pkg.version) return pkg.version
}
} catch {
// ignore; try next candidate
}
} catch {
// ignore
}
try {

View File

@@ -1,7 +1,7 @@
import * as fs from "node:fs"
import { fileURLToPath } from "node:url"
import type { OpencodeConfig } from "../types"
import { PACKAGE_NAME } from "../constants"
import { ACCEPTED_PACKAGE_NAMES } from "../constants"
import { getConfigPaths } from "./config-paths"
import { stripJsonComments } from "./jsonc-strip"
@@ -18,12 +18,12 @@ export function getLocalDevPath(directory: string): string | null {
const plugins = config.plugin ?? []
for (const entry of plugins) {
if (entry.startsWith("file://") && entry.includes(PACKAGE_NAME)) {
try {
return fileURLToPath(entry)
} catch {
return entry.replace("file://", "")
}
if (!entry.startsWith("file://")) continue
if (!ACCEPTED_PACKAGE_NAMES.some(name => entry.includes(name))) continue
try {
return fileURLToPath(entry)
} catch {
return entry.replace("file://", "")
}
}
} catch {

View File

@@ -0,0 +1,65 @@
import { afterEach, beforeEach, describe, expect, it } from "bun:test"
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"
import { tmpdir } from "node:os"
import { join } from "node:path"
import { findPackageJsonUp } from "./package-json-locator"
describe("findPackageJsonUp", () => {
let workdir: string
beforeEach(() => {
workdir = mkdtempSync(join(tmpdir(), "omo-pkg-locator-"))
})
afterEach(() => {
rmSync(workdir, { recursive: true, force: true })
})
it("finds a package.json whose name is the canonical oh-my-opencode", () => {
const pkgPath = join(workdir, "package.json")
writeFileSync(pkgPath, JSON.stringify({ name: "oh-my-opencode", version: "3.16.0" }))
const found = findPackageJsonUp(workdir)
expect(found).toBe(pkgPath)
})
it("finds a package.json whose name is the aliased oh-my-openagent (GH-3257)", () => {
// A user who installed `oh-my-openagent` from npm gets a node_modules entry
// whose package.json has `name: "oh-my-openagent"`. The auto-update-checker
// must still resolve it so the startup toast shows a real version instead
// of "unknown".
const pkgPath = join(workdir, "package.json")
writeFileSync(pkgPath, JSON.stringify({ name: "oh-my-openagent", version: "3.16.0" }))
const found = findPackageJsonUp(workdir)
expect(found).toBe(pkgPath)
})
it("walks up directories to find the matching package.json", () => {
const nested = join(workdir, "dist", "checker")
mkdirSync(nested, { recursive: true })
const pkgPath = join(workdir, "package.json")
writeFileSync(pkgPath, JSON.stringify({ name: "oh-my-openagent", version: "3.16.0" }))
const found = findPackageJsonUp(nested)
expect(found).toBe(pkgPath)
})
it("ignores unrelated package.json files", () => {
const pkgPath = join(workdir, "package.json")
writeFileSync(pkgPath, JSON.stringify({ name: "some-other-package", version: "1.0.0" }))
const found = findPackageJsonUp(workdir)
expect(found).toBeNull()
})
it("returns null when no package.json exists", () => {
const found = findPackageJsonUp(workdir)
expect(found).toBeNull()
})
})

View File

@@ -1,7 +1,9 @@
import * as fs from "node:fs"
import * as path from "node:path"
import type { PackageJson } from "../types"
import { PACKAGE_NAME } from "../constants"
import { ACCEPTED_PACKAGE_NAMES } from "../constants"
const ACCEPTED_NAME_SET = new Set<string>(ACCEPTED_PACKAGE_NAMES)
export function findPackageJsonUp(startPath: string): string | null {
try {
@@ -14,7 +16,7 @@ export function findPackageJsonUp(startPath: string): string | null {
try {
const content = fs.readFileSync(pkgPath, "utf-8")
const pkg = JSON.parse(content) as PackageJson
if (pkg.name === PACKAGE_NAME) return pkgPath
if (pkg.name && ACCEPTED_NAME_SET.has(pkg.name)) return pkgPath
} catch {
// ignore
}

View File

@@ -4,6 +4,7 @@ import * as fs from "node:fs"
import * as os from "node:os"
import * as path from "node:path"
import { PACKAGE_NAME } from "../constants"
import { LEGACY_PLUGIN_NAME, PLUGIN_NAME } from "../../../shared/plugin-identity"
type PluginEntryResult = {
entry: string
@@ -120,6 +121,64 @@ describe("findPluginEntry", () => {
expect(pluginInfo?.pinnedVersion).toBe("3.5.2")
})
test("finds preferred plugin entry", async () => {
// #given preferred plugin entry is configured
fs.writeFileSync(configPath, JSON.stringify({ plugin: [PLUGIN_NAME] }))
// #when plugin entry is detected
const execution = runFindPluginEntry(temporaryDirectory)
// #then preferred entry is returned
expect(execution.status).toBe(0)
const pluginInfo = JSON.parse(execution.stdout.trim()) as PluginEntryResult
expect(pluginInfo?.entry).toBe(PLUGIN_NAME)
expect(pluginInfo?.isPinned).toBe(false)
expect(pluginInfo?.pinnedVersion).toBeNull()
})
test("finds legacy plugin entry", async () => {
// #given legacy plugin entry is configured
fs.writeFileSync(configPath, JSON.stringify({ plugin: [LEGACY_PLUGIN_NAME] }))
// #when plugin entry is detected
const execution = runFindPluginEntry(temporaryDirectory)
// #then legacy entry is returned
expect(execution.status).toBe(0)
const pluginInfo = JSON.parse(execution.stdout.trim()) as PluginEntryResult
expect(pluginInfo?.entry).toBe(LEGACY_PLUGIN_NAME)
expect(pluginInfo?.isPinned).toBe(false)
expect(pluginInfo?.pinnedVersion).toBeNull()
})
test("finds preferred plugin entry with pinned version", async () => {
// #given preferred plugin entry includes semver version
fs.writeFileSync(configPath, JSON.stringify({ plugin: [`${PLUGIN_NAME}@3.15.0`] }))
// #when plugin entry is detected
const execution = runFindPluginEntry(temporaryDirectory)
// #then preferred versioned entry is returned
expect(execution.status).toBe(0)
const pluginInfo = JSON.parse(execution.stdout.trim()) as PluginEntryResult
expect(pluginInfo?.entry).toBe(`${PLUGIN_NAME}@3.15.0`)
expect(pluginInfo?.isPinned).toBe(true)
expect(pluginInfo?.pinnedVersion).toBe("3.15.0")
})
test("returns null for unrelated plugin entry", async () => {
// #given unrelated plugin entry is configured
fs.writeFileSync(configPath, JSON.stringify({ plugin: ["some-other-plugin"] }))
// #when plugin entry is detected
const execution = runFindPluginEntry(temporaryDirectory)
// #then no matching entry is returned
expect(execution.status).toBe(0)
const pluginInfo = JSON.parse(execution.stdout.trim()) as PluginEntryResult
expect(pluginInfo).toBeNull()
})
test("reads user config from profile dir even when OPENCODE_CONFIG_DIR changes after import", async () => {
// #given profile-specific user config after module import
const profileConfigDir = path.join(temporaryDirectory, "profiles", "today")

View File

@@ -3,6 +3,7 @@ import type { OpencodeConfig } from "../types"
import { PACKAGE_NAME } from "../constants"
import { getConfigPaths } from "./config-paths"
import { stripJsonComments } from "./jsonc-strip"
import { LEGACY_PLUGIN_NAME, PLUGIN_NAME } from "../../../shared/plugin-identity"
export interface PluginEntryInfo {
entry: string
@@ -12,6 +13,7 @@ export interface PluginEntryInfo {
}
const EXACT_SEMVER_REGEX = /^\d+\.\d+\.\d+(-[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$/
const MATCH_PLUGIN_NAMES = [PACKAGE_NAME, PLUGIN_NAME, LEGACY_PLUGIN_NAME]
export function findPluginEntry(directory: string): PluginEntryInfo | null {
for (const configPath of getConfigPaths(directory)) {
@@ -22,13 +24,15 @@ export function findPluginEntry(directory: string): PluginEntryInfo | null {
const plugins = config.plugin ?? []
for (const entry of plugins) {
if (entry === PACKAGE_NAME) {
return { entry, isPinned: false, pinnedVersion: null, configPath }
}
if (entry.startsWith(`${PACKAGE_NAME}@`)) {
const pinnedVersion = entry.slice(PACKAGE_NAME.length + 1)
const isPinned = EXACT_SEMVER_REGEX.test(pinnedVersion.trim())
return { entry, isPinned, pinnedVersion, configPath }
for (const pluginName of MATCH_PLUGIN_NAMES) {
if (entry === pluginName) {
return { entry, isPinned: false, pinnedVersion: null, configPath }
}
if (entry.startsWith(`${pluginName}@`)) {
const pinnedVersion = entry.slice(pluginName.length + 1)
const isPinned = EXACT_SEMVER_REGEX.test(pinnedVersion.trim())
return { entry, isPinned, pinnedVersion, configPath }
}
}
}
} catch {

View File

@@ -26,4 +26,24 @@ describe("auto-update-checker constants", () => {
// then PACKAGE_NAME equals the actually published package name
expect(PACKAGE_NAME).toBe(repoPackageJson.name)
})
it("ACCEPTED_PACKAGE_NAMES contains both the canonical and aliased npm names (GH-3257)", async () => {
const { ACCEPTED_PACKAGE_NAMES } = await import(`./constants?test=${Date.now()}`)
expect(ACCEPTED_PACKAGE_NAMES).toContain("oh-my-opencode")
expect(ACCEPTED_PACKAGE_NAMES).toContain("oh-my-openagent")
})
it("INSTALLED_PACKAGE_JSON_CANDIDATES covers every accepted package name (GH-3257)", async () => {
const { ACCEPTED_PACKAGE_NAMES, INSTALLED_PACKAGE_JSON_CANDIDATES, CACHE_DIR } = await import(
`./constants?test=${Date.now()}`
)
expect(INSTALLED_PACKAGE_JSON_CANDIDATES).toHaveLength(ACCEPTED_PACKAGE_NAMES.length)
for (const name of ACCEPTED_PACKAGE_NAMES) {
expect(INSTALLED_PACKAGE_JSON_CANDIDATES).toContain(
join(CACHE_DIR, "node_modules", name, "package.json")
)
}
})
})

Some files were not shown because too many files have changed in this diff Show More