Compare commits

..

24 Commits

Author SHA1 Message Date
Kit Langton
3c272cd6a8 Merge branch 'dev' into kit/ripgrep-schema-source 2026-04-13 21:18:30 -04:00
Aiden Cline
34e2429c49 feat: add experimental.compaction.autocontinue hook to disable auto continuing after compaction (#22361) 2026-04-13 20:14:53 -05:00
opencode-agent[bot]
10ba68c772 chore: update nix node_modules hashes 2026-04-14 00:23:25 +00:00
Kit Langton
e8471256f2 refactor(session): move llm stream into layer (#22358) 2026-04-13 19:53:30 -04:00
Kit Langton
43b37346b6 feat: add interactive burst to the TUI logo (#22098) 2026-04-13 19:36:28 -04:00
Kit Langton
d199648aeb refactor(permission): remove async facade exports (#22342) 2026-04-13 19:33:58 -04:00
Kit Langton
a06f40297b fix grep exact file path searches (#22356) 2026-04-13 19:26:50 -04:00
Kit Langton
5adba445bc Merge branch 'dev' into kit/ripgrep-schema-source 2026-04-13 19:15:27 -04:00
Dax Raad
59c0fc28ee ignore: v2 thoughts 2026-04-13 17:33:34 -04:00
James Long
b22add292c refactor(core): publish sync events to global event stream (#22347) 2026-04-13 16:51:59 -04:00
Kit Langton
67aaecacac refactor(session): remove revert async facade exports (#22339) 2026-04-13 16:16:13 -04:00
Kit Langton
29c202e6ab refactor(mcp): remove mcp auth async facade exports (#22338) 2026-04-13 15:36:12 -04:00
Kit Langton
dcbf11f41a refactor(session): remove summary async facades (#22337) 2026-04-13 15:35:38 -04:00
Kit Langton
14ccff4037 refactor(agent): remove async facade exports (#22341) 2026-04-13 14:54:01 -04:00
Kit Langton
5b8b874732 update effect docs (#22340) 2026-04-13 14:07:59 -04:00
Kit Langton
c9261dbd2a Merge branch 'dev' into kit/ripgrep-schema-source 2026-04-13 14:05:02 -04:00
opencode-agent[bot]
1d81c0266c chore: generate 2026-04-13 18:02:12 +00:00
Dax Raad
913120759a session entry 2026-04-13 14:00:49 -04:00
Kit Langton
19dbeea71c Merge branch 'dev' into kit/ripgrep-schema-source 2026-04-13 13:45:51 -04:00
Kit Langton
a7576a78ae Merge branch 'dev' into kit/ripgrep-schema-source 2026-04-13 12:35:05 -04:00
Kit Langton
573a10e2f4 refactor(file): derive ripgrep zod from effect schema 2026-04-13 12:34:13 -04:00
Kit Langton
75a87ffc5e refactor(file): decode ripgrep chunks synchronously 2026-04-13 12:34:13 -04:00
Kit Langton
2a10e4e89e refactor(file): decode ripgrep rows with schema 2026-04-13 12:34:13 -04:00
Kit Langton
f2a83a0a00 refactor(file): stream ripgrep search parsing 2026-04-13 12:34:10 -04:00
66 changed files with 5045 additions and 2354 deletions

View File

@@ -371,6 +371,7 @@
"bonjour-service": "1.3.0",
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"cli-sound": "1.1.3",
"clipboardy": "4.0.0",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",
@@ -2668,6 +2669,8 @@
"cli-cursor": ["cli-cursor@3.1.0", "", { "dependencies": { "restore-cursor": "^3.1.0" } }, "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw=="],
"cli-sound": ["cli-sound@1.1.3", "", { "dependencies": { "find-exec": "^1.0.3" }, "bin": { "cli-sound": "dist/esm/cli.js" } }, "sha512-dpdF3KS3wjo1fobKG5iU9KyKqzQWAqueymHzZ9epus/dZ40487gAvS6aXFeBul+GiQAQYUTAtUWgQvw6Jftbyg=="],
"cli-spinners": ["cli-spinners@3.4.0", "", {}, "sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw=="],
"cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="],
@@ -3092,6 +3095,8 @@
"find-babel-config": ["find-babel-config@2.1.2", "", { "dependencies": { "json5": "^2.2.3" } }, "sha512-ZfZp1rQyp4gyuxqt1ZqjFGVeVBvmpURMqdIWXbPRfB97Bf6BzdK/xSIbylEINzQ0kB5tlDQfn9HkNXXWsqTqLg=="],
"find-exec": ["find-exec@1.0.3", "", { "dependencies": { "shell-quote": "^1.8.1" } }, "sha512-gnG38zW90mS8hm5smNcrBnakPEt+cGJoiMkJwCU0IYnEb0H2NQk0NIljhNW+48oniCriFek/PH6QXbwsJo/qug=="],
"find-my-way": ["find-my-way@9.5.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-VW2RfnmscZO5KgBY5XVyKREMW5nMZcxDy+buTOsL+zIPnBlbKm+00sgzoQzq1EVh4aALZLfKdwv6atBGcjvjrQ=="],
"find-my-way-ts": ["find-my-way-ts@0.1.6", "", {}, "sha512-a85L9ZoXtNAey3Y6Z+eBWW658kO/MwR7zIafkIUPUMf3isZG0NCs2pjW2wtjxAKuJPxMAsHUIP4ZPGv0o5gyTA=="],
@@ -4412,6 +4417,8 @@
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
"shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="],
"shiki": ["shiki@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/engine-javascript": "3.20.0", "@shikijs/engine-oniguruma": "3.20.0", "@shikijs/langs": "3.20.0", "@shikijs/themes": "3.20.0", "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-kgCOlsnyWb+p0WU+01RjkCH+eBVsjL1jOwUYWv0YDWkM2/A46+LDKVs5yZCUXjJG6bj4ndFoAg5iLIIue6dulg=="],
"shikiji": ["shikiji@0.6.13", "", { "dependencies": { "hast-util-to-html": "^9.0.0" } }, "sha512-4T7X39csvhT0p7GDnq9vysWddf2b6BeioiN3Ymhnt3xcy9tXmDcnsEFVxX18Z4YcQgEE/w48dLJ4pPPUcG9KkA=="],

View File

@@ -1,8 +1,8 @@
{
"nodeModules": {
"x86_64-linux": "sha256-g29OM3dy+sZ3ioTs8zjQOK1N+KnNr9ptP9xtdPcdr64=",
"aarch64-linux": "sha256-Iu91KwDcV5omkf4Ngny1aYpyCkPLjuoWOVUDOJUhW1k=",
"aarch64-darwin": "sha256-bk3G6m+Yo60Ea3Kyglc37QZf5Vm7MLMFcxemjc7HnL0=",
"x86_64-darwin": "sha256-y3hooQw13Z3Cu0KFfXYdpkTEeKTyuKd+a/jsXHQLdqA="
"x86_64-linux": "sha256-fiMi8VxyMhNTaZf0ButrMEwT/ZmfeEg1T3c6HwUz8p4=",
"aarch64-linux": "sha256-1Mzjijq/INZGGEm4EerYN3hu1VxiQ8wuGg6t+XPDf6w=",
"aarch64-darwin": "sha256-3SH8Q2kK/F2kM29FmFUMR1aA23rSei+mPJliRIGfvCM=",
"x86_64-darwin": "sha256-RPsyoNXn84K93gunRFLsBvkZIQilfmUXdwkeieQjbd8="
}
}

View File

@@ -155,7 +155,12 @@ export const { use: useGlobalSDK, provider: GlobalSDKProvider } = createSimpleCo
resetHeartbeat()
streamErrorLogged = false
const directory = event.directory ?? "global"
const payload = event.payload
if (event.payload.type === "sync") {
continue
}
const payload = event.payload as Event
const k = key(directory, payload)
if (k) {
const i = coalesced.get(k)

View File

@@ -0,0 +1,13 @@
CREATE TABLE `session_entry` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`type` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_session_entry_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE INDEX `session_entry_session_idx` ON `session_entry` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_entry_session_type_idx` ON `session_entry` (`session_id`,`type`);--> statement-breakpoint
CREATE INDEX `session_entry_time_created_idx` ON `session_entry` (`time_created`);

File diff suppressed because it is too large Load Diff

View File

@@ -128,6 +128,7 @@
"bonjour-service": "1.3.0",
"bun-pty": "0.4.8",
"chokidar": "4.0.3",
"cli-sound": "1.1.3",
"clipboardy": "4.0.0",
"cross-spawn": "catalog:",
"decimal.js": "10.5.0",

View File

@@ -0,0 +1,238 @@
# Facade removal checklist
Concrete inventory of the remaining `makeRuntime(...)`-backed service facades in `packages/opencode`.
As of 2026-04-13, latest `origin/dev`:
- `src/` still has 15 `makeRuntime(...)` call sites.
- 13 of those are still in scope for facade removal.
- 2 are excluded from this checklist: `bus/index.ts` and `effect/cross-spawn-spawner.ts`.
Recent progress:
- Wave 1 is merged: `Pty`, `Skill`, `Vcs`, `ToolRegistry`, `Auth`.
- Wave 2 is merged: `Config`, `Provider`, `File`, `LSP`, `MCP`.
## Priority hotspots
- `server/instance/session.ts` still depends on `Session`, `SessionPrompt`, `SessionRevert`, `SessionCompaction`, `SessionSummary`, `ShareSession`, `Agent`, and `Permission` facades.
- `src/effect/app-runtime.ts` still references many facade namespaces directly, so it should stay in view during each deletion.
## Completed Batches
Low-risk batch, all merged:
1. `src/pty/index.ts`
2. `src/skill/index.ts`
3. `src/project/vcs.ts`
4. `src/tool/registry.ts`
5. `src/auth/index.ts`
Caller-heavy batch, all merged:
1. `src/config/config.ts`
2. `src/provider/provider.ts`
3. `src/file/index.ts`
4. `src/lsp/index.ts`
5. `src/mcp/index.ts`
Shared pattern:
- one service file still exports `makeRuntime(...)` + async facades
- one or two route or CLI entrypoints call those facades directly
- tests call the facade directly and need to switch to `yield* svc.method(...)`
- once callers are gone, delete `makeRuntime(...)`, remove async facade exports, and drop the `makeRuntime` import
## Done means
For each service in the low-risk batch, the work is complete only when all of these are true:
1. all production callers stop using `Namespace.method(...)` facade calls
2. all direct test callers stop using the facade and instead yield the service from context
3. the service file no longer has `makeRuntime(...)`
4. the service file no longer exports runtime-backed facade helpers
5. `grep` for the migrated facade methods only finds the service implementation itself or unrelated names
## Caller templates
### Route handlers
Use one `AppRuntime.runPromise(Effect.gen(...))` body and yield the service inside it.
```ts
const value = await AppRuntime.runPromise(
Effect.gen(function* () {
const pty = yield* Pty.Service
return yield* pty.list()
}),
)
```
If two service calls are independent, keep them in the same effect body and use `Effect.all(...)`.
### Plain async CLI or script entrypoints
If the caller is not itself an Effect service yet, still prefer one contiguous `AppRuntime.runPromise(Effect.gen(...))` block for the whole unit of work.
```ts
const skills = await AppRuntime.runPromise(
Effect.gen(function* () {
const auth = yield* Auth.Service
const skill = yield* Skill.Service
yield* auth.set(key, info)
return yield* skill.all()
}),
)
```
Only fall back to `AppRuntime.runPromise(Service.use(...))` for truly isolated one-off calls or awkward callback boundaries. Do not stack multiple tiny `runPromise(...)` calls in the same contiguous workflow.
This is the right intermediate state. Do not block facade removal on effectifying the whole CLI file.
### Bootstrap or fire-and-forget startup code
If the old facade call existed only to kick off initialization, call the service through the existing runtime for that file.
```ts
void BootstrapRuntime.runPromise(Vcs.Service.use((svc) => svc.init()))
```
Do not reintroduce a dedicated runtime in the service just for bootstrap.
### Tests
Convert facade tests to full effect style.
```ts
it.effect("does the thing", () =>
Effect.gen(function* () {
const svc = yield* Pty.Service
const info = yield* svc.create({ command: "cat", title: "a" })
yield* svc.remove(info.id)
}).pipe(Effect.provide(Pty.defaultLayer)),
)
```
If the repo test already uses `testEffect(...)`, prefer `testEffect(Service.defaultLayer)` and `yield* Service.Service` inside the test body.
Do not route tests through `AppRuntime` unless the test is explicitly exercising the app runtime. For facade removal, tests should usually provide the specific service layer they need.
If the test uses `provideTmpdirInstance(...)`, remember that fixture needs a live `ChildProcessSpawner` layer. For services whose `defaultLayer` does not already provide that infra, prefer the repo-standard cross-spawn layer:
```ts
const infra = CrossSpawnSpawner.defaultLayer
const it = testEffect(Layer.mergeAll(MyService.defaultLayer, infra))
```
Without that extra layer, tests fail at runtime with `Service not found: effect/process/ChildProcessSpawner`.
## Questions already answered
### Do we need to effectify the whole caller first?
No.
- route files: compose the handler with `AppRuntime.runPromise(Effect.gen(...))`
- CLI and scripts: use `AppRuntime.runPromise(Service.use(...))`
- bootstrap: use the existing bootstrap runtime
Facade removal does not require a bigger refactor than that.
### Should tests keep calling the namespace from async test bodies?
No. Convert them now.
The end state is `yield* svc.method(...)`, not `await Namespace.method(...)` inside `async` tests.
### Should we keep `runPromise` exported for convenience?
No. For this batch the goal is to delete the service-local runtime entirely.
### What if a route has websocket callbacks or nested async handlers?
Keep the route shape, but replace each facade call with `AppRuntime.runPromise(Service.use(...))` or wrap the surrounding async section in one `Effect.gen(...)` when practical. Do not keep the service facade just because the route has callback-shaped code.
### Should we use one `runPromise` per service call?
No.
Default to one contiguous `AppRuntime.runPromise(Effect.gen(...))` block per handler, command, or workflow. Yield every service you need inside that block.
Multiple tiny `runPromise(...)` calls are only acceptable when the caller structure forces it, such as websocket lifecycle callbacks, external callback APIs, or genuinely unrelated one-off operations.
### Should we wrap a single service expression in `Effect.gen(...)`?
Usually no.
Prefer the direct form when there is only one expression:
```ts
await AppRuntime.runPromise(File.Service.use((svc) => svc.read(path)))
```
Use `Effect.gen(...)` when the workflow actually needs multiple yielded values or branching.
## Learnings
These were the recurring mistakes and useful corrections from the first two batches:
1. Tests should usually provide the specific service layer, not `AppRuntime`.
2. If a test uses `provideTmpdirInstance(...)` and needs child processes, prefer `CrossSpawnSpawner.defaultLayer`.
3. Instance-scoped services may need both the service layer and the right instance fixture. `File` tests, for example, needed `provideInstance(...)` plus `File.defaultLayer`.
4. Do not wrap a single `Service.use(...)` call in `Effect.gen(...)` just to return it. Use the direct form.
5. For CLI readability, extract file-local preload helpers when the handler starts doing config load + service load + batched effect fanout inline.
6. When rebasing a facade branch after nearby merges, prefer the already-cleaned service/test version over older inline facade-era code.
## Next batch
Recommended next five, in order:
1. `src/permission/index.ts`
2. `src/agent/agent.ts`
3. `src/session/summary.ts`
4. `src/session/revert.ts`
5. `src/mcp/auth.ts`
Why this batch:
- It keeps pushing the session-adjacent cleanup without jumping straight into `session/index.ts` or `session/prompt.ts`.
- `Permission`, `Agent`, `SessionSummary`, and `SessionRevert` all reduce fanout in `server/instance/session.ts`.
- `McpAuth` is small and closely related to the just-landed `MCP` cleanup.
After that batch, the expected follow-up is the main session cluster:
1. `src/session/index.ts`
2. `src/session/prompt.ts`
3. `src/session/compaction.ts`
## Checklist
- [ ] `src/session/index.ts` (`Session`) - facades: `create`, `fork`, `get`, `setTitle`, `setArchived`, `setPermission`, `setRevert`, `messages`, `children`, `remove`, `updateMessage`, `removeMessage`, `removePart`, `updatePart`; main callers: `server/instance/session.ts`, `cli/cmd/session.ts`, `cli/cmd/export.ts`, `cli/cmd/github.ts`; tests: `test/server/session-actions.test.ts`, `test/server/session-list.test.ts`, `test/server/global-session-list.test.ts`
- [ ] `src/session/prompt.ts` (`SessionPrompt`) - facades: `prompt`, `resolvePromptParts`, `cancel`, `loop`, `shell`, `command`; main callers: `server/instance/session.ts`, `cli/cmd/github.ts`; tests: `test/session/prompt.test.ts`, `test/session/prompt-effect.test.ts`, `test/session/structured-output-integration.test.ts`
- [ ] `src/session/revert.ts` (`SessionRevert`) - facades: `revert`, `unrevert`, `cleanup`; main callers: `server/instance/session.ts`; tests: `test/session/revert-compact.test.ts`
- [ ] `src/session/compaction.ts` (`SessionCompaction`) - facades: `isOverflow`, `prune`, `create`; main callers: `server/instance/session.ts`; tests: `test/session/compaction.test.ts`
- [ ] `src/session/summary.ts` (`SessionSummary`) - facades: `summarize`, `diff`; main callers: `session/prompt.ts`, `session/processor.ts`, `server/instance/session.ts`; tests: `test/session/snapshot-tool-race.test.ts`
- [ ] `src/share/session.ts` (`ShareSession`) - facades: `create`, `share`, `unshare`; main callers: `server/instance/session.ts`, `cli/cmd/github.ts`
- [ ] `src/agent/agent.ts` (`Agent`) - facades: `get`, `list`, `defaultAgent`, `generate`; main callers: `cli/cmd/agent.ts`, `server/instance/session.ts`, `server/instance/experimental.ts`; tests: `test/agent/agent.test.ts`
- [ ] `src/permission/index.ts` (`Permission`) - facades: `ask`, `reply`, `list`; main callers: `server/instance/permission.ts`, `server/instance/session.ts`, `session/llm.ts`; tests: `test/permission/next.test.ts`
- [x] `src/file/index.ts` (`File`) - facades removed and merged.
- [x] `src/lsp/index.ts` (`LSP`) - facades removed and merged.
- [x] `src/mcp/index.ts` (`MCP`) - facades removed and merged.
- [x] `src/config/config.ts` (`Config`) - facades removed and merged.
- [x] `src/provider/provider.ts` (`Provider`) - facades removed and merged.
- [x] `src/pty/index.ts` (`Pty`) - facades removed and merged.
- [x] `src/skill/index.ts` (`Skill`) - facades removed and merged.
- [x] `src/project/vcs.ts` (`Vcs`) - facades removed and merged.
- [x] `src/tool/registry.ts` (`ToolRegistry`) - facades removed and merged.
- [ ] `src/worktree/index.ts` (`Worktree`) - facades: `makeWorktreeInfo`, `createFromInfo`, `create`, `remove`, `reset`; main callers: `control-plane/adaptors/worktree.ts`, `server/instance/experimental.ts`; tests: `test/project/worktree.test.ts`, `test/project/worktree-remove.test.ts`
- [x] `src/auth/index.ts` (`Auth`) - facades removed and merged.
- [ ] `src/mcp/auth.ts` (`McpAuth`) - facades: `get`, `getForUrl`, `all`, `set`, `remove`, `updateTokens`, `updateClientInfo`, `updateCodeVerifier`, `updateOAuthState`; main callers: `mcp/oauth-provider.ts`, `cli/cmd/mcp.ts`; tests: `test/mcp/oauth-auto-connect.test.ts`
- [ ] `src/plugin/index.ts` (`Plugin`) - facades: `trigger`, `list`, `init`; main callers: `agent/agent.ts`, `session/llm.ts`, `project/bootstrap.ts`; tests: `test/plugin/trigger.test.ts`, `test/provider/provider.test.ts`
- [ ] `src/project/project.ts` (`Project`) - facades: `fromDirectory`, `discover`, `initGit`, `update`, `sandboxes`, `addSandbox`, `removeSandbox`; main callers: `project/instance.ts`, `server/instance/project.ts`, `server/instance/experimental.ts`; tests: `test/project/project.test.ts`, `test/project/migrate-global.test.ts`
- [ ] `src/snapshot/index.ts` (`Snapshot`) - facades: `init`, `track`, `patch`, `restore`, `revert`, `diff`, `diffFull`; main callers: `project/bootstrap.ts`, `cli/cmd/debug/snapshot.ts`; tests: `test/snapshot/snapshot.test.ts`, `test/session/revert-compact.test.ts`
## Excluded `makeRuntime(...)` sites
- `src/bus/index.ts` - core bus plumbing, not a normal facade-removal target.
- `src/effect/cross-spawn-spawner.ts` - runtime helper for `ChildProcessSpawner`, not a service namespace facade.

View File

@@ -180,7 +180,7 @@ That is fine for leaf files like `schema.ts`. Keep the service surface in the ow
Service-shape migrated (single namespace, traced methods, `InstanceState` where needed).
This checklist is only about the service shape migration. Many of these services still keep `makeRuntime(...)` plus async facade exports; that facade-removal phase is tracked separately in [Destroying the facades](#destroying-the-facades).
This checklist is only about the service shape migration. Many of these services still keep `makeRuntime(...)` plus async facade exports; that facade-removal phase is tracked separately in `facades.md`.
- [x] `Account``account/index.ts`
- [x] `Agent``agent/agent.ts`
@@ -263,7 +263,7 @@ Tool-specific filesystem cleanup notes live in `tools.md`.
## Destroying the facades
This phase is still broadly open. As of 2026-04-11 there are still 31 `makeRuntime(...)` call sites under `src/`, and many service namespaces still export async facade helpers like `export async function read(...) { return runPromise(...) }`.
This phase is still broadly open. As of 2026-04-13 there are still 15 `makeRuntime(...)` call sites under `src/`, with 13 still in scope for facade removal. The live checklist now lives in `facades.md`.
These facades exist because cyclic imports used to force each service to build its own independent runtime. Now that the layer DAG is acyclic and `AppRuntime` (`src/effect/app-runtime.ts`) composes everything into one `ManagedRuntime`, we're removing them.

View File

@@ -40,6 +40,7 @@ import type { ACPConfig } from "./types"
import { Provider } from "../provider/provider"
import { ModelID, ProviderID } from "../provider/schema"
import { Agent as AgentModule } from "../agent/agent"
import { AppRuntime } from "@/effect/app-runtime"
import { Installation } from "@/installation"
import { MessageV2 } from "@/session/message-v2"
import { Config } from "@/config/config"
@@ -1166,7 +1167,7 @@ export namespace ACP {
this.sessionManager.get(sessionId).modeId ||
(await (async () => {
if (!availableModes.length) return undefined
const defaultAgentName = await AgentModule.defaultAgent()
const defaultAgentName = await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent()))
const resolvedModeId =
availableModes.find((mode) => mode.name === defaultAgentName)?.id ?? availableModes[0].id
this.sessionManager.setMode(sessionId, resolvedModeId)
@@ -1367,7 +1368,8 @@ export namespace ACP {
if (!current) {
this.sessionManager.setModel(session.id, model)
}
const agent = session.modeId ?? (await AgentModule.defaultAgent())
const agent =
session.modeId ?? (await AppRuntime.runPromise(AgentModule.Service.use((svc) => svc.defaultAgent())))
const parts: Array<
| { type: "text"; text: string; synthetic?: boolean; ignored?: boolean }

View File

@@ -21,7 +21,6 @@ import { Plugin } from "@/plugin"
import { Skill } from "../skill"
import { Effect, Context, Layer } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
export namespace Agent {
export const Info = z
@@ -404,22 +403,4 @@ export namespace Agent {
Layer.provide(Config.defaultLayer),
Layer.provide(Skill.defaultLayer),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function get(agent: string) {
return runPromise((svc) => svc.get(agent))
}
export async function list() {
return runPromise((svc) => svc.list())
}
export async function defaultAgent() {
return runPromise((svc) => svc.defaultAgent())
}
export async function generate(input: { description: string; model?: { providerID: ProviderID; modelID: ModelID } }) {
return runPromise((svc) => svc.generate(input))
}
}

4
packages/opencode/src/audio.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module "*.wav" {
const file: string
export default file
}

View File

@@ -16,25 +16,18 @@ export namespace BusEvent {
}
export function payloads() {
return z
.discriminatedUnion(
"type",
registry
.entries()
.map(([type, def]) => {
return z
.object({
type: z.literal(type),
properties: def.properties,
})
.meta({
ref: "Event" + "." + def.type,
})
return registry
.entries()
.map(([type, def]) => {
return z
.object({
type: z.literal(type),
properties: def.properties,
})
.meta({
ref: "Event" + "." + def.type,
})
.toArray() as any,
)
.meta({
ref: "Event",
})
.toArray()
}
}

View File

@@ -1,5 +1,6 @@
import { cmd } from "./cmd"
import * as prompts from "@clack/prompts"
import { AppRuntime } from "@/effect/app-runtime"
import { UI } from "../ui"
import { Global } from "../../global"
import { Agent } from "../../agent/agent"
@@ -110,7 +111,9 @@ const AgentCreateCommand = cmd({
const spinner = prompts.spinner()
spinner.start("Generating agent configuration...")
const model = args.model ? Provider.parseModel(args.model) : undefined
const generated = await Agent.generate({ description, model }).catch((error) => {
const generated = await AppRuntime.runPromise(
Agent.Service.use((svc) => svc.generate({ description, model })),
).catch((error) => {
spinner.stop(`LLM failed to generate agent: ${error.message}`, 1)
if (isFullyNonInteractive) process.exit(1)
throw new UI.CancelledError()
@@ -220,7 +223,7 @@ const AgentListCommand = cmd({
await Instance.provide({
directory: process.cwd(),
async fn() {
const agents = await Agent.list()
const agents = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.list()))
const sortedAgents = agents.sort((a, b) => {
if (a.native !== b.native) {
return a.native ? -1 : 1

View File

@@ -35,7 +35,7 @@ export const AgentCommand = cmd({
async handler(args) {
await bootstrap(process.cwd(), async () => {
const agentName = args.name as string
const agent = await Agent.get(agentName)
const agent = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.get(agentName)))
if (!agent) {
process.stderr.write(
`Agent ${agentName} not found, run '${basename(process.execPath)} agent list' to get an agent list` + EOL,

View File

@@ -361,7 +361,6 @@ export const McpLogoutCommand = cmd({
UI.empty()
prompts.intro("MCP OAuth Logout")
const authPath = path.join(Global.Path.data, "mcp-auth.json")
const credentials = await AppRuntime.runPromise(McpAuth.Service.use((auth) => auth.all()))
const serverNames = Object.keys(credentials)
@@ -717,6 +716,11 @@ export const McpDebugCommand = cmd({
// Try to discover OAuth metadata
const oauthConfig = typeof serverConfig.oauth === "object" ? serverConfig.oauth : undefined
const auth = await AppRuntime.runPromise(
Effect.gen(function* () {
return yield* McpAuth.Service
}),
)
const authProvider = new McpOAuthProvider(
serverName,
serverConfig.url,
@@ -729,6 +733,7 @@ export const McpDebugCommand = cmd({
{
onRedirect: async () => {},
},
auth,
)
prompts.log.info("Testing OAuth flow (without completing authorization)...")

View File

@@ -27,6 +27,7 @@ import { SkillTool } from "../../tool/skill"
import { BashTool } from "../../tool/bash"
import { TodoWriteTool } from "../../tool/todo"
import { Locale } from "../../util/locale"
import { AppRuntime } from "@/effect/app-runtime"
type ToolProps<T> = {
input: Tool.InferParameters<T>
@@ -573,6 +574,7 @@ export const RunCommand = cmd({
// Validate agent if specified
const agent = await (async () => {
if (!args.agent) return undefined
const name = args.agent
// When attaching, validate against the running server instead of local Instance state.
if (args.attach) {
@@ -590,12 +592,12 @@ export const RunCommand = cmd({
return undefined
}
const agent = modes.find((a) => a.name === args.agent)
const agent = modes.find((a) => a.name === name)
if (!agent) {
UI.println(
UI.Style.TEXT_WARNING_BOLD + "!",
UI.Style.TEXT_NORMAL,
`agent "${args.agent}" not found. Falling back to default agent`,
`agent "${name}" not found. Falling back to default agent`,
)
return undefined
}
@@ -604,20 +606,20 @@ export const RunCommand = cmd({
UI.println(
UI.Style.TEXT_WARNING_BOLD + "!",
UI.Style.TEXT_NORMAL,
`agent "${args.agent}" is a subagent, not a primary agent. Falling back to default agent`,
`agent "${name}" is a subagent, not a primary agent. Falling back to default agent`,
)
return undefined
}
return args.agent
return name
}
const entry = await Agent.get(args.agent)
const entry = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.get(name)))
if (!entry) {
UI.println(
UI.Style.TEXT_WARNING_BOLD + "!",
UI.Style.TEXT_NORMAL,
`agent "${args.agent}" not found. Falling back to default agent`,
`agent "${name}" not found. Falling back to default agent`,
)
return undefined
}
@@ -625,11 +627,11 @@ export const RunCommand = cmd({
UI.println(
UI.Style.TEXT_WARNING_BOLD + "!",
UI.Style.TEXT_NORMAL,
`agent "${args.agent}" is a subagent, not a primary agent. Falling back to default agent`,
`agent "${name}" is a subagent, not a primary agent. Falling back to default agent`,
)
return undefined
}
return args.agent
return name
})()
const sessionID = await session(sdk)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,82 +1,630 @@
import { TextAttributes, RGBA } from "@opentui/core"
import { For, type JSX } from "solid-js"
import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core"
import { For, createMemo, createSignal, onCleanup, type JSX } from "solid-js"
import { useTheme, tint } from "@tui/context/theme"
import { logo, marks } from "@/cli/logo"
import { Sound } from "@tui/util/sound"
import { logo } from "@/cli/logo"
// Shadow markers (rendered chars in parens):
// _ = full shadow cell (space with bg=shadow)
// ^ = letter top, shadow bottom (▀ with fg=letter, bg=shadow)
// ~ = shadow top only (▀ with fg=shadow)
const SHADOW_MARKER = new RegExp(`[${marks}]`)
const GAP = 1
const WIDTH = 0.76
const GAIN = 2.3
const FLASH = 2.15
const TRAIL = 0.28
const SWELL = 0.24
const WIDE = 1.85
const DRIFT = 1.45
const EXPAND = 1.62
const LIFE = 1020
const CHARGE = 3000
const HOLD = 90
const SINK = 40
const ARC = 2.2
const FORK = 1.2
const DIM = 1.04
const KICK = 0.86
const LAG = 60
const SUCK = 0.34
const SHIMMER_IN = 60
const SHIMMER_OUT = 2.8
const TRACE = 0.033
const TAIL = 1.8
const TRACE_IN = 200
const GLOW_OUT = 1600
const PEAK = RGBA.fromInts(255, 255, 255)
type Ring = {
x: number
y: number
at: number
force: number
kick: number
}
type Hold = {
x: number
y: number
at: number
glyph: number | undefined
}
type Release = {
x: number
y: number
at: number
glyph: number | undefined
level: number
rise: number
}
type Glow = {
glyph: number
at: number
force: number
}
type Frame = {
t: number
list: Ring[]
hold: Hold | undefined
release: Release | undefined
glow: Glow | undefined
spark: number
}
const LEFT = logo.left[0]?.length ?? 0
const FULL = logo.left.map((line, i) => line + " ".repeat(GAP) + logo.right[i])
const SPAN = Math.hypot(FULL[0]?.length ?? 0, FULL.length * 2) * 0.94
const NEAR = [
[1, 0],
[1, 1],
[0, 1],
[-1, 1],
[-1, 0],
[-1, -1],
[0, -1],
[1, -1],
] as const
type Trace = {
glyph: number
i: number
l: number
}
function clamp(n: number) {
return Math.max(0, Math.min(1, n))
}
function lerp(a: number, b: number, t: number) {
return a + (b - a) * clamp(t)
}
function ease(t: number) {
const p = clamp(t)
return p * p * (3 - 2 * p)
}
function push(t: number) {
const p = clamp(t)
return ease(p * p)
}
function ramp(t: number, start: number, end: number) {
if (end <= start) return ease(t >= end ? 1 : 0)
return ease((t - start) / (end - start))
}
function glow(base: RGBA, theme: ReturnType<typeof useTheme>["theme"], n: number) {
const mid = tint(base, theme.primary, 0.84)
const top = tint(theme.primary, PEAK, 0.96)
if (n <= 1) return tint(base, mid, Math.min(1, Math.sqrt(Math.max(0, n)) * 1.14))
return tint(mid, top, Math.min(1, 1 - Math.exp(-2.4 * (n - 1))))
}
function shade(base: RGBA, theme: ReturnType<typeof useTheme>["theme"], n: number) {
if (n >= 0) return glow(base, theme, n)
return tint(base, theme.background, Math.min(0.82, -n * 0.64))
}
function ghost(n: number, scale: number) {
if (n < 0) return n
return n * scale
}
function noise(x: number, y: number, t: number) {
const n = Math.sin(x * 12.9898 + y * 78.233 + t * 0.043) * 43758.5453
return n - Math.floor(n)
}
function lit(char: string) {
return char !== " " && char !== "_" && char !== "~"
}
function key(x: number, y: number) {
return `${x},${y}`
}
function route(list: Array<{ x: number; y: number }>) {
const left = new Map(list.map((item) => [key(item.x, item.y), item]))
const path: Array<{ x: number; y: number }> = []
let cur = [...left.values()].sort((a, b) => a.y - b.y || a.x - b.x)[0]
let dir = { x: 1, y: 0 }
while (cur) {
path.push(cur)
left.delete(key(cur.x, cur.y))
if (!left.size) return path
const next = NEAR.map(([dx, dy]) => left.get(key(cur.x + dx, cur.y + dy)))
.filter((item): item is { x: number; y: number } => !!item)
.sort((a, b) => {
const ax = a.x - cur.x
const ay = a.y - cur.y
const bx = b.x - cur.x
const by = b.y - cur.y
const adot = ax * dir.x + ay * dir.y
const bdot = bx * dir.x + by * dir.y
if (adot !== bdot) return bdot - adot
return Math.abs(ax) + Math.abs(ay) - (Math.abs(bx) + Math.abs(by))
})[0]
if (!next) {
cur = [...left.values()].sort((a, b) => {
const da = (a.x - cur.x) ** 2 + (a.y - cur.y) ** 2
const db = (b.x - cur.x) ** 2 + (b.y - cur.y) ** 2
return da - db
})[0]
dir = { x: 1, y: 0 }
continue
}
dir = { x: next.x - cur.x, y: next.y - cur.y }
cur = next
}
return path
}
function mapGlyphs() {
const cells = [] as Array<{ x: number; y: number }>
for (let y = 0; y < FULL.length; y++) {
for (let x = 0; x < (FULL[y]?.length ?? 0); x++) {
if (lit(FULL[y]?.[x] ?? " ")) cells.push({ x, y })
}
}
const all = new Map(cells.map((item) => [key(item.x, item.y), item]))
const seen = new Set<string>()
const glyph = new Map<string, number>()
const trace = new Map<string, Trace>()
const center = new Map<number, { x: number; y: number }>()
let id = 0
for (const item of cells) {
const start = key(item.x, item.y)
if (seen.has(start)) continue
const stack = [item]
const part = [] as Array<{ x: number; y: number }>
seen.add(start)
while (stack.length) {
const cur = stack.pop()!
part.push(cur)
glyph.set(key(cur.x, cur.y), id)
for (const [dx, dy] of NEAR) {
const next = all.get(key(cur.x + dx, cur.y + dy))
if (!next) continue
const mark = key(next.x, next.y)
if (seen.has(mark)) continue
seen.add(mark)
stack.push(next)
}
}
const path = route(part)
path.forEach((cell, i) => trace.set(key(cell.x, cell.y), { glyph: id, i, l: path.length }))
center.set(id, {
x: part.reduce((sum, item) => sum + item.x, 0) / part.length + 0.5,
y: (part.reduce((sum, item) => sum + item.y, 0) / part.length) * 2 + 1,
})
id++
}
return { glyph, trace, center }
}
const MAP = mapGlyphs()
function shimmer(x: number, y: number, frame: Frame) {
return frame.list.reduce((best, item) => {
const age = frame.t - item.at
if (age < SHIMMER_IN || age > LIFE) return best
const dx = x + 0.5 - item.x
const dy = y * 2 + 1 - item.y
const dist = Math.hypot(dx, dy)
const p = age / LIFE
const r = SPAN * (1 - (1 - p) ** EXPAND)
const lag = r - dist
if (lag < 0.18 || lag > SHIMMER_OUT) return best
const band = Math.exp(-(((lag - 1.05) / 0.68) ** 2))
const wobble = 0.5 + 0.5 * Math.sin(frame.t * 0.035 + x * 0.9 + y * 1.7)
const n = band * wobble * (1 - p) ** 1.45
if (n > best) return n
return best
}, 0)
}
function remain(x: number, y: number, item: Release, t: number) {
const age = t - item.at
if (age < 0 || age > LIFE) return 0
const p = age / LIFE
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const r = SPAN * (1 - (1 - p) ** EXPAND)
if (dist > r) return 1
return clamp((r - dist) / 1.35 < 1 ? 1 - (r - dist) / 1.35 : 0)
}
function wave(x: number, y: number, frame: Frame, live: boolean) {
return frame.list.reduce((sum, item) => {
const age = frame.t - item.at
if (age < 0 || age > LIFE) return sum
const p = age / LIFE
const dx = x + 0.5 - item.x
const dy = y * 2 + 1 - item.y
const dist = Math.hypot(dx, dy)
const r = SPAN * (1 - (1 - p) ** EXPAND)
const fade = (1 - p) ** 1.32
const j = 1.02 + noise(x + item.x * 0.7, y + item.y * 0.7, item.at * 0.002 + age * 0.06) * 0.52
const edge = Math.exp(-(((dist - r) / WIDTH) ** 2)) * GAIN * fade * item.force * j
const swell = Math.exp(-(((dist - Math.max(0, r - DRIFT)) / WIDE) ** 2)) * SWELL * fade * item.force
const trail = dist < r ? Math.exp(-(r - dist) / 2.4) * TRAIL * fade * item.force * lerp(0.92, 1.22, j) : 0
const flash = Math.exp(-(dist * dist) / 3.2) * FLASH * item.force * Math.max(0, 1 - age / 140) * lerp(0.95, 1.18, j)
const kick = Math.exp(-(dist * dist) / 2) * item.kick * Math.max(0, 1 - age / 100)
const suck = Math.exp(-(((dist - 1.25) / 0.75) ** 2)) * item.kick * SUCK * Math.max(0, 1 - age / 110)
const wake = live && dist < r ? Math.exp(-(r - dist) / 1.25) * 0.32 * fade : 0
return sum + edge + swell + trail + flash + wake - kick - suck
}, 0)
}
function field(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item) return 0
const rise = held ? ramp(frame.t - held.at, HOLD, CHARGE) : rest!.rise
const level = held ? push(rise) : rest!.level
const body = rise
const storm = level * level
const sink = held ? ramp(frame.t - held.at, SINK, CHARGE) : rest!.rise
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const angle = Math.atan2(dy, dx)
const spin = frame.t * lerp(0.008, 0.018, storm)
const dim = lerp(0, DIM, sink) * lerp(0.99, 1.01, 0.5 + 0.5 * Math.sin(frame.t * 0.014))
const core = Math.exp(-(dist * dist) / Math.max(0.22, lerp(0.22, 3.2, body))) * lerp(0.42, 2.45, body)
const shell =
Math.exp(-(((dist - lerp(0.16, 2.05, body)) / Math.max(0.18, lerp(0.18, 0.82, body))) ** 2)) * lerp(0.1, 0.95, body)
const ember =
Math.exp(-(((dist - lerp(0.45, 2.65, body)) / Math.max(0.14, lerp(0.14, 0.62, body))) ** 2)) *
lerp(0.02, 0.78, body)
const arc = Math.max(0, Math.cos(angle * 3 - spin + frame.spark * 2.2)) ** 8
const seam = Math.max(0, Math.cos(angle * 5 + spin * 1.55)) ** 12
const ring = Math.exp(-(((dist - lerp(1.05, 3, level)) / 0.48) ** 2)) * arc * lerp(0.03, 0.5 + ARC, storm)
const fork = Math.exp(-(((dist - (1.55 + storm * 2.1)) / 0.36) ** 2)) * seam * storm * FORK
const spark = Math.max(0, noise(x, y, frame.t) - lerp(0.94, 0.66, storm)) * lerp(0, 5.4, storm)
const glitch = spark * Math.exp(-dist / Math.max(1.2, 3.1 - storm))
const crack = Math.max(0, Math.cos((dx - dy) * 1.6 + spin * 2.1)) ** 18
const lash = crack * Math.exp(-(((dist - (1.95 + storm * 2)) / 0.28) ** 2)) * storm * 1.1
const flicker =
Math.max(0, noise(item.x * 3.1, item.y * 2.7, frame.t * 1.7) - 0.72) *
Math.exp(-(dist * dist) / 0.15) *
lerp(0.08, 0.42, body)
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
return (core + shell + ember + ring + fork + glitch + lash + flicker - dim) * fade
}
function pick(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item) return 0
const rise = held ? ramp(frame.t - held.at, HOLD, CHARGE) : rest!.rise
const dx = x + 0.5 - item.x - 0.5
const dy = y * 2 + 1 - item.y * 2 - 1
const dist = Math.hypot(dx, dy)
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
return Math.exp(-(dist * dist) / 1.7) * lerp(0.2, 0.96, rise) * fade
}
function select(x: number, y: number) {
const direct = MAP.glyph.get(key(x, y))
if (direct !== undefined) return direct
const near = NEAR.map(([dx, dy]) => MAP.glyph.get(key(x + dx, y + dy))).find(
(item): item is number => item !== undefined,
)
return near
}
function trace(x: number, y: number, frame: Frame) {
const held = frame.hold
const rest = frame.release
const item = held ?? rest
if (!item || item.glyph === undefined) return 0
const step = MAP.trace.get(key(x, y))
if (!step || step.glyph !== item.glyph || step.l < 2) return 0
const age = frame.t - item.at
const rise = held ? ramp(age, HOLD, CHARGE) : rest!.rise
const appear = held ? ramp(age, 0, TRACE_IN) : 1
const speed = lerp(TRACE * 0.48, TRACE * 0.88, rise)
const head = (age * speed) % step.l
const dist = Math.min(Math.abs(step.i - head), step.l - Math.abs(step.i - head))
const tail = (head - TAIL + step.l) % step.l
const lag = Math.min(Math.abs(step.i - tail), step.l - Math.abs(step.i - tail))
const fade = frame.release && !frame.hold ? remain(x, y, frame.release, frame.t) : 1
const core = Math.exp(-((dist / 1.05) ** 2)) * lerp(0.8, 2.35, rise)
const glow = Math.exp(-((dist / 1.85) ** 2)) * lerp(0.08, 0.34, rise)
const trail = Math.exp(-((lag / 1.45) ** 2)) * lerp(0.04, 0.42, rise)
return (core + glow + trail) * appear * fade
}
function bloom(x: number, y: number, frame: Frame) {
const item = frame.glow
if (!item) return 0
const glyph = MAP.glyph.get(key(x, y))
if (glyph !== item.glyph) return 0
const age = frame.t - item.at
if (age < 0 || age > GLOW_OUT) return 0
const p = age / GLOW_OUT
const flash = (1 - p) ** 2
const dx = x + 0.5 - MAP.center.get(item.glyph)!.x
const dy = y * 2 + 1 - MAP.center.get(item.glyph)!.y
const bias = Math.exp(-((Math.hypot(dx, dy) / 2.8) ** 2))
return lerp(item.force, item.force * 0.18, p) * lerp(0.72, 1.1, bias) * flash
}
export function Logo() {
const { theme } = useTheme()
const [rings, setRings] = createSignal<Ring[]>([])
const [hold, setHold] = createSignal<Hold>()
const [release, setRelease] = createSignal<Release>()
const [glow, setGlow] = createSignal<Glow>()
const [now, setNow] = createSignal(0)
let box: BoxRenderable | undefined
let timer: ReturnType<typeof setInterval> | undefined
let hum = false
const renderLine = (line: string, fg: RGBA, bold: boolean): JSX.Element[] => {
const shadow = tint(theme.background, fg, 0.25)
const stop = () => {
if (!timer) return
clearInterval(timer)
timer = undefined
}
const tick = () => {
const t = performance.now()
setNow(t)
const item = hold()
if (item && !hum && t - item.at >= HOLD) {
hum = true
Sound.start()
}
if (item && t - item.at >= CHARGE) {
burst(item.x, item.y)
}
let live = false
setRings((list) => {
const next = list.filter((item) => t - item.at < LIFE)
live = next.length > 0
return next
})
const flash = glow()
if (flash && t - flash.at >= GLOW_OUT) {
setGlow(undefined)
}
if (!live) setRelease(undefined)
if (live || hold() || release() || glow()) return
stop()
}
const start = () => {
if (timer) return
timer = setInterval(tick, 16)
}
const hit = (x: number, y: number) => {
const char = FULL[y]?.[x]
return char !== undefined && char !== " "
}
const press = (x: number, y: number, t: number) => {
const last = hold()
if (last) burst(last.x, last.y)
setNow(t)
if (!last) setRelease(undefined)
setHold({ x, y, at: t, glyph: select(x, y) })
hum = false
start()
}
const burst = (x: number, y: number) => {
const item = hold()
if (!item) return
hum = false
const t = performance.now()
const age = t - item.at
const rise = ramp(age, HOLD, CHARGE)
const level = push(rise)
setHold(undefined)
setRelease({ x, y, at: t, glyph: item.glyph, level, rise })
if (item.glyph !== undefined) {
setGlow({ glyph: item.glyph, at: t, force: lerp(0.18, 1.5, rise * level) })
}
setRings((list) => [
...list,
{
x: x + 0.5,
y: y * 2 + 1,
at: t,
force: lerp(0.82, 2.55, level),
kick: lerp(0.32, 0.32 + KICK, level),
},
])
setNow(t)
start()
Sound.pulse(lerp(0.8, 1, level))
}
const frame = createMemo(() => {
const t = now()
const item = hold()
return {
t,
list: rings(),
hold: item,
release: release(),
glow: glow(),
spark: item ? noise(item.x, item.y, t) : 0,
}
})
const dusk = createMemo(() => {
const base = frame()
const t = base.t - LAG
const item = base.hold
return {
t,
list: base.list,
hold: item,
release: base.release,
glow: base.glow,
spark: item ? noise(item.x, item.y, t) : 0,
}
})
const renderLine = (
line: string,
y: number,
ink: RGBA,
bold: boolean,
off: number,
frame: Frame,
dusk: Frame,
): JSX.Element[] => {
const shadow = tint(theme.background, ink, 0.25)
const attrs = bold ? TextAttributes.BOLD : undefined
const elements: JSX.Element[] = []
let i = 0
while (i < line.length) {
const rest = line.slice(i)
const markerIndex = rest.search(SHADOW_MARKER)
return [...line].map((char, i) => {
const h = field(off + i, y, frame)
const n = wave(off + i, y, frame, lit(char)) + h
const s = wave(off + i, y, dusk, false) + h
const p = lit(char) ? pick(off + i, y, frame) : 0
const e = lit(char) ? trace(off + i, y, frame) : 0
const b = lit(char) ? bloom(off + i, y, frame) : 0
const q = shimmer(off + i, y, frame)
if (markerIndex === -1) {
elements.push(
<text fg={fg} attributes={attrs} selectable={false}>
{rest}
</text>,
)
break
}
if (markerIndex > 0) {
elements.push(
<text fg={fg} attributes={attrs} selectable={false}>
{rest.slice(0, markerIndex)}
</text>,
if (char === "_") {
return (
<text
fg={shade(ink, theme, s * 0.08)}
bg={shade(shadow, theme, ghost(s, 0.24) + ghost(q, 0.06))}
attributes={attrs}
selectable={false}
>
{" "}
</text>
)
}
const marker = rest[markerIndex]
switch (marker) {
case "_":
elements.push(
<text fg={fg} bg={shadow} attributes={attrs} selectable={false}>
{" "}
</text>,
)
break
case "^":
elements.push(
<text fg={fg} bg={shadow} attributes={attrs} selectable={false}>
</text>,
)
break
case "~":
elements.push(
<text fg={shadow} attributes={attrs} selectable={false}>
</text>,
)
break
if (char === "^") {
return (
<text
fg={shade(ink, theme, n + p + e + b)}
bg={shade(shadow, theme, ghost(s, 0.18) + ghost(q, 0.05) + ghost(b, 0.08))}
attributes={attrs}
selectable={false}
>
</text>
)
}
i += markerIndex + 1
if (char === "~") {
return (
<text fg={shade(shadow, theme, ghost(s, 0.22) + ghost(q, 0.05))} attributes={attrs} selectable={false}>
</text>
)
}
if (char === " ") {
return (
<text fg={ink} attributes={attrs} selectable={false}>
{char}
</text>
)
}
return (
<text fg={shade(ink, theme, n + p + e + b)} attributes={attrs} selectable={false}>
{char}
</text>
)
})
}
onCleanup(() => {
stop()
hum = false
Sound.dispose()
})
const mouse = (evt: MouseEvent) => {
if (!box) return
if ((evt.type === "down" || evt.type === "drag") && evt.button === MouseButton.LEFT) {
const x = evt.x - box.x
const y = evt.y - box.y
if (!hit(x, y)) return
if (evt.type === "drag" && hold()) return
evt.preventDefault()
evt.stopPropagation()
const t = performance.now()
press(x, y, t)
return
}
return elements
if (!hold()) return
if (evt.type === "up") {
const item = hold()
if (!item) return
burst(item.x, item.y)
}
}
return (
<box>
<box ref={(item: BoxRenderable) => (box = item)}>
<box
position="absolute"
top={0}
left={0}
width={FULL[0]?.length ?? 0}
height={FULL.length}
zIndex={1}
onMouse={mouse}
/>
<For each={logo.left}>
{(line, index) => (
<box flexDirection="row" gap={1}>
<box flexDirection="row">{renderLine(line, theme.textMuted, false)}</box>
<box flexDirection="row">{renderLine(logo.right[index()], theme.text, true)}</box>
<box flexDirection="row">{renderLine(line, index(), theme.textMuted, false, 0, frame(), dusk())}</box>
<box flexDirection="row">
{renderLine(logo.right[index()], index(), theme.text, true, LEFT + GAP, frame(), dusk())}
</box>
</box>
)}
</For>

View File

@@ -8,6 +8,10 @@ export function useEvent() {
function subscribe(handler: (event: Event) => void) {
return sdk.event.on("event", (event) => {
if (event.payload.type === "sync") {
return
}
// Special hack for truly global events
if (event.directory === "global") {
handler(event.payload)

View File

@@ -0,0 +1,156 @@
import { Player } from "cli-sound"
import { mkdirSync } from "node:fs"
import { tmpdir } from "node:os"
import { basename, join } from "node:path"
import { Process } from "@/util/process"
import { which } from "@/util/which"
import pulseA from "../asset/pulse-a.wav" with { type: "file" }
import pulseB from "../asset/pulse-b.wav" with { type: "file" }
import pulseC from "../asset/pulse-c.wav" with { type: "file" }
import charge from "../asset/charge.wav" with { type: "file" }
const FILE = [pulseA, pulseB, pulseC]
const HUM = charge
const DIR = join(tmpdir(), "opencode-sfx")
const LIST = [
"ffplay",
"mpv",
"mpg123",
"mpg321",
"mplayer",
"afplay",
"play",
"omxplayer",
"aplay",
"cmdmp3",
"cvlc",
"powershell.exe",
] as const
type Kind = (typeof LIST)[number]
function args(kind: Kind, file: string, volume: number) {
if (kind === "ffplay") return [kind, "-autoexit", "-nodisp", "-af", `volume=${volume}`, file]
if (kind === "mpv")
return [kind, "--no-video", "--audio-display=no", "--volume", String(Math.round(volume * 100)), file]
if (kind === "mpg123" || kind === "mpg321") return [kind, "-g", String(Math.round(volume * 100)), file]
if (kind === "mplayer") return [kind, "-vo", "null", "-volume", String(Math.round(volume * 100)), file]
if (kind === "afplay" || kind === "omxplayer" || kind === "aplay" || kind === "cmdmp3") return [kind, file]
if (kind === "play") return [kind, "-v", String(volume), file]
if (kind === "cvlc") return [kind, `--gain=${volume}`, "--play-and-exit", file]
return [kind, "-c", `(New-Object Media.SoundPlayer '${file.replace(/'/g, "''")}').PlaySync()`]
}
export namespace Sound {
let item: Player | null | undefined
let kind: Kind | null | undefined
let proc: Process.Child | undefined
let tail: ReturnType<typeof setTimeout> | undefined
let cache: Promise<{ hum: string; pulse: string[] }> | undefined
let seq = 0
let shot = 0
function load() {
if (item !== undefined) return item
try {
item = new Player({ volume: 0.35 })
} catch {
item = null
}
return item
}
async function file(path: string) {
mkdirSync(DIR, { recursive: true })
const next = join(DIR, basename(path))
const out = Bun.file(next)
if (await out.exists()) return next
await Bun.write(out, Bun.file(path))
return next
}
function asset() {
cache ??= Promise.all([file(HUM), Promise.all(FILE.map(file))]).then(([hum, pulse]) => ({ hum, pulse }))
return cache
}
function pick() {
if (kind !== undefined) return kind
kind = LIST.find((item) => which(item)) ?? null
return kind
}
function run(file: string, volume: number) {
const kind = pick()
if (!kind) return
return Process.spawn(args(kind, file, volume), {
stdin: "ignore",
stdout: "ignore",
stderr: "ignore",
})
}
function clear() {
if (!tail) return
clearTimeout(tail)
tail = undefined
}
function play(file: string, volume: number) {
const item = load()
if (!item) return run(file, volume)?.exited
return item.play(file, { volume }).catch(() => run(file, volume)?.exited)
}
export function start() {
stop()
const id = ++seq
void asset().then(({ hum }) => {
if (id !== seq) return
const next = run(hum, 0.24)
if (!next) return
proc = next
void next.exited.then(
() => {
if (id !== seq) return
if (proc === next) proc = undefined
},
() => {
if (id !== seq) return
if (proc === next) proc = undefined
},
)
})
}
export function stop(delay = 0) {
seq++
clear()
if (!proc) return
const next = proc
if (delay <= 0) {
proc = undefined
void Process.stop(next).catch(() => undefined)
return
}
tail = setTimeout(() => {
tail = undefined
if (proc === next) proc = undefined
void Process.stop(next).catch(() => undefined)
}, delay)
}
export function pulse(scale = 1) {
stop(140)
const index = shot++ % FILE.length
void asset()
.then(({ pulse }) => play(pulse[index], 0.26 + 0.14 * scale))
.catch(() => undefined)
}
export function dispose() {
stop()
}
}

View File

@@ -3,7 +3,7 @@ import path from "path"
import { Global } from "../global"
import fs from "fs/promises"
import z from "zod"
import { Effect, Layer, Context, Schema } from "effect"
import { Array as Arr, Effect, Layer, Context, Schema } from "effect"
import * as Stream from "effect/Stream"
import { ChildProcess } from "effect/unstable/process"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
@@ -20,117 +20,99 @@ import { text } from "node:stream/consumers"
import { ZipReader, BlobReader, BlobWriter } from "@zip.js/zip.js"
import { Log } from "@/util/log"
import { zod } from "@/util/effect-zod"
export namespace Ripgrep {
const log = Log.create({ service: "ripgrep" })
const Stats = z.object({
elapsed: z.object({
secs: z.number(),
nanos: z.number(),
human: z.string(),
const stats = Schema.Struct({
elapsed: Schema.Struct({
secs: Schema.Number,
nanos: Schema.Number,
human: Schema.String,
}),
searches: z.number(),
searches_with_match: z.number(),
bytes_searched: z.number(),
bytes_printed: z.number(),
matched_lines: z.number(),
matches: z.number(),
searches: Schema.Number,
searches_with_match: Schema.Number,
bytes_searched: Schema.Number,
bytes_printed: Schema.Number,
matched_lines: Schema.Number,
matches: Schema.Number,
})
const Begin = z.object({
type: z.literal("begin"),
data: z.object({
path: z.object({
text: z.string(),
}),
}),
})
export const Match = z.object({
type: z.literal("match"),
data: z.object({
path: z.object({
text: z.string(),
}),
lines: z.object({
text: z.string(),
}),
line_number: z.number(),
absolute_offset: z.number(),
submatches: z.array(
z.object({
match: z.object({
text: z.string(),
}),
start: z.number(),
end: z.number(),
}),
),
}),
})
const End = z.object({
type: z.literal("end"),
data: z.object({
path: z.object({
text: z.string(),
}),
binary_offset: z.number().nullable(),
stats: Stats,
}),
})
const Summary = z.object({
type: z.literal("summary"),
data: z.object({
elapsed_total: z.object({
human: z.string(),
nanos: z.number(),
secs: z.number(),
}),
stats: Stats,
}),
})
const Result = z.union([Begin, Match, End, Summary])
const Hit = Schema.Struct({
type: Schema.Literal("match"),
const begin = Schema.Struct({
type: Schema.Literal("begin"),
data: Schema.Struct({
path: Schema.Struct({
text: Schema.String,
}),
lines: Schema.Struct({
text: Schema.String,
}),
line_number: Schema.Number,
absolute_offset: Schema.Number,
submatches: Schema.mutable(
Schema.Array(
Schema.Struct({
match: Schema.Struct({
text: Schema.String,
}),
start: Schema.Number,
end: Schema.Number,
}),
),
),
}),
})
const Row = Schema.Union([
Schema.Struct({ type: Schema.Literal("begin"), data: Schema.Unknown }),
Hit,
Schema.Struct({ type: Schema.Literal("end"), data: Schema.Unknown }),
Schema.Struct({ type: Schema.Literal("summary"), data: Schema.Unknown }),
])
const item = Schema.Struct({
path: Schema.Struct({
text: Schema.String,
}),
lines: Schema.Struct({
text: Schema.String,
}),
line_number: Schema.Number,
absolute_offset: Schema.Number,
submatches: Schema.mutable(
Schema.Array(
Schema.Struct({
match: Schema.Struct({
text: Schema.String,
}),
start: Schema.Number,
end: Schema.Number,
}),
),
),
})
const decode = Schema.decodeUnknownEffect(Schema.fromJsonString(Row))
const match = Schema.Struct({
type: Schema.Literal("match"),
data: item,
})
const end = Schema.Struct({
type: Schema.Literal("end"),
data: Schema.Struct({
path: Schema.Struct({
text: Schema.String,
}),
binary_offset: Schema.NullOr(Schema.Number),
stats,
}),
})
const summary = Schema.Struct({
type: Schema.Literal("summary"),
data: Schema.Struct({
elapsed_total: Schema.Struct({
human: Schema.String,
nanos: Schema.Number,
secs: Schema.Number,
}),
stats,
}),
})
const row = Schema.Union([begin, match, end, summary])
const decode = Schema.decodeUnknownSync(Schema.fromJsonString(row))
export const Stats = zod(stats)
export const Begin = zod(begin)
export const Item = zod(item)
export const Match = zod(match)
export const End = zod(end)
export const Summary = zod(summary)
export const Result = zod(row)
export type Stats = z.infer<typeof Stats>
export type Result = z.infer<typeof Result>
export type Match = z.infer<typeof Match>
export type Item = Match["data"]
export type Item = z.infer<typeof Item>
export type Begin = z.infer<typeof Begin>
export type End = z.infer<typeof End>
export type Summary = z.infer<typeof Summary>
@@ -330,6 +312,7 @@ export namespace Ripgrep {
glob?: string[]
limit?: number
follow?: boolean
file?: string[]
}) => Effect.Effect<{ items: Item[]; partial: boolean }, PlatformError | Error>
}
@@ -351,6 +334,7 @@ export namespace Ripgrep {
maxDepth?: number
limit?: number
pattern?: string
file?: string[]
}) {
const out = [yield* bin(), input.mode === "search" ? "--json" : "--files", "--glob=!.git/*"]
if (input.follow) out.push("--follow")
@@ -363,7 +347,7 @@ export namespace Ripgrep {
}
if (input.limit) out.push(`--max-count=${input.limit}`)
if (input.mode === "search") out.push("--no-messages")
if (input.pattern) out.push("--", input.pattern)
if (input.pattern) out.push("--", input.pattern, ...(input.file ?? []))
return out
})
@@ -405,6 +389,7 @@ export namespace Ripgrep {
glob?: string[]
limit?: number
follow?: boolean
file?: string[]
}) {
return yield* Effect.scoped(
Effect.gen(function* () {
@@ -414,6 +399,7 @@ export namespace Ripgrep {
follow: input.follow,
limit: input.limit,
pattern: input.pattern,
file: input.file,
})
const handle = yield* spawner.spawn(
@@ -428,10 +414,13 @@ export namespace Ripgrep {
Stream.decodeText(handle.stdout).pipe(
Stream.splitLines,
Stream.filter((line) => line.length > 0),
Stream.mapEffect((line) =>
decode(line).pipe(Effect.mapError((cause) => new Error("invalid ripgrep output", { cause }))),
Stream.mapArrayEffect((lines) =>
Effect.try({
try: () => Arr.map(lines, (line) => decode(line)),
catch: (cause) => new Error("invalid ripgrep output", { cause }),
}),
),
Stream.filter((row): row is Schema.Schema.Type<typeof Hit> => row.type === "match"),
Stream.filter((row): row is Schema.Schema.Type<typeof match> => row.type === "match"),
Stream.map((row): Item => row.data),
Stream.runCollect,
Effect.map((chunk) => [...chunk]),

View File

@@ -3,7 +3,6 @@ import z from "zod"
import { Global } from "../global"
import { Effect, Layer, Context } from "effect"
import { AppFileSystem } from "@/filesystem"
import { makeRuntime } from "@/effect/run-service"
export namespace McpAuth {
export const Tokens = z.object({
@@ -142,32 +141,4 @@ export namespace McpAuth {
)
export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
// Async facades for backward compat (used by McpOAuthProvider, CLI)
export const get = async (mcpName: string) => runPromise((svc) => svc.get(mcpName))
export const getForUrl = async (mcpName: string, serverUrl: string) =>
runPromise((svc) => svc.getForUrl(mcpName, serverUrl))
export const all = async () => runPromise((svc) => svc.all())
export const set = async (mcpName: string, entry: Entry, serverUrl?: string) =>
runPromise((svc) => svc.set(mcpName, entry, serverUrl))
export const remove = async (mcpName: string) => runPromise((svc) => svc.remove(mcpName))
export const updateTokens = async (mcpName: string, tokens: Tokens, serverUrl?: string) =>
runPromise((svc) => svc.updateTokens(mcpName, tokens, serverUrl))
export const updateClientInfo = async (mcpName: string, clientInfo: ClientInfo, serverUrl?: string) =>
runPromise((svc) => svc.updateClientInfo(mcpName, clientInfo, serverUrl))
export const updateCodeVerifier = async (mcpName: string, codeVerifier: string) =>
runPromise((svc) => svc.updateCodeVerifier(mcpName, codeVerifier))
export const updateOAuthState = async (mcpName: string, oauthState: string) =>
runPromise((svc) => svc.updateOAuthState(mcpName, oauthState))
}

View File

@@ -293,6 +293,7 @@ export namespace MCP {
log.info("oauth redirect requested", { key, url: url.toString() })
},
},
auth,
)
}
@@ -744,6 +745,7 @@ export namespace MCP {
capturedUrl = url
},
},
auth,
)
const transport = new StreamableHTTPClientTransport(new URL(mcpConfig.url), { authProvider })

View File

@@ -5,6 +5,7 @@ import type {
OAuthClientInformation,
OAuthClientInformationFull,
} from "@modelcontextprotocol/sdk/shared/auth.js"
import { Effect } from "effect"
import { McpAuth } from "./auth"
import { Log } from "../util/log"
@@ -30,6 +31,7 @@ export class McpOAuthProvider implements OAuthClientProvider {
private serverUrl: string,
private config: McpOAuthConfig,
private callbacks: McpOAuthCallbacks,
private auth: McpAuth.Interface,
) {}
get redirectUrl(): string {
@@ -61,7 +63,7 @@ export class McpOAuthProvider implements OAuthClientProvider {
// Check stored client info (from dynamic registration)
// Use getForUrl to validate credentials are for the current server URL
const entry = await McpAuth.getForUrl(this.mcpName, this.serverUrl)
const entry = await Effect.runPromise(this.auth.getForUrl(this.mcpName, this.serverUrl))
if (entry?.clientInfo) {
// Check if client secret has expired
if (entry.clientInfo.clientSecretExpiresAt && entry.clientInfo.clientSecretExpiresAt < Date.now() / 1000) {
@@ -79,15 +81,17 @@ export class McpOAuthProvider implements OAuthClientProvider {
}
async saveClientInformation(info: OAuthClientInformationFull): Promise<void> {
await McpAuth.updateClientInfo(
this.mcpName,
{
clientId: info.client_id,
clientSecret: info.client_secret,
clientIdIssuedAt: info.client_id_issued_at,
clientSecretExpiresAt: info.client_secret_expires_at,
},
this.serverUrl,
await Effect.runPromise(
this.auth.updateClientInfo(
this.mcpName,
{
clientId: info.client_id,
clientSecret: info.client_secret,
clientIdIssuedAt: info.client_id_issued_at,
clientSecretExpiresAt: info.client_secret_expires_at,
},
this.serverUrl,
),
)
log.info("saved dynamically registered client", {
mcpName: this.mcpName,
@@ -97,7 +101,7 @@ export class McpOAuthProvider implements OAuthClientProvider {
async tokens(): Promise<OAuthTokens | undefined> {
// Use getForUrl to validate tokens are for the current server URL
const entry = await McpAuth.getForUrl(this.mcpName, this.serverUrl)
const entry = await Effect.runPromise(this.auth.getForUrl(this.mcpName, this.serverUrl))
if (!entry?.tokens) return undefined
return {
@@ -112,15 +116,17 @@ export class McpOAuthProvider implements OAuthClientProvider {
}
async saveTokens(tokens: OAuthTokens): Promise<void> {
await McpAuth.updateTokens(
this.mcpName,
{
accessToken: tokens.access_token,
refreshToken: tokens.refresh_token,
expiresAt: tokens.expires_in ? Date.now() / 1000 + tokens.expires_in : undefined,
scope: tokens.scope,
},
this.serverUrl,
await Effect.runPromise(
this.auth.updateTokens(
this.mcpName,
{
accessToken: tokens.access_token,
refreshToken: tokens.refresh_token,
expiresAt: tokens.expires_in ? Date.now() / 1000 + tokens.expires_in : undefined,
scope: tokens.scope,
},
this.serverUrl,
),
)
log.info("saved oauth tokens", { mcpName: this.mcpName })
}
@@ -131,11 +137,11 @@ export class McpOAuthProvider implements OAuthClientProvider {
}
async saveCodeVerifier(codeVerifier: string): Promise<void> {
await McpAuth.updateCodeVerifier(this.mcpName, codeVerifier)
await Effect.runPromise(this.auth.updateCodeVerifier(this.mcpName, codeVerifier))
}
async codeVerifier(): Promise<string> {
const entry = await McpAuth.get(this.mcpName)
const entry = await Effect.runPromise(this.auth.get(this.mcpName))
if (!entry?.codeVerifier) {
throw new Error(`No code verifier saved for MCP server: ${this.mcpName}`)
}
@@ -143,11 +149,11 @@ export class McpOAuthProvider implements OAuthClientProvider {
}
async saveState(state: string): Promise<void> {
await McpAuth.updateOAuthState(this.mcpName, state)
await Effect.runPromise(this.auth.updateOAuthState(this.mcpName, state))
}
async state(): Promise<string> {
const entry = await McpAuth.get(this.mcpName)
const entry = await Effect.runPromise(this.auth.get(this.mcpName))
if (entry?.oauthState) {
return entry.oauthState
}
@@ -159,28 +165,28 @@ export class McpOAuthProvider implements OAuthClientProvider {
const newState = Array.from(crypto.getRandomValues(new Uint8Array(32)))
.map((b) => b.toString(16).padStart(2, "0"))
.join("")
await McpAuth.updateOAuthState(this.mcpName, newState)
await Effect.runPromise(this.auth.updateOAuthState(this.mcpName, newState))
return newState
}
async invalidateCredentials(type: "all" | "client" | "tokens"): Promise<void> {
log.info("invalidating credentials", { mcpName: this.mcpName, type })
const entry = await McpAuth.get(this.mcpName)
const entry = await Effect.runPromise(this.auth.get(this.mcpName))
if (!entry) {
return
}
switch (type) {
case "all":
await McpAuth.remove(this.mcpName)
await Effect.runPromise(this.auth.remove(this.mcpName))
break
case "client":
delete entry.clientInfo
await McpAuth.set(this.mcpName, entry)
await Effect.runPromise(this.auth.set(this.mcpName, entry))
break
case "tokens":
delete entry.tokens
await McpAuth.set(this.mcpName, entry)
await Effect.runPromise(this.auth.set(this.mcpName, entry))
break
}
}

View File

@@ -2,7 +2,6 @@ import { Bus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { ProjectID } from "@/project/schema"
import { Instance } from "@/project/instance"
import { MessageID, SessionID } from "@/session/schema"
@@ -308,18 +307,4 @@ export namespace Permission {
}
export const defaultLayer = layer.pipe(Layer.provide(Bus.layer))
export const { runPromise } = makeRuntime(Service, defaultLayer)
export async function ask(input: z.infer<typeof AskInput>) {
return runPromise((s) => s.ask(input))
}
export async function reply(input: z.infer<typeof ReplyInput>) {
return runPromise((s) => s.reply(input))
}
export async function list() {
return runPromise((s) => s.list())
}
}

View File

@@ -21,8 +21,6 @@ const disposal = {
all: undefined as Promise<void> | undefined,
}
function emitDisposed(directory: string) {}
function boot(input: { directory: string; init?: () => Promise<any>; worktree?: string; project?: Project.Info }) {
return iife(async () => {
const ctx =

View File

@@ -1,8 +1,10 @@
import z from "zod"
import { Hono } from "hono"
import { describeRoute, resolver } from "hono-openapi"
import { streamSSE } from "hono/streaming"
import { Log } from "@/util/log"
import { BusEvent } from "@/bus/bus-event"
import { SyncEvent } from "@/sync"
import { Bus } from "@/bus"
import { AsyncQueue } from "../../util/queue"
@@ -20,7 +22,11 @@ export const EventRoutes = () =>
description: "Event stream",
content: {
"text/event-stream": {
schema: resolver(BusEvent.payloads()),
schema: resolver(
z.union(BusEvent.payloads()).meta({
ref: "Event",
}),
),
},
},
},

View File

@@ -22,7 +22,7 @@ export const FileRoutes = lazy(() =>
description: "Matches",
content: {
"application/json": {
schema: resolver(Ripgrep.Match.shape.data.array()),
schema: resolver(Ripgrep.Item.array()),
},
},
},

View File

@@ -109,7 +109,7 @@ export const GlobalRoutes = lazy(() =>
directory: z.string(),
project: z.string().optional(),
workspace: z.string().optional(),
payload: BusEvent.payloads(),
payload: z.union([...BusEvent.payloads(), ...SyncEvent.payloads()]),
})
.meta({
ref: "GlobalEvent",
@@ -135,52 +135,6 @@ export const GlobalRoutes = lazy(() =>
})
},
)
.get(
"/sync-event",
describeRoute({
summary: "Subscribe to global sync events",
description: "Get global sync events",
operationId: "global.sync-event.subscribe",
responses: {
200: {
description: "Event stream",
content: {
"text/event-stream": {
schema: resolver(
z
.object({
payload: SyncEvent.payloads(),
})
.meta({
ref: "SyncEvent",
}),
),
},
},
},
},
}),
async (c) => {
log.info("global sync event connected")
c.header("Cache-Control", "no-cache, no-transform")
c.header("X-Accel-Buffering", "no")
c.header("X-Content-Type-Options", "nosniff")
return streamEvents(c, (q) => {
return SyncEvent.subscribeAll(({ def, event }) => {
// TODO: don't pass def, just pass the type (and it should
// be versioned)
q.push(
JSON.stringify({
payload: {
...event,
type: SyncEvent.versionedType(def.type, def.version),
},
}),
)
})
})
},
)
.get(
"/config",
describeRoute({

View File

@@ -207,7 +207,7 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono =>
},
}),
async (c) => {
const modes = await Agent.list()
const modes = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.list()))
return c.json(modes)
},
)

View File

@@ -1,6 +1,7 @@
import { Hono } from "hono"
import { describeRoute, validator, resolver } from "hono-openapi"
import z from "zod"
import { AppRuntime } from "@/effect/app-runtime"
import { Permission } from "@/permission"
import { PermissionID } from "@/permission/schema"
import { errors } from "../error"
@@ -36,11 +37,15 @@ export const PermissionRoutes = lazy(() =>
async (c) => {
const params = c.req.valid("param")
const json = c.req.valid("json")
await Permission.reply({
requestID: params.requestID,
reply: json.reply,
message: json.message,
})
await AppRuntime.runPromise(
Permission.Service.use((svc) =>
svc.reply({
requestID: params.requestID,
reply: json.reply,
message: json.message,
}),
),
)
return c.json(true)
},
)
@@ -62,7 +67,7 @@ export const PermissionRoutes = lazy(() =>
},
}),
async (c) => {
const permissions = await Permission.list()
const permissions = await AppRuntime.runPromise(Permission.Service.use((svc) => svc.list()))
return c.json(permissions)
},
),

View File

@@ -474,10 +474,14 @@ export const SessionRoutes = lazy(() =>
async (c) => {
const query = c.req.valid("query")
const params = c.req.valid("param")
const result = await SessionSummary.diff({
sessionID: params.sessionID,
messageID: query.messageID,
})
const result = await AppRuntime.runPromise(
SessionSummary.Service.use((summary) =>
summary.diff({
sessionID: params.sessionID,
messageID: query.messageID,
}),
),
)
return c.json(result)
},
)
@@ -547,27 +551,38 @@ export const SessionRoutes = lazy(() =>
async (c) => {
const sessionID = c.req.valid("param").sessionID
const body = c.req.valid("json")
const session = await Session.get(sessionID)
await SessionRevert.cleanup(session)
const msgs = await Session.messages({ sessionID })
let currentAgent = await Agent.defaultAgent()
for (let i = msgs.length - 1; i >= 0; i--) {
const info = msgs[i].info
if (info.role === "user") {
currentAgent = info.agent || (await Agent.defaultAgent())
break
}
}
await SessionCompaction.create({
sessionID,
agent: currentAgent,
model: {
providerID: body.providerID,
modelID: body.modelID,
},
auto: body.auto,
})
await SessionPrompt.loop({ sessionID })
await AppRuntime.runPromise(
Effect.gen(function* () {
const session = yield* Session.Service
const revert = yield* SessionRevert.Service
const compact = yield* SessionCompaction.Service
const prompt = yield* SessionPrompt.Service
const agent = yield* Agent.Service
yield* revert.cleanup(yield* session.get(sessionID))
const msgs = yield* session.messages({ sessionID })
const defaultAgent = yield* agent.defaultAgent()
let currentAgent = defaultAgent
for (let i = msgs.length - 1; i >= 0; i--) {
const info = msgs[i].info
if (info.role === "user") {
currentAgent = info.agent || defaultAgent
break
}
}
yield* compact.create({
sessionID,
agent: currentAgent,
model: {
providerID: body.providerID,
modelID: body.modelID,
},
auto: body.auto,
})
yield* prompt.loop({ sessionID })
}),
)
return c.json(true)
},
)
@@ -985,10 +1000,14 @@ export const SessionRoutes = lazy(() =>
async (c) => {
const sessionID = c.req.valid("param").sessionID
log.info("revert", c.req.valid("json"))
const session = await SessionRevert.revert({
sessionID,
...c.req.valid("json"),
})
const session = await AppRuntime.runPromise(
SessionRevert.Service.use((svc) =>
svc.revert({
sessionID,
...c.req.valid("json"),
}),
),
)
return c.json(session)
},
)
@@ -1018,7 +1037,7 @@ export const SessionRoutes = lazy(() =>
),
async (c) => {
const sessionID = c.req.valid("param").sessionID
const session = await SessionRevert.unrevert({ sessionID })
const session = await AppRuntime.runPromise(SessionRevert.Service.use((svc) => svc.unrevert({ sessionID })))
return c.json(session)
},
)
@@ -1051,10 +1070,14 @@ export const SessionRoutes = lazy(() =>
validator("json", z.object({ response: Permission.Reply })),
async (c) => {
const params = c.req.valid("param")
Permission.reply({
requestID: params.permissionID,
reply: c.req.valid("json").response,
})
await AppRuntime.runPromise(
Permission.Service.use((svc) =>
svc.reply({
requestID: params.permissionID,
reply: c.req.valid("json").response,
}),
),
)
return c.json(true)
},
),

View File

@@ -310,31 +310,51 @@ When constructing the summary, try to stick to this template:
}
if (!replay) {
const continueMsg = yield* session.updateMessage({
id: MessageID.ascending(),
role: "user",
sessionID: input.sessionID,
time: { created: Date.now() },
agent: userMessage.agent,
model: userMessage.model,
})
const text =
(input.overflow
? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n"
: "") +
"Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed."
yield* session.updatePart({
id: PartID.ascending(),
messageID: continueMsg.id,
sessionID: input.sessionID,
type: "text",
synthetic: true,
text,
time: {
start: Date.now(),
end: Date.now(),
},
})
const info = yield* provider.getProvider(userMessage.model.providerID)
if (
(yield* plugin.trigger(
"experimental.compaction.autocontinue",
{
sessionID: input.sessionID,
agent: userMessage.agent,
model: yield* provider.getModel(userMessage.model.providerID, userMessage.model.modelID),
provider: {
source: info.source,
info,
options: info.options,
},
message: userMessage,
overflow: input.overflow === true,
},
{ enabled: true },
)).enabled
) {
const continueMsg = yield* session.updateMessage({
id: MessageID.ascending(),
role: "user",
sessionID: input.sessionID,
time: { created: Date.now() },
agent: userMessage.agent,
model: userMessage.model,
})
const text =
(input.overflow
? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n"
: "") +
"Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed."
yield* session.updatePart({
id: PartID.ascending(),
messageID: continueMsg.id,
sessionID: input.sessionID,
type: "text",
synthetic: true,
text,
time: {
start: Date.now(),
end: Date.now(),
},
})
}
}
}

View File

@@ -1,7 +1,6 @@
import { Provider } from "@/provider/provider"
import { Log } from "@/util/log"
import { Cause, Effect, Layer, Record, Context } from "effect"
import * as Queue from "effect/Queue"
import { Context, Effect, Layer, Record } from "effect"
import * as Stream from "effect/Stream"
import { streamText, wrapLanguageModel, type ModelMessage, type Tool, tool, jsonSchema } from "ai"
import { mergeDeep, pipe } from "remeda"
@@ -21,10 +20,13 @@ import { Wildcard } from "@/util/wildcard"
import { SessionID } from "@/session/schema"
import { Auth } from "@/auth"
import { Installation } from "@/installation"
import { makeRuntime } from "@/effect/run-service"
export namespace LLM {
const log = Log.create({ service: "llm" })
const perms = makeRuntime(Permission.Service, Permission.defaultLayer)
export const OUTPUT_TOKEN_MAX = ProviderTransform.OUTPUT_TOKEN_MAX
type Result = Awaited<ReturnType<typeof streamText>>
export type StreamInput = {
user: MessageV2.User
@@ -45,7 +47,7 @@ export namespace LLM {
abort: AbortSignal
}
export type Event = Awaited<ReturnType<typeof stream>>["fullStream"] extends AsyncIterable<infer T> ? T : never
export type Event = Result["fullStream"] extends AsyncIterable<infer T> ? T : never
export interface Interface {
readonly stream: (input: StreamInput) => Stream.Stream<Event, unknown>
@@ -53,12 +55,340 @@ export namespace LLM {
export class Service extends Context.Service<Service, Interface>()("@opencode/LLM") {}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
return Service.of({
stream(input) {
return Stream.scoped(
export const layer: Layer.Layer<Service, never, Auth.Service | Config.Service | Provider.Service | Plugin.Service> =
Layer.effect(
Service,
Effect.gen(function* () {
const auth = yield* Auth.Service
const config = yield* Config.Service
const provider = yield* Provider.Service
const plugin = yield* Plugin.Service
const run = Effect.fn("LLM.run")(function* (input: StreamRequest) {
const l = log
.clone()
.tag("providerID", input.model.providerID)
.tag("modelID", input.model.id)
.tag("sessionID", input.sessionID)
.tag("small", (input.small ?? false).toString())
.tag("agent", input.agent.name)
.tag("mode", input.agent.mode)
l.info("stream", {
modelID: input.model.id,
providerID: input.model.providerID,
})
const [language, cfg, item, info] = yield* Effect.all(
[
provider.getLanguage(input.model),
config.get(),
provider.getProvider(input.model.providerID),
auth.get(input.model.providerID),
],
{ concurrency: "unbounded" },
)
// TODO: move this to a proper hook
const isOpenaiOauth = item.id === "openai" && info?.type === "oauth"
const system: string[] = []
system.push(
[
// use agent prompt otherwise provider prompt
...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)),
// any custom prompt passed into this call
...input.system,
// any custom prompt from last user message
...(input.user.system ? [input.user.system] : []),
]
.filter((x) => x)
.join("\n"),
)
const header = system[0]
yield* plugin.trigger(
"experimental.chat.system.transform",
{ sessionID: input.sessionID, model: input.model },
{ system },
)
// rejoin to maintain 2-part structure for caching if header unchanged
if (system.length > 2 && system[0] === header) {
const rest = system.slice(1)
system.length = 0
system.push(header, rest.join("\n"))
}
const variant =
!input.small && input.model.variants && input.user.model.variant
? input.model.variants[input.user.model.variant]
: {}
const base = input.small
? ProviderTransform.smallOptions(input.model)
: ProviderTransform.options({
model: input.model,
sessionID: input.sessionID,
providerOptions: item.options,
})
const options: Record<string, any> = pipe(
base,
mergeDeep(input.model.options),
mergeDeep(input.agent.options),
mergeDeep(variant),
)
if (isOpenaiOauth) {
options.instructions = system.join("\n")
}
const isWorkflow = language instanceof GitLabWorkflowLanguageModel
const messages = isOpenaiOauth
? input.messages
: isWorkflow
? input.messages
: [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...input.messages,
]
const params = yield* plugin.trigger(
"chat.params",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider: item,
message: input.user,
},
{
temperature: input.model.capabilities.temperature
? (input.agent.temperature ?? ProviderTransform.temperature(input.model))
: undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model),
maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
options,
},
)
const { headers } = yield* plugin.trigger(
"chat.headers",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider: item,
message: input.user,
},
{
headers: {},
},
)
const tools = resolveTools(input)
// LiteLLM and some Anthropic proxies require the tools parameter to be present
// when message history contains tool calls, even if no tools are being used.
// Add a dummy tool that is never called to satisfy this validation.
// This is enabled for:
// 1. Providers with "litellm" in their ID or API ID (auto-detected)
// 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways)
const isLiteLLMProxy =
item.options?.["litellmProxy"] === true ||
input.model.providerID.toLowerCase().includes("litellm") ||
input.model.api.id.toLowerCase().includes("litellm")
// LiteLLM/Bedrock rejects requests where the message history contains tool
// calls but no tools param is present. When there are no active tools (e.g.
// during compaction), inject a stub tool to satisfy the validation requirement.
// The stub description explicitly tells the model not to call it.
if (isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
tools["_noop"] = tool({
description: "Do not call this tool. It exists only for API compatibility and must never be invoked.",
inputSchema: jsonSchema({
type: "object",
properties: {
reason: { type: "string", description: "Unused" },
},
}),
execute: async () => ({ output: "", title: "", metadata: {} }),
})
}
// Wire up toolExecutor for DWS workflow models so that tool calls
// from the workflow service are executed via opencode's tool system
// and results sent back over the WebSocket.
if (language instanceof GitLabWorkflowLanguageModel) {
const workflowModel = language as GitLabWorkflowLanguageModel & {
sessionID?: string
sessionPreapprovedTools?: string[]
approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }>
}
workflowModel.sessionID = input.sessionID
workflowModel.systemPrompt = system.join("\n")
workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => {
const t = tools[toolName]
if (!t || !t.execute) {
return { result: "", error: `Unknown tool: ${toolName}` }
}
try {
const result = await t.execute!(JSON.parse(argsJson), {
toolCallId: _requestID,
messages: input.messages,
abortSignal: input.abort,
})
const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result))
return {
result: output,
metadata: typeof result === "object" ? result?.metadata : undefined,
title: typeof result === "object" ? result?.title : undefined,
}
} catch (e: any) {
return { result: "", error: e.message ?? String(e) }
}
}
const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? [])
workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => {
const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission))
return !match || match.action !== "ask"
})
const approvedToolsForSession = new Set<string>()
workflowModel.approvalHandler = Instance.bind(async (approvalTools) => {
const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[]
// Auto-approve tools that were already approved in this session
// (prevents infinite approval loops for server-side MCP tools)
if (uniqueNames.every((name) => approvedToolsForSession.has(name))) {
return { approved: true }
}
const id = PermissionID.ascending()
let reply: Permission.Reply | undefined
let unsub: (() => void) | undefined
try {
unsub = Bus.subscribe(Permission.Event.Replied, (evt) => {
if (evt.properties.requestID === id) reply = evt.properties.reply
})
const toolPatterns = approvalTools.map((t: { name: string; args: string }) => {
try {
const parsed = JSON.parse(t.args) as Record<string, unknown>
const title = (parsed?.title ?? parsed?.name ?? "") as string
return title ? `${t.name}: ${title}` : t.name
} catch {
return t.name
}
})
const uniquePatterns = [...new Set(toolPatterns)] as string[]
await perms.runPromise((svc) =>
svc.ask({
id,
sessionID: SessionID.make(input.sessionID),
permission: "workflow_tool_approval",
patterns: uniquePatterns,
metadata: { tools: approvalTools },
always: uniquePatterns,
ruleset: [],
}),
)
for (const name of uniqueNames) approvedToolsForSession.add(name)
workflowModel.sessionPreapprovedTools = [
...(workflowModel.sessionPreapprovedTools ?? []),
...uniqueNames,
]
return { approved: true }
} catch {
return { approved: false }
} finally {
unsub?.()
}
})
}
return streamText({
onError(error) {
l.error("stream error", {
error,
})
},
async experimental_repairToolCall(failed) {
const lower = failed.toolCall.toolName.toLowerCase()
if (lower !== failed.toolCall.toolName && tools[lower]) {
l.info("repairing tool call", {
tool: failed.toolCall.toolName,
repaired: lower,
})
return {
...failed.toolCall,
toolName: lower,
}
}
return {
...failed.toolCall,
input: JSON.stringify({
tool: failed.toolCall.toolName,
error: failed.error.message,
}),
toolName: "invalid",
}
},
temperature: params.temperature,
topP: params.topP,
topK: params.topK,
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
tools,
toolChoice: input.toolChoice,
maxOutputTokens: params.maxOutputTokens,
abortSignal: input.abort,
headers: {
...(input.model.providerID.startsWith("opencode")
? {
"x-opencode-project": Instance.project.id,
"x-opencode-session": input.sessionID,
"x-opencode-request": input.user.id,
"x-opencode-client": Flag.OPENCODE_CLIENT,
}
: {
"x-session-affinity": input.sessionID,
...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}),
"User-Agent": `opencode/${Installation.VERSION}`,
}),
...input.model.headers,
...headers,
},
maxRetries: input.retries ?? 0,
messages,
model: wrapLanguageModel({
model: language,
middleware: [
{
specificationVersion: "v3" as const,
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
}
return args.params
},
},
],
}),
experimental_telemetry: {
isEnabled: cfg.experimental?.openTelemetry,
metadata: {
userId: cfg.username ?? "unknown",
sessionId: input.sessionID,
},
},
})
})
const stream: Interface["stream"] = (input) =>
Stream.scoped(
Stream.unwrap(
Effect.gen(function* () {
const ctrl = yield* Effect.acquireRelease(
@@ -66,7 +396,7 @@ export namespace LLM {
(ctrl) => Effect.sync(() => ctrl.abort()),
)
const result = yield* Effect.promise(() => LLM.stream({ ...input, abort: ctrl.signal }))
const result = yield* run({ ...input, abort: ctrl.signal })
return Stream.fromAsyncIterable(result.fullStream, (e) =>
e instanceof Error ? e : new Error(String(e)),
@@ -74,335 +404,19 @@ export namespace LLM {
}),
),
)
},
})
}),
)
export const defaultLayer = layer
export async function stream(input: StreamRequest) {
const l = log
.clone()
.tag("providerID", input.model.providerID)
.tag("modelID", input.model.id)
.tag("sessionID", input.sessionID)
.tag("small", (input.small ?? false).toString())
.tag("agent", input.agent.name)
.tag("mode", input.agent.mode)
l.info("stream", {
modelID: input.model.id,
providerID: input.model.providerID,
})
const [language, cfg, provider, info] = await Effect.runPromise(
Effect.gen(function* () {
const auth = yield* Auth.Service
const cfg = yield* Config.Service
const provider = yield* Provider.Service
return yield* Effect.all(
[
provider.getLanguage(input.model),
cfg.get(),
provider.getProvider(input.model.providerID),
auth.get(input.model.providerID),
],
{ concurrency: "unbounded" },
)
}).pipe(Effect.provide(Layer.mergeAll(Auth.defaultLayer, Config.defaultLayer, Provider.defaultLayer))),
)
// TODO: move this to a proper hook
const isOpenaiOauth = provider.id === "openai" && info?.type === "oauth"
const system: string[] = []
system.push(
[
// use agent prompt otherwise provider prompt
...(input.agent.prompt ? [input.agent.prompt] : SystemPrompt.provider(input.model)),
// any custom prompt passed into this call
...input.system,
// any custom prompt from last user message
...(input.user.system ? [input.user.system] : []),
]
.filter((x) => x)
.join("\n"),
)
const header = system[0]
await Plugin.trigger(
"experimental.chat.system.transform",
{ sessionID: input.sessionID, model: input.model },
{ system },
)
// rejoin to maintain 2-part structure for caching if header unchanged
if (system.length > 2 && system[0] === header) {
const rest = system.slice(1)
system.length = 0
system.push(header, rest.join("\n"))
}
const variant =
!input.small && input.model.variants && input.user.model.variant
? input.model.variants[input.user.model.variant]
: {}
const base = input.small
? ProviderTransform.smallOptions(input.model)
: ProviderTransform.options({
model: input.model,
sessionID: input.sessionID,
providerOptions: provider.options,
})
const options: Record<string, any> = pipe(
base,
mergeDeep(input.model.options),
mergeDeep(input.agent.options),
mergeDeep(variant),
)
if (isOpenaiOauth) {
options.instructions = system.join("\n")
}
const isWorkflow = language instanceof GitLabWorkflowLanguageModel
const messages = isOpenaiOauth
? input.messages
: isWorkflow
? input.messages
: [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...input.messages,
]
const params = await Plugin.trigger(
"chat.params",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider,
message: input.user,
},
{
temperature: input.model.capabilities.temperature
? (input.agent.temperature ?? ProviderTransform.temperature(input.model))
: undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model),
maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
options,
},
)
const { headers } = await Plugin.trigger(
"chat.headers",
{
sessionID: input.sessionID,
agent: input.agent.name,
model: input.model,
provider,
message: input.user,
},
{
headers: {},
},
)
const tools = resolveTools(input)
// LiteLLM and some Anthropic proxies require the tools parameter to be present
// when message history contains tool calls, even if no tools are being used.
// Add a dummy tool that is never called to satisfy this validation.
// This is enabled for:
// 1. Providers with "litellm" in their ID or API ID (auto-detected)
// 2. Providers with explicit "litellmProxy: true" option (opt-in for custom gateways)
const isLiteLLMProxy =
provider.options?.["litellmProxy"] === true ||
input.model.providerID.toLowerCase().includes("litellm") ||
input.model.api.id.toLowerCase().includes("litellm")
// LiteLLM/Bedrock rejects requests where the message history contains tool
// calls but no tools param is present. When there are no active tools (e.g.
// during compaction), inject a stub tool to satisfy the validation requirement.
// The stub description explicitly tells the model not to call it.
if (isLiteLLMProxy && Object.keys(tools).length === 0 && hasToolCalls(input.messages)) {
tools["_noop"] = tool({
description: "Do not call this tool. It exists only for API compatibility and must never be invoked.",
inputSchema: jsonSchema({
type: "object",
properties: {
reason: { type: "string", description: "Unused" },
},
}),
execute: async () => ({ output: "", title: "", metadata: {} }),
})
}
// Wire up toolExecutor for DWS workflow models so that tool calls
// from the workflow service are executed via opencode's tool system
// and results sent back over the WebSocket.
if (language instanceof GitLabWorkflowLanguageModel) {
const workflowModel = language as GitLabWorkflowLanguageModel & {
sessionID?: string
sessionPreapprovedTools?: string[]
approvalHandler?: (approvalTools: { name: string; args: string }[]) => Promise<{ approved: boolean }>
}
workflowModel.sessionID = input.sessionID
workflowModel.systemPrompt = system.join("\n")
workflowModel.toolExecutor = async (toolName, argsJson, _requestID) => {
const t = tools[toolName]
if (!t || !t.execute) {
return { result: "", error: `Unknown tool: ${toolName}` }
}
try {
const result = await t.execute!(JSON.parse(argsJson), {
toolCallId: _requestID,
messages: input.messages,
abortSignal: input.abort,
})
const output = typeof result === "string" ? result : (result?.output ?? JSON.stringify(result))
return {
result: output,
metadata: typeof result === "object" ? result?.metadata : undefined,
title: typeof result === "object" ? result?.title : undefined,
}
} catch (e: any) {
return { result: "", error: e.message ?? String(e) }
}
}
const ruleset = Permission.merge(input.agent.permission ?? [], input.permission ?? [])
workflowModel.sessionPreapprovedTools = Object.keys(tools).filter((name) => {
const match = ruleset.findLast((rule) => Wildcard.match(name, rule.permission))
return !match || match.action !== "ask"
})
const approvedToolsForSession = new Set<string>()
workflowModel.approvalHandler = Instance.bind(async (approvalTools) => {
const uniqueNames = [...new Set(approvalTools.map((t: { name: string }) => t.name))] as string[]
// Auto-approve tools that were already approved in this session
// (prevents infinite approval loops for server-side MCP tools)
if (uniqueNames.every((name) => approvedToolsForSession.has(name))) {
return { approved: true }
}
const id = PermissionID.ascending()
let reply: Permission.Reply | undefined
let unsub: (() => void) | undefined
try {
unsub = Bus.subscribe(Permission.Event.Replied, (evt) => {
if (evt.properties.requestID === id) reply = evt.properties.reply
})
const toolPatterns = approvalTools.map((t: { name: string; args: string }) => {
try {
const parsed = JSON.parse(t.args) as Record<string, unknown>
const title = (parsed?.title ?? parsed?.name ?? "") as string
return title ? `${t.name}: ${title}` : t.name
} catch {
return t.name
}
})
const uniquePatterns = [...new Set(toolPatterns)] as string[]
await Permission.ask({
id,
sessionID: SessionID.make(input.sessionID),
permission: "workflow_tool_approval",
patterns: uniquePatterns,
metadata: { tools: approvalTools },
always: uniquePatterns,
ruleset: [],
})
for (const name of uniqueNames) approvedToolsForSession.add(name)
workflowModel.sessionPreapprovedTools = [...(workflowModel.sessionPreapprovedTools ?? []), ...uniqueNames]
return { approved: true }
} catch {
return { approved: false }
} finally {
unsub?.()
}
})
}
return streamText({
onError(error) {
l.error("stream error", {
error,
})
},
async experimental_repairToolCall(failed) {
const lower = failed.toolCall.toolName.toLowerCase()
if (lower !== failed.toolCall.toolName && tools[lower]) {
l.info("repairing tool call", {
tool: failed.toolCall.toolName,
repaired: lower,
})
return {
...failed.toolCall,
toolName: lower,
}
}
return {
...failed.toolCall,
input: JSON.stringify({
tool: failed.toolCall.toolName,
error: failed.error.message,
}),
toolName: "invalid",
}
},
temperature: params.temperature,
topP: params.topP,
topK: params.topK,
providerOptions: ProviderTransform.providerOptions(input.model, params.options),
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
tools,
toolChoice: input.toolChoice,
maxOutputTokens: params.maxOutputTokens,
abortSignal: input.abort,
headers: {
...(input.model.providerID.startsWith("opencode")
? {
"x-opencode-project": Instance.project.id,
"x-opencode-session": input.sessionID,
"x-opencode-request": input.user.id,
"x-opencode-client": Flag.OPENCODE_CLIENT,
}
: {
"x-session-affinity": input.sessionID,
...(input.parentSessionID ? { "x-parent-session-id": input.parentSessionID } : {}),
"User-Agent": `opencode/${Installation.VERSION}`,
}),
...input.model.headers,
...headers,
},
maxRetries: input.retries ?? 0,
messages,
model: wrapLanguageModel({
model: language,
middleware: [
{
specificationVersion: "v3" as const,
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
}
return args.params
},
},
],
return Service.of({ stream })
}),
experimental_telemetry: {
isEnabled: cfg.experimental?.openTelemetry,
metadata: {
userId: cfg.username ?? "unknown",
sessionId: input.sessionID,
},
},
})
}
)
export const defaultLayer = Layer.suspend(() =>
layer.pipe(
Layer.provide(Auth.defaultLayer),
Layer.provide(Config.defaultLayer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Plugin.defaultLayer),
),
)
function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "permission" | "user">) {
const disabled = Permission.disabled(

View File

@@ -1,4 +1,4 @@
import { Cause, Deferred, Effect, Layer, Context } from "effect"
import { Cause, Deferred, Effect, Layer, Context, Scope } from "effect"
import * as Stream from "effect/Stream"
import { Agent } from "@/agent/agent"
import { Bus } from "@/bus"
@@ -89,6 +89,7 @@ export namespace SessionProcessor {
| LLM.Service
| Permission.Service
| Plugin.Service
| SessionSummary.Service
| SessionStatus.Service
> = Layer.effect(
Service,
@@ -101,6 +102,8 @@ export namespace SessionProcessor {
const llm = yield* LLM.Service
const permission = yield* Permission.Service
const plugin = yield* Plugin.Service
const summary = yield* SessionSummary.Service
const scope = yield* Scope.Scope
const status = yield* SessionStatus.Service
const create = Effect.fn("SessionProcessor.create")(function* (input: Input) {
@@ -385,10 +388,12 @@ export namespace SessionProcessor {
}
ctx.snapshot = undefined
}
SessionSummary.summarize({
sessionID: ctx.sessionID,
messageID: ctx.assistantMessage.parentID,
})
yield* summary
.summarize({
sessionID: ctx.sessionID,
messageID: ctx.assistantMessage.parentID,
})
.pipe(Effect.ignore, Effect.forkIn(scope))
if (
!ctx.assistantMessage.summary &&
isOverflow({ cfg: yield* config.get(), tokens: usage.tokens, model: ctx.model })
@@ -603,6 +608,7 @@ export namespace SessionProcessor {
Layer.provide(LLM.defaultLayer),
Layer.provide(Permission.defaultLayer),
Layer.provide(Plugin.defaultLayer),
Layer.provide(SessionSummary.defaultLayer),
Layer.provide(SessionStatus.defaultLayer),
Layer.provide(Bus.layer),
Layer.provide(Config.defaultLayer),

View File

@@ -2,10 +2,10 @@ import { NotFoundError, eq, and, sql } from "../storage/db"
import { SyncEvent } from "@/sync"
import { Session } from "./index"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
import { SessionTable, MessageTable, PartTable, SessionEntryTable } from "./session.sql"
import { Log } from "../util/log"
import { DateTime } from "effect"
import { SessionEntry } from "@/v2/session-entry"
const log = Log.create({ service: "session.projector" })
@@ -133,33 +133,4 @@ export default [
log.warn("ignored late part update", { partID: id, messageID, sessionID })
}
}),
// Experimental
SyncEvent.project(MessageV2.Event.PartUpdated, (db, data) => {
/*
const id = SessionEntry.ID.make(data.part.id.replace("prt", "ent"))
switch (data.part.type) {
case "text":
db.insert(SessionEntryTable)
.values({
id,
session_id: data.sessionID,
type: "text",
data: new SessionEntry.Text({
id,
text: data.part.text,
type: "text",
time: {
created: DateTime.makeUnsafe(data.part.time?.start ?? Date.now()),
completed: data.part.time?.end ? DateTime.makeUnsafe(data.part.time.end) : undefined,
},
}),
time_created: Date.now(),
time_updated: Date.now(),
})
.onConflictDoUpdate({ target: SessionEntryTable.id, set: { data: sql`excluded.data` } })
.run()
}
*/
}),
]

View File

@@ -102,6 +102,7 @@ export namespace SessionPrompt {
const instruction = yield* Instruction.Service
const state = yield* SessionRunState.Service
const revert = yield* SessionRevert.Service
const summary = yield* SessionSummary.Service
const sys = yield* SystemPrompt.Service
const llm = yield* LLM.Service
@@ -1444,7 +1445,10 @@ NOTE: At any point in time through this workflow you should feel free to ask the
})
}
if (step === 1) SessionSummary.summarize({ sessionID, messageID: lastUser.id })
if (step === 1)
yield* summary
.summarize({ sessionID, messageID: lastUser.id })
.pipe(Effect.ignore, Effect.forkIn(scope))
if (step > 1 && lastFinished) {
for (const m of msgs) {
@@ -1692,6 +1696,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
Layer.provide(Plugin.defaultLayer),
Layer.provide(Session.defaultLayer),
Layer.provide(SessionRevert.defaultLayer),
Layer.provide(SessionSummary.defaultLayer),
Layer.provide(
Layer.mergeAll(
Agent.defaultLayer,

View File

@@ -1,6 +1,5 @@
import z from "zod"
import { Effect, Layer, Context } from "effect"
import { makeRuntime } from "@/effect/run-service"
import { Bus } from "../bus"
import { Snapshot } from "../snapshot"
import { Storage } from "@/storage/storage"
@@ -160,18 +159,4 @@ export namespace SessionRevert {
Layer.provide(SessionSummary.defaultLayer),
),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export async function revert(input: RevertInput) {
return runPromise((svc) => svc.revert(input))
}
export async function unrevert(input: { sessionID: SessionID }) {
return runPromise((svc) => svc.unrevert(input))
}
export async function cleanup(session: Session.Info) {
return runPromise((svc) => svc.cleanup(session))
}
}

View File

@@ -11,7 +11,6 @@ import { Timestamps } from "../storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
type EntryData = Omit<SessionEntry.Entry, "id" | "type">
export const SessionTable = sqliteTable(
"session",
@@ -96,7 +95,6 @@ export const TodoTable = sqliteTable(
],
)
/*
export const SessionEntryTable = sqliteTable(
"session_entry",
{
@@ -105,9 +103,9 @@ export const SessionEntryTable = sqliteTable(
.$type<SessionID>()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
type: text().notNull(),
type: text().$type<SessionEntry.Type>().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<SessionEntry.Entry>(),
data: text({ mode: "json" }).notNull().$type<Omit<SessionEntry.Entry, "type" | "id">>(),
},
(table) => [
index("session_entry_session_idx").on(table.session_id),
@@ -115,7 +113,6 @@ export const SessionEntryTable = sqliteTable(
index("session_entry_time_created_idx").on(table.time_created),
],
)
*/
export const PermissionTable = sqliteTable("permission", {
project_id: text()

View File

@@ -1,6 +1,5 @@
import z from "zod"
import { Effect, Layer, Context } from "effect"
import { makeRuntime } from "@/effect/run-service"
import { Bus } from "@/bus"
import { Snapshot } from "@/snapshot"
import { Storage } from "@/storage/storage"
@@ -159,17 +158,8 @@ export namespace SessionSummary {
),
)
const { runPromise } = makeRuntime(Service, defaultLayer)
export const summarize = (input: { sessionID: SessionID; messageID: MessageID }) =>
void runPromise((svc) => svc.summarize(input)).catch(() => {})
export const DiffInput = z.object({
sessionID: SessionID.zod,
messageID: MessageID.zod.optional(),
})
export async function diff(input: z.infer<typeof DiffInput>) {
return runPromise((svc) => svc.diff(input))
}
}

View File

@@ -2,9 +2,12 @@ import z from "zod"
import type { ZodObject } from "zod"
import { EventEmitter } from "events"
import { Database, eq } from "@/storage/db"
import { GlobalBus } from "@/bus/global"
import { Bus as ProjectBus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { Instance } from "@/project/instance"
import { EventSequenceTable, EventTable } from "./event.sql"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import { EventID } from "./schema"
import { Flag } from "@/flag/flag"
@@ -37,8 +40,6 @@ export namespace SyncEvent {
let frozen = false
let convertEvent: (type: string, event: Event["data"]) => Promise<Record<string, unknown>> | Record<string, unknown>
const Bus = new EventEmitter<{ event: [{ def: Definition; event: Event }] }>()
export function reset() {
frozen = false
projectors = undefined
@@ -140,11 +141,6 @@ export namespace SyncEvent {
}
Database.effect(() => {
Bus.emit("event", {
def,
event,
})
if (options?.publish) {
const result = convertEvent(def.type, event.data)
if (result instanceof Promise) {
@@ -154,6 +150,17 @@ export namespace SyncEvent {
} else {
ProjectBus.publish({ type: def.type, properties: def.schema }, result)
}
GlobalBus.emit("event", {
directory: Instance.directory,
project: Instance.project.id,
workspace: WorkspaceContext.workspaceID,
payload: {
type: "sync",
name: versionedType(def.type, def.version),
...event,
},
})
}
})
})
@@ -235,31 +242,23 @@ export namespace SyncEvent {
})
}
export function subscribeAll(handler: (event: { def: Definition; event: Event }) => void) {
Bus.on("event", handler)
return () => Bus.off("event", handler)
}
export function payloads() {
return z
.union(
registry
.entries()
.map(([type, def]) => {
return z
.object({
type: z.literal(type),
aggregate: z.literal(def.aggregate),
data: def.schema,
})
.meta({
ref: "SyncEvent" + "." + def.type,
})
return registry
.entries()
.map(([type, def]) => {
return z
.object({
type: z.literal("sync"),
name: z.literal(type),
id: z.string(),
seq: z.number(),
aggregateID: z.literal(def.aggregate),
data: def.schema,
})
.meta({
ref: "SyncEvent" + "." + def.type,
})
.toArray() as any,
)
.meta({
ref: "SyncEvent",
})
.toArray()
}
}

View File

@@ -40,6 +40,10 @@ export const GlobTool = Tool.define(
let search = params.path ?? Instance.directory
search = path.isAbsolute(search) ? search : path.resolve(Instance.directory, search)
const info = yield* fs.stat(search).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (info?.type === "File") {
throw new Error(`glob path must be a directory: ${search}`)
}
yield* assertExternalDirectoryEffect(ctx, search, { kind: "directory" })
const limit = 100

View File

@@ -51,19 +51,25 @@ export const GrepTool = Tool.define(
? (params.path ?? Instance.directory)
: path.join(Instance.directory, params.path ?? "."),
)
yield* assertExternalDirectoryEffect(ctx, searchPath, { kind: "directory" })
const info = yield* fs.stat(searchPath).pipe(Effect.catch(() => Effect.succeed(undefined)))
const cwd = info?.type === "Directory" ? searchPath : path.dirname(searchPath)
const file = info?.type === "Directory" ? undefined : [searchPath]
yield* assertExternalDirectoryEffect(ctx, searchPath, {
kind: info?.type === "Directory" ? "directory" : "file",
})
const result = yield* rg.search({
cwd: searchPath,
cwd,
pattern: params.pattern,
glob: params.include ? [params.include] : undefined,
file,
})
if (result.items.length === 0) return empty
const rows = result.items.map((item) => ({
path: AppFileSystem.resolve(
path.isAbsolute(item.path.text) ? item.path.text : path.join(searchPath, item.path.text),
path.isAbsolute(item.path.text) ? item.path.text : path.join(cwd, item.path.text),
),
line: item.line_number,
text: item.lines.text,

View File

@@ -121,6 +121,7 @@ export namespace ToolRegistry {
const greptool = yield* GrepTool
const patchtool = yield* ApplyPatchTool
const skilltool = yield* SkillTool
const agent = yield* Agent.Service
const state = yield* InstanceState.make<State>(
Effect.fn("ToolRegistry.state")(function* (ctx) {
@@ -140,8 +141,8 @@ export namespace ToolRegistry {
worktree: ctx.worktree,
}
const result = yield* Effect.promise(() => def.execute(args as any, pluginCtx))
const agent = yield* Effect.promise(() => Agent.get(toolCtx.agent))
const out = yield* truncate.output(result, {}, agent)
const info = yield* agent.get(toolCtx.agent)
const out = yield* truncate.output(result, {}, info)
return {
title: "",
output: out.truncated ? out.content : result,

View File

@@ -1,6 +1,10 @@
import { Identifier } from "@/id/id"
import { Database } from "@/node"
import type { SessionID } from "@/session/schema"
import { SessionEntryTable } from "@/session/session.sql"
import { withStatics } from "@/util/schema"
import { DateTime, Effect, Schema } from "effect"
import { Context, DateTime, Effect, Layer, Schema } from "effect"
import { eq } from "../storage/db"
export namespace SessionEntry {
export const ID = Schema.String.pipe(Schema.brand("Session.Entry.ID")).pipe(
@@ -181,6 +185,43 @@ export namespace SessionEntry {
overflow: Schema.Boolean.pipe(Schema.optional),
}) {}
export const Entry = Schema.Union([User, Synthetic, Request, Tool, Text, Reasoning, Complete, Retry, Compaction])
export const Entry = Schema.Union([User, Synthetic, Request, Tool, Text, Reasoning, Complete, Retry, Compaction], {
mode: "oneOf",
})
export type Entry = Schema.Schema.Type<typeof Entry>
export type Type = Entry["type"]
export interface Interface {
readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry
readonly fromSession: (sessionID: SessionID) => Effect.Effect<Entry[], never>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/SessionEntry") {}
export const layer: Layer.Layer<Service, never, never> = Layer.effect(
Service,
Effect.gen(function* () {
const decodeEntry = Schema.decodeUnknownSync(Entry)
const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type })
const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) {
return Database.use((db) =>
db
.select()
.from(SessionEntryTable)
.where(eq(SessionEntryTable.session_id, sessionID))
.orderBy(SessionEntryTable.id)
.all()
.map((row) => decode(row)),
)
})
return Service.of({
decode,
fromSession,
})
}),
)
}

View File

@@ -1,6 +1,7 @@
import { afterEach, test, expect } from "bun:test"
import { Effect } from "effect"
import path from "path"
import { tmpdir } from "../fixture/fixture"
import { provideInstance, tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { Agent } from "../../src/agent/agent"
import { Permission } from "../../src/permission"
@@ -11,6 +12,10 @@ function evalPerm(agent: Agent.Info | undefined, permission: string): Permission
return Permission.evaluate(permission, "*", agent.permission).action
}
function load<A>(dir: string, fn: (svc: Agent.Interface) => Effect.Effect<A>) {
return Effect.runPromise(provideInstance(dir)(Agent.Service.use(fn)).pipe(Effect.provide(Agent.defaultLayer)))
}
afterEach(async () => {
await Instance.disposeAll()
})
@@ -20,7 +25,7 @@ test("returns default native agents when no config", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agents = await Agent.list()
const agents = await load(tmp.path, (svc) => svc.list())
const names = agents.map((a) => a.name)
expect(names).toContain("build")
expect(names).toContain("plan")
@@ -38,7 +43,7 @@ test("build agent has correct default properties", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build).toBeDefined()
expect(build?.mode).toBe("primary")
expect(build?.native).toBe(true)
@@ -53,7 +58,7 @@ test("plan agent denies edits except .opencode/plans/*", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const plan = await Agent.get("plan")
const plan = await load(tmp.path, (svc) => svc.get("plan"))
expect(plan).toBeDefined()
// Wildcard is denied
expect(evalPerm(plan, "edit")).toBe("deny")
@@ -68,7 +73,7 @@ test("explore agent denies edit and write", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const explore = await Agent.get("explore")
const explore = await load(tmp.path, (svc) => svc.get("explore"))
expect(explore).toBeDefined()
expect(explore?.mode).toBe("subagent")
expect(evalPerm(explore, "edit")).toBe("deny")
@@ -84,7 +89,7 @@ test("explore agent asks for external directories and allows Truncate.GLOB", asy
await Instance.provide({
directory: tmp.path,
fn: async () => {
const explore = await Agent.get("explore")
const explore = await load(tmp.path, (svc) => svc.get("explore"))
expect(explore).toBeDefined()
expect(Permission.evaluate("external_directory", "/some/other/path", explore!.permission).action).toBe("ask")
expect(Permission.evaluate("external_directory", Truncate.GLOB, explore!.permission).action).toBe("allow")
@@ -97,7 +102,7 @@ test("general agent denies todo tools", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const general = await Agent.get("general")
const general = await load(tmp.path, (svc) => svc.get("general"))
expect(general).toBeDefined()
expect(general?.mode).toBe("subagent")
expect(general?.hidden).toBeUndefined()
@@ -111,7 +116,7 @@ test("compaction agent denies all permissions", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const compaction = await Agent.get("compaction")
const compaction = await load(tmp.path, (svc) => svc.get("compaction"))
expect(compaction).toBeDefined()
expect(compaction?.hidden).toBe(true)
expect(evalPerm(compaction, "bash")).toBe("deny")
@@ -137,7 +142,7 @@ test("custom agent from config creates new agent", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const custom = await Agent.get("my_custom_agent")
const custom = await load(tmp.path, (svc) => svc.get("my_custom_agent"))
expect(custom).toBeDefined()
expect(String(custom?.model?.providerID)).toBe("openai")
expect(String(custom?.model?.modelID)).toBe("gpt-4")
@@ -166,7 +171,7 @@ test("custom agent config overrides native agent properties", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build).toBeDefined()
expect(String(build?.model?.providerID)).toBe("anthropic")
expect(String(build?.model?.modelID)).toBe("claude-3")
@@ -189,9 +194,9 @@ test("agent disable removes agent from list", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const explore = await Agent.get("explore")
const explore = await load(tmp.path, (svc) => svc.get("explore"))
expect(explore).toBeUndefined()
const agents = await Agent.list()
const agents = await load(tmp.path, (svc) => svc.list())
const names = agents.map((a) => a.name)
expect(names).not.toContain("explore")
},
@@ -215,7 +220,7 @@ test("agent permission config merges with defaults", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build).toBeDefined()
// Specific pattern is denied
expect(Permission.evaluate("bash", "rm -rf *", build!.permission).action).toBe("deny")
@@ -236,7 +241,7 @@ test("global permission config applies to all agents", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build).toBeDefined()
expect(evalPerm(build, "bash")).toBe("deny")
},
@@ -255,8 +260,8 @@ test("agent steps/maxSteps config sets steps property", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const plan = await Agent.get("plan")
const build = await load(tmp.path, (svc) => svc.get("build"))
const plan = await load(tmp.path, (svc) => svc.get("plan"))
expect(build?.steps).toBe(50)
expect(plan?.steps).toBe(100)
},
@@ -274,7 +279,7 @@ test("agent mode can be overridden", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const explore = await Agent.get("explore")
const explore = await load(tmp.path, (svc) => svc.get("explore"))
expect(explore?.mode).toBe("primary")
},
})
@@ -291,7 +296,7 @@ test("agent name can be overridden", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build?.name).toBe("Builder")
},
})
@@ -308,7 +313,7 @@ test("agent prompt can be set from config", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build?.prompt).toBe("Custom system prompt")
},
})
@@ -328,7 +333,7 @@ test("unknown agent properties are placed into options", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build?.options.random_property).toBe("hello")
expect(build?.options.another_random).toBe(123)
},
@@ -351,7 +356,7 @@ test("agent options merge correctly", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(build?.options.custom_option).toBe(true)
expect(build?.options.another_option).toBe("value")
},
@@ -376,8 +381,8 @@ test("multiple custom agents can be defined", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agentA = await Agent.get("agent_a")
const agentB = await Agent.get("agent_b")
const agentA = await load(tmp.path, (svc) => svc.get("agent_a"))
const agentB = await load(tmp.path, (svc) => svc.get("agent_b"))
expect(agentA?.description).toBe("Agent A")
expect(agentA?.mode).toBe("subagent")
expect(agentB?.description).toBe("Agent B")
@@ -405,7 +410,7 @@ test("Agent.list keeps the default agent first and sorts the rest by name", asyn
await Instance.provide({
directory: tmp.path,
fn: async () => {
const names = (await Agent.list()).map((a) => a.name)
const names = (await load(tmp.path, (svc) => svc.list())).map((a) => a.name)
expect(names[0]).toBe("plan")
expect(names.slice(1)).toEqual(names.slice(1).toSorted((a, b) => a.localeCompare(b)))
},
@@ -417,7 +422,7 @@ test("Agent.get returns undefined for non-existent agent", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const nonExistent = await Agent.get("does_not_exist")
const nonExistent = await load(tmp.path, (svc) => svc.get("does_not_exist"))
expect(nonExistent).toBeUndefined()
},
})
@@ -428,7 +433,7 @@ test("default permission includes doom_loop and external_directory as ask", asyn
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(evalPerm(build, "doom_loop")).toBe("ask")
expect(evalPerm(build, "external_directory")).toBe("ask")
},
@@ -440,7 +445,7 @@ test("webfetch is allowed by default", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(evalPerm(build, "webfetch")).toBe("allow")
},
})
@@ -462,7 +467,7 @@ test("legacy tools config converts to permissions", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(evalPerm(build, "bash")).toBe("deny")
expect(evalPerm(build, "read")).toBe("deny")
},
@@ -484,7 +489,7 @@ test("legacy tools config maps write/edit/patch/multiedit to edit permission", a
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(evalPerm(build, "edit")).toBe("deny")
},
})
@@ -502,7 +507,7 @@ test("Truncate.GLOB is allowed even when user denies external_directory globally
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(Permission.evaluate("external_directory", Truncate.GLOB, build!.permission).action).toBe("allow")
expect(Permission.evaluate("external_directory", Truncate.DIR, build!.permission).action).toBe("deny")
expect(Permission.evaluate("external_directory", "/some/other/path", build!.permission).action).toBe("deny")
@@ -526,7 +531,7 @@ test("Truncate.GLOB is allowed even when user denies external_directory per-agen
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(Permission.evaluate("external_directory", Truncate.GLOB, build!.permission).action).toBe("allow")
expect(Permission.evaluate("external_directory", Truncate.DIR, build!.permission).action).toBe("deny")
expect(Permission.evaluate("external_directory", "/some/other/path", build!.permission).action).toBe("deny")
@@ -549,7 +554,7 @@ test("explicit Truncate.GLOB deny is respected", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
expect(Permission.evaluate("external_directory", Truncate.GLOB, build!.permission).action).toBe("deny")
expect(Permission.evaluate("external_directory", Truncate.DIR, build!.permission).action).toBe("deny")
},
@@ -581,7 +586,7 @@ description: Permission skill.
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
const skillDir = path.join(tmp.path, ".opencode", "skill", "perm-skill")
const target = path.join(skillDir, "reference", "notes.md")
expect(Permission.evaluate("external_directory", target, build!.permission).action).toBe("allow")
@@ -597,7 +602,7 @@ test("defaultAgent returns build when no default_agent config", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.defaultAgent()
const agent = await load(tmp.path, (svc) => svc.defaultAgent())
expect(agent).toBe("build")
},
})
@@ -612,7 +617,7 @@ test("defaultAgent respects default_agent config set to plan", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.defaultAgent()
const agent = await load(tmp.path, (svc) => svc.defaultAgent())
expect(agent).toBe("plan")
},
})
@@ -632,7 +637,7 @@ test("defaultAgent respects default_agent config set to custom agent with mode a
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.defaultAgent()
const agent = await load(tmp.path, (svc) => svc.defaultAgent())
expect(agent).toBe("my_custom")
},
})
@@ -647,7 +652,7 @@ test("defaultAgent throws when default_agent points to subagent", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(Agent.defaultAgent()).rejects.toThrow('default agent "explore" is a subagent')
await expect(load(tmp.path, (svc) => svc.defaultAgent())).rejects.toThrow('default agent "explore" is a subagent')
},
})
})
@@ -661,7 +666,7 @@ test("defaultAgent throws when default_agent points to hidden agent", async () =
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(Agent.defaultAgent()).rejects.toThrow('default agent "compaction" is hidden')
await expect(load(tmp.path, (svc) => svc.defaultAgent())).rejects.toThrow('default agent "compaction" is hidden')
},
})
})
@@ -675,7 +680,9 @@ test("defaultAgent throws when default_agent points to non-existent agent", asyn
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(Agent.defaultAgent()).rejects.toThrow('default agent "does_not_exist" not found')
await expect(load(tmp.path, (svc) => svc.defaultAgent())).rejects.toThrow(
'default agent "does_not_exist" not found',
)
},
})
})
@@ -691,7 +698,7 @@ test("defaultAgent returns plan when build is disabled and default_agent not set
await Instance.provide({
directory: tmp.path,
fn: async () => {
const agent = await Agent.defaultAgent()
const agent = await load(tmp.path, (svc) => svc.defaultAgent())
// build is disabled, so it should return plan (next primary agent)
expect(agent).toBe("plan")
},
@@ -711,7 +718,7 @@ test("defaultAgent throws when all primary agents are disabled", async () => {
directory: tmp.path,
fn: async () => {
// build and plan are disabled, no primary-capable agents remain
await expect(Agent.defaultAgent()).rejects.toThrow("no primary visible agent found")
await expect(load(tmp.path, (svc) => svc.defaultAgent())).rejects.toThrow("no primary visible agent found")
},
})
})

View File

@@ -1,6 +1,7 @@
import { test, expect } from "bun:test"
import { Effect } from "effect"
import path from "path"
import { tmpdir } from "../fixture/fixture"
import { provideInstance, tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { Config } from "../../src/config/config"
import { Agent as AgentSvc } from "../../src/agent/agent"
@@ -8,6 +9,8 @@ import { Color } from "../../src/util/color"
import { AppRuntime } from "../../src/effect/app-runtime"
const load = () => AppRuntime.runPromise(Config.Service.use((svc) => svc.get()))
const agent = <A>(dir: string, fn: (svc: AgentSvc.Interface) => Effect.Effect<A>) =>
Effect.runPromise(provideInstance(dir)(AgentSvc.Service.use(fn)).pipe(Effect.provide(AgentSvc.defaultLayer)))
test("agent color parsed from project config", async () => {
await using tmp = await tmpdir({
@@ -52,9 +55,9 @@ test("Agent.get includes color from config", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const plan = await AgentSvc.get("plan")
const plan = await agent(tmp.path, (svc) => svc.get("plan"))
expect(plan?.color).toBe("#A855F7")
const build = await AgentSvc.get("build")
const build = await agent(tmp.path, (svc) => svc.get("build"))
expect(build?.color).toBe("accent")
},
})

View File

@@ -76,6 +76,25 @@ describe("Ripgrep.Service", () => {
expect(result.items[0]?.lines.text).toContain("needle")
})
test("search supports explicit file targets", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(path.join(dir, "match.ts"), "const value = 'needle'\n")
await Bun.write(path.join(dir, "skip.ts"), "const value = 'needle'\n")
},
})
const file = path.join(tmp.path, "match.ts")
const result = await Effect.gen(function* () {
const rg = yield* Ripgrep.Service
return yield* rg.search({ cwd: tmp.path, pattern: "needle", file: [file] })
}).pipe(Effect.provide(Ripgrep.defaultLayer), Effect.runPromise)
expect(result.partial).toBe(false)
expect(result.items).toHaveLength(1)
expect(result.items[0]?.path.text).toBe(file)
})
test("files returns stream of filenames", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

View File

@@ -154,15 +154,22 @@ test("state() generates a new state when none is saved", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const auth = await Effect.runPromise(
Effect.gen(function* () {
return yield* McpAuth.Service
}).pipe(Effect.provide(McpAuth.defaultLayer)),
)
const provider = new McpOAuthProvider(
"test-state-gen",
"https://example.com/mcp",
{},
{ onRedirect: async () => {} },
auth,
)
// Ensure no state exists
const entryBefore = await McpAuth.get("test-state-gen")
const entryBefore = await Effect.runPromise(
McpAuth.Service.use((auth) => auth.get("test-state-gen")).pipe(Effect.provide(McpAuth.defaultLayer)),
)
expect(entryBefore?.oauthState).toBeUndefined()
// state() should generate and return a new state, not throw
@@ -171,7 +178,9 @@ test("state() generates a new state when none is saved", async () => {
expect(state.length).toBe(64) // 32 bytes as hex
// The generated state should be persisted
const entryAfter = await McpAuth.get("test-state-gen")
const entryAfter = await Effect.runPromise(
McpAuth.Service.use((auth) => auth.get("test-state-gen")).pipe(Effect.provide(McpAuth.defaultLayer)),
)
expect(entryAfter?.oauthState).toBe(state)
},
})
@@ -186,16 +195,26 @@ test("state() returns existing state when one is saved", async () => {
await Instance.provide({
directory: tmp.path,
fn: async () => {
const auth = await Effect.runPromise(
Effect.gen(function* () {
return yield* McpAuth.Service
}).pipe(Effect.provide(McpAuth.defaultLayer)),
)
const provider = new McpOAuthProvider(
"test-state-existing",
"https://example.com/mcp",
{},
{ onRedirect: async () => {} },
auth,
)
// Pre-save a state
const existingState = "pre-saved-state-value"
await McpAuth.updateOAuthState("test-state-existing", existingState)
await Effect.runPromise(
McpAuth.Service.use((auth) => auth.updateOAuthState("test-state-existing", existingState)).pipe(
Effect.provide(McpAuth.defaultLayer),
),
)
// state() should return the existing state
const state = await provider.state()

File diff suppressed because it is too large Load Diff

View File

@@ -18,6 +18,7 @@ import { Session } from "../../src/session"
import { MessageV2 } from "../../src/session/message-v2"
import { MessageID, PartID, SessionID } from "../../src/session/schema"
import { SessionStatus } from "../../src/session/status"
import { SessionSummary } from "../../src/session/summary"
import { ModelID, ProviderID } from "../../src/provider/schema"
import type { Provider } from "../../src/provider/provider"
import * as SessionProcessorModule from "../../src/session/processor"
@@ -26,6 +27,15 @@ import { ProviderTest } from "../fake/provider"
Log.init({ print: false })
const summary = Layer.succeed(
SessionSummary.Service,
SessionSummary.Service.of({
summarize: () => Effect.void,
diff: () => Effect.succeed([]),
computeDiff: () => Effect.succeed([]),
}),
)
const ref = {
providerID: ProviderID.make("test"),
modelID: ModelID.make("test-model"),
@@ -194,7 +204,7 @@ function llm() {
function liveRuntime(layer: Layer.Layer<LLM.Service>, provider = ProviderTest.fake()) {
const bus = Bus.layer
const status = SessionStatus.layer.pipe(Layer.provide(bus))
const processor = SessionProcessorModule.SessionProcessor.layer
const processor = SessionProcessorModule.SessionProcessor.layer.pipe(Layer.provide(summary))
return ManagedRuntime.make(
Layer.mergeAll(SessionCompaction.layer.pipe(Layer.provide(processor)), processor, bus, status).pipe(
Layer.provide(provider.layer),
@@ -234,6 +244,20 @@ function plugin(ready: ReturnType<typeof defer>) {
})
}
function autocontinue(enabled: boolean) {
return Layer.mock(Plugin.Service)({
trigger: <Name extends string, Input, Output>(name: Name, _input: Input, output: Output) => {
if (name !== "experimental.compaction.autocontinue") return Effect.succeed(output)
return Effect.sync(() => {
;(output as { enabled: boolean }).enabled = enabled
return output
})
},
list: () => Effect.succeed([]),
init: () => Effect.void,
})
}
describe("session.compaction.isOverflow", () => {
test("returns true when token count exceeds usable context", async () => {
await using tmp = await tmpdir()
@@ -661,6 +685,49 @@ describe("session.compaction.process", () => {
})
})
test("allows plugins to disable synthetic continue prompt", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
fn: async () => {
const session = await Session.create({})
const msg = await user(session.id, "hello")
const rt = runtime("continue", autocontinue(false), wide())
try {
const msgs = await Session.messages({ sessionID: session.id })
const result = await rt.runPromise(
SessionCompaction.Service.use((svc) =>
svc.process({
parentID: msg.id,
messages: msgs,
sessionID: session.id,
auto: true,
}),
),
)
const all = await Session.messages({ sessionID: session.id })
const last = all.at(-1)
expect(result).toBe("continue")
expect(last?.info.role).toBe("assistant")
expect(
all.some(
(msg) =>
msg.info.role === "user" &&
msg.parts.some(
(part) =>
part.type === "text" && part.synthetic && part.text.includes("Continue if you have next steps"),
),
),
).toBe(false)
} finally {
await rt.dispose()
}
},
})
})
test("replays the prior user turn on overflow when earlier context exists", async () => {
await using tmp = await tmpdir()
await Instance.provide({

View File

@@ -26,6 +26,12 @@ async function getModel(providerID: ProviderID, modelID: ModelID) {
)
}
const llm = makeRuntime(LLM.Service, LLM.defaultLayer)
async function drain(input: LLM.StreamInput) {
return llm.runPromise((svc) => svc.stream(input).pipe(Stream.runDrain))
}
describe("session.llm.hasToolCalls", () => {
test("returns false for empty messages array", () => {
expect(LLM.hasToolCalls([])).toBe(false)
@@ -355,20 +361,16 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make(providerID), modelID: resolved.id, variant: "high" },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const headers = capture.headers
@@ -393,80 +395,6 @@ describe("session.llm.stream", () => {
})
})
test("raw stream abort signal cancels provider response body promptly", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
const providerID = "alibaba"
const modelID = "qwen-plus"
const fixture = await loadFixture(providerID, modelID)
const model = fixture.model
const pending = waitStreamingRequest("/chat/completions")
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
enabled_providers: [providerID],
provider: {
[providerID]: {
options: {
apiKey: "test-key",
baseURL: `${server.url.origin}/v1`,
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const resolved = await getModel(ProviderID.make(providerID), ModelID.make(model.id))
const sessionID = SessionID.make("session-test-raw-abort")
const agent = {
name: "test",
mode: "primary",
options: {},
permission: [{ permission: "*", pattern: "*", action: "allow" }],
} satisfies Agent.Info
const user = {
id: MessageID.make("user-raw-abort"),
sessionID,
role: "user",
time: { created: Date.now() },
agent: agent.name,
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
} satisfies MessageV2.User
const ctrl = new AbortController()
const result = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: ctrl.signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
const iter = result.fullStream[Symbol.asyncIterator]()
await pending.request
await iter.next()
ctrl.abort()
await Promise.race([pending.responseCanceled, timeout(500)])
await Promise.race([pending.requestAborted, timeout(500)]).catch(() => undefined)
await iter.return?.()
},
})
})
test("service stream cancellation cancels provider response body promptly", async () => {
const server = state.server
if (!server) throw new Error("Server not initialized")
@@ -518,8 +446,7 @@ describe("session.llm.stream", () => {
} satisfies MessageV2.User
const ctrl = new AbortController()
const { runPromiseExit } = makeRuntime(LLM.Service, LLM.defaultLayer)
const run = runPromiseExit(
const run = llm.runPromiseExit(
(svc) =>
svc
.stream({
@@ -610,14 +537,13 @@ describe("session.llm.stream", () => {
tools: { question: true },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
permission: [{ permission: "question", pattern: "*", action: "allow" }],
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {
question: tool({
@@ -628,9 +554,6 @@ describe("session.llm.stream", () => {
},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const tools = capture.body.tools as Array<{ function?: { name?: string } }> | undefined
expect(tools?.some((item) => item.function?.name === "question")).toBe(true)
@@ -728,20 +651,16 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("openai"), modelID: resolved.id, variant: "high" },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
@@ -847,13 +766,12 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("openai"), modelID: resolved.id },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [
{
role: "user",
@@ -871,9 +789,6 @@ describe("session.llm.stream", () => {
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
expect(capture.url.pathname.endsWith("/responses")).toBe(true)
},
@@ -972,20 +887,16 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make("minimax"), modelID: ModelID.make("MiniMax-M2.5") },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
@@ -1073,20 +984,16 @@ describe("session.llm.stream", () => {
model: { providerID: ProviderID.make(providerID), modelID: resolved.id },
} satisfies MessageV2.User
const stream = await LLM.stream({
await drain({
user,
sessionID,
model: resolved,
agent,
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {},
})
for await (const _ of stream.fullStream) {
}
const capture = await request
const body = capture.body
const config = body.generationConfig as

View File

@@ -16,6 +16,7 @@ import { MessageV2 } from "../../src/session/message-v2"
import { SessionProcessor } from "../../src/session/processor"
import { MessageID, PartID, SessionID } from "../../src/session/schema"
import { SessionStatus } from "../../src/session/status"
import { SessionSummary } from "../../src/session/summary"
import { Snapshot } from "../../src/snapshot"
import { Log } from "../../src/util/log"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
@@ -25,6 +26,15 @@ import { raw, reply, TestLLMServer } from "../lib/llm-server"
Log.init({ print: false })
const summary = Layer.succeed(
SessionSummary.Service,
SessionSummary.Service.of({
summarize: () => Effect.void,
diff: () => Effect.succeed([]),
computeDiff: () => Effect.succeed([]),
}),
)
const ref = {
providerID: ProviderID.make("test"),
modelID: ModelID.make("test-model"),
@@ -156,7 +166,10 @@ const deps = Layer.mergeAll(
Provider.defaultLayer,
status,
).pipe(Layer.provideMerge(infra))
const env = Layer.mergeAll(TestLLMServer.layer, SessionProcessor.layer.pipe(Layer.provideMerge(deps)))
const env = Layer.mergeAll(
TestLLMServer.layer,
SessionProcessor.layer.pipe(Layer.provide(summary), Layer.provideMerge(deps)),
)
const it = testEffect(env)

View File

@@ -23,6 +23,7 @@ import { LLM } from "../../src/session/llm"
import { MessageV2 } from "../../src/session/message-v2"
import { AppFileSystem } from "../../src/filesystem"
import { SessionCompaction } from "../../src/session/compaction"
import { SessionSummary } from "../../src/session/summary"
import { Instruction } from "../../src/session/instruction"
import { SessionProcessor } from "../../src/session/processor"
import { SessionPrompt } from "../../src/session/prompt"
@@ -46,6 +47,15 @@ import { reply, TestLLMServer } from "../lib/llm-server"
Log.init({ print: false })
const summary = Layer.succeed(
SessionSummary.Service,
SessionSummary.Service.of({
summarize: () => Effect.void,
diff: () => Effect.succeed([]),
computeDiff: () => Effect.succeed([]),
}),
)
const ref = {
providerID: ProviderID.make("test"),
modelID: ModelID.make("test-model"),
@@ -182,12 +192,13 @@ function makeHttp() {
Layer.provideMerge(deps),
)
const trunc = Truncate.layer.pipe(Layer.provideMerge(deps))
const proc = SessionProcessor.layer.pipe(Layer.provideMerge(deps))
const proc = SessionProcessor.layer.pipe(Layer.provide(summary), Layer.provideMerge(deps))
const compact = SessionCompaction.layer.pipe(Layer.provideMerge(proc), Layer.provideMerge(deps))
return Layer.mergeAll(
TestLLMServer.layer,
SessionPrompt.layer.pipe(
Layer.provide(SessionRevert.defaultLayer),
Layer.provide(summary),
Layer.provideMerge(run),
Layer.provideMerge(compact),
Layer.provideMerge(proc),

File diff suppressed because it is too large Load Diff

View File

@@ -146,12 +146,14 @@ function makeHttp() {
Layer.provideMerge(deps),
)
const trunc = Truncate.layer.pipe(Layer.provideMerge(deps))
const proc = SessionProcessor.layer.pipe(Layer.provideMerge(deps))
const proc = SessionProcessor.layer.pipe(Layer.provide(SessionSummary.defaultLayer), Layer.provideMerge(deps))
const compact = SessionCompaction.layer.pipe(Layer.provideMerge(proc), Layer.provideMerge(deps))
return Layer.mergeAll(
TestLLMServer.layer,
SessionSummary.defaultLayer,
SessionPrompt.layer.pipe(
Layer.provide(SessionRevert.defaultLayer),
Layer.provide(SessionSummary.defaultLayer),
Layer.provideMerge(run),
Layer.provideMerge(compact),
Layer.provideMerge(proc),
@@ -200,6 +202,7 @@ it.live("tool execution produces non-empty session diff (snapshot race)", () =>
Effect.fnUntraced(function* ({ dir, llm }) {
const prompt = yield* SessionPrompt.Service
const sessions = yield* Session.Service
const summary = yield* SessionSummary.Service
const session = yield* sessions.create({
title: "snapshot race test",
@@ -244,9 +247,9 @@ it.live("tool execution produces non-empty session diff (snapshot race)", () =>
expect(tool?.state.status).toBe("completed")
// Poll for diff — summarize() is fire-and-forget
let diff: Awaited<ReturnType<typeof SessionSummary.diff>> = []
let diff: Array<{ file: string }> = []
for (let i = 0; i < 50; i++) {
diff = yield* Effect.promise(() => SessionSummary.diff({ sessionID: session.id }))
diff = yield* summary.diff({ sessionID: session.id })
if (diff.length > 0) break
yield* Effect.sleep("100 millis")
}

View File

@@ -4,7 +4,11 @@ import { Effect } from "effect"
import { Agent } from "../../src/agent/agent"
import { Instance } from "../../src/project/instance"
import { SystemPrompt } from "../../src/session/system"
import { tmpdir } from "../fixture/fixture"
import { provideInstance, tmpdir } from "../fixture/fixture"
function load<A>(dir: string, fn: (svc: Agent.Interface) => Effect.Effect<A>) {
return Effect.runPromise(provideInstance(dir)(Agent.Service.use(fn)).pipe(Effect.provide(Agent.defaultLayer)))
}
describe("session.system", () => {
test("skills output is sorted by name and stable across calls", async () => {
@@ -38,7 +42,7 @@ description: ${description}
await Instance.provide({
directory: tmp.path,
fn: async () => {
const build = await Agent.get("build")
const build = await load(tmp.path, (svc) => svc.get("build"))
const runSkills = Effect.gen(function* () {
const svc = yield* SystemPrompt.Service
return yield* svc.skills(build!)

View File

@@ -0,0 +1,81 @@
import { describe, expect } from "bun:test"
import path from "path"
import { Cause, Effect, Exit, Layer } from "effect"
import { GlobTool } from "../../src/tool/glob"
import { SessionID, MessageID } from "../../src/session/schema"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Ripgrep } from "../../src/file/ripgrep"
import { AppFileSystem } from "../../src/filesystem"
import { Truncate } from "../../src/tool/truncate"
import { Agent } from "../../src/agent/agent"
import { provideTmpdirInstance } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
Ripgrep.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
),
)
const ctx = {
sessionID: SessionID.make("ses_test"),
messageID: MessageID.make(""),
callID: "",
agent: "build",
abort: AbortSignal.any([]),
messages: [],
metadata: () => Effect.void,
ask: () => Effect.void,
}
describe("tool.glob", () => {
it.live("matches files from a directory path", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
yield* Effect.promise(() => Bun.write(path.join(dir, "a.ts"), "export const a = 1\n"))
yield* Effect.promise(() => Bun.write(path.join(dir, "b.txt"), "hello\n"))
const info = yield* GlobTool
const glob = yield* info.init()
const result = yield* glob.execute(
{
pattern: "*.ts",
path: dir,
},
ctx,
)
expect(result.metadata.count).toBe(1)
expect(result.output).toContain(path.join(dir, "a.ts"))
expect(result.output).not.toContain(path.join(dir, "b.txt"))
}),
),
)
it.live("rejects exact file paths", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const file = path.join(dir, "a.ts")
yield* Effect.promise(() => Bun.write(file, "export const a = 1\n"))
const info = yield* GlobTool
const glob = yield* info.init()
const exit = yield* glob
.execute(
{
pattern: "*.ts",
path: file,
},
ctx,
)
.pipe(Effect.exit)
expect(Exit.isFailure(exit)).toBe(true)
if (Exit.isFailure(exit)) {
const err = Cause.squash(exit.cause)
expect(err instanceof Error ? err.message : String(err)).toContain("glob path must be a directory")
}
}),
),
)
})

View File

@@ -90,4 +90,25 @@ describe("tool.grep", () => {
}),
),
)
it.live("supports exact file paths", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
const file = path.join(dir, "test.txt")
yield* Effect.promise(() => Bun.write(file, "line1\nline2\nline3"))
const info = yield* GrepTool
const grep = yield* info.init()
const result = yield* grep.execute(
{
pattern: "line2",
path: file,
},
ctx,
)
expect(result.metadata.matches).toBe(1)
expect(result.output).toContain(file)
expect(result.output).toContain("Line 2: line2")
}),
),
)
})

View File

@@ -304,6 +304,24 @@ export interface Hooks {
input: { sessionID: string },
output: { context: string[]; prompt?: string },
) => Promise<void>
/**
* Called after compaction succeeds and before a synthetic user
* auto-continue message is added.
*
* - `enabled`: Defaults to `true`. Set to `false` to skip the synthetic
* user "continue" turn.
*/
"experimental.compaction.autocontinue"?: (
input: {
sessionID: string
agent: string
model: Model
provider: ProviderContext
message: UserMessage
overflow: boolean
},
output: { enabled: boolean },
) => Promise<void>
"experimental.text.complete"?: (
input: { sessionID: string; messageID: string; partID: string },
output: { text: string },

View File

@@ -51,7 +51,6 @@ import type {
GlobalDisposeResponses,
GlobalEventResponses,
GlobalHealthResponses,
GlobalSyncEventSubscribeResponses,
GlobalUpgradeErrors,
GlobalUpgradeResponses,
InstanceDisposeResponses,
@@ -237,20 +236,6 @@ class HeyApiRegistry<T> {
}
}
export class SyncEvent extends HeyApiClient {
/**
* Subscribe to global sync events
*
* Get global sync events
*/
public subscribe<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) {
return (options?.client ?? this.client).sse.get<GlobalSyncEventSubscribeResponses, unknown, ThrowOnError>({
url: "/global/sync-event",
...options,
})
}
}
export class Config extends HeyApiClient {
/**
* Get global configuration
@@ -350,11 +335,6 @@ export class Global extends HeyApiClient {
})
}
private _syncEvent?: SyncEvent
get syncEvent(): SyncEvent {
return (this._syncEvent ??= new SyncEvent({ client: this.client }))
}
private _config?: Config
get config(): Config {
return (this._config ??= new Config({ client: this.client }))

View File

@@ -971,64 +971,12 @@ export type EventSessionDeleted = {
}
}
export type Event =
| EventProjectUpdated
| EventServerInstanceDisposed
| EventInstallationUpdated
| EventInstallationUpdateAvailable
| EventServerConnected
| EventGlobalDisposed
| EventFileEdited
| EventFileWatcherUpdated
| EventLspClientDiagnostics
| EventLspUpdated
| EventMessagePartDelta
| EventPermissionAsked
| EventPermissionReplied
| EventSessionDiff
| EventSessionError
| EventQuestionAsked
| EventQuestionReplied
| EventQuestionRejected
| EventTodoUpdated
| EventSessionStatus
| EventSessionIdle
| EventSessionCompacted
| EventTuiPromptAppend
| EventTuiCommandExecute
| EventTuiToastShow
| EventTuiSessionSelect
| EventMcpToolsChanged
| EventMcpBrowserOpenFailed
| EventCommandExecuted
| EventVcsBranchUpdated
| EventWorktreeReady
| EventWorktreeFailed
| EventPtyCreated
| EventPtyUpdated
| EventPtyExited
| EventPtyDeleted
| EventWorkspaceReady
| EventWorkspaceFailed
| EventWorkspaceStatus
| EventMessageUpdated
| EventMessageRemoved
| EventMessagePartUpdated
| EventMessagePartRemoved
| EventSessionCreated
| EventSessionUpdated
| EventSessionDeleted
export type GlobalEvent = {
directory: string
project?: string
workspace?: string
payload: Event
}
export type SyncEventMessageUpdated = {
type: "message.updated.1"
aggregate: "sessionID"
type: "sync"
name: "message.updated.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
info: Message
@@ -1036,8 +984,11 @@ export type SyncEventMessageUpdated = {
}
export type SyncEventMessageRemoved = {
type: "message.removed.1"
aggregate: "sessionID"
type: "sync"
name: "message.removed.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
messageID: string
@@ -1045,8 +996,11 @@ export type SyncEventMessageRemoved = {
}
export type SyncEventMessagePartUpdated = {
type: "message.part.updated.1"
aggregate: "sessionID"
type: "sync"
name: "message.part.updated.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
part: Part
@@ -1055,8 +1009,11 @@ export type SyncEventMessagePartUpdated = {
}
export type SyncEventMessagePartRemoved = {
type: "message.part.removed.1"
aggregate: "sessionID"
type: "sync"
name: "message.part.removed.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
messageID: string
@@ -1065,8 +1022,11 @@ export type SyncEventMessagePartRemoved = {
}
export type SyncEventSessionCreated = {
type: "session.created.1"
aggregate: "sessionID"
type: "sync"
name: "session.created.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
info: Session
@@ -1074,8 +1034,11 @@ export type SyncEventSessionCreated = {
}
export type SyncEventSessionUpdated = {
type: "session.updated.1"
aggregate: "sessionID"
type: "sync"
name: "session.updated.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
info: {
@@ -1114,16 +1077,75 @@ export type SyncEventSessionUpdated = {
}
export type SyncEventSessionDeleted = {
type: "session.deleted.1"
aggregate: "sessionID"
type: "sync"
name: "session.deleted.1"
id: string
seq: number
aggregateID: "sessionID"
data: {
sessionID: string
info: Session
}
}
export type SyncEvent = {
payload: SyncEvent
export type GlobalEvent = {
directory: string
project?: string
workspace?: string
payload:
| EventProjectUpdated
| EventServerInstanceDisposed
| EventInstallationUpdated
| EventInstallationUpdateAvailable
| EventServerConnected
| EventGlobalDisposed
| EventFileEdited
| EventFileWatcherUpdated
| EventLspClientDiagnostics
| EventLspUpdated
| EventMessagePartDelta
| EventPermissionAsked
| EventPermissionReplied
| EventSessionDiff
| EventSessionError
| EventQuestionAsked
| EventQuestionReplied
| EventQuestionRejected
| EventTodoUpdated
| EventSessionStatus
| EventSessionIdle
| EventSessionCompacted
| EventTuiPromptAppend
| EventTuiCommandExecute
| EventTuiToastShow
| EventTuiSessionSelect
| EventMcpToolsChanged
| EventMcpBrowserOpenFailed
| EventCommandExecuted
| EventVcsBranchUpdated
| EventWorktreeReady
| EventWorktreeFailed
| EventPtyCreated
| EventPtyUpdated
| EventPtyExited
| EventPtyDeleted
| EventWorkspaceReady
| EventWorkspaceFailed
| EventWorkspaceStatus
| EventMessageUpdated
| EventMessageRemoved
| EventMessagePartUpdated
| EventMessagePartRemoved
| EventSessionCreated
| EventSessionUpdated
| EventSessionDeleted
| SyncEventMessageUpdated
| SyncEventMessageRemoved
| SyncEventMessagePartUpdated
| SyncEventMessagePartRemoved
| SyncEventSessionCreated
| SyncEventSessionUpdated
| SyncEventSessionDeleted
}
/**
@@ -1982,6 +2004,54 @@ export type File = {
status: "added" | "deleted" | "modified"
}
export type Event =
| EventProjectUpdated
| EventServerInstanceDisposed
| EventInstallationUpdated
| EventInstallationUpdateAvailable
| EventServerConnected
| EventGlobalDisposed
| EventFileEdited
| EventFileWatcherUpdated
| EventLspClientDiagnostics
| EventLspUpdated
| EventMessagePartDelta
| EventPermissionAsked
| EventPermissionReplied
| EventSessionDiff
| EventSessionError
| EventQuestionAsked
| EventQuestionReplied
| EventQuestionRejected
| EventTodoUpdated
| EventSessionStatus
| EventSessionIdle
| EventSessionCompacted
| EventTuiPromptAppend
| EventTuiCommandExecute
| EventTuiToastShow
| EventTuiSessionSelect
| EventMcpToolsChanged
| EventMcpBrowserOpenFailed
| EventCommandExecuted
| EventVcsBranchUpdated
| EventWorktreeReady
| EventWorktreeFailed
| EventPtyCreated
| EventPtyUpdated
| EventPtyExited
| EventPtyDeleted
| EventWorkspaceReady
| EventWorkspaceFailed
| EventWorkspaceStatus
| EventMessageUpdated
| EventMessageRemoved
| EventMessagePartUpdated
| EventMessagePartRemoved
| EventSessionCreated
| EventSessionUpdated
| EventSessionDeleted
export type McpStatusConnected = {
status: "connected"
}
@@ -2113,23 +2183,6 @@ export type GlobalEventResponses = {
export type GlobalEventResponse = GlobalEventResponses[keyof GlobalEventResponses]
export type GlobalSyncEventSubscribeData = {
body?: never
path?: never
query?: never
url: "/global/sync-event"
}
export type GlobalSyncEventSubscribeResponses = {
/**
* Event stream
*/
200: SyncEvent
}
export type GlobalSyncEventSubscribeResponse =
GlobalSyncEventSubscribeResponses[keyof GlobalSyncEventSubscribeResponses]
export type GlobalConfigGetData = {
body?: never
path?: never

View File

@@ -66,31 +66,6 @@
]
}
},
"/global/sync-event": {
"get": {
"operationId": "global.sync-event.subscribe",
"summary": "Subscribe to global sync events",
"description": "Get global sync events",
"responses": {
"200": {
"description": "Event stream",
"content": {
"text/event-stream": {
"schema": {
"$ref": "#/components/schemas/SyncEvent"
}
}
}
}
},
"x-codeSamples": [
{
"lang": "js",
"source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.global.sync-event.subscribe({\n ...\n})"
}
]
}
},
"/global/config": {
"get": {
"operationId": "global.config.get",
@@ -9925,174 +9900,24 @@
},
"required": ["type", "properties"]
},
"Event": {
"anyOf": [
{
"$ref": "#/components/schemas/Event.project.updated"
},
{
"$ref": "#/components/schemas/Event.server.instance.disposed"
},
{
"$ref": "#/components/schemas/Event.installation.updated"
},
{
"$ref": "#/components/schemas/Event.installation.update-available"
},
{
"$ref": "#/components/schemas/Event.server.connected"
},
{
"$ref": "#/components/schemas/Event.global.disposed"
},
{
"$ref": "#/components/schemas/Event.file.edited"
},
{
"$ref": "#/components/schemas/Event.file.watcher.updated"
},
{
"$ref": "#/components/schemas/Event.lsp.client.diagnostics"
},
{
"$ref": "#/components/schemas/Event.lsp.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.delta"
},
{
"$ref": "#/components/schemas/Event.permission.asked"
},
{
"$ref": "#/components/schemas/Event.permission.replied"
},
{
"$ref": "#/components/schemas/Event.session.diff"
},
{
"$ref": "#/components/schemas/Event.session.error"
},
{
"$ref": "#/components/schemas/Event.question.asked"
},
{
"$ref": "#/components/schemas/Event.question.replied"
},
{
"$ref": "#/components/schemas/Event.question.rejected"
},
{
"$ref": "#/components/schemas/Event.todo.updated"
},
{
"$ref": "#/components/schemas/Event.session.status"
},
{
"$ref": "#/components/schemas/Event.session.idle"
},
{
"$ref": "#/components/schemas/Event.session.compacted"
},
{
"$ref": "#/components/schemas/Event.tui.prompt.append"
},
{
"$ref": "#/components/schemas/Event.tui.command.execute"
},
{
"$ref": "#/components/schemas/Event.tui.toast.show"
},
{
"$ref": "#/components/schemas/Event.tui.session.select"
},
{
"$ref": "#/components/schemas/Event.mcp.tools.changed"
},
{
"$ref": "#/components/schemas/Event.mcp.browser.open.failed"
},
{
"$ref": "#/components/schemas/Event.command.executed"
},
{
"$ref": "#/components/schemas/Event.vcs.branch.updated"
},
{
"$ref": "#/components/schemas/Event.worktree.ready"
},
{
"$ref": "#/components/schemas/Event.worktree.failed"
},
{
"$ref": "#/components/schemas/Event.pty.created"
},
{
"$ref": "#/components/schemas/Event.pty.updated"
},
{
"$ref": "#/components/schemas/Event.pty.exited"
},
{
"$ref": "#/components/schemas/Event.pty.deleted"
},
{
"$ref": "#/components/schemas/Event.workspace.ready"
},
{
"$ref": "#/components/schemas/Event.workspace.failed"
},
{
"$ref": "#/components/schemas/Event.workspace.status"
},
{
"$ref": "#/components/schemas/Event.message.updated"
},
{
"$ref": "#/components/schemas/Event.message.removed"
},
{
"$ref": "#/components/schemas/Event.message.part.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.removed"
},
{
"$ref": "#/components/schemas/Event.session.created"
},
{
"$ref": "#/components/schemas/Event.session.updated"
},
{
"$ref": "#/components/schemas/Event.session.deleted"
}
]
},
"GlobalEvent": {
"type": "object",
"properties": {
"directory": {
"type": "string"
},
"project": {
"type": "string"
},
"workspace": {
"type": "string"
},
"payload": {
"$ref": "#/components/schemas/Event"
}
},
"required": ["directory", "payload"]
},
"SyncEvent.message.updated": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "message.updated.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10110,16 +9935,26 @@
"required": ["sessionID", "info"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.message.removed": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "message.removed.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10138,16 +9973,26 @@
"required": ["sessionID", "messageID"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.message.part.updated": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "message.part.updated.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10168,16 +10013,26 @@
"required": ["sessionID", "part", "time"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.message.part.removed": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "message.part.removed.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10200,16 +10055,26 @@
"required": ["sessionID", "messageID", "partID"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.session.created": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "session.created.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10227,16 +10092,26 @@
"required": ["sessionID", "info"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.session.updated": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "session.updated.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10479,16 +10354,26 @@
"required": ["sessionID", "info"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent.session.deleted": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "sync"
},
"name": {
"type": "string",
"const": "session.deleted.1"
},
"aggregate": {
"id": {
"type": "string"
},
"seq": {
"type": "number"
},
"aggregateID": {
"type": "string",
"const": "sessionID"
},
@@ -10506,16 +10391,185 @@
"required": ["sessionID", "info"]
}
},
"required": ["type", "aggregate", "data"]
"required": ["type", "name", "id", "seq", "aggregateID", "data"]
},
"SyncEvent": {
"GlobalEvent": {
"type": "object",
"properties": {
"directory": {
"type": "string"
},
"project": {
"type": "string"
},
"workspace": {
"type": "string"
},
"payload": {
"$ref": "#/components/schemas/SyncEvent"
"anyOf": [
{
"$ref": "#/components/schemas/Event.project.updated"
},
{
"$ref": "#/components/schemas/Event.server.instance.disposed"
},
{
"$ref": "#/components/schemas/Event.installation.updated"
},
{
"$ref": "#/components/schemas/Event.installation.update-available"
},
{
"$ref": "#/components/schemas/Event.server.connected"
},
{
"$ref": "#/components/schemas/Event.global.disposed"
},
{
"$ref": "#/components/schemas/Event.file.edited"
},
{
"$ref": "#/components/schemas/Event.file.watcher.updated"
},
{
"$ref": "#/components/schemas/Event.lsp.client.diagnostics"
},
{
"$ref": "#/components/schemas/Event.lsp.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.delta"
},
{
"$ref": "#/components/schemas/Event.permission.asked"
},
{
"$ref": "#/components/schemas/Event.permission.replied"
},
{
"$ref": "#/components/schemas/Event.session.diff"
},
{
"$ref": "#/components/schemas/Event.session.error"
},
{
"$ref": "#/components/schemas/Event.question.asked"
},
{
"$ref": "#/components/schemas/Event.question.replied"
},
{
"$ref": "#/components/schemas/Event.question.rejected"
},
{
"$ref": "#/components/schemas/Event.todo.updated"
},
{
"$ref": "#/components/schemas/Event.session.status"
},
{
"$ref": "#/components/schemas/Event.session.idle"
},
{
"$ref": "#/components/schemas/Event.session.compacted"
},
{
"$ref": "#/components/schemas/Event.tui.prompt.append"
},
{
"$ref": "#/components/schemas/Event.tui.command.execute"
},
{
"$ref": "#/components/schemas/Event.tui.toast.show"
},
{
"$ref": "#/components/schemas/Event.tui.session.select"
},
{
"$ref": "#/components/schemas/Event.mcp.tools.changed"
},
{
"$ref": "#/components/schemas/Event.mcp.browser.open.failed"
},
{
"$ref": "#/components/schemas/Event.command.executed"
},
{
"$ref": "#/components/schemas/Event.vcs.branch.updated"
},
{
"$ref": "#/components/schemas/Event.worktree.ready"
},
{
"$ref": "#/components/schemas/Event.worktree.failed"
},
{
"$ref": "#/components/schemas/Event.pty.created"
},
{
"$ref": "#/components/schemas/Event.pty.updated"
},
{
"$ref": "#/components/schemas/Event.pty.exited"
},
{
"$ref": "#/components/schemas/Event.pty.deleted"
},
{
"$ref": "#/components/schemas/Event.workspace.ready"
},
{
"$ref": "#/components/schemas/Event.workspace.failed"
},
{
"$ref": "#/components/schemas/Event.workspace.status"
},
{
"$ref": "#/components/schemas/Event.message.updated"
},
{
"$ref": "#/components/schemas/Event.message.removed"
},
{
"$ref": "#/components/schemas/Event.message.part.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.removed"
},
{
"$ref": "#/components/schemas/Event.session.created"
},
{
"$ref": "#/components/schemas/Event.session.updated"
},
{
"$ref": "#/components/schemas/Event.session.deleted"
},
{
"$ref": "#/components/schemas/SyncEvent.message.updated"
},
{
"$ref": "#/components/schemas/SyncEvent.message.removed"
},
{
"$ref": "#/components/schemas/SyncEvent.message.part.updated"
},
{
"$ref": "#/components/schemas/SyncEvent.message.part.removed"
},
{
"$ref": "#/components/schemas/SyncEvent.session.created"
},
{
"$ref": "#/components/schemas/SyncEvent.session.updated"
},
{
"$ref": "#/components/schemas/SyncEvent.session.deleted"
}
]
}
},
"required": ["payload"]
"required": ["directory", "payload"]
},
"LogLevel": {
"description": "Log level",
@@ -12608,6 +12662,148 @@
},
"required": ["path", "added", "removed", "status"]
},
"Event": {
"anyOf": [
{
"$ref": "#/components/schemas/Event.project.updated"
},
{
"$ref": "#/components/schemas/Event.server.instance.disposed"
},
{
"$ref": "#/components/schemas/Event.installation.updated"
},
{
"$ref": "#/components/schemas/Event.installation.update-available"
},
{
"$ref": "#/components/schemas/Event.server.connected"
},
{
"$ref": "#/components/schemas/Event.global.disposed"
},
{
"$ref": "#/components/schemas/Event.file.edited"
},
{
"$ref": "#/components/schemas/Event.file.watcher.updated"
},
{
"$ref": "#/components/schemas/Event.lsp.client.diagnostics"
},
{
"$ref": "#/components/schemas/Event.lsp.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.delta"
},
{
"$ref": "#/components/schemas/Event.permission.asked"
},
{
"$ref": "#/components/schemas/Event.permission.replied"
},
{
"$ref": "#/components/schemas/Event.session.diff"
},
{
"$ref": "#/components/schemas/Event.session.error"
},
{
"$ref": "#/components/schemas/Event.question.asked"
},
{
"$ref": "#/components/schemas/Event.question.replied"
},
{
"$ref": "#/components/schemas/Event.question.rejected"
},
{
"$ref": "#/components/schemas/Event.todo.updated"
},
{
"$ref": "#/components/schemas/Event.session.status"
},
{
"$ref": "#/components/schemas/Event.session.idle"
},
{
"$ref": "#/components/schemas/Event.session.compacted"
},
{
"$ref": "#/components/schemas/Event.tui.prompt.append"
},
{
"$ref": "#/components/schemas/Event.tui.command.execute"
},
{
"$ref": "#/components/schemas/Event.tui.toast.show"
},
{
"$ref": "#/components/schemas/Event.tui.session.select"
},
{
"$ref": "#/components/schemas/Event.mcp.tools.changed"
},
{
"$ref": "#/components/schemas/Event.mcp.browser.open.failed"
},
{
"$ref": "#/components/schemas/Event.command.executed"
},
{
"$ref": "#/components/schemas/Event.vcs.branch.updated"
},
{
"$ref": "#/components/schemas/Event.worktree.ready"
},
{
"$ref": "#/components/schemas/Event.worktree.failed"
},
{
"$ref": "#/components/schemas/Event.pty.created"
},
{
"$ref": "#/components/schemas/Event.pty.updated"
},
{
"$ref": "#/components/schemas/Event.pty.exited"
},
{
"$ref": "#/components/schemas/Event.pty.deleted"
},
{
"$ref": "#/components/schemas/Event.workspace.ready"
},
{
"$ref": "#/components/schemas/Event.workspace.failed"
},
{
"$ref": "#/components/schemas/Event.workspace.status"
},
{
"$ref": "#/components/schemas/Event.message.updated"
},
{
"$ref": "#/components/schemas/Event.message.removed"
},
{
"$ref": "#/components/schemas/Event.message.part.updated"
},
{
"$ref": "#/components/schemas/Event.message.part.removed"
},
{
"$ref": "#/components/schemas/Event.session.created"
},
{
"$ref": "#/components/schemas/Event.session.updated"
},
{
"$ref": "#/components/schemas/Event.session.deleted"
}
]
},
"MCPStatusConnected": {
"type": "object",
"properties": {