Compare commits

..

164 Commits

Author SHA1 Message Date
opencode-agent[bot]
f897c46d7e Fix beta integration 2026-05-01 13:50:39 +00:00
opencode-agent[bot]
c82a67ff14 Apply PR #25034: feat: default HTTP API backend to on for dev/beta channels 2026-05-01 13:49:13 +00:00
opencode-agent[bot]
0ba6327752 Apply PR #24877: fix(core): run sessions with the same directory they were created with 2026-05-01 13:49:13 +00:00
opencode-agent[bot]
82c62861b7 Apply PR #24512: Refactor v2 session events as schemas 2026-05-01 13:49:12 +00:00
opencode-agent[bot]
f4fbc8a4e8 Apply PR #24229: fix: lazy session error schema 2026-05-01 13:49:12 +00:00
opencode-agent[bot]
182d9ce012 Apply PR #24174: feat(core): add background subagent support 2026-05-01 13:49:11 +00:00
opencode-agent[bot]
d67d4985c7 Apply PR #24149: feat(core): add scout agent for repo research 2026-05-01 13:49:11 +00:00
opencode-agent[bot]
0f04cf865c Apply PR #23557: feat(opencode): add interactive split-footer mode to run 2026-05-01 13:47:44 +00:00
opencode-agent[bot]
57da08d96b Apply PR #22753: core: move plugin intialisation to config layer override 2026-05-01 13:44:18 +00:00
opencode-agent[bot]
317bcc944a Apply PR #21537: fix(app): remove pierre diff virtualization 2026-05-01 13:44:18 +00:00
opencode-agent[bot]
3b81a8ea8b Apply PR #20039: feat: bash->shell tool + pwsh/powershell/cmd/bash specific tool definitions so agents work better 2026-05-01 13:43:23 +00:00
opencode-agent[bot]
88357671ec Apply PR #19067: ci: only build electron desktop 2026-05-01 13:40:46 +00:00
opencode-agent[bot]
5d72c6a475 Apply PR #12633: feat(tui): add auto-accept mode for permission requests 2026-05-01 13:40:46 +00:00
opencode-agent[bot]
27121e7898 Apply PR #11710: feat: Add the ability to include cleared prompts in the history, toggled by a KV-persisted command palette item (resolves #11489) 2026-05-01 13:36:54 +00:00
Shoubhit Dash
a84edc224f fix(tui): show background task progress 2026-05-01 19:05:16 +05:30
Shoubhit Dash
2f919b8bc7 refactor(task): use background jobs 2026-05-01 19:05:06 +05:30
Shoubhit Dash
085fac7c2c feat(background): add job service 2026-05-01 19:04:56 +05:30
Kit Langton
16ddf5f559 fix(session): use finite archived timestamp schema (#25275) 2026-05-01 11:57:03 +00:00
Kit Langton
8c79c58c4d refactor: rename workspace adapters (#25272) 2026-05-01 07:36:52 -04:00
luo jiyin
97ed9ba624 fix: correct documentation typos (#25260) 2026-05-01 12:05:06 +02:00
Shoubhit Dash
b26fe0d357 Merge branch 'dev' into nxl/background-subagents 2026-05-01 15:29:32 +05:30
Simon Klee
a6b6395c8a fix(tui): gate logo subpixel rendering on truecolor support (#25265) 2026-05-01 11:33:44 +02:00
Shoubhit Dash
365386fac0 Merge remote-tracking branch 'origin/dev' into nxl/scout-repo-tools
# Conflicts:
#	packages/opencode/src/agent/agent.ts
#	packages/opencode/src/cli/cmd/github.ts
#	packages/opencode/src/config/config.ts
#	packages/opencode/src/config/permission.ts
#	packages/opencode/src/global/index.ts
#	packages/opencode/src/tool/registry.ts
2026-05-01 14:59:01 +05:30
Dax Raad
0983235f75 core: simplify Session.Info schema to empty struct for flexible event handling
This change removes the predefined fields from Session.Info to allow more
dynamic event-driven session data. Instead of fixed schema fields, session
information will be populated through the event system, enabling better
support for evolving session states without schema migrations.

The empty struct serves as a base that can be extended through the event
model, making it easier to add new session attributes without modifying
core schema definitions.
2026-05-01 00:03:17 -04:00
Dax Raad
6d1fa44d7f core: expose complete session metadata schema for agent session introspection 2026-05-01 00:03:17 -04:00
Dax Raad
51f7e71579 core: add session listing API with filtering and improved pagination
Users can now browse sessions through the new /api/session endpoint with filters for directory, workspace, date range, and title search. Pagination cursors are now labeled 'previous' and 'next' instead of 'before' and 'after' to make navigation direction clearer. Both session lists and message history now support explicit 'asc' or 'desc' ordering so users can choose between newest-first or oldest-first views. The TUI session view now displays messages with the newest at the bottom, matching standard chat interfaces.
2026-05-01 00:03:17 -04:00
Dax Raad
7989a032e7 core: add pagination support for session messages with cursor-based navigation
Enables loading messages in chunks for better performance with long conversations.
Users can now navigate through large session histories without loading all messages at once.
Includes before/after cursors for bi-directional pagination.
2026-05-01 00:03:17 -04:00
Dax Raad
965e74ef89 core: simplify message history pagination with unified cursor API
Replace separate before/after query parameters with a single cursor that
carries direction info. Chat clients can now use 'start' or 'end' keywords
to jump to the beginning or newest messages, and navigate history with a
single cursor parameter instead of managing multiple pagination states.
2026-05-01 00:03:17 -04:00
Dax Raad
f99c69bba8 refactor(session): define v2 session event schemas 2026-05-01 00:03:17 -04:00
Brendan Allan
7423b4872c rename desktop-darwin to desktop-mac 2026-05-01 11:54:56 +08:00
Brendan Allan
9b85d2cbb4 Merge branch 'dev' into brendan/lazy-init-plugins 2026-05-01 11:48:58 +08:00
Brendan Allan
4a86c2b77a Merge branch 'dev' into brendan/lazy-init-plugins 2026-05-01 11:47:50 +08:00
James Long
fdfe599cb5 handle all events 2026-04-30 10:43:06 -04:00
James Long
6df566161e fix(core): run sessions with the same directory they were created with 2026-04-30 10:43:05 -04:00
Kit Langton
1e5cc6da19 feat: default HTTP API backend to on for dev/beta channels
Turn on the experimental effect-httpapi server backend by default for
dev, beta, and local installations so internal users exercise the new
backend. Stable (prod/latest) installs remain on the legacy hono backend.

OPENCODE_EXPERIMENTAL_HTTPAPI=true/1 still force-enables on stable, and
OPENCODE_EXPERIMENTAL_HTTPAPI=false/0 disables it as an escape hatch for
dev/beta users.
2026-04-29 21:35:48 -04:00
Brendan Allan
92d44ce72e create git token after downloading artifacts 2026-04-29 14:25:36 +08:00
LukeParkerDev
529a6ed10f . 2026-04-29 13:00:39 +10:00
Brendan Allan
a3cb00a1ab try improve 2026-04-29 10:17:54 +08:00
LukeParkerDev
20c3461a80 f 2026-04-29 10:10:58 +10:00
LukeParkerDev
f8687190f2 . 2026-04-29 09:47:58 +10:00
LukeParkerDev
d5ebfad838 . 2026-04-29 09:34:28 +10:00
LukeParkerDev
c16a0e08ae Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-29 09:25:48 +10:00
LukeParkerDev
70ca5727a6 noooooooooo brekaing changes 2026-04-29 09:06:05 +10:00
LukeParkerDev
9d9830b7df no breaking changes 2026-04-29 08:51:06 +10:00
Brendan Allan
6a7b415894 read signature from sig file 2026-04-28 19:29:39 +08:00
Brendan Allan
682c4eecd6 don't do tag lookup istg 2026-04-28 15:41:05 +08:00
Brendan Allan
9fbff1bc7d Merge branch 'dev' into brendan/desktop-electron-only 2026-04-28 14:05:53 +08:00
Brendan Allan
dced9c9baa lookup release by id not name 2026-04-28 14:03:15 +08:00
Brendan Allan
55e7bb08d0 remove build-tauri 2026-04-28 12:31:53 +08:00
Brendan Allan
6620054fe1 fix error 2026-04-28 12:28:09 +08:00
Brendan Allan
db830c636b rename opencode-election-* to opencode-desktop-* 2026-04-28 12:28:09 +08:00
Brendan Allan
04c03fa612 ci: only build electron desktop 2026-04-28 12:28:09 +08:00
LukeParkerDev
6ac33ddc4d test: update experimental api shell assertions 2026-04-27 14:30:41 +10:00
LukeParkerDev
ea277baeb7 css 2026-04-27 14:23:37 +10:00
LukeParkerDev
b1d9c57655 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-27 14:15:07 +10:00
LukeParkerDev
5a7e69b325 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-27 08:59:39 +10:00
LukeParkerDev
344dab3839 Update next.test.ts 2026-04-26 09:59:46 +10:00
LukeParkerDev
9dde86acbe Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-26 09:56:21 +10:00
Luke Parker
73ee7ae702 Merge branch 'dev' into refactor-shells 2026-04-25 15:23:49 +10:00
LukeParkerDev
2051cadcb8 Update prompt.ts 2026-04-25 15:18:30 +10:00
LukeParkerDev
790d181d8a slight accuracy 2026-04-25 11:37:32 +10:00
LukeParkerDev
ecac4c4e2a split prompt/definition from logic 2026-04-25 11:32:18 +10:00
LukeParkerDev
f89955a4e3 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-25 11:18:01 +10:00
LukeParkerDev
428b0c46a7 cmd 2026-04-25 11:15:31 +10:00
LukeParkerDev
341b8e78c9 perms 2026-04-25 11:11:42 +10:00
LukeParkerDev
d704110e52 fix: lazy session error schema 2026-04-25 10:04:49 +10:00
Shoubhit Dash
80aeb78b38 Merge branch 'dev' into nxl/background-subagents 2026-04-24 20:32:04 +05:30
Shoubhit Dash
601fe03a3a refactor(task): simplify effect wrappers 2026-04-24 20:30:53 +05:30
Shoubhit Dash
3f4b9d9ef4 test(task): use branded session id in schema test 2026-04-24 20:21:02 +05:30
Shoubhit Dash
1357bb984f style: fix background task formatting 2026-04-24 20:20:04 +05:30
Shoubhit Dash
ecde8ab363 test(task): update parameter schema snapshot 2026-04-24 20:20:04 +05:30
Shoubhit Dash
7970130720 fix(ui): label background task cards 2026-04-24 20:08:32 +05:30
Shoubhit Dash
971c837ad4 feat(task): add background subagent support 2026-04-24 20:08:24 +05:30
Shoubhit Dash
b633a8b1c8 refactor(scout): fold github remote parsing into repository 2026-04-24 19:03:56 +05:30
Shoubhit Dash
c750df3e86 fix(scout): use effect schema tool params 2026-04-24 18:58:28 +05:30
Shoubhit Dash
3bf0c79396 Merge branch 'dev' into nxl/scout-repo-tools 2026-04-24 16:39:56 +05:30
Shoubhit Dash
35a19df57d fix(scout): widen repo tool schema types 2026-04-24 16:38:37 +05:30
Shoubhit Dash
343e68853c fix(scout): type repo tool definitions 2026-04-24 16:34:45 +05:30
Shoubhit Dash
0db04ef69f docs: add scout agent docs 2026-04-24 16:29:31 +05:30
Shoubhit Dash
1e0246cdc8 feat(scout): add repo research tools 2026-04-24 16:29:19 +05:30
Ariane Emory
09e4e5a184 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-23 21:55:13 -04:00
LukeParkerDev
4f8ff6ab53 . 2026-04-24 08:23:18 +10:00
LukeParkerDev
7266b48ca0 Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-24 08:11:09 +10:00
LukeParkerDev
26d77add77 edges 2026-04-24 08:03:16 +10:00
LukeParkerDev
cffb8eb1e3 . 2026-04-24 07:54:08 +10:00
LukeParkerDev
0d500a735f Create todo.spec.ts 2026-04-24 07:44:06 +10:00
LukeParkerDev
6d66973fd5 clean 2026-04-24 07:39:19 +10:00
LukeParkerDev
3e30068907 refactor: make shell the canonical tool internals 2026-04-23 19:46:00 +10:00
LukeParkerDev
b75f831eaa . 2026-04-23 17:34:57 +10:00
LukeParkerDev
f9a633bd0b Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-23 17:31:07 +10:00
Brendan Allan
e041605b40 Merge branch 'dev' into brendan/lazy-init-plugins 2026-04-21 12:33:02 +08:00
LukeParkerDev
f280e7e69c fix: defer MessageV2.Assistant.shape access to break circular dep in compiled binary 2026-04-20 16:08:42 +10:00
Brendan Allan
b265742fd0 Merge branch 'dev' into brendan/lazy-init-plugins 2026-04-19 21:15:45 +08:00
Brendan Allan
b1db69fdf7 fix other commands 2026-04-17 17:03:53 +08:00
Brendan Allan
031766efa0 fix tui 2026-04-17 15:44:01 +08:00
Brendan Allan
dc6d39551c address feedback 2026-04-17 15:44:01 +08:00
Brendan Allan
e287569f82 rename layer 2026-04-17 15:44:01 +08:00
Brendan Allan
14eacb4019 core: move plugin intialisation to config layer override 2026-04-17 15:44:01 +08:00
Ariane Emory
731c1e58f2 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-16 20:22:02 -04:00
Ariane Emory
c411d37484 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-12 04:22:06 -04:00
Adam
cb29742b57 fix(app): remove pierre diff virtualization 2026-04-08 13:16:45 -05:00
LukeParkerDev
ee0884ad31 fix(shell): preserve legacy bash compatibility
Keep mixed shell/bash permission configs ordered correctly and treat --tools bash as the legacy alias during agent creation.
2026-04-08 15:14:45 +10:00
LukeParkerDev
f1547de528 ok 2026-04-08 14:35:16 +10:00
LukeParkerDev
39088e1a1e Merge remote-tracking branch 'upstream/dev' into refactor-shells
# Conflicts:
#	packages/app/e2e/prompt/prompt-shell.spec.ts
#	packages/opencode/src/tool/bash.ts
#	packages/opencode/src/tool/registry.ts
2026-04-08 13:11:43 +10:00
Ariane Emory
97a94571a4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-04-03 09:19:12 -04:00
LukeParkerDev
25551172c9 fix(shell): avoid abort hangs and utf8 corruption
Attach shell process listeners before handling already-aborted tool signals so canceled runs always settle, and decode shell output as UTF-8 to preserve multibyte characters across chunk boundaries. Also lazy-load shell-specific parsers and hoist command sets so parsing work stays focused on the active shell.
2026-04-03 16:04:41 +10:00
LukeParkerDev
32ec3666b7 fix(shell): keep shell config consistent
Treat shell access as one logical toggle during agent creation and apply bash compatibility rules before explicit per-shell overrides. This avoids disabling the active Windows shell unexpectedly and keeps pwsh and powershell overrides deterministic.
2026-04-03 15:08:30 +10:00
LukeParkerDev
2eb9ae4d34 refactor(shell): centralize shell tool identity
Move shell tool ID checks behind shared helpers so runtime code and tests stop duplicating bash, pwsh, and powershell branches. This keeps shell-specific behavior aligned across consumers and makes follow-on shell changes less error-prone.
2026-04-03 14:56:40 +10:00
LukeParkerDev
baf476f431 test(shell): handle nullable exit metadata
Make the shell exit assertions typecheck cleanly while keeping the PowerShell regression coverage. Remove the accidentally committed .opencode package-lock so generated state does not ship in the branch.
2026-04-03 14:29:04 +10:00
LukeParkerDev
23e77fd9bc fix(shell): preserve powershell exit codes
Use a multiline PowerShell trailer so native Windows commands keep their actual exit status without masking cmdlet failures, and add focused regression coverage. Remove the accidentally committed .opencode package-lock to keep generated state out of the branch.
2026-04-03 14:27:03 +10:00
LukeParkerDev
6ad6358eb1 fix: render pwsh and powershell tools correctly in UI
This fixes regressions from splitting the shell tools where powershell commands were missing their native exit codes and their correct UI rendering.
2026-04-03 14:01:13 +10:00
LukeParkerDev
95577c75a3 fix(config): preserve bash permission compatibility
Keep legacy tools.bash migration mapped to the single bash permission since the permission layer already expands it to pwsh and powershell. This preserves the backward-compatible config shape while retaining shell compatibility.
2026-04-03 13:43:37 +10:00
LukeParkerDev
f21bf4a62a Merge remote-tracking branch 'upstream/dev' into refactor-shells 2026-04-03 13:34:16 +10:00
LukeParkerDev
676519d79d refactor: apply positive guidance and parameterize shell commands in prompt template 2026-03-30 20:42:42 +10:00
LukeParkerDev
48f9082d0a refactor: use positive tone in shell guidance prompts 2026-03-30 20:24:49 +10:00
LukeParkerDev
51ebba2975 refactor: add shell-specific guidance to each tool prompt 2026-03-30 20:18:50 +10:00
LukeParkerDev
3e26c3ae83 refactor: extract shell tool factory to eliminate duplication 2026-03-30 20:15:58 +10:00
LukeParkerDev
67dfbcbcfd fix: use dynamic imports for tree-sitter and shell-aware metadata tags 2026-03-30 20:12:36 +10:00
LukeParkerDev
048ac63abd refactor: split monolithic bash tool into separate bash/pwsh/powershell tools 2026-03-30 20:08:27 +10:00
Ariane Emory
6652585a7f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 11:17:40 -04:00
Ariane Emory
532b64c0d5 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-24 07:43:03 -04:00
Ariane Emory
eec4c775a7 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-23 21:10:21 -04:00
Ariane Emory
01e350449c Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 19:12:18 -04:00
Dax
5792a80a8c Merge branch 'dev' into feat/auto-accept-permissions 2026-03-20 10:46:31 -04:00
Dax Raad
db039db7f5 regen js sdk 2026-03-20 10:21:10 -04:00
Dax Raad
c1a3936b61 Merge remote-tracking branch 'origin/dev' into feat/auto-accept-permissions
# Conflicts:
#	packages/sdk/js/src/v2/gen/types.gen.ts
2026-03-20 10:20:26 -04:00
Ariane Emory
a9d9e4d9c4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-20 03:35:16 -04:00
Ariane Emory
2531b2d3a9 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-13 11:47:39 -04:00
Ariane Emory
a718f86e0f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 19:28:41 -04:00
Ariane Emory
f3efdff861 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-08 08:36:02 -04:00
Ariane Emory
955d8591df Merge branch 'dev' into feat/canceled-prompts-in-history 2026-03-05 18:24:19 -05:00
Ariane Emory
33b3388bf4 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 17:50:11 -05:00
Ariane Emory
716f40b128 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-26 01:36:39 -05:00
Ariane Emory
0b06ff1407 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-20 21:24:12 -05:00
Ariane Emory
01ff5b5390 Merge branch 'dev' into feat/canceled-prompts-in-history
# Conflicts:
#	packages/opencode/src/cli/cmd/tui/component/prompt/history.tsx
2026-02-20 02:16:02 -05:00
Ariane Emory
3d1b121e70 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-19 19:18:48 -05:00
Ariane Emory
b70629af27 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-18 19:10:26 -05:00
Ariane Emory
b7b016fa28 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-17 00:09:51 -05:00
Ariane Emory
5ba2d7e5f0 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-15 12:27:51 -05:00
Ariane Emory
459b22b83d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-14 19:21:47 -05:00
Ariane Emory
377812b98a Merge dev into feat/canceled-prompts-in-history 2026-02-14 06:28:48 -05:00
Ariane Emory
5cc0901e38 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-13 09:37:11 -05:00
Ariane Emory
7fb6b589d1 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-12 18:29:23 -05:00
Ariane Emory
3f37b43e7d Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-11 12:46:47 -05:00
Ariane Emory
8805dfc849 fix: deduplicate prompt history entries
Avoid adding duplicate entries to prompt history when the same input
is appended multiple times (e.g., clearing with ctrl+c then restoring
via history navigation and clearing again).
2026-02-10 22:21:39 -05:00
Ariane Emory
ac5a5d8b16 Merge branch 'feat/canceled-prompts-in-history' of github.com:ariane-emory/opencode into feat/canceled-prompts-in-history 2026-02-10 16:37:55 -05:00
Ariane Emory
eaf94ed047 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:29:05 -05:00
Ariane Emory
b8031c5ae8 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-10 16:10:35 -05:00
Dax Raad
a531f3f36d core: run command build agent now auto-accepts file edits to reduce workflow interruptions while still requiring confirmation for bash commands 2026-02-07 20:00:09 -05:00
Dax Raad
bb3382311d tui: standardize autoedit indicator text styling to match other status labels 2026-02-07 19:57:45 -05:00
Dax Raad
ad545d0cc9 tui: allow auto-accepting only edit permissions instead of all permissions 2026-02-07 19:52:53 -05:00
Dax Raad
ac244b1458 tui: add searchable 'toggle' keywords to command palette and show current state in toggle titles 2026-02-07 17:03:34 -05:00
Dax Raad
f202536b65 tui: show enable/disable state in permission toggle and make it searchable by 'toggle permissions' 2026-02-07 16:57:48 -05:00
Dax Raad
405cc3f610 tui: streamline permission toggle command naming and add keyboard shortcut support
Rename 'Toggle autoaccept permissions' to 'Toggle permissions' for clarity
and move the command to the Agent category for better discoverability.
Add permission_auto_accept_toggle keybind to enable keyboard shortcut
toggling of auto-accept mode for permission requests.
2026-02-07 16:51:55 -05:00
Dax Raad
878c1b8c2d feat(tui): add auto-accept mode for permission requests
Add a toggleable auto-accept mode that automatically accepts all incoming
permission requests with a 'once' reply. This is useful for users who want
to streamline their workflow when they trust the agent's actions.

Changes:
- Add permission_auto_accept keybind (default: shift+tab) to config
- Remove default for agent_cycle_reverse (was shift+tab)
- Add auto-accept logic in sync.tsx to auto-reply when enabled
- Add command bar action to toggle auto-accept mode (copy: "Toggle autoaccept permissions")
- Add visual indicator showing 'auto-accept' when active
- Store auto-accept state in KV for persistence across sessions
2026-02-07 16:44:39 -05:00
Ariane Emory
d5dcadc000 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-07 13:34:42 -05:00
Ariane Emory
0c154e6a2f Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-06 15:59:50 -05:00
Ariane Emory
4f96975148 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-05 18:17:01 -05:00
Ariane Emory
eaba99711b Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-04 19:33:59 -05:00
Ariane Emory
f762125775 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-03 18:36:44 -05:00
Ariane Emory
ded6bb6513 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-02 21:23:28 -05:00
Ariane Emory
39332f5be6 Merge branch 'dev' into feat/canceled-prompts-in-history 2026-02-01 22:33:29 -05:00
Ariane Emory
2c6ff35400 feat: add toggle to control whether cleared prompts are saved to history
Adds a toggle command in the System category that allows users to enable
or disable saving cleared prompts to history. The feature is disabled by
default to preserve existing behavior.

When enabled via the command palette ("Include cleared prompts in history"),
pressing Ctrl+C will save the current prompt to history before clearing it,
allowing users to navigate back with arrow keys.

The setting persists in kv.json.
2026-02-01 21:12:48 -05:00
Ariane Emory
738d6c8899 feat: save prompt to history when cleared with Ctrl+C
When users press Ctrl+C to clear the input field, the current prompt
is now saved to history before clearing. This allows users to navigate
back to cleared prompts using arrow keys, preventing loss of work.

Addresses #11489
2026-02-01 21:01:15 -05:00
152 changed files with 9997 additions and 3043 deletions

View File

@@ -209,182 +209,6 @@ jobs:
packages/opencode/dist/opencode-windows-x64
packages/opencode/dist/opencode-windows-x64-baseline
build-tauri:
needs:
- build-cli
- version
continue-on-error: false
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
AZURE_TRUSTED_SIGNING_ACCOUNT_NAME: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE }}
AZURE_TRUSTED_SIGNING_ENDPOINT: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
- host: macos-latest
target: aarch64-apple-darwin
# github-hosted: blacksmith lacks ARM64 MSVC cross-compilation toolchain
- host: windows-2025
target: aarch64-pc-windows-msvc
- host: blacksmith-4vcpu-windows-2025
target: x86_64-pc-windows-msvc
- host: blacksmith-4vcpu-ubuntu-2404
target: x86_64-unknown-linux-gnu
- host: blacksmith-8vcpu-ubuntu-2404-arm
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
fetch-tags: true
- uses: apple-actions/import-codesign-certs@v2
if: ${{ runner.os == 'macOS' }}
with:
keychain: build
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Verify Certificate
if: ${{ runner.os == 'macOS' }}
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- name: Setup Apple API Key
if: ${{ runner.os == 'macOS' }}
run: |
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
- uses: ./.github/actions/setup-bun
- name: Azure login
if: runner.os == 'Windows'
uses: azure/login@v2
with:
client-id: ${{ env.AZURE_CLIENT_ID }}
tenant-id: ${{ env.AZURE_TENANT_ID }}
subscription-id: ${{ env.AZURE_SUBSCRIPTION_ID }}
- uses: actions/setup-node@v4
with:
node-version: "24"
- name: Cache apt packages
if: contains(matrix.settings.host, 'ubuntu')
uses: actions/cache@v4
with:
path: ~/apt-cache
key: ${{ runner.os }}-${{ matrix.settings.target }}-apt-${{ hashFiles('.github/workflows/publish.yml') }}
restore-keys: |
${{ runner.os }}-${{ matrix.settings.target }}-apt-
- name: install dependencies (ubuntu only)
if: contains(matrix.settings.host, 'ubuntu')
run: |
mkdir -p ~/apt-cache && chmod -R a+rw ~/apt-cache
sudo apt-get update
sudo apt-get install -y --no-install-recommends -o dir::cache::archives="$HOME/apt-cache" libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
sudo chmod -R a+rw ~/apt-cache
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
- uses: Swatinem/rust-cache@v2
with:
workspaces: packages/desktop/src-tauri
shared-key: ${{ matrix.settings.target }}
- name: Prepare
run: |
cd packages/desktop
bun ./scripts/prepare.ts
env:
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
OPENCODE_CLI_ARTIFACT: ${{ (runner.os == 'Windows' && 'opencode-cli-windows') || 'opencode-cli' }}
RUST_TARGET: ${{ matrix.settings.target }}
GH_TOKEN: ${{ github.token }}
GITHUB_RUN_ID: ${{ github.run_id }}
- name: Resolve tauri portable SHA
if: contains(matrix.settings.host, 'ubuntu')
run: echo "TAURI_PORTABLE_SHA=$(git ls-remote https://github.com/tauri-apps/tauri.git refs/heads/feat/truly-portable-appimage | cut -f1)" >> "$GITHUB_ENV"
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
- name: Install tauri-cli from portable appimage branch
uses: taiki-e/cache-cargo-install-action@v3
if: contains(matrix.settings.host, 'ubuntu')
with:
tool: tauri-cli
git: https://github.com/tauri-apps/tauri
# branch: feat/truly-portable-appimage
rev: ${{ env.TAURI_PORTABLE_SHA }}
- name: Show tauri-cli version
if: contains(matrix.settings.host, 'ubuntu')
run: cargo tauri --version
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Build and upload artifacts
uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
timeout-minutes: 60
with:
projectPath: packages/desktop
uploadWorkflowArtifacts: true
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
args: --target ${{ matrix.settings.target }} --config ${{ (github.ref_name == 'beta' && './src-tauri/tauri.beta.conf.json') || './src-tauri/tauri.prod.conf.json' }} --verbose
updaterJsonPreferNsis: true
releaseId: ${{ needs.version.outputs.release }}
tagName: ${{ needs.version.outputs.tag }}
releaseDraft: true
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
repo: ${{ (github.ref_name == 'beta' && 'opencode-beta') || '' }}
releaseCommitish: ${{ github.sha }}
env:
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
- name: Verify signed Windows desktop artifacts
if: runner.os == 'Windows'
shell: pwsh
run: |
$files = @(
"${{ github.workspace }}\packages\desktop\src-tauri\sidecars\opencode-cli-${{ matrix.settings.target }}.exe"
)
$files += Get-ChildItem "${{ github.workspace }}\packages\desktop\src-tauri\target\${{ matrix.settings.target }}\release\bundle\nsis\*.exe" | Select-Object -ExpandProperty FullName
foreach ($file in $files) {
$sig = Get-AuthenticodeSignature $file
if ($sig.Status -ne "Valid") {
throw "Invalid signature for ${file}: $($sig.Status)"
}
}
build-electron:
needs:
- build-cli
@@ -524,6 +348,30 @@ jobs:
env:
OPENCODE_CHANNEL: ${{ (github.ref_name == 'beta' && 'beta') || 'prod' }}
- name: Create and upload macOS .app.tar.gz
if: runner.os == 'macOS' && needs.version.outputs.release
working-directory: packages/desktop-electron/dist
env:
GH_TOKEN: ${{ steps.committer.outputs.token }}
run: |
if [[ "${{ matrix.settings.target }}" == "x86_64-apple-darwin" ]]; then
APP_DIR="mac"
OUT_NAME="opencode-desktop-mac-x64.app.tar.gz"
elif [[ "${{ matrix.settings.target }}" == "aarch64-apple-darwin" ]]; then
APP_DIR="mac-arm64"
OUT_NAME="opencode-desktop-mac-arm64.app.tar.gz"
else
echo "Unknown macOS target: ${{ matrix.settings.target }}"
exit 1
fi
APP_PATH=$(find "$APP_DIR" -maxdepth 1 -name "*.app" -type d | head -1)
if [ -z "$APP_PATH" ]; then
echo "No .app bundle found in $APP_DIR"
exit 1
fi
tar -czf "$OUT_NAME" -C "$(dirname "$APP_PATH")" "$(basename "$APP_PATH")"
gh release upload "v${{ needs.version.outputs.version }}" "$OUT_NAME" --clobber --repo "${{ needs.version.outputs.repo }}"
- name: Verify signed Windows Electron artifacts
if: runner.os == 'Windows'
shell: pwsh
@@ -542,7 +390,7 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: opencode-electron-${{ matrix.settings.target }}
name: opencode-desktop-${{ matrix.settings.target }}
path: packages/desktop-electron/dist/*
- uses: actions/upload-artifact@v4
@@ -556,7 +404,6 @@ jobs:
- version
- build-cli
- sign-cli-windows
- build-tauri
- build-electron
if: always() && !failure() && !cancelled()
runs-on: blacksmith-4vcpu-ubuntu-2404
@@ -583,13 +430,6 @@ jobs:
node-version: "24"
registry-url: "https://registry.npmjs.org"
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- uses: actions/download-artifact@v4
with:
name: opencode-cli
@@ -611,6 +451,13 @@ jobs:
pattern: latest-yml-*
path: /tmp/latest-yml
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Cache apt packages (AUR)
uses: actions/cache@v4
with:
@@ -639,3 +486,5 @@ jobs:
GH_REPO: ${{ needs.version.outputs.repo }}
NPM_CONFIG_PROVENANCE: false
LATEST_YML_DIR: /tmp/latest-yml
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}

View File

@@ -1,4 +1,5 @@
import { Config } from "effect"
import { InstallationChannel } from "../installation/version"
function truthy(key: string) {
const value = process.env[key]?.toLowerCase()
@@ -10,6 +11,10 @@ function falsy(key: string) {
return value === "false" || value === "0"
}
// Channels that default to the new effect-httpapi server backend. The legacy
// hono backend remains the default for stable (`prod`/`latest`) installs.
const HTTPAPI_DEFAULT_ON_CHANNELS = new Set(["dev", "beta", "local"])
function number(key: string) {
const value = process.env[key]
if (!value) return undefined
@@ -81,7 +86,14 @@ export const Flag = {
OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"),
OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"],
OPENCODE_EXPERIMENTAL_HTTPAPI: truthy("OPENCODE_EXPERIMENTAL_HTTPAPI"),
// Defaults to true on dev/beta/local channels so internal users exercise the
// new effect-httpapi server backend. Stable (`prod`/`latest`) installs stay
// on the legacy hono backend until the rollout is complete. An explicit env
// var ("true"/"1" or "false"/"0") always wins, providing an opt-in for
// stable users and an escape hatch for dev/beta users.
OPENCODE_EXPERIMENTAL_HTTPAPI:
truthy("OPENCODE_EXPERIMENTAL_HTTPAPI") ||
(!falsy("OPENCODE_EXPERIMENTAL_HTTPAPI") && HTTPAPI_DEFAULT_ON_CHANNELS.has(InstallationChannel)),
OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"),
// Evaluated at access time (not module load) because tests, the CLI, and

View File

@@ -20,6 +20,7 @@ const paths = {
data,
bin: path.join(cache, "bin"),
log: path.join(data, "log"),
repos: path.join(data, "repos"),
cache,
config,
state,
@@ -37,6 +38,7 @@ await Promise.all([
fs.mkdir(Path.tmp, { recursive: true }),
fs.mkdir(Path.log, { recursive: true }),
fs.mkdir(Path.bin, { recursive: true }),
fs.mkdir(Path.repos, { recursive: true }),
])
export class Service extends Context.Service<Service, Interface>()("@opencode/Global") {}
@@ -50,6 +52,7 @@ export interface Interface {
readonly tmp: string
readonly bin: string
readonly log: string
readonly repos: string
}
export function make(input: Partial<Interface> = {}): Interface {
@@ -62,6 +65,7 @@ export function make(input: Partial<Interface> = {}): Interface {
tmp: Path.tmp,
bin: Path.bin,
log: Path.log,
repos: Path.repos,
...input,
}
}

View File

@@ -1,3 +1,5 @@
export * as Log from "./log"
import path from "path"
import fs from "fs/promises"
import { createWriteStream } from "fs"

View File

@@ -27,7 +27,7 @@ const channel = (() => {
})()
const getBase = (): Configuration => ({
artifactName: "opencode-electron-${os}-${arch}.${ext}",
artifactName: "opencode-desktop-${os}-${arch}.${ext}",
directories: {
output: "dist",
buildResources: "resources",

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env bun
import { Buffer } from "node:buffer"
import { $ } from "bun"
import path from "node:path"
import { parseArgs } from "node:util"
const { values } = parseArgs({
args: Bun.argv.slice(2),
@@ -12,8 +13,6 @@ const { values } = parseArgs({
const dryRun = values["dry-run"]
import { parseArgs } from "node:util"
const repo = process.env.GH_REPO
if (!repo) throw new Error("GH_REPO is required")
@@ -23,20 +22,22 @@ if (!releaseId) throw new Error("OPENCODE_RELEASE is required")
const version = process.env.OPENCODE_VERSION
if (!version) throw new Error("OPENCODE_VERSION is required")
const dir = process.env.LATEST_YML_DIR
if (!dir) throw new Error("LATEST_YML_DIR is required")
const root = dir
const token = process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN
if (!token) throw new Error("GH_TOKEN or GITHUB_TOKEN is required")
const apiHeaders = {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
}
const releaseRes = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: apiHeaders,
const rel = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
},
})
if (!releaseRes.ok) {
throw new Error(`Failed to fetch release: ${releaseRes.status} ${releaseRes.statusText}`)
if (!rel.ok) {
throw new Error(`Failed to fetch release: ${rel.status} ${rel.statusText}`)
}
type Asset = {
@@ -45,115 +46,169 @@ type Asset = {
}
type Release = {
tag_name?: string
assets?: Asset[]
}
const release = (await releaseRes.json()) as Release
const assets = release.assets ?? []
const assetByName = new Map(assets.map((asset) => [asset.name, asset]))
const assets = ((await rel.json()) as Release).assets ?? []
const amap = new Map(assets.map((item) => [item.name, item]))
const latestAsset = assetByName.get("latest.json")
if (!latestAsset) {
console.log("latest.json not found, skipping tauri finalization")
process.exit(0)
type Item = {
url: string
}
const latestRes = await fetch(latestAsset.url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
},
})
if (!latestRes.ok) {
throw new Error(`Failed to fetch latest.json: ${latestRes.status} ${latestRes.statusText}`)
type Yml = {
version: string
files: Item[]
}
const latestText = new TextDecoder().decode(await latestRes.arrayBuffer())
const latest = JSON.parse(latestText)
const base = { ...latest }
delete base.platforms
function parse(text: string): Yml {
const lines = text.split("\n")
let version = ""
const files: Item[] = []
let url = ""
const fetchSignature = async (asset: Asset) => {
const res = await fetch(asset.url, {
const flush = () => {
if (!url) return
files.push({ url })
url = ""
}
for (const line of lines) {
const trim = line.trim()
if (line.startsWith("version:")) {
version = line.slice("version:".length).trim()
continue
}
if (trim.startsWith("- url:")) {
flush()
url = trim.slice("- url:".length).trim()
continue
}
const indented = line.startsWith(" ") || line.startsWith("\t")
if (!indented) flush()
}
flush()
return { version, files }
}
async function read(sub: string, file: string) {
const item = Bun.file(path.join(root, sub, file))
if (!(await item.exists())) return undefined
return parse(await item.text())
}
function pick(list: Item[], exts: string[]) {
for (const ext of exts) {
const found = list.find((item) => item.url.split("?")[0]?.toLowerCase().endsWith(ext))
if (found) return found.url
}
}
function link(raw: string) {
if (raw.startsWith("https://") || raw.startsWith("http://")) return raw
return `https://github.com/${repo}/releases/download/v${version}/${raw}`
}
async function sign(url: string, key: string) {
const name = decodeURIComponent(new URL(url).pathname.split("/").pop() ?? key)
const asset = amap.get(name)
const res = await fetch(asset?.url ?? url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
...(asset ? { Accept: "application/octet-stream" } : {}),
},
})
if (!res.ok) {
throw new Error(`Failed to fetch signature: ${res.status} ${res.statusText}`)
throw new Error(`Failed to fetch file ${name}: ${res.status} ${res.statusText} (${asset?.url ?? url})`)
}
return Buffer.from(await res.arrayBuffer()).toString()
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, name)
await Bun.write(file, await res.arrayBuffer())
await $`bunx @tauri-apps/cli signer sign ${file}`
const sigFile = Bun.file(`${file}.sig`)
if (!(await sigFile.exists())) throw new Error(`Signature file not found for ${name}`)
return (await sigFile.text()).trim()
}
const entries: Record<string, { url: string; signature: string }> = {}
const add = (key: string, asset: Asset, signature: string) => {
if (entries[key]) return
entries[key] = {
url: `https://github.com/${repo}/releases/download/v${version}/${asset.name}`,
signature,
}
const add = async (data: Record<string, { url: string; signature: string }>, key: string, raw: string | undefined) => {
if (!raw) return
if (data[key]) return
const url = link(raw)
data[key] = { url, signature: await sign(url, key) }
}
const targets = [
{ key: "linux-x86_64-deb", asset: "opencode-desktop-linux-amd64.deb" },
{ key: "linux-x86_64-rpm", asset: "opencode-desktop-linux-x86_64.rpm" },
{ key: "linux-aarch64-deb", asset: "opencode-desktop-linux-arm64.deb" },
{ key: "linux-aarch64-rpm", asset: "opencode-desktop-linux-aarch64.rpm" },
{ key: "windows-aarch64-nsis", asset: "opencode-desktop-windows-arm64.exe" },
{ key: "windows-x86_64-nsis", asset: "opencode-desktop-windows-x64.exe" },
{ key: "darwin-x86_64-app", asset: "opencode-desktop-darwin-x64.app.tar.gz" },
{
key: "darwin-aarch64-app",
asset: "opencode-desktop-darwin-aarch64.app.tar.gz",
},
]
for (const target of targets) {
const asset = assetByName.get(target.asset)
if (!asset) continue
const sig = assetByName.get(`${target.asset}.sig`)
if (!sig) continue
const signature = await fetchSignature(sig)
add(target.key, asset, signature)
const alias = (data: Record<string, { url: string; signature: string }>, key: string, src: string) => {
if (data[key]) return
if (!data[src]) return
data[key] = data[src]
}
const alias = (key: string, source: string) => {
if (entries[key]) return
const entry = entries[source]
if (!entry) return
entries[key] = entry
}
const winx = await read("latest-yml-x86_64-pc-windows-msvc", "latest.yml")
const wina = await read("latest-yml-aarch64-pc-windows-msvc", "latest.yml")
const macx = await read("latest-yml-x86_64-apple-darwin", "latest-mac.yml")
const maca = await read("latest-yml-aarch64-apple-darwin", "latest-mac.yml")
const linx = await read("latest-yml-x86_64-unknown-linux-gnu", "latest-linux.yml")
const lina = await read("latest-yml-aarch64-unknown-linux-gnu", "latest-linux-arm64.yml")
alias("linux-x86_64", "linux-x86_64-deb")
alias("linux-aarch64", "linux-aarch64-deb")
alias("windows-aarch64", "windows-aarch64-nsis")
alias("windows-x86_64", "windows-x86_64-nsis")
alias("darwin-x86_64", "darwin-x86_64-app")
alias("darwin-aarch64", "darwin-aarch64-app")
const yver = winx?.version ?? wina?.version ?? macx?.version ?? maca?.version ?? linx?.version ?? lina?.version
if (yver && yver !== version) throw new Error(`latest.yml version mismatch: expected ${version}, got ${yver}`)
const out: Record<string, { url: string; signature: string }> = {}
const winxexe = pick(winx?.files ?? [], [".exe"])
const winaexe = pick(wina?.files ?? [], [".exe"])
const macxTarGz = "opencode-desktop-mac-x64.app.tar.gz"
const macaTarGz = "opencode-desktop-mac-arm64.app.tar.gz"
const linxDeb = pick(linx?.files ?? [], [".deb"])
const linxRpm = pick(linx?.files ?? [], [".rpm"])
const linxAppImage = pick(linx?.files ?? [], [".appimage"])
const linaDeb = pick(lina?.files ?? [], [".deb"])
const linaRpm = pick(lina?.files ?? [], [".rpm"])
const linaAppImage = pick(lina?.files ?? [], [".appimage"])
await add(out, "windows-x86_64-nsis", winxexe)
await add(out, "windows-aarch64-nsis", winaexe)
await add(out, "darwin-x86_64-app", macxTarGz)
await add(out, "darwin-aarch64-app", macaTarGz)
await add(out, "linux-x86_64-deb", linxDeb)
await add(out, "linux-x86_64-rpm", linxRpm)
await add(out, "linux-x86_64-appimage", linxAppImage)
await add(out, "linux-aarch64-deb", linaDeb)
await add(out, "linux-aarch64-rpm", linaRpm)
await add(out, "linux-aarch64-appimage", linaAppImage)
alias(out, "windows-x86_64", "windows-x86_64-nsis")
alias(out, "windows-aarch64", "windows-aarch64-nsis")
alias(out, "darwin-x86_64", "darwin-x86_64-app")
alias(out, "darwin-aarch64", "darwin-aarch64-app")
alias(out, "linux-x86_64", "linux-x86_64-deb")
alias(out, "linux-aarch64", "linux-aarch64-deb")
const platforms = Object.fromEntries(
Object.keys(entries)
Object.keys(out)
.sort()
.map((key) => [key, entries[key]]),
.map((key) => [key, out[key]]),
)
const output = {
...base,
if (!Object.keys(platforms).length) throw new Error("No updater files found in latest.yml artifacts")
const data = {
version,
notes: "",
pub_date: new Date().toISOString(),
platforms,
}
const dir = process.env.RUNNER_TEMP ?? "/tmp"
const file = `${dir}/latest.json`
await Bun.write(file, JSON.stringify(output, null, 2))
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, "latest.json")
await Bun.write(file, JSON.stringify(data, null, 2))
const tag = release.tag_name
if (!tag) throw new Error("Release tag not found")
const tag = `v${version}`
if (dryRun) {
console.log(`dry-run: wrote latest.json for ${tag} to ${file}`)

View File

@@ -0,0 +1,17 @@
CREATE TABLE `session_message` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`type` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_session_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_session_idx`;--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_session_type_idx`;--> statement-breakpoint
DROP INDEX IF EXISTS `session_entry_time_created_idx`;--> statement-breakpoint
CREATE INDEX `session_message_session_idx` ON `session_message` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_message_session_type_idx` ON `session_message` (`session_id`,`type`);--> statement-breakpoint
CREATE INDEX `session_message_time_created_idx` ON `session_message` (`time_created`);--> statement-breakpoint
DROP TABLE `session_entry`;

File diff suppressed because it is too large Load Diff

View File

@@ -198,7 +198,7 @@ Use raw Effect HTTP routes where `HttpApi` does not fit. The goal is deleting Ho
| `project` | `bridged` | list, current, git init, update |
| `file` | `bridged` partial | find text/file/symbol, list/content/status |
| `mcp` | `bridged` | status, add, OAuth, connect/disconnect |
| `workspace` | `bridged` | adaptor/list/status/create/remove/session-restore |
| `workspace` | `bridged` | adapter/list/status/create/remove/session-restore |
| top-level instance routes | `bridged` | path, vcs, command, agent, skill, lsp, formatter, dispose |
| experimental JSON routes | `bridged` | console, tool, worktree list/mutations, global session list, resource list |
| `session` | `bridged` | read, lifecycle, prompt, message/part mutations, revert, permission reply |
@@ -290,7 +290,7 @@ This checklist tracks bridge parity only. Checked routes are available through t
### Workspace Routes
- [x] `GET /experimental/workspace/adaptor` - list workspace adaptors.
- [x] `GET /experimental/workspace/adapter` - list workspace adapters.
- [x] `POST /experimental/workspace` - create workspace.
- [x] `GET /experimental/workspace` - list workspaces.
- [x] `GET /experimental/workspace/status` - workspace status.

View File

@@ -353,7 +353,7 @@ piecewise.
- [ ] `src/cli/cmd/tui/event.ts`
- [ ] `src/cli/ui.ts`
- [ ] `src/command/index.ts`
- [x] `src/control-plane/adaptors/worktree.ts`
- [x] `src/control-plane/adapters/worktree.ts`
- [x] `src/control-plane/types.ts`
- [x] `src/control-plane/workspace.ts`
- [ ] `src/file/index.ts`

View File

@@ -51,6 +51,7 @@ import { LoadAPIKeyError } from "ai"
import type { AssistantMessage, Event, OpencodeClient, SessionMessageResponse, ToolPart } from "@opencode-ai/sdk/v2"
import { applyPatch } from "diff"
import { InstallationVersion } from "@opencode-ai/core/installation/version"
import { ShellToolID } from "@/tool/shell/id"
type ModeOption = { id: string; name: string; description?: string }
type ModelOption = { modelId: string; name: string }
@@ -144,7 +145,7 @@ export class Agent implements ACPAgent {
private sessionManager: ACPSessionManager
private eventAbort = new AbortController()
private eventStarted = false
private bashSnapshots = new Map<string, string>()
private shellSnapshots = new Map<string, string>()
private toolStarts = new Set<string>()
private permissionQueues = new Map<string, Promise<void>>()
private permissionOptions: PermissionOption[] = [
@@ -283,16 +284,16 @@ export class Agent implements ACPAgent {
switch (part.state.status) {
case "pending":
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
return
case "running":
const output = this.bashOutput(part)
const output = this.shellOutput(part)
const content: ToolCallContent[] = []
if (output) {
const hash = Hash.fast(output)
if (part.tool === "bash") {
if (this.bashSnapshots.get(part.callID) === hash) {
if (part.tool === ShellToolID.id) {
if (this.shellSnapshots.get(part.callID) === hash) {
await this.connection
.sessionUpdate({
sessionId,
@@ -311,7 +312,7 @@ export class Agent implements ACPAgent {
})
return
}
this.bashSnapshots.set(part.callID, hash)
this.shellSnapshots.set(part.callID, hash)
}
content.push({
type: "content",
@@ -342,7 +343,7 @@ export class Agent implements ACPAgent {
case "completed": {
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
const kind = toToolKind(part.tool)
const content: ToolCallContent[] = [
{
@@ -423,7 +424,7 @@ export class Agent implements ACPAgent {
}
case "error":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
await this.connection
.sessionUpdate({
sessionId,
@@ -837,10 +838,10 @@ export class Agent implements ACPAgent {
await this.toolStart(sessionId, part)
switch (part.state.status) {
case "pending":
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
break
case "running":
const output = this.bashOutput(part)
const output = this.shellOutput(part)
const runningContent: ToolCallContent[] = []
if (output) {
runningContent.push({
@@ -871,7 +872,7 @@ export class Agent implements ACPAgent {
break
case "completed":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
const kind = toToolKind(part.tool)
const content: ToolCallContent[] = [
{
@@ -951,7 +952,7 @@ export class Agent implements ACPAgent {
break
case "error":
this.toolStarts.delete(part.callID)
this.bashSnapshots.delete(part.callID)
this.shellSnapshots.delete(part.callID)
await this.connection
.sessionUpdate({
sessionId,
@@ -1105,8 +1106,8 @@ export class Agent implements ACPAgent {
}
}
private bashOutput(part: ToolPart) {
if (part.tool !== "bash") return
private shellOutput(part: ToolPart) {
if (part.tool !== ShellToolID.id) return
if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return
const output = part.state.metadata["output"]
if (typeof output !== "string") return
@@ -1549,9 +1550,11 @@ export class Agent implements ACPAgent {
function toToolKind(toolName: string): ToolKind {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "bash":
case ShellToolID.id:
return "execute"
case "webfetch":
return "fetch"
@@ -1562,6 +1565,8 @@ function toToolKind(toolName: string): ToolKind {
case "grep":
case "glob":
case "repo_clone":
case "repo_overview":
case "context7_resolve_library_id":
case "context7_get_library_docs":
return "search"
@@ -1576,6 +1581,7 @@ function toToolKind(toolName: string): ToolKind {
function toLocations(toolName: string, input: Record<string, any>): { path: string }[] {
const tool = toolName.toLocaleLowerCase()
switch (tool) {
case "read":
case "edit":
@@ -1584,7 +1590,11 @@ function toLocations(toolName: string, input: Record<string, any>): { path: stri
case "glob":
case "grep":
return input["path"] ? [{ path: input["path"] }] : []
case "bash":
case "repo_clone":
return input["path"] ? [{ path: input["path"] }] : []
case "repo_overview":
return input["path"] ? [{ path: input["path"] }] : []
case ShellToolID.id:
return []
default:
return []

View File

@@ -10,6 +10,7 @@ import { ProviderTransform } from "@/provider/transform"
import PROMPT_GENERATE from "./generate.txt"
import PROMPT_COMPACTION from "./prompt/compaction.txt"
import PROMPT_EXPLORE from "./prompt/explore.txt"
import PROMPT_SCOUT from "./prompt/scout.txt"
import PROMPT_SUMMARY from "./prompt/summary.txt"
import PROMPT_TITLE from "./prompt/title.txt"
import { Permission } from "@/permission"
@@ -81,11 +82,11 @@ export const layer = Layer.effect(
Effect.fn("Agent.state")(function* (ctx) {
const cfg = yield* config.get()
const skillDirs = yield* skill.dirs()
const whitelistedDirs = [
Truncate.GLOB,
path.join(Global.Path.tmp, "*"),
...skillDirs.map((dir) => path.join(dir, "*")),
]
const whitelistedDirs = [Truncate.GLOB, path.join(Global.Path.tmp, "*"), ...skillDirs.map((dir) => path.join(dir, "*"))]
const readonlyExternalDirectory = {
"*": "ask",
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
} satisfies Record<string, "allow" | "ask" | "deny">
const defaults = Permission.fromConfig({
"*": "allow",
@@ -97,6 +98,9 @@ export const layer = Layer.effect(
question: "deny",
plan_enter: "deny",
plan_exit: "deny",
edit: "ask",
repo_clone: "deny",
repo_overview: "deny",
// mirrors github.com/github/gitignore Node.gitignore pattern for .env files
read: {
"*": "allow",
@@ -174,10 +178,7 @@ export const layer = Layer.effect(
webfetch: "allow",
websearch: "allow",
read: "allow",
external_directory: {
"*": "ask",
...Object.fromEntries(whitelistedDirs.map((dir) => [dir, "allow"])),
},
external_directory: readonlyExternalDirectory,
}),
user,
),
@@ -187,6 +188,33 @@ export const layer = Layer.effect(
mode: "subagent",
native: true,
},
scout: {
name: "scout",
permission: Permission.merge(
defaults,
Permission.fromConfig({
"*": "deny",
grep: "allow",
glob: "allow",
webfetch: "allow",
websearch: "allow",
codesearch: "allow",
read: "allow",
repo_clone: "allow",
repo_overview: "allow",
external_directory: {
...readonlyExternalDirectory,
[path.join(Global.Path.repos, "*")]: "allow",
},
}),
user,
),
description: `Docs and dependency-source specialist. Use this when you need to inspect external documentation, clone dependency repositories into the managed cache, and research library implementation details without modifying the user's workspace.`,
prompt: PROMPT_SCOUT,
options: {},
mode: "subagent",
native: true,
},
compaction: {
name: "compaction",
mode: "primary",

View File

@@ -0,0 +1,36 @@
You are `scout`, a read-only research agent for external libraries, dependency source, and documentation.
Your purpose is to investigate code outside the local workspace and return evidence-backed findings without modifying the user's workspace.
Use this agent when asked to:
- inspect dependency repositories or library source
- compare local code against upstream implementations
- research public GitHub repositories the environment can clone
- explain how a library or framework works by reading its source and docs
- investigate third-party APIs, workflows, or behavior outside the current workspace
Working style:
1. When the task involves a GitHub repository or dependency source, use `repo_clone` first.
2. After cloning, use `Glob`, `Grep`, and `Read` to inspect the cloned repository.
3. Use `WebFetch` for official documentation pages when source alone is not enough.
4. Prefer direct code and documentation evidence over assumptions.
5. If multiple external repositories are relevant, inspect each one before drawing conclusions.
Research standards:
- cite exact absolute file paths and line references whenever possible
- separate what is verified from what is inferred
- if the answer depends on branch state, note that you are reading the repository's current default clone state unless the caller specifies otherwise
- if a repository cannot be cloned or accessed, say so explicitly and continue with whatever evidence is still available
- call out uncertainty clearly instead of smoothing over gaps
Output expectations:
- start with the direct answer
- then explain the evidence repository by repository or source by source
- include file references when relevant
- keep the explanation organized and easy to scan
Constraints:
- do not modify files or run tools that change the user's workspace
- return absolute file paths for cloned-repo findings in your final response
Complete the user's research request efficiently and report your findings clearly.

View File

@@ -0,0 +1,173 @@
import { InstanceState } from "@/effect/instance-state"
import { Identifier } from "@/id/id"
import { Cause, Deferred, Effect, Fiber, Layer, Scope, Context } from "effect"
export type Status = "running" | "completed" | "error" | "cancelled"
export type Info = {
id: string
type: string
title?: string
status: Status
started_at: number
completed_at?: number
output?: string
error?: string
metadata?: Record<string, unknown>
}
type Active = {
info: Info
done: Deferred.Deferred<Info>
fiber?: Fiber.Fiber<void, unknown>
}
type State = {
jobs: Map<string, Active>
scope: Scope.Scope
}
export type StartInput = {
id?: string
type: string
title?: string
metadata?: Record<string, unknown>
run: Effect.Effect<string, unknown>
}
export type WaitInput = {
id: string
timeout?: number
}
export type WaitResult = {
info?: Info
timedOut: boolean
}
export interface Interface {
readonly list: () => Effect.Effect<Info[]>
readonly get: (id: string) => Effect.Effect<Info | undefined>
readonly start: (input: StartInput) => Effect.Effect<Info>
readonly wait: (input: WaitInput) => Effect.Effect<WaitResult>
readonly cancel: (id: string) => Effect.Effect<Info | undefined>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/BackgroundJob") {}
function snapshot(job: Active): Info {
return {
...job.info,
...(job.info.metadata ? { metadata: { ...job.info.metadata } } : {}),
}
}
function errorText(error: unknown) {
if (error instanceof Error) return error.message
return String(error)
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const state = yield* InstanceState.make<State>(
Effect.fn("BackgroundJob.state")(function* () {
return {
jobs: new Map(),
scope: yield* Scope.Scope,
}
}),
)
const finish = Effect.fn("BackgroundJob.finish")(function* (
job: Active,
status: Exclude<Status, "running">,
data?: { output?: string; error?: string },
) {
if (job.info.status !== "running") return snapshot(job)
job.info.status = status
job.info.completed_at = Date.now()
if (data?.output !== undefined) job.info.output = data.output
if (data?.error !== undefined) job.info.error = data.error
job.fiber = undefined
const info = snapshot(job)
yield* Deferred.succeed(job.done, info).pipe(Effect.ignore)
return info
})
const list: Interface["list"] = Effect.fn("BackgroundJob.list")(function* () {
const s = yield* InstanceState.get(state)
return Array.from(s.jobs.values())
.map(snapshot)
.toSorted((a, b) => a.started_at - b.started_at)
})
const get: Interface["get"] = Effect.fn("BackgroundJob.get")(function* (id) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(id)
if (!job) return
return snapshot(job)
})
const start: Interface["start"] = Effect.fn("BackgroundJob.start")(function* (input) {
const s = yield* InstanceState.get(state)
const id = input.id ?? Identifier.ascending("job")
const existing = s.jobs.get(id)
if (existing?.info.status === "running") return snapshot(existing)
const job: Active = {
info: {
id,
type: input.type,
title: input.title,
status: "running",
started_at: Date.now(),
metadata: input.metadata,
},
done: yield* Deferred.make<Info>(),
}
s.jobs.set(id, job)
job.fiber = yield* input.run.pipe(
Effect.matchCauseEffect({
onSuccess: (output) => finish(job, "completed", { output }),
onFailure: (cause) =>
finish(job, Cause.hasInterruptsOnly(cause) ? "cancelled" : "error", {
error: errorText(Cause.squash(cause)),
}),
}),
Effect.asVoid,
Effect.forkIn(s.scope),
)
return snapshot(job)
})
const wait: Interface["wait"] = Effect.fn("BackgroundJob.wait")(function* (input) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(input.id)
if (!job) return { timedOut: false }
if (job.info.status !== "running") return { info: snapshot(job), timedOut: false }
if (!input.timeout) return { info: yield* Deferred.await(job.done), timedOut: false }
return yield* Effect.raceAll([
Deferred.await(job.done).pipe(Effect.map((info) => ({ info, timedOut: false }))),
Effect.sleep(input.timeout).pipe(Effect.as({ info: snapshot(job), timedOut: true })),
])
})
const cancel: Interface["cancel"] = Effect.fn("BackgroundJob.cancel")(function* (id) {
const s = yield* InstanceState.get(state)
const job = s.jobs.get(id)
if (!job) return
if (job.info.status !== "running") return snapshot(job)
const fiber = job.fiber
const info = yield* finish(job, "cancelled")
if (fiber) yield* Fiber.interrupt(fiber).pipe(Effect.ignore)
return info
})
return Service.of({ list, get, start, wait, cancel })
}),
)
export const defaultLayer = layer
export * as BackgroundJob from "./job"

View File

@@ -33,6 +33,7 @@ import { AppRuntime } from "@/effect/app-runtime"
import { Git } from "@/git"
import { setTimeout as sleep } from "node:timers/promises"
import { Process } from "@/util/process"
import { parseGitHubRemote } from "@/util/repository"
import { Effect } from "effect"
type GitHubAuthor = {
@@ -152,18 +153,7 @@ const SUPPORTED_EVENTS = [...USER_EVENTS, ...REPO_EVENTS] as const
type UserEvent = (typeof USER_EVENTS)[number]
type RepoEvent = (typeof REPO_EVENTS)[number]
// Parses GitHub remote URLs in various formats:
// - https://github.com/owner/repo.git
// - https://github.com/owner/repo
// - git@github.com:owner/repo.git
// - git@github.com:owner/repo
// - ssh://git@github.com/owner/repo.git
// - ssh://git@github.com/owner/repo
export function parseGitHubRemote(url: string): { owner: string; repo: string } | null {
const match = url.match(/^(?:(?:https?|ssh):\/\/)?(?:git@)?github\.com[:/]([^/]+)\/([^/]+?)(?:\.git)?$/)
if (!match) return null
return { owner: match[1], repo: match[2] }
}
export { parseGitHubRemote }
/**
* Extracts displayable text from assistant response parts.
@@ -879,7 +869,7 @@ export const GithubRunCommand = cmd({
function subscribeSessionEvents() {
const TOOL: Record<string, [string, string]> = {
todowrite: ["Todo", UI.Style.TEXT_WARNING_BOLD],
bash: ["Bash", UI.Style.TEXT_DANGER_BOLD],
bash: ["Shell", UI.Style.TEXT_DANGER_BOLD],
edit: ["Edit", UI.Style.TEXT_SUCCESS_BOLD],
glob: ["Glob", UI.Style.TEXT_INFO_BOLD],
grep: ["Grep", UI.Style.TEXT_INFO_BOLD],

View File

@@ -78,7 +78,6 @@ async function tool(part: ToolPart) {
block(next, next.body)
return
}
inline(next)
} catch {
inline({

View File

@@ -86,7 +86,9 @@ type SharedSyntaxTheme = TuiThemeCurrent & {
export const transparent = RGBA.fromValues(0, 0, 0, 0)
function alpha(color: RGBA, value: number): RGBA {
return RGBA.fromValues(color.r, color.g, color.b, Math.max(0, Math.min(1, value)), color.tag)
const next = RGBA.clone(color)
next.a = Math.max(0, Math.min(1, value))
return next
}
function rgba(hex: string, value?: number): RGBA {

View File

@@ -20,7 +20,7 @@ import stripAnsi from "strip-ansi"
import type { ToolPart } from "@opencode-ai/sdk/v2"
import type * as Tool from "@/tool/tool"
import type { ApplyPatchTool } from "@/tool/apply_patch"
import type { BashTool } from "@/tool/bash"
import type { ShellTool as BashTool } from "@/tool/shell"
import type { EditTool } from "@/tool/edit"
import type { GlobTool } from "@/tool/glob"
import type { GrepTool } from "@/tool/grep"

View File

@@ -1,6 +1,7 @@
import { Server } from "../../server/server"
import { cmd } from "./cmd"
import { withNetworkOptions, resolveNetworkOptions } from "../network"
import { bootstrap } from "../bootstrap"
import { Flag } from "@opencode-ai/core/flag/flag"
export const ServeCommand = cmd({
@@ -11,7 +12,8 @@ export const ServeCommand = cmd({
if (!Flag.OPENCODE_SERVER_PASSWORD) {
console.log("Warning: OPENCODE_SERVER_PASSWORD is not set; server is unsecured.")
}
const opts = await resolveNetworkOptions(args)
const opts = await bootstrap(process.cwd(), () => resolveNetworkOptions(args))
const server = await Server.listen(opts)
console.log(`opencode server listening on http://${server.hostname}:${server.port}`)

View File

@@ -28,6 +28,7 @@ import { useEvent } from "@tui/context/event"
import { SDKProvider, useSDK } from "@tui/context/sdk"
import { StartupLoading } from "@tui/component/startup-loading"
import { SyncProvider, useSync } from "@tui/context/sync"
import { SyncProviderV2 } from "@tui/context/sync-v2"
import { LocalProvider, useLocal } from "@tui/context/local"
import { DialogModel } from "@tui/component/dialog-model"
import { useConnected } from "@tui/component/use-connected"
@@ -166,27 +167,29 @@ export function tui(input: {
>
<ProjectProvider>
<SyncProvider>
<ThemeProvider mode={mode}>
<LocalProvider>
<KeybindProvider>
<PromptStashProvider>
<DialogProvider>
<CommandProvider>
<FrecencyProvider>
<PromptHistoryProvider>
<PromptRefProvider>
<EditorContextProvider>
<App onSnapshot={input.onSnapshot} />
</EditorContextProvider>
</PromptRefProvider>
</PromptHistoryProvider>
</FrecencyProvider>
</CommandProvider>
</DialogProvider>
</PromptStashProvider>
</KeybindProvider>
</LocalProvider>
</ThemeProvider>
<SyncProviderV2>
<ThemeProvider mode={mode}>
<LocalProvider>
<KeybindProvider>
<PromptStashProvider>
<DialogProvider>
<CommandProvider>
<FrecencyProvider>
<PromptHistoryProvider>
<PromptRefProvider>
<EditorContextProvider>
<App onSnapshot={input.onSnapshot} />
</EditorContextProvider>
</PromptRefProvider>
</PromptHistoryProvider>
</FrecencyProvider>
</CommandProvider>
</DialogProvider>
</PromptStashProvider>
</KeybindProvider>
</LocalProvider>
</ThemeProvider>
</SyncProviderV2>
</SyncProvider>
</ProjectProvider>
</SDKProvider>
@@ -774,6 +777,15 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
dialog.clear()
},
},
{
title: kv.get("clear_prompt_save_history", false) ? "Don't include cleared prompts in history" : "Include cleared prompts in history",
value: "app.toggle.clear_prompt_history",
category: "System",
onSelect: (dialog) => {
kv.set("clear_prompt_save_history", !kv.get("clear_prompt_save_history", false))
dialog.clear()
},
},
])
event.on(TuiEvent.CommandExecute.type, (evt) => {

View File

@@ -10,7 +10,7 @@ import { errorMessage } from "@/util/error"
import { useSDK } from "../context/sdk"
import { useToast } from "../ui/toast"
type Adaptor = {
type Adapter = {
type: string
name: string
description: string
@@ -108,26 +108,26 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) =
const sdk = useSDK()
const toast = useToast()
const [creating, setCreating] = createSignal<string>()
const [adaptors, setAdaptors] = createSignal<Adaptor[]>()
const [adapters, setAdapters] = createSignal<Adapter[]>()
onMount(() => {
dialog.setSize("medium")
void (async () => {
const dir = sync.path.directory || sdk.directory
const url = new URL("/experimental/workspace/adaptor", sdk.url)
const url = new URL("/experimental/workspace/adapter", sdk.url)
if (dir) url.searchParams.set("directory", dir)
const res = await sdk
.fetch(url)
.then((x) => x.json() as Promise<Adaptor[]>)
.then((x) => x.json() as Promise<Adapter[]>)
.catch(() => undefined)
if (!res) {
toast.show({
message: "Failed to load workspace adaptors",
message: "Failed to load workspace adapters",
variant: "error",
})
return
}
setAdaptors(res)
setAdapters(res)
})()
})
@@ -142,13 +142,13 @@ export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) =
},
]
}
const list = adaptors()
const list = adapters()
if (!list) {
return [
{
title: "Loading workspaces...",
value: "loading" as const,
description: "Fetching available workspace adaptors",
description: "Fetching available workspace adapters",
},
]
}

View File

@@ -1,4 +1,5 @@
import { BoxRenderable, MouseButton, MouseEvent, RGBA, TextAttributes } from "@opentui/core"
import { useRenderer } from "@opentui/solid"
import { For, createMemo, createSignal, onCleanup, onMount, type JSX } from "solid-js"
import { useTheme, tint } from "@tui/context/theme"
import * as Sound from "@tui/util/sound"
@@ -554,6 +555,7 @@ function buildIdleState(t: number, ctx: LogoContext): IdleState {
export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } = {}) {
const ctx = props.shape ? build(props.shape) : DEFAULT
const { theme } = useTheme()
const renderer = useRenderer()
const [rings, setRings] = createSignal<Ring[]>([])
const [hold, setHold] = createSignal<Hold>()
const [release, setRelease] = createSignal<Release>()
@@ -684,6 +686,7 @@ export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } =
})
const idleState = createMemo(() => (props.idle ? buildIdleState(frame().t, ctx) : undefined))
const useSubpixelBlocks = () => renderer.capabilities?.rgb === true
const renderLine = (
line: string,
@@ -789,7 +792,7 @@ export function Logo(props: { shape?: LogoShape; ink?: RGBA; idle?: boolean } =
}
// Solid █: render as ▀ so the top pixel (fg) and bottom pixel (bg) can carry independent shimmer values
if (char === "█") {
if (char === "█" && useSubpixelBlocks()) {
return (
<text
fg={shade(inkTop, theme, n + p + e + b)}

View File

@@ -82,6 +82,7 @@ export const { use: usePromptHistory, provider: PromptHistoryProvider } = create
return store.history.at(store.index)
},
append(item: PromptInfo) {
if (store.history.at(-1)?.input === item.input) return
const entry = structuredClone(unwrap(item))
let trimmed = false
setStore(

View File

@@ -135,6 +135,7 @@ export function Prompt(props: PromptProps) {
const dimensions = useTerminalDimensions()
const { theme, syntax } = useTheme()
const kv = useKV()
const [autoaccept, setAutoaccept] = kv.signal<"none" | "edit">("permission_auto_accept", "edit")
const animationsEnabled = createMemo(() => kv.get("animations_enabled", true))
const list = createMemo(() => props.placeholders?.normal ?? [])
const shell = createMemo(() => props.placeholders?.shell ?? [])
@@ -295,6 +296,17 @@ export function Prompt(props: PromptProps) {
command.register(() => {
return [
{
title: autoaccept() === "none" ? "Enable autoedit" : "Disable autoedit",
value: "permission.auto_accept.toggle",
search: "toggle permissions",
keybind: "permission_auto_accept_toggle",
category: "Agent",
onSelect: (dialog) => {
setAutoaccept(() => (autoaccept() === "none" ? "edit" : "none"))
dialog.clear()
},
},
{
title: "Clear prompt",
value: "prompt.clear",
@@ -1121,6 +1133,12 @@ export function Prompt(props: PromptProps) {
// If no image, let the default paste behavior continue
}
if (keybind.match("input_clear", e) && store.prompt.input !== "") {
if (kv.get("clear_prompt_save_history", false)) {
history.append({
...store.prompt,
mode: store.mode,
})
}
input.clear()
input.extmarks.clear()
setStore("prompt", {
@@ -1313,9 +1331,14 @@ export function Prompt(props: PromptProps) {
)}
</Show>
</box>
<Show when={hasRightContent()}>
<Show when={hasRightContent() || autoaccept() === "edit"}>
<box flexDirection="row" gap={1} alignItems="center">
{props.right}
<Show when={autoaccept() === "edit"}>
<text>
<span style={{ fg: theme.warning }}>autoedit</span>
</text>
</Show>
</box>
</Show>
</box>

View File

@@ -12,22 +12,7 @@ export function useEvent() {
return
}
// Special hack for truly global events
if (event.directory === "global") {
handler(event.payload)
}
if (project.workspace.current()) {
if (event.workspace === project.workspace.current()) {
handler(event.payload)
}
return
}
if (event.directory === project.instance.directory()) {
handler(event.payload)
}
handler(event.payload)
})
}

View File

@@ -0,0 +1,271 @@
import { useEvent } from "@tui/context/event"
import type {
SessionMessage,
SessionMessageAssistant,
SessionMessageAssistantReasoning,
SessionMessageAssistantText,
SessionMessageAssistantTool,
} from "@opencode-ai/sdk/v2"
import { createStore, produce, reconcile } from "solid-js/store"
import { createSimpleContext } from "./helper"
import { useSDK } from "./sdk"
function activeAssistant(messages: SessionMessage[]) {
const index = messages.findLastIndex((message) => message.type === "assistant" && !message.time.completed)
if (index < 0) return
const assistant = messages[index]
return assistant?.type === "assistant" ? assistant : undefined
}
function activeCompaction(messages: SessionMessage[]) {
const index = messages.findLastIndex((message) => message.type === "compaction")
if (index < 0) return
const compaction = messages[index]
return compaction?.type === "compaction" ? compaction : undefined
}
function latestTool(assistant: SessionMessageAssistant | undefined, callID?: string) {
return assistant?.content.findLast(
(item): item is SessionMessageAssistantTool => item.type === "tool" && (callID === undefined || item.id === callID),
)
}
function latestText(assistant: SessionMessageAssistant | undefined) {
return assistant?.content.findLast((item): item is SessionMessageAssistantText => item.type === "text")
}
function latestReasoning(assistant: SessionMessageAssistant | undefined, reasoningID: string) {
return assistant?.content.findLast(
(item): item is SessionMessageAssistantReasoning => item.type === "reasoning" && item.id === reasoningID,
)
}
export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext({
name: "SyncV2",
init: () => {
const [store, setStore] = createStore<{
messages: {
[sessionID: string]: SessionMessage[]
}
}>({
messages: {},
})
const event = useEvent()
const sdk = useSDK()
function update(sessionID: string, fn: (messages: SessionMessage[]) => void) {
setStore(
"messages",
produce((draft) => {
fn((draft[sessionID] ??= []))
}),
)
}
event.subscribe((event) => {
switch (event.type) {
case "session.next.prompted": {
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.properties.id,
type: "user",
text: event.properties.prompt.text,
files: event.properties.prompt.files,
agents: event.properties.prompt.agents,
time: { created: event.properties.timestamp },
})
})
break
}
case "session.next.synthetic":
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.properties.id,
type: "synthetic",
sessionID: event.properties.sessionID,
text: event.properties.text,
time: { created: event.properties.timestamp },
})
})
break
case "session.next.step.started":
update(event.properties.sessionID, (draft) => {
const currentAssistant = activeAssistant(draft)
if (currentAssistant) currentAssistant.time.completed = event.properties.timestamp
draft.push({
id: event.properties.id,
type: "assistant",
agent: event.properties.agent,
model: event.properties.model,
content: [],
snapshot: event.properties.snapshot ? { start: event.properties.snapshot } : undefined,
time: { created: event.properties.timestamp },
})
})
break
case "session.next.step.ended":
update(event.properties.sessionID, (draft) => {
const currentAssistant = activeAssistant(draft)
if (!currentAssistant) return
currentAssistant.time.completed = event.properties.timestamp
currentAssistant.finish = event.properties.finish
currentAssistant.cost = event.properties.cost
currentAssistant.tokens = event.properties.tokens
if (event.properties.snapshot)
currentAssistant.snapshot = { ...currentAssistant.snapshot, end: event.properties.snapshot }
})
break
case "session.next.text.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({ type: "text", text: "" })
})
break
case "session.next.text.delta":
update(event.properties.sessionID, (draft) => {
const match = latestText(activeAssistant(draft))
if (match) match.text += event.properties.delta
})
break
case "session.next.text.ended":
update(event.properties.sessionID, (draft) => {
const match = latestText(activeAssistant(draft))
if (match) match.text = event.properties.text
})
break
case "session.next.tool.input.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({
type: "tool",
id: event.properties.callID,
name: event.properties.name,
time: { created: event.properties.timestamp },
state: { status: "pending", input: "" },
})
})
break
case "session.next.tool.input.delta":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status === "pending") match.state.input += event.properties.delta
})
break
case "session.next.tool.input.ended":
break
case "session.next.tool.called":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (!match) return
match.time.ran = event.properties.timestamp
match.provider = event.properties.provider
match.state = { status: "running", input: event.properties.input, structured: {}, content: [] }
})
break
case "session.next.tool.progress":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state.structured = event.properties.structured
match.state.content = [...event.properties.content]
})
break
case "session.next.tool.success":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state = {
status: "completed",
input: match.state.input,
structured: event.properties.structured,
content: [...event.properties.content],
}
match.provider = event.properties.provider
match.time.completed = event.properties.timestamp
})
break
case "session.next.tool.error":
update(event.properties.sessionID, (draft) => {
const match = latestTool(activeAssistant(draft), event.properties.callID)
if (match?.state.status !== "running") return
match.state = {
status: "error",
error: event.properties.error,
input: match.state.input,
structured: match.state.structured,
content: match.state.content,
}
match.provider = event.properties.provider
match.time.completed = event.properties.timestamp
})
break
case "session.next.reasoning.started":
update(event.properties.sessionID, (draft) => {
activeAssistant(draft)?.content.push({
type: "reasoning",
id: event.properties.reasoningID,
text: "",
})
})
break
case "session.next.reasoning.delta":
update(event.properties.sessionID, (draft) => {
const match = latestReasoning(activeAssistant(draft), event.properties.reasoningID)
if (match) match.text += event.properties.delta
})
break
case "session.next.reasoning.ended":
update(event.properties.sessionID, (draft) => {
const match = latestReasoning(activeAssistant(draft), event.properties.reasoningID)
if (match) match.text = event.properties.text
})
break
case "session.next.retried":
break
case "session.next.compaction.started":
update(event.properties.sessionID, (draft) => {
draft.push({
id: event.properties.id,
type: "compaction",
reason: event.properties.reason,
summary: "",
time: { created: event.properties.timestamp },
})
})
break
case "session.next.compaction.delta":
update(event.properties.sessionID, (draft) => {
const match = activeCompaction(draft)
if (match) match.summary += event.properties.text
})
break
case "session.next.compaction.ended":
update(event.properties.sessionID, (draft) => {
const match = activeCompaction(draft)
if (!match) return
match.summary = event.properties.text
match.include = event.properties.include
})
break
}
})
const result = {
data: store,
session: {
message: {
async sync(sessionID: string) {
const response = await sdk.client.v2.session.messages({ sessionID })
setStore("messages", sessionID, reconcile(response.data?.items ?? []))
},
fromSession(sessionID: string) {
const messages = store.messages[sessionID]
if (!messages) return []
return messages
},
},
},
}
return result
},
})

View File

@@ -110,6 +110,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
const project = useProject()
const sdk = useSDK()
const kv = useKV()
const [autoaccept] = kv.signal<"none" | "edit">("permission_auto_accept", "edit")
const fullSyncedSessions = new Set<string>()
let syncedWorkspace = project.workspace.current()
@@ -152,6 +153,13 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
case "permission.asked": {
const request = event.properties
if (autoaccept() === "edit" && request.permission === "edit") {
sdk.client.permission.reply({
reply: "once",
requestID: request.id,
})
break
}
const requests = store.permission[request.sessionID]
if (!requests) {
setStore("permission", request.sessionID, [request])

View File

@@ -0,0 +1,971 @@
import type { TuiPlugin, TuiPluginApi, TuiPluginModule } from "@opencode-ai/plugin/tui"
import { useSyncV2 } from "@tui/context/sync-v2"
import { SplitBorder } from "@tui/component/border"
import { Spinner } from "@tui/component/spinner"
import { useTheme } from "@tui/context/theme"
import { useLocal } from "@tui/context/local"
import { useKeyboard, useTerminalDimensions, type JSX } from "@opentui/solid"
import type { SyntaxStyle } from "@opentui/core"
import { Locale } from "@/util/locale"
import { LANGUAGE_EXTENSIONS } from "@/lsp/language"
import path from "path"
import stripAnsi from "strip-ansi"
import type {
SessionMessage,
SessionMessageAssistant,
SessionMessageAssistantReasoning,
SessionMessageAssistantText,
SessionMessageAssistantTool,
SessionMessageCompaction,
SessionMessageSynthetic,
SessionMessageUser,
ToolFileContent,
ToolTextContent,
} from "@opencode-ai/sdk/v2"
import { createEffect, createMemo, For, Match, Show, Switch } from "solid-js"
const id = "internal:session-v2-debug"
const route = "session.v2.messages"
function currentSessionID(api: TuiPluginApi) {
const current = api.route.current
if (current.name !== "session") return
const sessionID = current.params?.sessionID
return typeof sessionID === "string" ? sessionID : undefined
}
function View(props: { api: TuiPluginApi; sessionID: string }) {
const sync = useSyncV2()
const dimensions = useTerminalDimensions()
const { theme, syntax, subtleSyntax } = useTheme()
const messages = createMemo(() => sync.data.messages[props.sessionID] ?? [])
const renderedMessages = createMemo(() => messages().toReversed())
const lastAssistant = createMemo(() => renderedMessages().findLast((message) => message.type === "assistant"))
createEffect(() => {
void sync.session.message.sync(props.sessionID)
})
useKeyboard((event) => {
if (event.name !== "escape") return
event.preventDefault()
event.stopPropagation()
props.api.route.navigate("session", { sessionID: props.sessionID })
})
return (
<box width={dimensions().width} height={dimensions().height} backgroundColor={theme.background}>
<box flexDirection="row">
<box flexGrow={1} paddingBottom={1} paddingLeft={2} paddingRight={2} gap={1}>
<scrollbox
viewportOptions={{ paddingRight: 0 }}
verticalScrollbarOptions={{ visible: false }}
stickyScroll={true}
stickyStart="bottom"
flexGrow={1}
>
<box height={1} />
<Show when={messages().length === 0}>
<MissingData label="Messages" detail="No v2 messages loaded from useSyncV2 yet." />
</Show>
<For each={renderedMessages()}>
{(message, index) => (
<Switch>
<Match when={message.type === "user"}>
<UserMessage message={message as SessionMessageUser} index={index()} />
</Match>
<Match when={message.type === "assistant"}>
<AssistantMessage
message={message as SessionMessageAssistant}
last={lastAssistant()?.id === message.id}
syntax={syntax()}
subtleSyntax={subtleSyntax()}
/>
</Match>
<Match when={message.type === "synthetic"}>
<SyntheticMessage message={message as SessionMessageSynthetic} index={index()} />
</Match>
<Match when={message.type === "compaction"}>
<CompactionMessage message={message as SessionMessageCompaction} />
</Match>
<Match when={true}>
<UnknownMessage message={message} />
</Match>
</Switch>
)}
</For>
</scrollbox>
<MissingData
label="Session prompt, permission prompt, question prompt, sidebar"
detail="The v2 message endpoint only exposes messages, so these session UI regions cannot be rendered here. Press Esc to return to the live session."
/>
</box>
</box>
</box>
)
}
function MissingData(props: { label: string; detail: string }) {
const { theme } = useTheme()
return (
<box
border={["left"]}
customBorderChars={SplitBorder.customBorderChars}
borderColor={theme.warning}
backgroundColor={theme.backgroundPanel}
paddingLeft={2}
paddingTop={1}
paddingBottom={1}
marginTop={1}
flexShrink={0}
>
<text fg={theme.text}>
<span style={{ bg: theme.warning, fg: theme.background, bold: true }}> MISSING DATA </span> {props.label}
</text>
<text fg={theme.textMuted}>{props.detail}</text>
</box>
)
}
function UserMessage(props: { message: SessionMessageUser; index: number }) {
const { theme } = useTheme()
const attachments = createMemo(() => [...(props.message.files ?? []), ...(props.message.agents ?? [])])
return (
<box
id={props.message.id}
border={["left"]}
borderColor={theme.primary}
customBorderChars={SplitBorder.customBorderChars}
marginTop={props.index === 0 ? 0 : 1}
flexShrink={0}
>
<box paddingTop={1} paddingBottom={1} paddingLeft={2} backgroundColor={theme.backgroundPanel}>
<Show
when={props.message.text.trim()}
fallback={
<MissingData label="User message text" detail={`Message ${props.message.id} has no text field content.`} />
}
>
<text fg={theme.text}>{props.message.text}</text>
</Show>
<Show when={attachments().length}>
<box flexDirection="row" paddingTop={1} gap={1} flexWrap="wrap">
<For each={props.message.files ?? []}>
{(file) => (
<text fg={theme.text}>
<span style={{ bg: theme.secondary, fg: theme.background }}> {file.mime} </span>
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {file.name ?? file.uri} </span>
</text>
)}
</For>
<For each={props.message.agents ?? []}>
{(agent) => (
<text fg={theme.text}>
<span style={{ bg: theme.accent, fg: theme.background }}> agent </span>
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {agent.name} </span>
</text>
)}
</For>
</box>
</Show>
<text fg={theme.textMuted}>{Locale.todayTimeOrDateTime(props.message.time.created)}</text>
</box>
</box>
)
}
function SyntheticMessage(props: { message: SessionMessageSynthetic; index: number }) {
const { theme } = useTheme()
return (
<box
id={props.message.id}
border={["left"]}
borderColor={theme.backgroundElement}
customBorderChars={SplitBorder.customBorderChars}
marginTop={props.index === 0 ? 0 : 1}
paddingLeft={2}
paddingTop={1}
paddingBottom={1}
backgroundColor={theme.backgroundPanel}
flexShrink={0}
>
<text fg={theme.textMuted}>Synthetic</text>
<text fg={theme.text}>{props.message.text}</text>
</box>
)
}
function CompactionMessage(props: { message: SessionMessageCompaction }) {
const { theme } = useTheme()
return (
<box
marginTop={1}
border={["top"]}
title={props.message.reason === "auto" ? " Auto Compaction " : " Compaction "}
titleAlignment="center"
borderColor={theme.borderActive}
flexShrink={0}
>
<Show when={props.message.summary}>
<text fg={theme.textMuted}>{props.message.summary}</text>
</Show>
</box>
)
}
function UnknownMessage(props: { message: SessionMessage }) {
return <MissingData label="Unknown message type" detail={JSON.stringify(props.message)} />
}
function AssistantMessage(props: {
message: SessionMessageAssistant
last: boolean
syntax: SyntaxStyle
subtleSyntax: SyntaxStyle
}) {
const { theme } = useTheme()
const local = useLocal()
const duration = createMemo(() => {
if (!props.message.time.completed) return 0
return props.message.time.completed - props.message.time.created
})
const model = createMemo(() => {
const variant = props.message.model.variant ? `/${props.message.model.variant}` : ""
return `${props.message.model.providerID}/${props.message.model.id}${variant}`
})
const final = createMemo(() => props.message.finish && !["tool-calls", "unknown"].includes(props.message.finish))
return (
<>
<For each={props.message.content}>
{(part) => (
<Switch>
<Match when={part.type === "text"}>
<AssistantText part={part as SessionMessageAssistantText} syntax={props.syntax} />
</Match>
<Match when={part.type === "reasoning"}>
<AssistantReasoning part={part as SessionMessageAssistantReasoning} subtleSyntax={props.subtleSyntax} />
</Match>
<Match when={part.type === "tool"}>
<AssistantTool part={part as SessionMessageAssistantTool} />
</Match>
</Switch>
)}
</For>
<Show when={props.message.content.length === 0}>
<MissingData label="Assistant content" detail={`Assistant message ${props.message.id} has no content items.`} />
</Show>
<Show when={props.message.error}>
<box
border={["left"]}
paddingTop={1}
paddingBottom={1}
paddingLeft={2}
marginTop={1}
backgroundColor={theme.backgroundPanel}
customBorderChars={SplitBorder.customBorderChars}
borderColor={theme.error}
flexShrink={0}
>
<text fg={theme.textMuted}>{props.message.error}</text>
</box>
</Show>
<Show when={props.last || final() || props.message.error}>
<box paddingLeft={3} flexShrink={0}>
<text marginTop={1}>
<span style={{ fg: local.agent.color(props.message.agent) }}> </span>
<span style={{ fg: theme.text }}>{Locale.titlecase(props.message.agent)}</span>
<span style={{ fg: theme.textMuted }}> · {model()}</span>
<Show when={duration()}>
<span style={{ fg: theme.textMuted }}> · {Locale.duration(duration())}</span>
</Show>
</text>
</box>
</Show>
</>
)
}
function AssistantText(props: { part: SessionMessageAssistantText; syntax: SyntaxStyle }) {
const { theme } = useTheme()
return (
<Show when={props.part.text.trim()}>
<box paddingLeft={3} marginTop={1} flexShrink={0}>
<code
filetype="markdown"
drawUnstyledText={false}
streaming={true}
syntaxStyle={props.syntax}
content={props.part.text.trim()}
conceal={true}
fg={theme.text}
/>
</box>
</Show>
)
}
function AssistantReasoning(props: { part: SessionMessageAssistantReasoning; subtleSyntax: SyntaxStyle }) {
const { theme } = useTheme()
const content = createMemo(() => props.part.text.replace("[REDACTED]", "").trim())
return (
<Show when={content()}>
<box
paddingLeft={2}
marginTop={1}
flexDirection="column"
border={["left"]}
customBorderChars={SplitBorder.customBorderChars}
borderColor={theme.backgroundElement}
flexShrink={0}
>
<code
filetype="markdown"
drawUnstyledText={false}
streaming={true}
syntaxStyle={props.subtleSyntax}
content={"_Thinking:_ " + content()}
conceal={true}
fg={theme.textMuted}
/>
</box>
</Show>
)
}
function AssistantTool(props: { part: SessionMessageAssistantTool }) {
const input = createMemo(() => toolInputRecord(props.part.state.input))
const toolprops = {
get input() {
return input()
},
get metadata() {
return props.part.provider?.metadata ?? {}
},
get output() {
return props.part.state.status === "pending" ? undefined : toolOutput(props.part.state.content)
},
part: props.part,
}
return (
<Switch>
<Match when={props.part.name === "bash"}>
<Bash {...toolprops} />
</Match>
<Match when={props.part.name === "glob"}>
<Glob {...toolprops} />
</Match>
<Match when={props.part.name === "read"}>
<Read {...toolprops} />
</Match>
<Match when={props.part.name === "grep"}>
<Grep {...toolprops} />
</Match>
<Match when={props.part.name === "webfetch"}>
<WebFetch {...toolprops} />
</Match>
<Match when={props.part.name === "codesearch"}>
<CodeSearch {...toolprops} />
</Match>
<Match when={props.part.name === "websearch"}>
<WebSearch {...toolprops} />
</Match>
<Match when={props.part.name === "write"}>
<Write {...toolprops} />
</Match>
<Match when={props.part.name === "edit"}>
<Edit {...toolprops} />
</Match>
<Match when={props.part.name === "apply_patch"}>
<ApplyPatch {...toolprops} />
</Match>
<Match when={props.part.name === "todowrite"}>
<TodoWrite {...toolprops} />
</Match>
<Match when={props.part.name === "question"}>
<Question {...toolprops} />
</Match>
<Match when={props.part.name === "skill"}>
<Skill {...toolprops} />
</Match>
<Match when={props.part.name === "task"}>
<Task {...toolprops} />
</Match>
<Match when={true}>
<GenericTool {...toolprops} />
</Match>
</Switch>
)
}
type ToolProps = {
input: Record<string, unknown>
metadata: Record<string, unknown>
output?: string
part: SessionMessageAssistantTool
}
function GenericTool(props: ToolProps) {
return (
<Show
when={props.output?.trim()}
fallback={
<InlineTool icon="⚙" pending="Writing command..." complete={toolComplete(props.part)} part={props.part}>
{props.part.name} {input(props.input)}
</InlineTool>
}
>
<BlockTool title={`# ${props.part.name} ${input(props.input)}`} part={props.part}>
<text fg={useTheme().theme.text}>{props.output}</text>
</BlockTool>
</Show>
)
}
function InlineTool(props: {
icon: string
complete: unknown
pending: string
spinner?: boolean
children: JSX.Element
part: SessionMessageAssistantTool
}) {
const { theme } = useTheme()
const error = createMemo(() => (props.part.state.status === "error" ? props.part.state.error.message : undefined))
const denied = createMemo(() => {
const message = error()
if (!message) return false
return (
message.includes("QuestionRejectedError") ||
message.includes("rejected permission") ||
message.includes("user dismissed")
)
})
return (
<box marginTop={1} paddingLeft={3} flexShrink={0}>
<Switch>
<Match when={props.spinner}>
<Spinner color={theme.text}>{props.children}</Spinner>
</Match>
<Match when={true}>
<text paddingLeft={3} fg={props.complete ? theme.textMuted : theme.text}>
<Show fallback={<>~ {props.pending}</>} when={props.complete}>
{props.icon} {props.children}
</Show>
</text>
</Match>
</Switch>
<Show when={error() && !denied()}>
<text fg={theme.error}>{error()}</text>
</Show>
</box>
)
}
function BlockTool(props: {
title: string
children: JSX.Element
part: SessionMessageAssistantTool
spinner?: boolean
}) {
const { theme } = useTheme()
const error = createMemo(() => (props.part.state.status === "error" ? props.part.state.error.message : undefined))
return (
<box
border={["left"]}
paddingTop={1}
paddingBottom={1}
paddingLeft={2}
marginTop={1}
gap={1}
backgroundColor={theme.backgroundPanel}
customBorderChars={SplitBorder.customBorderChars}
borderColor={theme.background}
flexShrink={0}
>
<Show
when={props.spinner}
fallback={
<text paddingLeft={3} fg={theme.textMuted}>
{props.title}
</text>
}
>
<Spinner color={theme.textMuted}>{props.title.replace(/^# /, "")}</Spinner>
</Show>
{props.children}
<Show when={error()}>
<text fg={theme.error}>{error()}</text>
</Show>
</box>
)
}
function Bash(props: ToolProps) {
const { theme } = useTheme()
const output = createMemo(() => stripAnsi((stringValue(props.metadata.output) ?? props.output ?? "").trim()))
const command = createMemo(() => stringValue(props.input.command) ?? pendingInput(props.part))
const title = createMemo(() => `# ${stringValue(props.input.description) ?? "Shell"}`)
return (
<Switch>
<Match when={output()}>
<BlockTool title={title()} part={props.part} spinner={props.part.state.status === "running"}>
<box gap={1}>
<text fg={theme.text}>$ {command()}</text>
<text fg={theme.text}>{output()}</text>
</box>
</BlockTool>
</Match>
<Match when={true}>
<InlineTool icon="$" pending="Writing command..." complete={command()} part={props.part}>
{command()}
</InlineTool>
</Match>
</Switch>
)
}
function Glob(props: ToolProps) {
return (
<InlineTool icon="✱" pending="Finding files..." complete={toolComplete(props.part)} part={props.part}>
Glob "{stringValue(props.input.pattern) ?? pendingInput(props.part)}"{" "}
<Show when={stringValue(props.input.path)}>in {normalizePath(stringValue(props.input.path))} </Show>
<Show when={numberValue(props.metadata.count)}>
{(count) => (
<>
({count()} {count() === 1 ? "match" : "matches"})
</>
)}
</Show>
</InlineTool>
)
}
function Read(props: ToolProps) {
const { theme } = useTheme()
const loaded = createMemo(() =>
arrayValue(props.metadata.loaded).filter((item): item is string => typeof item === "string"),
)
return (
<>
<InlineTool
icon="→"
pending="Reading file..."
complete={stringValue(props.input.filePath) ?? pendingInput(props.part)}
spinner={props.part.state.status === "running"}
part={props.part}
>
Read {normalizePath(stringValue(props.input.filePath) ?? pendingInput(props.part))}{" "}
{input(props.input, ["filePath"])}
</InlineTool>
<For each={loaded()}>
{(filepath) => (
<box paddingLeft={3} flexShrink={0}>
<text paddingLeft={3} fg={theme.textMuted}>
Loaded {normalizePath(filepath)}
</text>
</box>
)}
</For>
</>
)
}
function Grep(props: ToolProps) {
return (
<InlineTool icon="✱" pending="Searching content..." complete={toolComplete(props.part)} part={props.part}>
Grep "{stringValue(props.input.pattern) ?? pendingInput(props.part)}"{" "}
<Show when={stringValue(props.input.path)}>in {normalizePath(stringValue(props.input.path))} </Show>
<Show when={numberValue(props.metadata.matches)}>
{(matches) => (
<>
({matches()} {matches() === 1 ? "match" : "matches"})
</>
)}
</Show>
</InlineTool>
)
}
function WebFetch(props: ToolProps) {
return (
<InlineTool icon="%" pending="Fetching from the web..." complete={toolComplete(props.part)} part={props.part}>
WebFetch {stringValue(props.input.url) ?? pendingInput(props.part)}
</InlineTool>
)
}
function CodeSearch(props: ToolProps) {
return (
<InlineTool icon="◇" pending="Searching code..." complete={toolComplete(props.part)} part={props.part}>
Exa Code Search "{stringValue(props.input.query) ?? pendingInput(props.part)}"{" "}
<Show when={numberValue(props.metadata.results)}>{(results) => <>({results()} results)</>}</Show>
</InlineTool>
)
}
function WebSearch(props: ToolProps) {
return (
<InlineTool icon="◈" pending="Searching web..." complete={toolComplete(props.part)} part={props.part}>
Exa Web Search "{stringValue(props.input.query) ?? pendingInput(props.part)}"{" "}
<Show when={numberValue(props.metadata.numResults)}>{(results) => <>({results()} results)</>}</Show>
</InlineTool>
)
}
function Write(props: ToolProps) {
const { theme, syntax } = useTheme()
const filePath = createMemo(() => stringValue(props.input.filePath) ?? "")
const content = createMemo(() => stringValue(props.input.content) ?? "")
return (
<Switch>
<Match when={content() && props.part.state.status === "completed"}>
<BlockTool title={"# Wrote " + normalizePath(filePath())} part={props.part}>
<line_number fg={theme.textMuted} minWidth={3} paddingRight={1}>
<code
conceal={false}
fg={theme.text}
filetype={filetype(filePath())}
syntaxStyle={syntax()}
content={content()}
/>
</line_number>
<Diagnostics diagnostics={props.metadata.diagnostics} filePath={filePath()} />
</BlockTool>
</Match>
<Match when={true}>
<InlineTool icon="←" pending="Preparing write..." complete={filePath()} part={props.part}>
Write {normalizePath(filePath())}
</InlineTool>
</Match>
</Switch>
)
}
function Edit(props: ToolProps) {
const { theme, syntax } = useTheme()
const dimensions = useTerminalDimensions()
const filePath = createMemo(() => stringValue(props.input.filePath) ?? "")
const diff = createMemo(() => stringValue(props.metadata.diff))
return (
<Switch>
<Match when={diff()}>
{(diff) => (
<BlockTool title={"← Edit " + normalizePath(filePath())} part={props.part}>
<box paddingLeft={1}>
<diff
diff={diff()}
view={dimensions().width > 120 ? "split" : "unified"}
filetype={filetype(filePath())}
syntaxStyle={syntax()}
showLineNumbers={true}
width="100%"
wrapMode="word"
fg={theme.text}
addedBg={theme.diffAddedBg}
removedBg={theme.diffRemovedBg}
contextBg={theme.diffContextBg}
addedSignColor={theme.diffHighlightAdded}
removedSignColor={theme.diffHighlightRemoved}
lineNumberFg={theme.diffLineNumber}
lineNumberBg={theme.diffContextBg}
addedLineNumberBg={theme.diffAddedLineNumberBg}
removedLineNumberBg={theme.diffRemovedLineNumberBg}
/>
</box>
<Diagnostics diagnostics={props.metadata.diagnostics} filePath={filePath()} />
</BlockTool>
)}
</Match>
<Match when={true}>
<InlineTool icon="←" pending="Preparing edit..." complete={filePath()} part={props.part}>
Edit {normalizePath(filePath())} {input({ replaceAll: props.input.replaceAll })}
</InlineTool>
</Match>
</Switch>
)
}
function ApplyPatch(props: ToolProps) {
const { theme, syntax } = useTheme()
const dimensions = useTerminalDimensions()
const files = createMemo(() => arrayValue(props.metadata.files).flatMap((item) => (isRecord(item) ? [item] : [])))
const fileTitle = (file: Record<string, unknown>) => {
const type = stringValue(file.type)
const relativePath = stringValue(file.relativePath) ?? stringValue(file.filePath) ?? "patch"
if (type === "delete") return "# Deleted " + relativePath
if (type === "add") return "# Created " + relativePath
if (type === "move") return "# Moved " + normalizePath(stringValue(file.filePath)) + " → " + relativePath
return "← Patched " + relativePath
}
return (
<Switch>
<Match when={files().length > 0}>
<For each={files()}>
{(file) => (
<BlockTool title={fileTitle(file)} part={props.part}>
<Show
when={stringValue(file.patch)}
fallback={
<text fg={theme.diffRemoved}>
-{numberValue(file.deletions) ?? 0} line{numberValue(file.deletions) === 1 ? "" : "s"}
</text>
}
>
{(patch) => (
<box paddingLeft={1}>
<diff
diff={patch()}
view={dimensions().width > 120 ? "split" : "unified"}
filetype={filetype(stringValue(file.filePath) ?? stringValue(file.relativePath))}
syntaxStyle={syntax()}
showLineNumbers={true}
width="100%"
wrapMode="word"
fg={theme.text}
addedBg={theme.diffAddedBg}
removedBg={theme.diffRemovedBg}
contextBg={theme.diffContextBg}
addedSignColor={theme.diffHighlightAdded}
removedSignColor={theme.diffHighlightRemoved}
lineNumberFg={theme.diffLineNumber}
lineNumberBg={theme.diffContextBg}
addedLineNumberBg={theme.diffAddedLineNumberBg}
removedLineNumberBg={theme.diffRemovedLineNumberBg}
/>
</box>
)}
</Show>
</BlockTool>
)}
</For>
</Match>
<Match when={true}>
<InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
Patch
</InlineTool>
</Match>
</Switch>
)
}
function TodoWrite(props: ToolProps) {
const { theme } = useTheme()
const todos = createMemo(() => arrayValue(props.input.todos).flatMap((item) => (isRecord(item) ? [item] : [])))
return (
<Switch>
<Match when={todos().length > 0 && props.part.state.status === "completed"}>
<BlockTool title="# Todos" part={props.part}>
<box>
<For each={todos()}>
{(todo) => (
<text fg={theme.text}>
{todoIcon(stringValue(todo.status))} {stringValue(todo.content)}
</text>
)}
</For>
</box>
</BlockTool>
</Match>
<Match when={true}>
<InlineTool icon="⚙" pending="Updating todos..." complete={false} part={props.part}>
Updating todos...
</InlineTool>
</Match>
</Switch>
)
}
function Question(props: ToolProps) {
const { theme } = useTheme()
const questions = createMemo(() =>
arrayValue(props.input.questions).flatMap((item) => (isRecord(item) ? [item] : [])),
)
const answers = createMemo(() => arrayValue(props.metadata.answers))
return (
<Switch>
<Match when={answers().length > 0}>
<BlockTool title="# Questions" part={props.part}>
<box gap={1}>
<For each={questions()}>
{(question, index) => (
<box>
<text fg={theme.textMuted}>{stringValue(question.question)}</text>
<text fg={theme.text}>{formatAnswer(answers()[index()])}</text>
</box>
)}
</For>
</box>
</BlockTool>
</Match>
<Match when={true}>
<InlineTool icon="→" pending="Asking questions..." complete={questions().length} part={props.part}>
Asked {questions().length} question{questions().length === 1 ? "" : "s"}
</InlineTool>
</Match>
</Switch>
)
}
function Skill(props: ToolProps) {
return (
<InlineTool icon="→" pending="Loading skill..." complete={toolComplete(props.part)} part={props.part}>
Skill "{stringValue(props.input.name) ?? pendingInput(props.part)}"
</InlineTool>
)
}
function Task(props: ToolProps) {
const content = createMemo(() => {
const description = stringValue(props.input.description)
if (!description) return pendingInput(props.part)
return `${Locale.titlecase(stringValue(props.input.subagent_type) ?? "General")} Task — ${description}`
})
return (
<InlineTool
icon="│"
spinner={props.part.state.status === "running"}
complete={toolComplete(props.part)}
pending="Delegating..."
part={props.part}
>
{content()}
</InlineTool>
)
}
function Diagnostics(props: { diagnostics: unknown; filePath: string }) {
const { theme } = useTheme()
const errors = createMemo(() => {
if (!isRecord(props.diagnostics)) return []
const value = props.diagnostics[normalizePath(props.filePath)] ?? props.diagnostics[props.filePath]
return arrayValue(value)
.flatMap((item) => (isRecord(item) ? [item] : []))
.filter((diagnostic) => diagnostic.severity === 1)
.slice(0, 3)
})
return (
<Show when={errors().length}>
<box>
<For each={errors()}>
{(diagnostic) => <text fg={theme.error}>Error {stringValue(diagnostic.message)}</text>}
</For>
</box>
</Show>
)
}
function toolOutput(content?: Array<ToolTextContent | ToolFileContent>) {
return (content ?? [])
.map((item) => {
if (item.type === "text") return item.text.trim()
return `[file ${item.name ?? item.uri}]`
})
.filter(Boolean)
.join("\n")
}
function toolInputRecord(input: string | Record<string, unknown>) {
if (typeof input === "string") return {}
return input
}
function pendingInput(part: SessionMessageAssistantTool) {
if (part.state.status !== "pending") return ""
return part.state.input.trim()
}
function toolComplete(part: SessionMessageAssistantTool) {
if (part.state.status === "pending") return pendingInput(part)
return part.state.status === "completed" || part.state.status === "error" || part.state.status === "running"
}
function stringValue(value: unknown) {
return typeof value === "string" ? value : undefined
}
function numberValue(value: unknown) {
return typeof value === "number" ? value : undefined
}
function arrayValue(value: unknown): unknown[] {
return Array.isArray(value) ? value : []
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === "object" && !Array.isArray(value)
}
function input(input: Record<string, unknown>, omit?: string[]) {
const primitives = Object.entries(input).filter(([key, value]) => {
if (omit?.includes(key)) return false
return typeof value === "string" || typeof value === "number" || typeof value === "boolean"
})
if (primitives.length === 0) return ""
return `[${primitives.map(([key, value]) => `${key}=${value}`).join(", ")}]`
}
function normalizePath(input?: string) {
if (!input) return ""
const absolute = path.isAbsolute(input) ? input : path.resolve(process.cwd(), input)
const relative = path.relative(process.cwd(), absolute)
if (!relative) return "."
if (!relative.startsWith("..")) return relative
return absolute
}
function filetype(input?: string) {
if (!input) return "none"
const language = LANGUAGE_EXTENSIONS[path.extname(input)]
if (["typescriptreact", "javascriptreact", "javascript"].includes(language)) return "typescript"
return language
}
function todoIcon(status?: string) {
if (status === "completed") return "✓"
if (status === "in_progress") return "~"
if (status === "cancelled") return "✕"
return "☐"
}
function formatAnswer(answer: unknown) {
if (!Array.isArray(answer)) return "(no answer)"
if (answer.length === 0) return "(no answer)"
return answer.filter((item): item is string => typeof item === "string").join(", ")
}
const tui: TuiPlugin = async (api) => {
api.route.register([
{
name: route,
render(input) {
const sessionID = input.params?.sessionID
if (typeof sessionID !== "string") {
return <text fg={api.theme.current.error}>Missing sessionID</text>
}
return <View api={api} sessionID={sessionID} />
},
},
])
api.command.register(() => [
{
title: "View v2 session messages",
value: route,
category: "Debug",
suggested: api.route.current.name === "session",
enabled: api.route.current.name === "session",
onSelect() {
const sessionID = currentSessionID(api)
if (!sessionID) return
api.route.navigate(route, { sessionID })
},
},
])
}
const plugin: TuiPluginModule & { id: string } = {
id,
tui,
}
export default plugin

View File

@@ -7,6 +7,7 @@ import SidebarTodo from "../feature-plugins/sidebar/todo"
import SidebarFiles from "../feature-plugins/sidebar/files"
import SidebarFooter from "../feature-plugins/sidebar/footer"
import PluginManager from "../feature-plugins/system/plugins"
import SessionV2Debug from "../feature-plugins/system/session-v2"
import type { TuiPlugin, TuiPluginModule } from "@opencode-ai/plugin/tui"
export type InternalTuiPlugin = TuiPluginModule & {
@@ -24,4 +25,5 @@ export const INTERNAL_TUI_PLUGINS: InternalTuiPlugin[] = [
SidebarFiles,
SidebarFooter,
PluginManager,
SessionV2Debug,
]

View File

@@ -37,7 +37,8 @@ import { Locale } from "@/util/locale"
import type { Tool } from "@/tool/tool"
import type { ReadTool } from "@/tool/read"
import type { WriteTool } from "@/tool/write"
import { BashTool } from "@/tool/bash"
import { ShellTool } from "@/tool/shell"
import { ShellToolID } from "@/tool/shell/id"
import type { GlobTool } from "@/tool/glob"
import { TodoWriteTool } from "@/tool/todo"
import type { GrepTool } from "@/tool/grep"
@@ -1552,8 +1553,8 @@ function ToolPart(props: { last: boolean; part: ToolPart; message: AssistantMess
return (
<Show when={!shouldHide()}>
<Switch>
<Match when={props.part.tool === "bash"}>
<Bash {...toolprops} />
<Match when={props.part.tool === ShellToolID.id}>
<Shell {...toolprops} />
</Match>
<Match when={props.part.tool === "glob"}>
<Glob {...toolprops} />
@@ -1784,7 +1785,7 @@ function BlockTool(props: {
)
}
function Bash(props: ToolProps<typeof BashTool>) {
function Shell(props: ToolProps<typeof ShellTool>) {
const { theme } = useTheme()
const sync = useSync()
const isRunning = createMemo(() => props.part.state.status === "running")
@@ -1960,12 +1961,15 @@ function Task(props: ToolProps<typeof TaskTool>) {
const { navigate } = useRoute()
const sync = useSync()
onMount(() => {
if (props.metadata.sessionId && !sync.data.message[props.metadata.sessionId]?.length)
void sync.session.sync(props.metadata.sessionId)
createEffect(() => {
const sessionID = props.metadata.sessionId
if (!sessionID) return
if (sync.data.message[sessionID]?.length) return
void sync.session.sync(sessionID)
})
const messages = createMemo(() => sync.data.message[props.metadata.sessionId ?? ""] ?? [])
const childSessionID = createMemo(() => props.metadata.sessionId)
const messages = createMemo(() => sync.data.message[childSessionID() ?? ""] ?? [])
const tools = createMemo(() => {
return messages().flatMap((msg) =>
@@ -1979,7 +1983,16 @@ function Task(props: ToolProps<typeof TaskTool>) {
tools().findLast((x) => (x.state.status === "running" || x.state.status === "completed") && x.state.title),
)
const isRunning = createMemo(() => props.part.state.status === "running")
const isBackground = createMemo(() => props.metadata.background === true)
const isBackgroundRunning = createMemo(() => {
const sessionID = childSessionID()
if (!isBackground() || !sessionID) return false
const status = sync.data.session_status[sessionID]?.type
if (status === "busy" || status === "retry") return true
if (status === "idle") return false
return !messages().some((x) => x.role === "assistant" && x.time.completed)
})
const isRunning = createMemo(() => props.part.state.status === "running" || isBackgroundRunning())
const duration = createMemo(() => {
const first = messages().find((x) => x.role === "user")?.time.created
@@ -1990,7 +2003,8 @@ function Task(props: ToolProps<typeof TaskTool>) {
const content = createMemo(() => {
if (!props.input.description) return ""
let content = [`${Locale.titlecase(props.input.subagent_type ?? "General")} Task — ${props.input.description}`]
const description = isBackground() ? `${props.input.description} (background)` : props.input.description
let content = [`${Locale.titlecase(props.input.subagent_type ?? "General")} Task — ${description}`]
if (isRunning() && tools().length > 0) {
// content[0] += ` · ${tools().length} toolcalls`
@@ -2001,7 +2015,7 @@ function Task(props: ToolProps<typeof TaskTool>) {
} else content.push(`${tools().length} toolcalls`)
}
if (props.part.state.status === "completed") {
if (!isRunning() && props.part.state.status === "completed") {
content.push(`${tools().length} toolcalls · ${Locale.duration(duration())}`)
}
@@ -2016,8 +2030,9 @@ function Task(props: ToolProps<typeof TaskTool>) {
pending="Delegating..."
part={props.part}
onClick={() => {
if (props.metadata.sessionId) {
navigate({ type: "session", sessionID: props.metadata.sessionId })
const sessionID = childSessionID()
if (sessionID) {
navigate({ type: "session", sessionID })
}
}}
>

View File

@@ -15,6 +15,7 @@ import { LANGUAGE_EXTENSIONS } from "@/lsp/language"
import { Keybind } from "@/util/keybind"
import { Locale } from "@/util/locale"
import { Global } from "@opencode-ai/core/global"
import { ShellToolID } from "@/tool/shell/id"
import { useDialog } from "../../ui/dialog"
import { getScrollAcceleration } from "../../util/scroll"
import { useTuiConfig } from "../../context/tui-config"
@@ -287,7 +288,7 @@ export function PermissionPrompt(props: { request: PermissionRequest }) {
}
}
if (permission === "bash") {
if (permission === ShellToolID.id) {
const title =
typeof data.description === "string" && data.description ? data.description : "Shell command"
const command = typeof data.command === "string" ? data.command : ""

View File

@@ -7,6 +7,7 @@ import { UI } from "@/cli/ui"
import * as Log from "@opencode-ai/core/util/log"
import { errorMessage } from "@/util/error"
import { withTimeout } from "@/util/timeout"
import { Instance } from "@/project/instance"
import { withNetworkOptions, resolveNetworkOptionsNoConfig } from "@/cli/network"
import { Filesystem } from "@/util/filesystem"
import type { GlobalEvent } from "@opencode-ai/sdk/v2"
@@ -189,7 +190,11 @@ export const TuiThreadCommand = cmd({
const prompt = await input(args.prompt)
const config = await TuiConfig.get()
const network = resolveNetworkOptionsNoConfig(args)
const network = await Instance.provide({
directory: cwd,
fn: () => resolveNetworkOptionsNoConfig(args),
})
const external =
process.argv.includes("--port") ||
process.argv.includes("--hostname") ||

View File

@@ -37,6 +37,7 @@ export interface DialogSelectOption<T = any> {
title: string
value: T
description?: string
search?: string
footer?: JSX.Element | string
category?: string
categoryView?: JSX.Element
@@ -93,8 +94,8 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
// users typically search by the item name, and not its category.
const result = fuzzysort
.go(needle, options, {
keys: ["title", "category"],
scoreFn: (r) => r[0].score * 2 + r[1].score,
keys: ["title", "category", "search"],
scoreFn: (r) => r[0].score * 2 + r[1].score + r[2].score,
})
.map((x) => x.obj)

View File

@@ -5,6 +5,7 @@ import { withNetworkOptions, resolveNetworkOptions } from "../network"
import { Flag } from "@opencode-ai/core/flag/flag"
import open from "open"
import { networkInterfaces } from "os"
import { bootstrap } from "../bootstrap"
function getNetworkIPs() {
const nets = networkInterfaces()
@@ -36,7 +37,7 @@ export const WebCommand = cmd({
if (!Flag.OPENCODE_SERVER_PASSWORD) {
UI.println(UI.Style.TEXT_WARNING_BOLD + "! OPENCODE_SERVER_PASSWORD is not set; server is unsecured.")
}
const opts = await resolveNetworkOptions(args)
const opts = await bootstrap(process.cwd(), () => resolveNetworkOptions(args))
const server = await Server.listen(opts)
UI.empty()
UI.println(UI.logo(" "))

View File

@@ -173,6 +173,7 @@ export const Info = Schema.Struct({
// subagent
general: Schema.optional(ConfigAgent.Info),
explore: Schema.optional(ConfigAgent.Info),
scout: Schema.optional(ConfigAgent.Info),
// specialized
title: Schema.optional(ConfigAgent.Info),
summary: Schema.optional(ConfigAgent.Info),

View File

@@ -112,6 +112,7 @@ const KeybindsSchema = Schema.Struct({
// `Keybinds.parse(...)`, so the schema-level transform was redundant.
terminal_suspend: keybind("ctrl+z", "Suspend terminal"),
terminal_title_toggle: keybind("none", "Toggle terminal title"),
permission_auto_accept_toggle: keybind("none", "Toggle auto-accept for edit permissions"),
tips_toggle: keybind("<leader>h", "Toggle tips on home screen"),
plugin_manager: keybind("none", "Open plugin manager dialog"),
display_thinking: keybind("none", "Toggle thinking blocks visibility"),

View File

@@ -35,6 +35,9 @@ const InputObject = Schema.StructWithRest(
question: Schema.optional(Action),
webfetch: Schema.optional(Action),
websearch: Schema.optional(Action),
codesearch: Schema.optional(Action),
repo_clone: Schema.optional(Rule),
repo_overview: Schema.optional(Rule),
lsp: Schema.optional(Rule),
doom_loop: Schema.optional(Action),
skill: Schema.optional(Rule),

View File

@@ -0,0 +1,45 @@
import type { ProjectID } from "@/project/schema"
import type { WorkspaceAdapter, WorkspaceAdapterEntry } from "../types"
import { WorktreeAdapter } from "./worktree"
const BUILTIN: Record<string, WorkspaceAdapter> = {
worktree: WorktreeAdapter,
}
const state = new Map<ProjectID, Map<string, WorkspaceAdapter>>()
export function getAdapter(projectID: ProjectID, type: string): WorkspaceAdapter {
const custom = state.get(projectID)?.get(type)
if (custom) return custom
const builtin = BUILTIN[type]
if (builtin) return builtin
throw new Error(`Unknown workspace adapter: ${type}`)
}
export async function listAdapters(projectID: ProjectID): Promise<WorkspaceAdapterEntry[]> {
const builtin = await Promise.all(
Object.entries(BUILTIN).map(async ([type, adapter]) => {
return {
type,
name: adapter.name,
description: adapter.description,
}
}),
)
const custom = [...(state.get(projectID)?.entries() ?? [])].map(([type, adapter]) => ({
type,
name: adapter.name,
description: adapter.description,
}))
return [...builtin, ...custom]
}
// Plugins can be loaded per-project so we need to scope them. If you
// want to install a global one pass `ProjectID.global`
export function registerAdapter(projectID: ProjectID, type: string, adapter: WorkspaceAdapter) {
const adapters = state.get(projectID) ?? new Map<string, WorkspaceAdapter>()
adapters.set(type, adapter)
state.set(projectID, adapters)
}

View File

@@ -1,5 +1,5 @@
import { Schema } from "effect"
import { type WorkspaceAdaptor, WorkspaceInfo } from "../types"
import { type WorkspaceAdapter, WorkspaceInfo } from "../types"
const WorktreeConfig = Schema.Struct({
name: WorkspaceInfo.fields.name,
@@ -13,7 +13,7 @@ async function loadWorktree() {
return { AppRuntime, Worktree }
}
export const WorktreeAdaptor: WorkspaceAdaptor = {
export const WorktreeAdapter: WorkspaceAdapter = {
name: "Worktree",
description: "Create a git worktree",
async configure(info) {

View File

@@ -1,45 +0,0 @@
import type { ProjectID } from "@/project/schema"
import type { WorkspaceAdaptor, WorkspaceAdaptorEntry } from "../types"
import { WorktreeAdaptor } from "./worktree"
const BUILTIN: Record<string, WorkspaceAdaptor> = {
worktree: WorktreeAdaptor,
}
const state = new Map<ProjectID, Map<string, WorkspaceAdaptor>>()
export function getAdaptor(projectID: ProjectID, type: string): WorkspaceAdaptor {
const custom = state.get(projectID)?.get(type)
if (custom) return custom
const builtin = BUILTIN[type]
if (builtin) return builtin
throw new Error(`Unknown workspace adaptor: ${type}`)
}
export async function listAdaptors(projectID: ProjectID): Promise<WorkspaceAdaptorEntry[]> {
const builtin = await Promise.all(
Object.entries(BUILTIN).map(async ([type, adaptor]) => {
return {
type,
name: adaptor.name,
description: adaptor.description,
}
}),
)
const custom = [...(state.get(projectID)?.entries() ?? [])].map(([type, adaptor]) => ({
type,
name: adaptor.name,
description: adaptor.description,
}))
return [...builtin, ...custom]
}
// Plugins can be loaded per-project so we need to scope them. If you
// want to install a global one pass `ProjectID.global`
export function registerAdaptor(projectID: ProjectID, type: string, adaptor: WorkspaceAdaptor) {
const adaptors = state.get(projectID) ?? new Map<string, WorkspaceAdaptor>()
adaptors.set(type, adaptor)
state.set(projectID, adaptors)
}

View File

@@ -17,12 +17,12 @@ export const WorkspaceInfo = Schema.Struct({
.pipe(withStatics((s) => ({ zod: zod(s) })))
export type WorkspaceInfo = DeepMutable<Schema.Schema.Type<typeof WorkspaceInfo>>
export const WorkspaceAdaptorEntry = Schema.Struct({
export const WorkspaceAdapterEntry = Schema.Struct({
type: Schema.String,
name: Schema.String,
description: Schema.String,
}).pipe(withStatics((s) => ({ zod: zod(s) })))
export type WorkspaceAdaptorEntry = Schema.Schema.Type<typeof WorkspaceAdaptorEntry>
export type WorkspaceAdapterEntry = Schema.Schema.Type<typeof WorkspaceAdapterEntry>
export type Target =
| {
@@ -35,7 +35,7 @@ export type Target =
headers?: HeadersInit
}
export type WorkspaceAdaptor = {
export type WorkspaceAdapter = {
name: string
description: string
configure(info: WorkspaceInfo): WorkspaceInfo | Promise<WorkspaceInfo>

View File

@@ -16,7 +16,7 @@ import { Filesystem } from "@/util/filesystem"
import { ProjectID } from "@/project/schema"
import { Slug } from "@opencode-ai/core/util/slug"
import { WorkspaceTable } from "./workspace.sql"
import { getAdaptor } from "./adaptors"
import { getAdapter } from "./adapters"
import { type WorkspaceInfo, WorkspaceInfo as WorkspaceInfoSchema } from "./types"
import { WorkspaceID } from "./schema"
import { Session } from "@/session/session"
@@ -335,8 +335,8 @@ export const layer = Layer.effect(
})
const syncWorkspaceLoop = Effect.fn("Workspace.syncWorkspaceLoop")(function* (space: Info) {
const adaptor = getAdaptor(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adaptor.target(space)))
const adapter = getAdapter(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adapter.target(space)))
if (target.type === "local") return
@@ -419,8 +419,8 @@ export const layer = Layer.effect(
const startSync = Effect.fn("Workspace.startSync")(function* (space: Info) {
if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) return
const adaptor = getAdaptor(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adaptor.target(space)))
const adapter = getAdapter(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adapter.target(space)))
if (target.type === "local") {
setStatus(space.id, (yield* Effect.promise(() => Filesystem.exists(target.directory))) ? "connected" : "error")
@@ -458,9 +458,9 @@ export const layer = Layer.effect(
const create = Effect.fn("Workspace.create")(function* (input: CreateInput) {
const id = WorkspaceID.ascending(input.id)
const adaptor = getAdaptor(input.projectID, input.type)
const adapter = getAdapter(input.projectID, input.type)
const config = yield* Effect.promise(() =>
Promise.resolve(adaptor.configure({ ...input, id, name: Slug.create(), directory: null })),
Promise.resolve(adapter.configure({ ...input, id, name: Slug.create(), directory: null })),
)
const info: Info = {
@@ -496,7 +496,7 @@ export const layer = Layer.effect(
OTEL_RESOURCE_ATTRIBUTES: process.env.OTEL_RESOURCE_ATTRIBUTES,
}
yield* Effect.promise(() => adaptor.create(config, env))
yield* Effect.promise(() => adapter.create(config, env))
yield* Effect.all(
[
waitEvent({
@@ -531,8 +531,8 @@ export const layer = Layer.effect(
workspaceID: input.workspaceID,
})
const adaptor = getAdaptor(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adaptor.target(space)))
const adapter = getAdapter(space.projectID, space.type)
const target = yield* Effect.promise(() => Promise.resolve(adapter.target(space)))
yield* sync.run(Session.Event.Updated, {
sessionID: input.sessionID,
@@ -726,12 +726,12 @@ export const layer = Layer.effect(
const info = fromRow(row)
yield* Effect.catch(
Effect.gen(function* () {
const adaptor = getAdaptor(info.projectID, row.type)
yield* Effect.tryPromise(() => Promise.resolve(adaptor.remove(info)))
const adapter = getAdapter(info.projectID, row.type)
yield* Effect.tryPromise(() => Promise.resolve(adapter.remove(info)))
}),
() =>
Effect.sync(() => {
log.error("adaptor not available when removing workspace", { type: row.type })
log.error("adapter not available when removing workspace", { type: row.type })
}),
)

View File

@@ -45,11 +45,30 @@ import { Workspace } from "@/control-plane/workspace"
import { Worktree } from "@/worktree"
import { Pty } from "@/pty"
import { Installation } from "@/installation"
import * as Effect from "effect/Effect"
import { ShareNext } from "@/share/share-next"
import { SessionShare } from "@/share/session"
import { SyncEvent } from "@/sync"
import { Npm } from "@opencode-ai/core/npm"
import { memoMap } from "@opencode-ai/core/effect/memo-map"
import { BackgroundJob } from "@/background/job"
// Adjusts the default Config layer to ensure that plugins are always initialised before
// any other layers read the current config
const ConfigWithPluginPriority = Layer.effect(
Config.Service,
Effect.gen(function* () {
const config = yield* Config.Service
const plugin = yield* Plugin.Service
return {
...config,
get: () => Effect.andThen(plugin.init(), config.get),
getGlobal: () => Effect.andThen(plugin.init(), config.getGlobal),
getConsoleState: () => Effect.andThen(plugin.init(), config.getConsoleState),
}
}),
).pipe(Layer.provide(Layer.merge(Plugin.defaultLayer, Config.defaultLayer)))
export const AppLayer = Layer.mergeAll(
Npm.defaultLayer,
@@ -57,7 +76,7 @@ export const AppLayer = Layer.mergeAll(
Bus.defaultLayer,
Auth.defaultLayer,
Account.defaultLayer,
Config.defaultLayer,
ConfigWithPluginPriority,
Git.defaultLayer,
Ripgrep.defaultLayer,
File.defaultLayer,
@@ -75,6 +94,7 @@ export const AppLayer = Layer.mergeAll(
Todo.defaultLayer,
Session.defaultLayer,
SessionStatus.defaultLayer,
BackgroundJob.defaultLayer,
SessionRunState.defaultLayer,
SessionProcessor.defaultLayer,
SessionCompaction.defaultLayer,

View File

@@ -2,6 +2,7 @@ import z from "zod"
import { randomBytes } from "crypto"
const prefixes = {
job: "job",
event: "evt",
session: "ses",
message: "msg",

View File

@@ -3,7 +3,7 @@ import type {
PluginInput,
Plugin as PluginInstance,
PluginModule,
WorkspaceAdaptor as PluginWorkspaceAdaptor,
WorkspaceAdapter as PluginWorkspaceAdapter,
} from "@opencode-ai/plugin"
import { Config } from "@/config/config"
import { Bus } from "../bus"
@@ -24,8 +24,8 @@ import { InstanceState } from "@/effect/instance-state"
import { errorMessage } from "@/util/error"
import { PluginLoader } from "./loader"
import { parsePluginSpecifier, readPluginId, readV1Plugin, resolvePluginId } from "./shared"
import { registerAdaptor } from "@/control-plane/adaptors"
import type { WorkspaceAdaptor } from "@/control-plane/types"
import { registerAdapter } from "@/control-plane/adapters"
import type { WorkspaceAdapter } from "@/control-plane/types"
const log = Log.create({ service: "plugin" })
@@ -138,8 +138,8 @@ export const layer = Layer.effect(
worktree: ctx.worktree,
directory: ctx.directory,
experimental_workspace: {
register(type: string, adaptor: PluginWorkspaceAdaptor) {
registerAdaptor(ctx.project.id, type, adaptor as WorkspaceAdaptor)
register(type: string, adapter: PluginWorkspaceAdapter) {
registerAdapter(ctx.project.id, type, adapter as WorkspaceAdapter)
},
},
get serverUrl(): URL {

View File

@@ -1,4 +1,3 @@
import { Plugin } from "../plugin"
import { Format } from "../format"
import { LSP } from "@/lsp/lsp"
import { File } from "../file"
@@ -7,6 +6,7 @@ import * as Project from "./project"
import * as Vcs from "./vcs"
import { Bus } from "../bus"
import { Command } from "../command"
import { Plugin } from "../plugin"
import { InstanceState } from "@/effect/instance-state"
import * as Log from "@opencode-ai/core/util/log"
import { FileWatcher } from "@/file/watcher"
@@ -17,20 +17,20 @@ import { Config } from "@/config/config"
export const InstanceBootstrap = Effect.gen(function* () {
const ctx = yield* InstanceState.context
Log.Default.info("bootstrapping", { directory: ctx.directory })
// everything depends on config so eager load it for nice traces
yield* Config.Service.use((svc) => svc.get())
// Plugin can mutate config so it has to be initialized before anything else.
yield* Plugin.Service.use((svc) => svc.init())
yield* Effect.all(
[
LSP.Service,
ShareNext.Service,
Format.Service,
File.Service,
FileWatcher.Service,
Vcs.Service,
Snapshot.Service,
].map((s) => Effect.forkDetach(s.use((i) => i.init()))),
Config.Service.use((i) => i.get()),
...[
Plugin.Service,
LSP.Service,
ShareNext.Service,
Format.Service,
File.Service,
FileWatcher.Service,
Vcs.Service,
Snapshot.Service,
].map((s) => s.use((i) => i.init())),
].map((e) => Effect.forkDetach(e)),
).pipe(Effect.withSpan("InstanceBootstrap.init"))
const projectID = ctx.project.id

View File

@@ -2,10 +2,10 @@ import { Hono } from "hono"
import { describeRoute, resolver, validator } from "hono-openapi"
import z from "zod"
import { Effect } from "effect"
import { listAdaptors } from "@/control-plane/adaptors"
import { listAdapters } from "@/control-plane/adapters"
import { Workspace } from "@/control-plane/workspace"
import { AppRuntime } from "@/effect/app-runtime"
import { WorkspaceAdaptorEntry } from "@/control-plane/types"
import { WorkspaceAdapterEntry } from "@/control-plane/types"
import { zodObject } from "@/util/effect-zod"
import { Instance } from "@/project/instance"
import { errors } from "../../error"
@@ -18,24 +18,24 @@ const log = Log.create({ service: "server.workspace" })
export const WorkspaceRoutes = lazy(() =>
new Hono()
.get(
"/adaptor",
"/adapter",
describeRoute({
summary: "List workspace adaptors",
description: "List all available workspace adaptors for the current project.",
operationId: "experimental.workspace.adaptor.list",
summary: "List workspace adapters",
description: "List all available workspace adapters for the current project.",
operationId: "experimental.workspace.adapter.list",
responses: {
200: {
description: "Workspace adaptors",
description: "Workspace adapters",
content: {
"application/json": {
schema: resolver(z.array(zodObject(WorkspaceAdaptorEntry))),
schema: resolver(z.array(zodObject(WorkspaceAdapterEntry))),
},
},
},
},
}),
async (c) => {
return c.json(await listAdaptors(Instance.project.id))
return c.json(await listAdapters(Instance.project.id))
},
)
.post(

View File

@@ -19,6 +19,7 @@ import { SessionApi } from "./groups/session"
import { SyncApi } from "./groups/sync"
import { TuiApi } from "./groups/tui"
import { WorkspaceApi } from "./groups/workspace"
import { V2Api } from "./groups/v2"
// SSE event schemas built from the same BusEvent/SyncEvent registries that
// the Hono spec uses, so both specs emit identical Event/SyncEvent components.
@@ -40,6 +41,7 @@ export const InstanceHttpApi = HttpApi.make("opencode-instance")
.addHttpApi(ProviderApi)
.addHttpApi(SessionApi)
.addHttpApi(SyncApi)
.addHttpApi(V2Api)
.addHttpApi(TuiApi)
.addHttpApi(WorkspaceApi)

View File

@@ -0,0 +1,14 @@
import { HttpApi, OpenApi } from "effect/unstable/httpapi"
import { MessageGroup } from "./v2/message"
import { SessionGroup } from "./v2/session"
export const V2Api = HttpApi.make("v2")
.add(SessionGroup)
.add(MessageGroup)
.annotateMerge(
OpenApi.annotations({
title: "opencode experimental HttpApi",
version: "0.0.1",
description: "Experimental HttpApi surface for selected instance routes.",
}),
)

View File

@@ -0,0 +1,69 @@
import { SessionID } from "@/session/schema"
import { SessionMessage } from "@/v2/session-message"
import { Schema } from "effect"
import { HttpApiEndpoint, HttpApiError, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"
import { Authorization } from "../../middleware/authorization"
export const MessageGroup = HttpApiGroup.make("v2.message")
.add(
HttpApiEndpoint.get("messages", "/api/session/:sessionID/message", {
params: { sessionID: SessionID },
query: Schema.Union([
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description:
"Maximum number of messages to return. When omitted, the endpoint returns its default page size.",
}),
order: Schema.optional(Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")])).annotate({
description: "Message order for the first page. Use desc for newest first or asc for oldest first.",
}),
cursor: Schema.optional(Schema.Never),
}),
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description:
"Maximum number of messages to return. When omitted, the endpoint returns its default page size.",
}),
cursor: Schema.String.annotate({
description:
"Opaque pagination cursor returned as cursor.previous or cursor.next in the previous response. Do not combine with order.",
}),
order: Schema.optional(Schema.Never),
}),
]).annotate({ identifier: "V2SessionMessagesQuery" }),
success: Schema.Struct({
items: Schema.Array(SessionMessage.Message),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionMessagesResponse" }),
error: HttpApiError.BadRequest,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.messages",
summary: "Get v2 session messages",
description:
"Retrieve projected v2 messages for a session. Items keep the requested order across pages; use cursor.next or cursor.previous to move through the ordered timeline.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "v2 messages",
description: "Experimental v2 message routes.",
}),
)
.middleware(Authorization)

View File

@@ -0,0 +1,129 @@
import { WorkspaceID } from "@/control-plane/schema"
import { SessionID } from "@/session/schema"
import { Session } from "@/session/session"
import { SessionMessage } from "@/v2/session-message"
import { Prompt } from "@/v2/session-prompt"
import { SessionV2 } from "@/v2/session"
import { Schema, SchemaGetter } from "effect"
import { HttpApiEndpoint, HttpApiError, HttpApiGroup, HttpApiSchema, OpenApi } from "effect/unstable/httpapi"
import { Authorization } from "../../middleware/authorization"
export const SessionGroup = HttpApiGroup.make("v2.session")
.add(
HttpApiEndpoint.get("sessions", "/api/session", {
query: Schema.Union([
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description: "Maximum number of sessions to return. Defaults to the newest 50 sessions.",
}),
order: Schema.optional(Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")])).annotate({
description: "Session order for the first page. Use desc for newest first or asc for oldest first.",
}),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspace: WorkspaceID.pipe(Schema.optional),
roots: Schema.Literals(["true", "false"])
.pipe(
Schema.decodeTo(Schema.Boolean, {
decode: SchemaGetter.transform((value) => value === "true"),
encode: SchemaGetter.transform((value) => (value ? "true" : "false")),
}),
)
.pipe(Schema.optional),
start: Schema.NumberFromString.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
cursor: Schema.optional(Schema.Never),
}),
Schema.Struct({
limit: Schema.optional(
Schema.NumberFromString.check(
Schema.isInt(),
Schema.isGreaterThanOrEqualTo(1),
Schema.isLessThanOrEqualTo(200),
),
).annotate({
description: "Maximum number of sessions to return. Defaults to the newest 50 sessions.",
}),
cursor: Schema.String.annotate({
description:
"Opaque pagination cursor returned as cursor.previous or cursor.next in the previous response. Do not combine with order.",
}),
order: Schema.optional(Schema.Never),
directory: Schema.optional(Schema.Never),
path: Schema.optional(Schema.Never),
workspace: Schema.optional(Schema.Never),
roots: Schema.optional(Schema.Never),
start: Schema.optional(Schema.Never),
search: Schema.optional(Schema.Never),
}),
]).annotate({ identifier: "V2SessionsQuery" }),
success: Schema.Struct({
items: Schema.Array(Session.Info),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionsResponse" }),
error: HttpApiError.BadRequest,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.list",
summary: "List v2 sessions",
description:
"Retrieve sessions in the requested order. Items keep that order across pages; use cursor.next or cursor.previous to move through the ordered list.",
}),
),
)
.add(
HttpApiEndpoint.post("prompt", "/api/session/:sessionID/prompt", {
params: { sessionID: SessionID },
payload: Schema.Struct({
prompt: Prompt,
delivery: SessionV2.Delivery.pipe(Schema.optional),
}),
success: SessionMessage.Message,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.prompt",
summary: "Send v2 message",
description: "Create a v2 session message and queue it for the agent loop.",
}),
),
)
.add(
HttpApiEndpoint.post("compact", "/api/session/:sessionID/compact", {
params: { sessionID: SessionID },
success: HttpApiSchema.NoContent,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.compact",
summary: "Compact v2 session",
description: "Compact a v2 session conversation.",
}),
),
)
.add(
HttpApiEndpoint.post("wait", "/api/session/:sessionID/wait", {
params: { sessionID: SessionID },
success: HttpApiSchema.NoContent,
}).annotateMerge(
OpenApi.annotations({
identifier: "v2.session.wait",
summary: "Wait for v2 session",
description: "Wait for a v2 session agent loop to become idle.",
}),
),
)
.annotateMerge(
OpenApi.annotations({
title: "v2",
description: "Experimental v2 routes.",
}),
)
.middleware(Authorization)

View File

@@ -1,5 +1,5 @@
import { Workspace } from "@/control-plane/workspace"
import { WorkspaceAdaptorEntry } from "@/control-plane/types"
import { WorkspaceAdapterEntry } from "@/control-plane/types"
import { NonNegativeInt } from "@/util/schema"
import { Schema, Struct } from "effect"
import { HttpApi, HttpApiEndpoint, HttpApiError, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"
@@ -16,7 +16,7 @@ export const SessionRestoreResponse = Schema.Struct({
})
export const WorkspacePaths = {
adaptors: `${root}/adaptor`,
adapters: `${root}/adapter`,
list: root,
status: `${root}/status`,
remove: `${root}/:id`,
@@ -27,13 +27,13 @@ export const WorkspaceApi = HttpApi.make("workspace")
.add(
HttpApiGroup.make("workspace")
.add(
HttpApiEndpoint.get("adaptors", WorkspacePaths.adaptors, {
success: described(Schema.Array(WorkspaceAdaptorEntry), "Workspace adaptors"),
HttpApiEndpoint.get("adapters", WorkspacePaths.adapters, {
success: described(Schema.Array(WorkspaceAdapterEntry), "Workspace adapters"),
}).annotateMerge(
OpenApi.annotations({
identifier: "experimental.workspace.adaptor.list",
summary: "List workspace adaptors",
description: "List all available workspace adaptors for the current project.",
identifier: "experimental.workspace.adapter.list",
summary: "List workspace adapters",
description: "List all available workspace adapters for the current project.",
}),
),
HttpApiEndpoint.get("list", WorkspacePaths.list, {

View File

@@ -0,0 +1,6 @@
import { SessionV2 } from "@/v2/session"
import { Layer } from "effect"
import { messageHandlers } from "./v2/message"
import { sessionHandlers } from "./v2/session"
export const v2Handlers = Layer.mergeAll(sessionHandlers, messageHandlers).pipe(Layer.provide(SessionV2.defaultLayer))

View File

@@ -0,0 +1,60 @@
import { SessionMessage } from "@/v2/session-message"
import { SessionV2 } from "@/v2/session"
import { Effect, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { HttpApiBuilder, HttpApiError } from "effect/unstable/httpapi"
import { InstanceHttpApi } from "../../api"
const DefaultMessagesLimit = 50
const Cursor = Schema.Struct({
id: SessionMessage.ID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
})
const decodeCursor = Schema.decodeUnknownSync(Cursor)
const cursor = {
encode(message: SessionMessage.Message, order: "asc" | "desc", direction: "previous" | "next") {
return Buffer.from(
JSON.stringify({ id: message.id, time: DateTime.toEpochMillis(message.time.created), order, direction }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const messageHandlers = HttpApiBuilder.group(InstanceHttpApi, "v2.message", (handlers) =>
Effect.gen(function* () {
const session = yield* SessionV2.Service
return handlers.handle(
"messages",
Effect.fn(function* (ctx) {
const decoded = yield* Effect.try({
try: () => (ctx.query.cursor ? cursor.decode(ctx.query.cursor) : undefined),
catch: () => new HttpApiError.BadRequest({}),
})
const order = decoded?.order ?? ctx.query.order ?? "desc"
const messages = yield* session.messages({
sessionID: ctx.params.sessionID,
limit: ctx.query.limit ?? DefaultMessagesLimit,
order,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = messages[0]
const last = messages.at(-1)
return {
items: messages,
cursor: {
previous: first ? cursor.encode(first, order, "previous") : undefined,
next: last ? cursor.encode(last, order, "next") : undefined,
},
}
}),
)
}),
)

View File

@@ -0,0 +1,110 @@
import { WorkspaceID } from "@/control-plane/schema"
import { Session } from "@/session/session"
import { SessionV2 } from "@/v2/session"
import { Effect, Schema } from "effect"
import { HttpApiBuilder, HttpApiError, HttpApiSchema } from "effect/unstable/httpapi"
import { InstanceHttpApi } from "../../api"
const DefaultSessionsLimit = 50
const SessionCursor = Schema.Struct({
id: Session.Info.fields.id,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspaceID: WorkspaceID.pipe(Schema.optional),
roots: Schema.Boolean.pipe(Schema.optional),
start: Schema.Number.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
})
type SessionCursor = typeof SessionCursor.Type
const decodeCursor = Schema.decodeUnknownSync(SessionCursor)
const sessionCursor = {
encode(
session: Session.Info,
order: "asc" | "desc",
direction: "previous" | "next",
filters: Pick<SessionCursor, "directory" | "path" | "workspaceID" | "roots" | "start" | "search">,
) {
return Buffer.from(
JSON.stringify({ id: session.id, time: session.time.created, order, direction, ...filters }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const sessionHandlers = HttpApiBuilder.group(InstanceHttpApi, "v2.session", (handlers) =>
Effect.gen(function* () {
const session = yield* SessionV2.Service
return handlers
.handle(
"sessions",
Effect.fn(function* (ctx) {
const decoded = yield* Effect.try({
try: () => (ctx.query.cursor ? sessionCursor.decode(ctx.query.cursor) : undefined),
catch: () => new HttpApiError.BadRequest({}),
})
const order = decoded?.order ?? ctx.query.order ?? "desc"
const filters = decoded ?? {
directory: ctx.query.directory,
path: ctx.query.path,
workspaceID: ctx.query.workspace ? WorkspaceID.make(ctx.query.workspace) : undefined,
roots: ctx.query.roots,
start: ctx.query.start,
search: ctx.query.search,
}
const sessions = yield* session.list({
limit: ctx.query.limit ?? DefaultSessionsLimit,
order,
directory: filters.directory,
path: filters.path,
workspaceID: filters.workspaceID,
roots: filters.roots,
start: filters.start,
search: filters.search,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = sessions[0]
const last = sessions.at(-1)
return {
items: sessions,
cursor: {
previous: first ? sessionCursor.encode(first, order, "previous", filters) : undefined,
next: last ? sessionCursor.encode(last, order, "next", filters) : undefined,
},
}
}),
)
.handle(
"prompt",
Effect.fn(function* (ctx) {
return yield* session.prompt({
sessionID: ctx.params.sessionID,
prompt: ctx.payload.prompt,
delivery: ctx.payload.delivery ?? SessionV2.DefaultDelivery,
})
}),
)
.handle(
"compact",
Effect.fn(function* (ctx) {
yield* session.compact(ctx.params.sessionID)
return HttpApiSchema.NoContent.make()
}),
)
.handle(
"wait",
Effect.fn(function* (ctx) {
yield* session.wait(ctx.params.sessionID)
return HttpApiSchema.NoContent.make()
}),
)
}),
)

View File

@@ -1,4 +1,4 @@
import { listAdaptors } from "@/control-plane/adaptors"
import { listAdapters } from "@/control-plane/adapters"
import { Workspace } from "@/control-plane/workspace"
import * as InstanceState from "@/effect/instance-state"
import { Effect } from "effect"
@@ -10,9 +10,9 @@ export const workspaceHandlers = HttpApiBuilder.group(InstanceHttpApi, "workspac
Effect.gen(function* () {
const workspace = yield* Workspace.Service
const adaptors = Effect.fn("WorkspaceHttpApi.adaptors")(function* () {
const adapters = Effect.fn("WorkspaceHttpApi.adapters")(function* () {
const instance = yield* InstanceState.context
return yield* Effect.promise(() => listAdaptors(instance.project.id))
return yield* Effect.promise(() => listAdapters(instance.project.id))
})
const list = Effect.fn("WorkspaceHttpApi.list")(function* () {
@@ -51,7 +51,7 @@ export const workspaceHandlers = HttpApiBuilder.group(InstanceHttpApi, "workspac
})
return handlers
.handle("adaptors", adaptors)
.handle("adapters", adapters)
.handle("list", list)
.handle("create", create)
.handle("status", status)

View File

@@ -1,4 +1,4 @@
import { getAdaptor } from "@/control-plane/adaptors"
import { getAdapter } from "@/control-plane/adapters"
import { WorkspaceID } from "@/control-plane/schema"
import type { Target } from "@/control-plane/types"
import { Workspace } from "@/control-plane/workspace"
@@ -89,8 +89,8 @@ function missingWorkspaceResponse(id: WorkspaceID): HttpServerResponse.HttpServe
function resolveTarget(workspace: Workspace.Info): Effect.Effect<Target> {
return Effect.gen(function* () {
const adaptor = yield* Effect.sync(() => getAdaptor(workspace.projectID, workspace.type))
return yield* Effect.promise(() => Promise.resolve(adaptor.target(workspace)))
const adapter = yield* Effect.sync(() => getAdapter(workspace.projectID, workspace.type))
return yield* Effect.promise(() => Promise.resolve(adapter.target(workspace)))
})
}

View File

@@ -58,6 +58,7 @@ import { questionHandlers } from "./handlers/question"
import { sessionHandlers } from "./handlers/session"
import { syncHandlers } from "./handlers/sync"
import { tuiHandlers } from "./handlers/tui"
import { v2Handlers } from "./handlers/v2"
import { workspaceHandlers } from "./handlers/workspace"
import { instanceContextLayer, instanceRouterMiddleware } from "./middleware/instance-context"
import { workspaceRouterMiddleware, workspaceRoutingLayer } from "./middleware/workspace-routing"
@@ -109,6 +110,7 @@ const instanceApiRoutes = HttpApiBuilder.layer(InstanceHttpApi).pipe(
providerHandlers,
sessionHandlers,
syncHandlers,
v2Handlers,
tuiHandlers,
workspaceHandlers,
]),

View File

@@ -1,7 +1,8 @@
import { describeRoute, resolver, validator } from "hono-openapi"
import { Hono } from "hono"
import type { UpgradeWebSocket } from "hono/ws"
import { Effect } from "effect"
import { Context, Effect } from "effect"
import { Flag } from "@opencode-ai/core/flag/flag"
import z from "zod"
import { Format } from "@/format"
import { TuiRoutes } from "./tui"
@@ -24,12 +25,136 @@ import { ExperimentalRoutes } from "./experimental"
import { ProviderRoutes } from "./provider"
import { EventRoutes } from "./event"
import { SyncRoutes } from "./sync"
import { V2Routes } from "./v2"
import { InstanceMiddleware } from "./middleware"
import { jsonRequest } from "./trace"
import { ExperimentalHttpApiServer } from "./httpapi/server"
import { EventPaths } from "./httpapi/event"
import { ExperimentalPaths } from "./httpapi/groups/experimental"
import { FilePaths } from "./httpapi/groups/file"
import { InstancePaths } from "./httpapi/groups/instance"
import { McpPaths } from "./httpapi/groups/mcp"
import { PtyPaths } from "./httpapi/groups/pty"
import { SessionPaths } from "./httpapi/groups/session"
import { SyncPaths } from "./httpapi/groups/sync"
import { TuiPaths } from "./httpapi/groups/tui"
import { WorkspacePaths } from "./httpapi/groups/workspace"
export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => {
const app = new Hono()
if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) {
const handler = ExperimentalHttpApiServer.webHandler().handler
const context = Context.empty() as Context.Context<unknown>
app.all("/api/*", (c) => handler(c.req.raw, context))
app.get(EventPaths.event, (c) => handler(c.req.raw, context))
app.get("/question", (c) => handler(c.req.raw, context))
app.post("/question/:requestID/reply", (c) => handler(c.req.raw, context))
app.post("/question/:requestID/reject", (c) => handler(c.req.raw, context))
app.get("/permission", (c) => handler(c.req.raw, context))
app.post("/permission/:requestID/reply", (c) => handler(c.req.raw, context))
app.get("/config", (c) => handler(c.req.raw, context))
app.patch("/config", (c) => handler(c.req.raw, context))
app.get("/config/providers", (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.console, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.consoleOrgs, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.consoleSwitch, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.tool, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.toolIDs, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.delete(ExperimentalPaths.worktree, (c) => handler(c.req.raw, context))
app.post(ExperimentalPaths.worktreeReset, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.session, (c) => handler(c.req.raw, context))
app.get(ExperimentalPaths.resource, (c) => handler(c.req.raw, context))
app.get("/provider", (c) => handler(c.req.raw, context))
app.get("/provider/auth", (c) => handler(c.req.raw, context))
app.post("/provider/:providerID/oauth/authorize", (c) => handler(c.req.raw, context))
app.post("/provider/:providerID/oauth/callback", (c) => handler(c.req.raw, context))
app.get("/project", (c) => handler(c.req.raw, context))
app.get("/project/current", (c) => handler(c.req.raw, context))
app.post("/project/git/init", (c) => handler(c.req.raw, context))
app.patch("/project/:projectID", (c) => handler(c.req.raw, context))
app.get(FilePaths.findText, (c) => handler(c.req.raw, context))
app.get(FilePaths.findFile, (c) => handler(c.req.raw, context))
app.get(FilePaths.findSymbol, (c) => handler(c.req.raw, context))
app.get(FilePaths.list, (c) => handler(c.req.raw, context))
app.get(FilePaths.content, (c) => handler(c.req.raw, context))
app.get(FilePaths.status, (c) => handler(c.req.raw, context))
app.get(InstancePaths.path, (c) => handler(c.req.raw, context))
app.post(InstancePaths.dispose, (c) => handler(c.req.raw, context))
app.get(InstancePaths.vcs, (c) => handler(c.req.raw, context))
app.get(InstancePaths.vcsDiff, (c) => handler(c.req.raw, context))
app.get(InstancePaths.command, (c) => handler(c.req.raw, context))
app.get(InstancePaths.agent, (c) => handler(c.req.raw, context))
app.get(InstancePaths.skill, (c) => handler(c.req.raw, context))
app.get(InstancePaths.lsp, (c) => handler(c.req.raw, context))
app.get(InstancePaths.formatter, (c) => handler(c.req.raw, context))
app.get(McpPaths.status, (c) => handler(c.req.raw, context))
app.post(McpPaths.status, (c) => handler(c.req.raw, context))
app.post(McpPaths.auth, (c) => handler(c.req.raw, context))
app.post(McpPaths.authCallback, (c) => handler(c.req.raw, context))
app.post(McpPaths.authAuthenticate, (c) => handler(c.req.raw, context))
app.delete(McpPaths.auth, (c) => handler(c.req.raw, context))
app.post(McpPaths.connect, (c) => handler(c.req.raw, context))
app.post(McpPaths.disconnect, (c) => handler(c.req.raw, context))
app.post(SyncPaths.start, (c) => handler(c.req.raw, context))
app.post(SyncPaths.replay, (c) => handler(c.req.raw, context))
app.post(SyncPaths.history, (c) => handler(c.req.raw, context))
app.get(PtyPaths.list, (c) => handler(c.req.raw, context))
app.post(PtyPaths.create, (c) => handler(c.req.raw, context))
app.get(PtyPaths.get, (c) => handler(c.req.raw, context))
app.put(PtyPaths.update, (c) => handler(c.req.raw, context))
app.delete(PtyPaths.remove, (c) => handler(c.req.raw, context))
app.get(PtyPaths.connect, (c) => handler(c.req.raw, context))
app.get(SessionPaths.list, (c) => handler(c.req.raw, context))
app.get(SessionPaths.status, (c) => handler(c.req.raw, context))
app.get(SessionPaths.get, (c) => handler(c.req.raw, context))
app.get(SessionPaths.children, (c) => handler(c.req.raw, context))
app.get(SessionPaths.todo, (c) => handler(c.req.raw, context))
app.get(SessionPaths.diff, (c) => handler(c.req.raw, context))
app.get(SessionPaths.messages, (c) => handler(c.req.raw, context))
app.get(SessionPaths.message, (c) => handler(c.req.raw, context))
app.post(SessionPaths.create, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.remove, (c) => handler(c.req.raw, context))
app.patch(SessionPaths.update, (c) => handler(c.req.raw, context))
app.post(SessionPaths.init, (c) => handler(c.req.raw, context))
app.post(SessionPaths.fork, (c) => handler(c.req.raw, context))
app.post(SessionPaths.abort, (c) => handler(c.req.raw, context))
app.post(SessionPaths.share, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.share, (c) => handler(c.req.raw, context))
app.post(SessionPaths.summarize, (c) => handler(c.req.raw, context))
app.post(SessionPaths.prompt, (c) => handler(c.req.raw, context))
app.post(SessionPaths.promptAsync, (c) => handler(c.req.raw, context))
app.post(SessionPaths.command, (c) => handler(c.req.raw, context))
app.post(SessionPaths.shell, (c) => handler(c.req.raw, context))
app.post(SessionPaths.revert, (c) => handler(c.req.raw, context))
app.post(SessionPaths.unrevert, (c) => handler(c.req.raw, context))
app.post(SessionPaths.permissions, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.deleteMessage, (c) => handler(c.req.raw, context))
app.delete(SessionPaths.deletePart, (c) => handler(c.req.raw, context))
app.patch(SessionPaths.updatePart, (c) => handler(c.req.raw, context))
app.post(TuiPaths.appendPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openHelp, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openSessions, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openThemes, (c) => handler(c.req.raw, context))
app.post(TuiPaths.openModels, (c) => handler(c.req.raw, context))
app.post(TuiPaths.submitPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.clearPrompt, (c) => handler(c.req.raw, context))
app.post(TuiPaths.executeCommand, (c) => handler(c.req.raw, context))
app.post(TuiPaths.showToast, (c) => handler(c.req.raw, context))
app.post(TuiPaths.publish, (c) => handler(c.req.raw, context))
app.post(TuiPaths.selectSession, (c) => handler(c.req.raw, context))
app.get(TuiPaths.controlNext, (c) => handler(c.req.raw, context))
app.post(TuiPaths.controlResponse, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.adapters, (c) => handler(c.req.raw, context))
app.post(WorkspacePaths.list, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.list, (c) => handler(c.req.raw, context))
app.get(WorkspacePaths.status, (c) => handler(c.req.raw, context))
app.delete(WorkspacePaths.remove, (c) => handler(c.req.raw, context))
app.post(WorkspacePaths.sessionRestore, (c) => handler(c.req.raw, context))
}
return app
.route("/project", ProjectRoutes())
.route("/pty", PtyRoutes(upgrade))
@@ -40,6 +165,7 @@ export const InstanceRoutes = (upgrade: UpgradeWebSocket): Hono => {
.route("/question", QuestionRoutes())
.route("/provider", ProviderRoutes())
.route("/sync", SyncRoutes())
.route("/api", V2Routes())
.route("/", FileRoutes())
.route("/", EventRoutes())
.route("/mcp", McpRoutes())

View File

@@ -0,0 +1,230 @@
import { WorkspaceID } from "@/control-plane/schema"
import { SessionID } from "@/session/schema"
import { Session } from "@/session/session"
import { SessionMessage } from "@/v2/session-message"
import { SessionV2 } from "@/v2/session"
import { zod } from "@/util/effect-zod"
import { lazy } from "@/util/lazy"
import { Effect, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { Hono } from "hono"
import { describeRoute, resolver, validator } from "hono-openapi"
import { HTTPException } from "hono/http-exception"
import z from "zod"
import { errors } from "../../error"
import { jsonRequest } from "./trace"
const DefaultMessagesLimit = 50
const DefaultSessionsLimit = 50
const SessionCursor = Schema.Struct({
id: SessionID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
directory: Schema.String.pipe(Schema.optional),
path: Schema.String.pipe(Schema.optional),
workspaceID: WorkspaceID.pipe(Schema.optional),
roots: Schema.Boolean.pipe(Schema.optional),
start: Schema.Number.pipe(Schema.optional),
search: Schema.String.pipe(Schema.optional),
})
type SessionCursor = typeof SessionCursor.Type
const SessionsResponse = Schema.Struct({
items: Schema.Array(Session.Info),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionsResponse" })
const Cursor = Schema.Struct({
id: SessionMessage.ID,
time: Schema.Number,
order: Schema.Union([Schema.Literal("asc"), Schema.Literal("desc")]),
direction: Schema.Union([Schema.Literal("previous"), Schema.Literal("next")]),
})
const MessagesResponse = Schema.Struct({
items: Schema.Array(SessionMessage.Message),
cursor: Schema.Struct({
previous: Schema.String.pipe(Schema.optional),
next: Schema.String.pipe(Schema.optional),
}),
}).annotate({ identifier: "V2SessionMessagesResponse" })
const decodeCursor = Schema.decodeUnknownSync(Cursor)
const decodeSessionCursor = Schema.decodeUnknownSync(SessionCursor)
const sessionCursor = {
encode(
session: Session.Info,
order: "asc" | "desc",
direction: "previous" | "next",
filters: Pick<SessionCursor, "directory" | "path" | "workspaceID" | "roots" | "start" | "search">,
) {
return Buffer.from(
JSON.stringify({ id: session.id, time: session.time.created, order, direction, ...filters }),
).toString("base64url")
},
decode(input: string) {
return decodeSessionCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
const cursor = {
encode(message: SessionMessage.Message, order: "asc" | "desc", direction: "previous" | "next") {
return Buffer.from(
JSON.stringify({ id: message.id, time: DateTime.toEpochMillis(message.time.created), order, direction }),
).toString("base64url")
},
decode(input: string) {
return decodeCursor(JSON.parse(Buffer.from(input, "base64url").toString("utf8")))
},
}
export const V2Routes = lazy(() =>
new Hono()
.get(
"/session",
describeRoute({
summary: "List v2 sessions",
description:
"Retrieve sessions in the requested order. Items keep that order across pages; use cursor.next or cursor.previous to move through the ordered list.",
operationId: "v2.session.list",
responses: {
200: {
description: "List of v2 sessions",
content: {
"application/json": {
schema: resolver(zod(SessionsResponse)),
},
},
},
...errors(400),
},
}),
validator(
"query",
z.object({
limit: z.coerce.number().int().min(1).max(200).optional(),
cursor: z.string().optional(),
order: z.enum(["asc", "desc"]).optional(),
directory: z.string().optional(),
path: z.string().optional(),
workspace: WorkspaceID.zod.optional(),
roots: z
.enum(["true", "false"])
.transform((value) => value === "true")
.optional(),
start: z.coerce.number().optional(),
search: z.string().optional(),
}),
),
async (c) => {
const query = c.req.valid("query")
const decoded = (() => {
try {
return query.cursor ? sessionCursor.decode(query.cursor) : undefined
} catch {
throw new HTTPException(400)
}
})()
const order = decoded?.order ?? query.order ?? "desc"
const filters = decoded ?? {
directory: query.directory,
path: query.path,
workspaceID: query.workspace,
roots: query.roots,
start: query.start,
search: query.search,
}
return jsonRequest("V2Routes.sessions", c, function* () {
return yield* Effect.gen(function* () {
const session = yield* SessionV2.Service
const sessions = yield* session.list({
limit: query.limit ?? DefaultSessionsLimit,
order,
directory: filters.directory,
path: filters.path,
workspaceID: filters.workspaceID,
roots: filters.roots,
start: filters.start,
search: filters.search,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = sessions[0]
const last = sessions.at(-1)
return {
items: sessions,
cursor: {
previous: first ? sessionCursor.encode(first, order, "previous", filters) : undefined,
next: last ? sessionCursor.encode(last, order, "next", filters) : undefined,
},
}
}).pipe(Effect.provide(SessionV2.defaultLayer))
})
},
)
.get(
"/session/:sessionID/message",
describeRoute({
summary: "Get v2 session messages",
description: "Retrieve projected v2 messages for a session directly from the message database.",
operationId: "v2.session.messages",
responses: {
200: {
description: "List of v2 session messages",
content: {
"application/json": {
schema: resolver(zod(MessagesResponse)),
},
},
},
...errors(400, 404),
},
}),
validator("param", z.object({ sessionID: SessionID.zod })),
validator(
"query",
z.object({
limit: z.coerce.number().int().min(1).max(200).optional(),
cursor: z.string().optional(),
order: z.enum(["asc", "desc"]).optional(),
}),
),
async (c) => {
const sessionID = c.req.valid("param").sessionID
const query = c.req.valid("query")
const decoded = (() => {
try {
return query.cursor ? cursor.decode(query.cursor) : undefined
} catch {
throw new HTTPException(400)
}
})()
const order = decoded?.order ?? query.order ?? "desc"
return jsonRequest("V2Routes.messages", c, function* () {
return yield* Effect.gen(function* () {
const session = yield* SessionV2.Service
const messages = yield* session.messages({
sessionID,
limit: query.limit ?? DefaultMessagesLimit,
order,
cursor: decoded ? { id: decoded.id, time: decoded.time, direction: decoded.direction } : undefined,
})
const first = messages[0]
const last = messages.at(-1)
return {
items: messages,
cursor: {
previous: first ? cursor.encode(first, order, "previous") : undefined,
next: last ? cursor.encode(last, order, "next") : undefined,
},
}
}).pipe(Effect.provide(SessionV2.defaultLayer))
})
},
),
)

View File

@@ -1,6 +1,6 @@
import type { MiddlewareHandler } from "hono"
import type { UpgradeWebSocket } from "hono/ws"
import { getAdaptor } from "@/control-plane/adaptors"
import { getAdapter } from "@/control-plane/adapters"
import { WorkspaceID } from "@/control-plane/schema"
import { WorkspaceContext } from "@/control-plane/workspace-context"
import { Workspace } from "@/control-plane/workspace"
@@ -49,14 +49,14 @@ export function workspaceProxyURL(target: string | URL, requestURL: URL) {
return proxyURL
}
async function getSessionWorkspace(url: URL) {
async function getSession(url: URL) {
const id = getWorkspaceRouteSessionID(url)
if (!id) return null
const session = await AppRuntime.runPromise(
Session.Service.use((svc) => svc.get(id)).pipe(Effect.withSpan("WorkspaceRouter.lookup")),
).catch(() => undefined)
return session?.workspaceID
return session
}
export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): MiddlewareHandler {
@@ -65,10 +65,20 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware
return async (c, next) => {
const url = new URL(c.req.url)
const sessionWorkspaceID = await getSessionWorkspace(url)
const workspaceID = sessionWorkspaceID || url.searchParams.get("workspace")
const session = await getSession(url)
const workspaceID = session?.workspaceID || url.searchParams.get("workspace")
if (!workspaceID || url.pathname.startsWith("/console") || Flag.OPENCODE_WORKSPACE_ID) {
if (session) {
return Instance.provide({
directory: session.directory,
init: () => AppRuntime.runPromise(InstanceBootstrap),
async fn() {
return next()
},
})
}
return next()
}
@@ -91,8 +101,8 @@ export function WorkspaceRouterMiddleware(upgrade: UpgradeWebSocket): Middleware
return next()
}
const adaptor = getAdaptor(workspace.projectID, workspace.type)
const target = await adaptor.target(workspace)
const adapter = getAdapter(workspace.projectID, workspace.type)
const target = await adapter.target(workspace)
if (target.type === "local") {
return WorkspaceContext.provide({

View File

@@ -14,10 +14,13 @@ import { Config } from "@/config/config"
import { NotFoundError } from "@/storage/storage"
import { ModelID, ProviderID } from "@/provider/schema"
import { Effect, Layer, Context, Schema } from "effect"
import * as DateTime from "effect/DateTime"
import { InstanceState } from "@/effect/instance-state"
import { isOverflow as overflow, usable } from "./overflow"
import { makeRuntime } from "@/effect/run-service"
import { fn } from "@/util/fn"
import { SyncEvent } from "@/sync"
import { SessionEvent } from "@/v2/session-event"
const log = Log.create({ service: "session.compaction" })
@@ -556,7 +559,21 @@ export const layer: Layer.Layer<
}
if (processor.message.error) return "stop"
if (result === "continue") yield* bus.publish(Event.Compacted, { sessionID: input.sessionID })
if (result === "continue") {
const summary = summaryText(
(yield* session.messages({ sessionID: input.sessionID })).find((item) => item.info.id === msg.id) ?? {
info: msg,
parts: [],
},
)
SyncEvent.run(SessionEvent.Compaction.Ended.Sync, {
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
text: summary ?? "",
include: selected.tail_start_id,
})
yield* bus.publish(Event.Compacted, { sessionID: input.sessionID })
}
return result
})
@@ -583,6 +600,12 @@ export const layer: Layer.Layer<
auto: input.auto,
overflow: input.overflow,
})
SyncEvent.run(SessionEvent.Compaction.Started.Sync, {
id: SessionEvent.ID.create(),
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
reason: input.auto ? "auto" : "manual",
})
})
return Service.of({

View File

@@ -20,6 +20,9 @@ import { Question } from "@/question"
import { errorMessage } from "@/util/error"
import * as Log from "@opencode-ai/core/util/log"
import { isRecord } from "@/util/record"
import { SyncEvent } from "@/sync"
import { SessionEvent } from "@/v2/session-event"
import * as DateTime from "effect/DateTime"
const DOOM_LOOP_THRESHOLD = 3
const log = Log.create({ service: "session.processor" })
@@ -221,6 +224,12 @@ export const layer: Layer.Layer<
case "reasoning-start":
if (value.id in ctx.reasoningMap) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Reasoning.Started.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
timestamp: DateTime.makeUnsafe(Date.now()),
})
ctx.reasoningMap[value.id] = {
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -235,6 +244,13 @@ export const layer: Layer.Layer<
case "reasoning-delta":
if (!(value.id in ctx.reasoningMap)) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Reasoning.Delta.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
delta: value.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
ctx.reasoningMap[value.id].text += value.text
if (value.providerMetadata) ctx.reasoningMap[value.id].metadata = value.providerMetadata
yield* session.updatePartDelta({
@@ -248,6 +264,13 @@ export const layer: Layer.Layer<
case "reasoning-end":
if (!(value.id in ctx.reasoningMap)) return
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Reasoning.Ended.Sync, {
sessionID: ctx.sessionID,
reasoningID: value.id,
text: ctx.reasoningMap[value.id].text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
// oxlint-disable-next-line no-self-assign -- reactivity trigger
ctx.reasoningMap[value.id].text = ctx.reasoningMap[value.id].text
ctx.reasoningMap[value.id].time = { ...ctx.reasoningMap[value.id].time, end: Date.now() }
@@ -260,6 +283,13 @@ export const layer: Layer.Layer<
if (ctx.assistantMessage.summary) {
throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`)
}
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Tool.Input.Started.Sync, {
sessionID: ctx.sessionID,
callID: value.id,
name: value.toolName,
timestamp: DateTime.makeUnsafe(Date.now()),
})
const part = yield* session.updatePart({
id: ctx.toolcalls[value.id]?.partID ?? PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -281,13 +311,34 @@ export const layer: Layer.Layer<
case "tool-input-delta":
return
case "tool-input-end":
case "tool-input-end": {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Tool.Input.Ended.Sync, {
sessionID: ctx.sessionID,
callID: value.id,
text: "",
timestamp: DateTime.makeUnsafe(Date.now()),
})
return
}
case "tool-call": {
if (ctx.assistantMessage.summary) {
throw new Error(`Tool call not allowed while generating summary: ${value.toolName}`)
}
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Tool.Called.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
tool: value.toolName,
input: value.input,
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
...(value.providerMetadata ? { metadata: value.providerMetadata } : {}),
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* updateToolCall(value.toolCallId, (match) => ({
...match,
tool: value.toolName,
@@ -331,11 +382,48 @@ export const layer: Layer.Layer<
}
case "tool-result": {
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Tool.Success.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
structured: value.output.metadata,
content: [
{
type: "text",
text: value.output.output,
},
...(value.output.attachments?.map((item: MessageV2.FilePart) => ({
type: "file",
uri: item.url,
mime: item.mime,
name: item.filename,
})) ?? []),
],
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* completeToolCall(value.toolCallId, value.output)
return
}
case "tool-error": {
const toolCall = yield* readToolCall(value.toolCallId)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Tool.Error.Sync, {
sessionID: ctx.sessionID,
callID: value.toolCallId,
error: {
type: "unknown",
message: errorMessage(value.error),
},
provider: {
executed: toolCall?.part.metadata?.providerExecuted === true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
yield* failToolCall(value.toolCallId, value.error)
return
}
@@ -345,6 +433,21 @@ export const layer: Layer.Layer<
case "start-step":
if (!ctx.snapshot) ctx.snapshot = yield* snapshot.track()
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Step.Started.Sync, {
id: SessionEvent.ID.create(),
sessionID: ctx.sessionID,
agent: input.assistantMessage.agent,
model: {
id: ctx.model.id,
providerID: ctx.model.providerID,
variant: input.assistantMessage.variant,
},
snapshot: ctx.snapshot,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
yield* session.updatePart({
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -355,18 +458,30 @@ export const layer: Layer.Layer<
return
case "finish-step": {
const completedSnapshot = yield* snapshot.track()
const usage = Session.getUsage({
model: ctx.model,
usage: value.usage,
metadata: value.providerMetadata,
})
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Step.Ended.Sync, {
sessionID: ctx.sessionID,
finish: value.finishReason,
cost: usage.cost,
tokens: usage.tokens,
snapshot: completedSnapshot,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.assistantMessage.finish = value.finishReason
ctx.assistantMessage.cost += usage.cost
ctx.assistantMessage.tokens = usage.tokens
yield* session.updatePart({
id: PartID.ascending(),
reason: value.finishReason,
snapshot: yield* snapshot.track(),
snapshot: completedSnapshot,
messageID: ctx.assistantMessage.id,
sessionID: ctx.assistantMessage.sessionID,
type: "step-finish",
@@ -404,6 +519,13 @@ export const layer: Layer.Layer<
}
case "text-start":
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Text.Started.Sync, {
sessionID: ctx.sessionID,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.currentText = {
id: PartID.ascending(),
messageID: ctx.assistantMessage.id,
@@ -418,6 +540,13 @@ export const layer: Layer.Layer<
case "text-delta":
if (!ctx.currentText) return
if (ctx.assistantMessage.summary) {
SyncEvent.run(SessionEvent.Compaction.Delta.Sync, {
sessionID: ctx.sessionID,
text: value.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
ctx.currentText.text += value.text
if (value.providerMetadata) ctx.currentText.metadata = value.providerMetadata
yield* session.updatePartDelta({
@@ -442,6 +571,14 @@ export const layer: Layer.Layer<
},
{ text: ctx.currentText.text },
)).text
if (!ctx.assistantMessage.summary) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Text.Ended.Sync, {
sessionID: ctx.sessionID,
text: ctx.currentText.text,
timestamp: DateTime.makeUnsafe(Date.now()),
})
}
{
const end = Date.now()
ctx.currentText.time = { start: ctx.currentText.time?.start ?? end, end }
@@ -568,13 +705,24 @@ export const layer: Layer.Layer<
Effect.retry(
SessionRetry.policy({
parse,
set: (info) =>
status.set(ctx.sessionID, {
set: (info) => {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Retried.Sync, {
sessionID: ctx.sessionID,
attempt: info.attempt,
error: {
message: info.message,
isRetryable: true,
},
timestamp: DateTime.makeUnsafe(Date.now()),
})
return status.set(ctx.sessionID, {
type: "retry",
attempt: info.attempt,
message: info.message,
next: info.next,
}),
})
},
}),
),
Effect.catch(halt),

View File

@@ -0,0 +1,153 @@
import { and, desc, eq } from "@/storage/db"
import type { Database } from "@/storage/db"
import { SessionMessage } from "@/v2/session-message"
import { SessionMessageUpdater } from "@/v2/session-message-updater"
import { SessionEvent } from "@/v2/session-event"
import * as DateTime from "effect/DateTime"
import { SyncEvent } from "@/sync"
import { SessionMessageTable } from "./session.sql"
import type { SessionID } from "./schema"
import { Schema } from "effect"
const decodeMessage = Schema.decodeUnknownSync(SessionMessage.Message)
type SessionMessageData = NonNullable<typeof SessionMessageTable.$inferInsert["data"]>
function encodeDateTimes(value: unknown): unknown {
if (DateTime.isDateTime(value)) return DateTime.toEpochMillis(value)
if (Array.isArray(value)) return value.map(encodeDateTimes)
if (typeof value === "object" && value !== null) {
return Object.fromEntries(Object.entries(value).map(([key, item]) => [key, encodeDateTimes(item)]))
}
return value
}
function encodeMessageData(value: unknown): SessionMessageData {
return encodeDateTimes(value) as SessionMessageData
}
function sqlite(db: Database.TxOrDb, sessionID: SessionID): SessionMessageUpdater.Adapter<void> {
return {
getCurrentAssistant() {
return db
.select()
.from(SessionMessageTable)
.where(and(eq(SessionMessageTable.session_id, sessionID), eq(SessionMessageTable.type, "assistant")))
.orderBy(desc(SessionMessageTable.id))
.all()
.map((row) => decodeMessage({ ...row.data, id: row.id, type: row.type }))
.find((message): message is SessionMessage.Assistant => message.type === "assistant" && !message.time.completed)
},
getCurrentCompaction() {
return db
.select()
.from(SessionMessageTable)
.where(and(eq(SessionMessageTable.session_id, sessionID), eq(SessionMessageTable.type, "compaction")))
.orderBy(desc(SessionMessageTable.id))
.all()
.map((row) => decodeMessage({ ...row.data, id: row.id, type: row.type }))
.find((message): message is SessionMessage.Compaction => message.type === "compaction")
},
updateAssistant(assistant) {
const { id, type, ...data } = assistant
db.update(SessionMessageTable)
.set({ data: encodeMessageData(data) })
.where(
and(
eq(SessionMessageTable.id, id),
eq(SessionMessageTable.session_id, sessionID),
eq(SessionMessageTable.type, type),
),
)
.run()
},
updateCompaction(compaction) {
const { id, type, ...data } = compaction
db.update(SessionMessageTable)
.set({ data: encodeMessageData(data) })
.where(
and(
eq(SessionMessageTable.id, id),
eq(SessionMessageTable.session_id, sessionID),
eq(SessionMessageTable.type, type),
),
)
.run()
},
appendMessage(message) {
const { id, type, ...data } = message
db.insert(SessionMessageTable)
.values([
{
id,
session_id: sessionID,
type,
time_created: DateTime.toEpochMillis(message.time.created),
data: encodeMessageData(data),
},
])
.run()
},
finish() {},
}
}
function update(db: Database.TxOrDb, event: SessionEvent.Event) {
SessionMessageUpdater.update(sqlite(db, event.data.sessionID), event)
}
export default [
SyncEvent.project(SessionEvent.Prompted.Sync, (db, data) => {
update(db, { type: "session.next.prompted", data })
}),
SyncEvent.project(SessionEvent.Synthetic.Sync, (db, data) => {
update(db, { type: "session.next.synthetic", data })
}),
SyncEvent.project(SessionEvent.Step.Started.Sync, (db, data) => {
update(db, { type: "session.next.step.started", data })
}),
SyncEvent.project(SessionEvent.Step.Ended.Sync, (db, data) => {
update(db, { type: "session.next.step.ended", data })
}),
SyncEvent.project(SessionEvent.Text.Started.Sync, (db, data) => {
update(db, { type: "session.next.text.started", data })
}),
SyncEvent.project(SessionEvent.Text.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Text.Ended.Sync, (db, data) => {
update(db, { type: "session.next.text.ended", data })
}),
SyncEvent.project(SessionEvent.Tool.Input.Started.Sync, (db, data) => {
update(db, { type: "session.next.tool.input.started", data })
}),
SyncEvent.project(SessionEvent.Tool.Input.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Tool.Input.Ended.Sync, (db, data) => {
update(db, { type: "session.next.tool.input.ended", data })
}),
SyncEvent.project(SessionEvent.Tool.Called.Sync, (db, data) => {
update(db, { type: "session.next.tool.called", data })
}),
SyncEvent.project(SessionEvent.Tool.Success.Sync, (db, data) => {
update(db, { type: "session.next.tool.success", data })
}),
SyncEvent.project(SessionEvent.Tool.Error.Sync, (db, data) => {
update(db, { type: "session.next.tool.error", data })
}),
SyncEvent.project(SessionEvent.Reasoning.Started.Sync, (db, data) => {
update(db, { type: "session.next.reasoning.started", data })
}),
SyncEvent.project(SessionEvent.Reasoning.Delta.Sync, () => {}),
SyncEvent.project(SessionEvent.Reasoning.Ended.Sync, (db, data) => {
update(db, { type: "session.next.reasoning.ended", data })
}),
SyncEvent.project(SessionEvent.Retried.Sync, (db, data) => {
update(db, { type: "session.next.retried", data })
}),
SyncEvent.project(SessionEvent.Compaction.Started.Sync, (db, data) => {
update(db, { type: "session.next.compaction.started", data })
}),
SyncEvent.project(SessionEvent.Compaction.Delta.Sync, (db, data) => {
update(db, { type: "session.next.compaction.delta", data })
}),
SyncEvent.project(SessionEvent.Compaction.Ended.Sync, (db, data) => {
update(db, { type: "session.next.compaction.ended", data })
}),
]

View File

@@ -5,7 +5,8 @@ import { SyncEvent } from "@/sync"
import * as Session from "./session"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import * as Log from "@opencode-ai/core/util/log"
import { Log } from "@opencode-ai/core/util/log"
import nextProjectors from "./projectors-next"
const log = Log.create({ service: "session.projector" })
@@ -136,4 +137,6 @@ export default [
log.warn("ignored late part update", { partID: id, messageID, sessionID })
}
}),
...nextProjectors,
]

View File

@@ -41,6 +41,7 @@ import { Permission } from "@/permission"
import { SessionStatus } from "./status"
import { LLM } from "./llm"
import { Shell } from "@/shell/shell"
import { ShellToolID } from "@/tool/shell/id"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Truncate } from "@/tool/truncate"
import { decodeDataUrl } from "@/util/data-url"
@@ -53,6 +54,10 @@ import { InstanceState } from "@/effect/instance-state"
import { TaskTool, type TaskPromptOps } from "@/tool/task"
import { SessionRunState } from "./run-state"
import { EffectBridge } from "@/effect/bridge"
import { SessionEvent } from "@/v2/session-event"
import { AgentAttachment, FileAttachment, Source } from "@/v2/session-prompt"
import { SyncEvent } from "@/sync"
import * as DateTime from "effect/DateTime"
// @ts-ignore
globalThis.AI_SDK_LOG_WARNINGS = false
@@ -117,6 +122,7 @@ export const layer = Layer.effect(
cancel: (sessionID: SessionID) => run.fork(cancel(sessionID)),
resolvePromptParts: (template: string) => resolvePromptParts(template),
prompt: (input: PromptInput) => prompt(input),
loop: (input: LoopInput) => loop(input),
} satisfies TaskPromptOps
})
@@ -780,7 +786,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
id: PartID.ascending(),
messageID: msg.id,
sessionID: input.sessionID,
tool: "bash",
tool: ShellToolID.id,
callID: ulid(),
state: {
status: "running",
@@ -1240,6 +1246,71 @@ NOTE: At any point in time through this workflow you should feel free to ask the
yield* sessions.updateMessage(info)
for (const part of parts) yield* sessions.updatePart(part)
const nextPrompt = parts.reduce(
(result, part) => {
if (part.type === "text") {
if (part.synthetic) result.synthetic.push(part.text)
else result.text.push(part.text)
}
if (part.type === "file") {
result.files.push(
new FileAttachment({
uri: part.url,
mime: part.mime,
name: part.filename,
source: part.source
? new Source({
start: part.source.text.start,
end: part.source.text.end,
text: part.source.text.value,
})
: undefined,
}),
)
}
if (part.type === "agent") {
result.agents.push(
new AgentAttachment({
name: part.name,
source: part.source
? new Source({
start: part.source.start,
end: part.source.end,
text: part.source.value,
})
: undefined,
}),
)
}
return result
},
{
text: [] as string[],
files: [] as FileAttachment[],
agents: [] as AgentAttachment[],
synthetic: [] as string[],
},
)
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Prompted.Sync, {
id: SessionEvent.ID.create(),
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
prompt: {
text: nextPrompt.text.join("\n"),
files: nextPrompt.files,
agents: nextPrompt.agents,
},
})
for (const text of nextPrompt.synthetic) {
// TODO(v2): Temporary dual-write while migrating session messages to v2 events.
SyncEvent.run(SessionEvent.Synthetic.Sync, {
id: SessionEvent.ID.create(),
sessionID: input.sessionID,
timestamp: DateTime.makeUnsafe(info.time.created),
text,
})
}
return { info, parts }
}, Effect.scoped)

View File

@@ -1,7 +1,7 @@
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { SessionEntry } from "../v2/session-entry"
import type { SessionMessage } from "../v2/session-message"
import type { Snapshot } from "../snapshot"
import type { Permission } from "../permission"
import type { ProjectID } from "../project/schema"
@@ -11,6 +11,7 @@ import { Timestamps } from "../storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
type SessionMessageData = Omit<(typeof SessionMessage.Message)["Encoded"], "type" | "id">
export const SessionTable = sqliteTable(
"session",
@@ -96,22 +97,22 @@ export const TodoTable = sqliteTable(
],
)
export const SessionEntryTable = sqliteTable(
"session_entry",
export const SessionMessageTable = sqliteTable(
"session_message",
{
id: text().$type<SessionEntry.ID>().primaryKey(),
id: text().$type<SessionMessage.ID>().primaryKey(),
session_id: text()
.$type<SessionID>()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
type: text().$type<SessionEntry.Type>().notNull(),
type: text().$type<SessionMessage.Type>().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<Omit<SessionEntry.Entry, "type" | "id">>(),
data: text({ mode: "json" }).notNull().$type<SessionMessageData>(),
},
(table) => [
index("session_entry_session_idx").on(table.session_id),
index("session_entry_session_type_idx").on(table.session_id, table.type),
index("session_entry_time_created_idx").on(table.time_created),
index("session_message_session_idx").on(table.session_id),
index("session_message_session_type_idx").on(table.session_id, table.type),
index("session_message_time_created_idx").on(table.time_created),
],
)

View File

@@ -142,9 +142,9 @@ const Share = Schema.Struct({
url: Schema.String,
})
// Legacy HTTP accepted any number here, and persisted data may already contain
// negative values. Keep archive timestamps permissive while other clocks stay non-negative.
export const ArchivedTimestamp = Schema.Number
// Legacy HTTP accepted negative values here. Keep archive timestamps permissive
// while excluding non-finite values that cannot round-trip through JSON.
export const ArchivedTimestamp = Schema.Finite
const Time = Schema.Struct({
created: NonNegativeInt,
@@ -316,7 +316,8 @@ export const Event = {
sessionID: Schema.optional(SessionID),
// Reuses MessageV2.Assistant.fields.error (already Schema.optional) so
// the derived zod keeps the same discriminated-union shape on the bus.
error: MessageV2.Assistant.fields.error,
// Schema.suspend defers access to break circular init in compiled binaries.
error: Schema.suspend(() => MessageV2.Assistant.fields.error),
}),
),
}

View File

@@ -94,7 +94,7 @@ Importantly, **sync events automatically re-publish as bus events**. This makes
### Event shape
- The shape of the events are slightly different. A sync event has the `type`, `id`, `seq`, `aggregateID`, and `data` fields. A bus event has the `type` and `properties` fields. `data` and `properties` are largely the same thing. This conversion is automatically handled when the sync system re-published the event throught the bus.
- The shape of the events are slightly different. A sync event has the `type`, `id`, `seq`, `aggregateID`, and `data` fields. A bus event has the `type` and `properties` fields. `data` and `properties` are largely the same thing. This conversion is automatically handled when the sync system re-published the event through the bus.
The reason for this is because sync events need to track more information. I chose not to copy the `properties` naming to more clearly disambiguate the event types.
@@ -112,9 +112,9 @@ The system install projectors in `server/projectors.js`. It calls `SyncEvent.ini
This allows you to "reshape" an event from the sync system before it's published to the bus. This should be avoided, but might be necessary for temporary backwards compat.
The only time we use this is the `session.updated` event. Previously this event contained the entire session object. The sync even only contains the fields updated. We convert the event to contain to full object for backwards compatibility (but ideally we'd remove this).
The only time we use this is the `session.updated` event. Previously this event contained the entire session object. The sync event only contains the fields updated. We convert the event to contain the full object for backwards compatibility (but ideally we'd remove this).
It's very important that types are correct when working with events. Event definitions have a `schema` which carries the defintiion of the event shape (provided by a zod schema, inferred into a TypeScript type). Examples:
It's very important that types are correct when working with events. Event definitions have a `schema` which carries the definition of the event shape (provided by a zod schema, inferred into a TypeScript type). Examples:
```ts
// The schema from `Updated` typechecks the object correctly

View File

@@ -0,0 +1,62 @@
import { Effect, Schema } from "effect"
import { HttpClient } from "effect/unstable/http"
import * as Tool from "./tool"
import * as McpExa from "./mcp-exa"
import DESCRIPTION from "./codesearch.txt"
export const Parameters = Schema.Struct({
query: Schema.String.annotate({
description:
"Search query to find relevant context for APIs, Libraries, and SDKs. For example, 'React useState hook examples', 'Python pandas dataframe filtering', 'Express.js middleware', 'Next js partial prerendering configuration'",
}),
tokensNum: Schema.Number.check(Schema.isGreaterThanOrEqualTo(1000))
.check(Schema.isLessThanOrEqualTo(50000))
.pipe(Schema.optional, Schema.withDecodingDefault(Effect.succeed(5000)))
.annotate({
description:
"Number of tokens to return (1000-50000). Default is 5000 tokens. Adjust this value based on how much context you need - use lower values for focused queries and higher values for comprehensive documentation.",
}),
})
export const CodeSearchTool = Tool.define(
"codesearch",
Effect.gen(function* () {
const http = yield* HttpClient.HttpClient
return {
description: DESCRIPTION,
parameters: Parameters,
execute: (params: { query: string; tokensNum: number }, ctx: Tool.Context) =>
Effect.gen(function* () {
yield* ctx.ask({
permission: "codesearch",
patterns: [params.query],
always: ["*"],
metadata: {
query: params.query,
tokensNum: params.tokensNum,
},
})
const result = yield* McpExa.call(
http,
"get_code_context_exa",
McpExa.CodeArgs,
{
query: params.query,
tokensNum: params.tokensNum,
},
"30 seconds",
)
return {
output:
result ??
"No code snippets or documentation found. Please try a different query, be more specific about the library or programming concept, or check the spelling of framework names.",
title: `Code search: ${params.query}`,
metadata: {},
}
}).pipe(Effect.orDie),
}
}),
)

View File

@@ -0,0 +1,12 @@
- Search and get relevant context for any programming task using Exa Code API
- Provides the highest quality and freshest context for libraries, SDKs, and APIs
- Use this tool for ANY question or task related to programming
- Returns comprehensive code examples, documentation, and API references
- Optimized for finding specific programming patterns and solutions
Usage notes:
- Adjustable token count (1000-50000) for focused or comprehensive results
- Default 5000 tokens provides balanced context for most queries
- Use lower values for specific questions, higher values for comprehensive documentation
- Supports queries about frameworks, libraries, APIs, and programming concepts
- Examples: 'React useState hook examples', 'Python pandas dataframe filtering', 'Express.js middleware'

View File

@@ -35,6 +35,11 @@ export const SearchArgs = Schema.Struct({
contextMaxCharacters: Schema.optional(Schema.Number),
})
export const CodeArgs = Schema.Struct({
query: Schema.String,
tokensNum: Schema.Number,
})
const McpRequest = <F extends Schema.Struct.Fields>(args: Schema.Struct<F>) =>
Schema.Struct({
jsonrpc: Schema.Literal("2.0"),

View File

@@ -1,12 +1,13 @@
import { PlanExitTool } from "./plan"
import { Session } from "@/session/session"
import { QuestionTool } from "./question"
import { BashTool } from "./bash"
import { ShellTool } from "./shell"
import { EditTool } from "./edit"
import { GlobTool } from "./glob"
import { GrepTool } from "./grep"
import { ReadTool } from "./read"
import { TaskTool } from "./task"
import { TaskStatusTool } from "./task_status"
import { TodoWriteTool } from "./todo"
import { WebFetchTool } from "./webfetch"
import { WriteTool } from "./write"
@@ -22,6 +23,9 @@ import { Plugin } from "../plugin"
import { Provider } from "@/provider/provider"
import { ProviderID, type ModelID } from "../provider/schema"
import { WebSearchTool } from "./websearch"
import { CodeSearchTool } from "./codesearch"
import { RepoCloneTool } from "./repo_clone"
import { RepoOverviewTool } from "./repo_overview"
import { Flag } from "@opencode-ai/core/flag/flag"
import * as Log from "@opencode-ai/core/util/log"
import { LspTool } from "./lsp"
@@ -44,8 +48,11 @@ import { Instruction } from "../session/instruction"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Bus } from "../bus"
import { Agent } from "../agent/agent"
import { Git } from "@/git"
import { Skill } from "../skill"
import { Permission } from "@/permission"
import { SessionStatus } from "@/session/status"
import { BackgroundJob } from "@/background/job"
const log = Log.create({ service: "tool.registry" })
@@ -78,11 +85,14 @@ export const layer: Layer.Layer<
| Agent.Service
| Skill.Service
| Session.Service
| SessionStatus.Service
| Provider.Service
| Git.Service
| LSP.Service
| Instruction.Service
| AppFileSystem.Service
| Bus.Service
| BackgroundJob.Service
| HttpClient.HttpClient
| ChildProcessSpawner
| Ripgrep.Service
@@ -106,13 +116,17 @@ export const layer: Layer.Layer<
const plan = yield* PlanExitTool
const webfetch = yield* WebFetchTool
const websearch = yield* WebSearchTool
const bash = yield* BashTool
const shell = yield* ShellTool
const codesearch = yield* CodeSearchTool
const repoClone = yield* RepoCloneTool
const repoOverview = yield* RepoOverviewTool
const globtool = yield* GlobTool
const writetool = yield* WriteTool
const edit = yield* EditTool
const greptool = yield* GrepTool
const patchtool = yield* ApplyPatchTool
const skilltool = yield* SkillTool
const taskstatus = yield* TaskStatusTool
const agent = yield* Agent.Service
const state = yield* InstanceState.make<State>(
@@ -186,16 +200,20 @@ export const layer: Layer.Layer<
const tool = yield* Effect.all({
invalid: Tool.init(invalid),
bash: Tool.init(bash),
shell: Tool.init(shell),
read: Tool.init(read),
glob: Tool.init(globtool),
grep: Tool.init(greptool),
edit: Tool.init(edit),
write: Tool.init(writetool),
task: Tool.init(task),
taskstatus: Tool.init(taskstatus),
fetch: Tool.init(webfetch),
todo: Tool.init(todo),
search: Tool.init(websearch),
code: Tool.init(codesearch),
repo_clone: Tool.init(repoClone),
repo_overview: Tool.init(repoOverview),
skill: Tool.init(skilltool),
patch: Tool.init(patchtool),
question: Tool.init(question),
@@ -208,16 +226,20 @@ export const layer: Layer.Layer<
builtin: [
tool.invalid,
...(questionEnabled ? [tool.question] : []),
tool.bash,
tool.shell,
tool.read,
tool.glob,
tool.grep,
tool.edit,
tool.write,
tool.task,
tool.taskstatus,
tool.fetch,
tool.todo,
tool.search,
tool.code,
tool.repo_clone,
tool.repo_overview,
tool.skill,
tool.patch,
...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [tool.lsp] : []),
@@ -331,11 +353,14 @@ export const defaultLayer = Layer.suspend(() =>
Layer.provide(Skill.defaultLayer),
Layer.provide(Agent.defaultLayer),
Layer.provide(Session.defaultLayer),
Layer.provide(SessionStatus.defaultLayer),
Layer.provide(Provider.defaultLayer),
Layer.provide(Git.defaultLayer),
Layer.provide(LSP.defaultLayer),
Layer.provide(Instruction.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Bus.layer),
Layer.provide(BackgroundJob.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(Format.defaultLayer),
Layer.provide(CrossSpawnSpawner.defaultLayer),

View File

@@ -0,0 +1,153 @@
import path from "path"
import { Effect, Schema } from "effect"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Flock } from "@opencode-ai/core/util/flock"
import { Git } from "@/git"
import DESCRIPTION from "./repo_clone.txt"
import * as Tool from "./tool"
import { parseRepositoryReference, repositoryCachePath, sameRepositoryReference } from "@/util/repository"
export const Parameters = Schema.Struct({
repository: Schema.String.annotate({
description: "Repository to clone, as a git URL, host/path reference, or GitHub owner/repo shorthand",
}),
refresh: Schema.optional(Schema.Boolean).annotate({
description: "When true, fetches the latest remote state into the managed cache",
}),
})
type Metadata = {
repository: string
host: string
remote: string
localPath: string
status: "cached" | "cloned" | "refreshed"
head?: string
branch?: string
}
function statusForRepository(input: { reuse: boolean; refresh?: boolean }) {
if (!input.reuse) return "cloned" as const
if (input.refresh) return "refreshed" as const
return "cached" as const
}
function resetTarget(input: {
remoteHead: { code: number; stdout: string }
branch: { code: number; stdout: string }
}) {
if (input.remoteHead.code === 0 && input.remoteHead.stdout) {
return input.remoteHead.stdout.replace(/^refs\/remotes\//, "")
}
if (input.branch.code === 0 && input.branch.stdout) {
return `origin/${input.branch.stdout}`
}
return "HEAD"
}
export const RepoCloneTool = Tool.define<typeof Parameters, Metadata, AppFileSystem.Service | Git.Service>(
"repo_clone",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
return {
description: DESCRIPTION,
parameters: Parameters,
execute: (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context<Metadata>) =>
Effect.gen(function* () {
const reference = parseRepositoryReference(params.repository)
if (!reference) throw new Error("Repository must be a git URL, host/path reference, or GitHub owner/repo shorthand")
const repository = reference.label
const remote = reference.remote
const localPath = repositoryCachePath(reference)
const cloneTarget = parseRepositoryReference(remote) ?? reference
yield* ctx.ask({
permission: "repo_clone",
patterns: [repository],
always: [repository],
metadata: {
repository,
remote,
path: localPath,
refresh: Boolean(params.refresh),
},
})
return yield* Effect.acquireUseRelease(
Effect.promise((signal) => Flock.acquire(`repo-clone:${localPath}`, { signal })),
() =>
Effect.gen(function* () {
yield* fs.ensureDir(path.dirname(localPath)).pipe(Effect.orDie)
const exists = yield* fs.existsSafe(localPath)
const hasGitDir = yield* fs.existsSafe(path.join(localPath, ".git"))
const origin = hasGitDir
? yield* git.run(["config", "--get", "remote.origin.url"], { cwd: localPath })
: undefined
const originReference = origin?.exitCode === 0 ? parseRepositoryReference(origin.text().trim()) : undefined
const reuse = hasGitDir && Boolean(originReference && sameRepositoryReference(originReference, cloneTarget))
if (exists && !reuse) {
yield* fs.remove(localPath, { recursive: true }).pipe(Effect.orDie)
}
const status = statusForRepository({ reuse, refresh: params.refresh })
if (status === "cloned") {
const clone = yield* git.run(["clone", "--depth", "100", remote, localPath], { cwd: path.dirname(localPath) })
if (clone.exitCode !== 0) {
throw new Error(clone.stderr.toString().trim() || clone.text().trim() || `Failed to clone ${repository}`)
}
}
if (status === "refreshed") {
const fetch = yield* git.run(["fetch", "--all", "--prune"], { cwd: localPath })
if (fetch.exitCode !== 0) {
throw new Error(fetch.stderr.toString().trim() || fetch.text().trim() || `Failed to refresh ${repository}`)
}
const remoteHead = yield* git.run(["symbolic-ref", "refs/remotes/origin/HEAD"], { cwd: localPath })
const branch = yield* git.run(["symbolic-ref", "--quiet", "--short", "HEAD"], { cwd: localPath })
const target = resetTarget({
remoteHead: { code: remoteHead.exitCode, stdout: remoteHead.text().trim() },
branch: { code: branch.exitCode, stdout: branch.text().trim() },
})
const reset = yield* git.run(["reset", "--hard", target], { cwd: localPath })
if (reset.exitCode !== 0) {
throw new Error(reset.stderr.toString().trim() || reset.text().trim() || `Failed to reset ${repository}`)
}
}
const head = yield* git.run(["rev-parse", "HEAD"], { cwd: localPath })
const branch = yield* git.branch(localPath)
const headText = head.exitCode === 0 ? head.text().trim() : undefined
return {
title: repository,
metadata: {
repository,
host: reference.host,
remote,
localPath,
status,
head: headText,
branch,
},
output: [
`Repository ready: ${repository}`,
`Status: ${status}`,
`Local path: ${localPath}`,
...(branch ? [`Branch: ${branch}`] : []),
...(headText ? [`HEAD: ${headText}`] : []),
].join("\n"),
}
}),
(lock) => Effect.promise(() => lock.release()).pipe(Effect.ignore),
)
}).pipe(Effect.orDie),
} satisfies Tool.DefWithoutID<typeof Parameters, Metadata>
}),
)

View File

@@ -0,0 +1,5 @@
- Clone or refresh a repository into OpenCode's managed cache under the data directory
- Accepts git URLs, forge host/path references, or GitHub owner/repo shorthand
- Returns the cached absolute local path so other tools can explore the cloned source
- Use this before Read, Glob, or Grep when the code you need lives outside the current workspace
- This tool is intended for dependency and documentation research workflows, not for modifying the user's workspace

View File

@@ -0,0 +1,238 @@
import path from "path"
import { Effect, Schema } from "effect"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Git } from "@/git"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./repo_overview.txt"
import * as Tool from "./tool"
import { parseRepositoryReference, repositoryCachePath } from "@/util/repository"
import { Instance } from "@/project/instance"
export const Parameters = Schema.Struct({
repository: Schema.optional(Schema.String).annotate({
description: "Cached repository to inspect, as a git URL, host/path reference, or GitHub owner/repo shorthand",
}),
path: Schema.optional(Schema.String).annotate({
description: "Directory path to inspect instead of a cached repository",
}),
depth: Schema.optional(Schema.Number).annotate({
description: "Maximum structure depth to include. Defaults to 3.",
})
})
type Metadata = {
path: string
repository?: string
branch?: string
head?: string
package_manager?: string
ecosystems: string[]
dependency_files: string[]
entrypoints: string[]
depth: number
truncated: boolean
}
const IGNORED_DIRS = new Set([".git", "node_modules", "__pycache__", ".venv", "dist", "build", ".next", "target", "vendor"])
const STRUCTURE_LIMIT = 200
const DEPENDENCY_FILES = [
"package.json",
"package-lock.json",
"bun.lock",
"bun.lockb",
"pnpm-lock.yaml",
"yarn.lock",
"requirements.txt",
"pyproject.toml",
"go.mod",
"Cargo.toml",
"Gemfile",
"build.gradle",
"build.gradle.kts",
"pom.xml",
"composer.json",
]
function packageManager(files: Set<string>) {
if (files.has("bun.lock") || files.has("bun.lockb")) return "bun"
if (files.has("pnpm-lock.yaml")) return "pnpm"
if (files.has("yarn.lock")) return "yarn"
if (files.has("package-lock.json")) return "npm"
}
function ecosystems(files: Set<string>) {
return [
...(files.has("package.json") ? ["Node.js"] : []),
...(files.has("pyproject.toml") || files.has("requirements.txt") ? ["Python"] : []),
...(files.has("go.mod") ? ["Go"] : []),
...(files.has("Cargo.toml") ? ["Rust"] : []),
...(files.has("Gemfile") ? ["Ruby"] : []),
...(files.has("build.gradle") || files.has("build.gradle.kts") || files.has("pom.xml") ? ["Java/Kotlin"] : []),
...(files.has("composer.json") ? ["PHP"] : []),
]
}
function commonEntrypoints(files: Set<string>) {
return ["index.ts", "index.tsx", "index.js", "index.mjs", "main.ts", "main.js", "src/index.ts", "src/index.tsx", "src/index.js", "src/main.ts", "src/main.js"].filter((file) => files.has(file))
}
export const RepoOverviewTool = Tool.define<typeof Parameters, Metadata, AppFileSystem.Service | Git.Service>(
"repo_overview",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const resolveTarget = Effect.fn("RepoOverviewTool.resolveTarget")(function* (params: Schema.Schema.Type<typeof Parameters>) {
if (params.path) {
const full = path.isAbsolute(params.path) ? params.path : path.resolve(Instance.directory, params.path)
return { path: full, repository: params.repository }
}
if (!params.repository) throw new Error("Either repository or path is required")
const parsed = parseRepositoryReference(params.repository)
if (!parsed) throw new Error("Repository must be a git URL, host/path reference, or GitHub owner/repo shorthand")
const repository = parsed.label
return {
repository,
path: repositoryCachePath(parsed),
}
})
const structure = Effect.fn("RepoOverviewTool.structure")(function* (root: string, depth: number) {
let truncated = false
const lines: string[] = []
const visit: (dir: string, level: number) => Effect.Effect<void> = Effect.fnUntraced(function* (dir: string, level: number) {
if (level >= depth || lines.length >= STRUCTURE_LIMIT) {
truncated = truncated || lines.length >= STRUCTURE_LIMIT
return
}
const entries = yield* fs.readDirectoryEntries(dir).pipe(Effect.orElseSucceed(() => []))
const sorted = yield* Effect.forEach(
entries,
Effect.fnUntraced(function* (entry) {
if (IGNORED_DIRS.has(entry.name)) return undefined
const full = path.join(dir, entry.name)
const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!info) return undefined
return { name: entry.name, full, directory: info.type === "Directory" }
}),
{ concurrency: 16 },
).pipe(
Effect.map((items) =>
items
.filter((item): item is { name: string; full: string; directory: boolean } => Boolean(item))
.sort((a, b) => Number(b.directory) - Number(a.directory) || a.name.localeCompare(b.name)),
),
)
for (const entry of sorted) {
if (lines.length >= STRUCTURE_LIMIT) {
truncated = true
return
}
lines.push(`${" ".repeat(level)}${entry.name}${entry.directory ? "/" : ""}`)
if (entry.directory) yield* visit(entry.full, level + 1)
}
})
yield* visit(root, 0)
return { lines, truncated }
})
return {
description: DESCRIPTION,
parameters: Parameters,
execute: (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context<Metadata>) =>
Effect.gen(function* () {
const target = yield* resolveTarget(params)
const depth = !params.depth || !Number.isInteger(params.depth) || params.depth < 1 || params.depth > 6 ? 3 : params.depth
yield* assertExternalDirectoryEffect(ctx, target.path, { kind: "directory" })
yield* ctx.ask({
permission: "repo_overview",
patterns: [target.repository ?? target.path],
always: [target.repository ?? target.path],
metadata: {
repository: target.repository,
path: target.path,
depth,
},
})
const info = yield* fs.stat(target.path).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!info) {
if (target.repository) throw new Error(`Repository is not cloned: ${target.repository}. Use repo_clone first.`)
throw new Error(`Directory not found: ${target.path}`)
}
if (info.type !== "Directory") throw new Error(`Path is not a directory: ${target.path}`)
const entries = yield* fs.readDirectoryEntries(target.path).pipe(Effect.orElseSucceed(() => []))
const topLevel = new Set(entries.map((entry) => entry.name))
const dependencyFiles = DEPENDENCY_FILES.filter((file) => topLevel.has(file))
const packageJson = topLevel.has("package.json")
? (yield* fs.readJson(path.join(target.path, "package.json")).pipe(Effect.orElseSucceed(() => ({})))) as Record<string, unknown>
: {}
const entrypoints = [
...(typeof packageJson.main === "string" ? [`main: ${packageJson.main}`] : []),
...(typeof packageJson.module === "string" ? [`module: ${packageJson.module}`] : []),
...(typeof packageJson.types === "string" ? [`types: ${packageJson.types}`] : []),
...(typeof packageJson.bin === "string" ? [`bin: ${packageJson.bin}`] : []),
...(packageJson.bin && typeof packageJson.bin === "object" && !Array.isArray(packageJson.bin)
? Object.keys(packageJson.bin as Record<string, unknown>).map((name) => `bin: ${name}`)
: []),
...(packageJson.exports && typeof packageJson.exports === "object" && !Array.isArray(packageJson.exports)
? Object.keys(packageJson.exports as Record<string, unknown>).slice(0, 10).map((name) => `exports: ${name}`)
: []),
]
const common = commonEntrypoints(new Set([
...topLevel,
...entries
.filter((entry) => entry.name === "src")
.flatMap(() => ["src/index.ts", "src/index.tsx", "src/index.js", "src/main.ts", "src/main.js"]),
]))
const structureResult = yield* structure(target.path, depth)
const branch = yield* git.branch(target.path)
const head = yield* git.run(["rev-parse", "HEAD"], { cwd: target.path })
const headText = head.exitCode === 0 ? head.text().trim() : undefined
const metadata: Metadata = {
path: target.path,
repository: target.repository,
branch,
head: headText,
package_manager: packageManager(topLevel),
ecosystems: ecosystems(topLevel),
dependency_files: dependencyFiles,
entrypoints: [...entrypoints, ...common.map((file) => `file: ${file}`)],
depth,
truncated: structureResult.truncated,
}
return {
title: target.repository ?? path.basename(target.path),
metadata,
output: [
`Path: ${target.path}`,
...(target.repository ? [`Repository: ${target.repository}`] : []),
...(branch ? [`Branch: ${branch}`] : []),
...(headText ? [`HEAD: ${headText}`] : []),
...(metadata.ecosystems.length ? [`Ecosystems: ${metadata.ecosystems.join(", ")}`] : []),
...(metadata.package_manager ? [`Package manager: ${metadata.package_manager}`] : []),
...(metadata.dependency_files.length ? [`Dependency files: ${metadata.dependency_files.join(", ")}`] : []),
...(metadata.entrypoints.length ? ["Likely entrypoints:", ...metadata.entrypoints.map((entry) => `- ${entry}`)] : []),
"Top-level structure:",
...structureResult.lines,
...(structureResult.truncated ? ["(Structure truncated)"] : []),
].join("\n"),
}
}).pipe(Effect.orDie),
} satisfies Tool.DefWithoutID<typeof Parameters, Metadata>
}),
)

View File

@@ -0,0 +1,4 @@
- Summarize the structure and likely entrypoints of a cloned repository or local directory
- Accepts either a cached repository reference or a directory path
- Reports detected ecosystems, dependency files, package manager, likely entrypoints, and a compact structure tree
- Use this after repo_clone to orient quickly before deeper Read, Glob, or Grep investigation

View File

@@ -1,10 +1,8 @@
import { Schema } from "effect"
import { PositiveInt } from "@/util/schema"
import { Effect, Stream } from "effect"
import os from "os"
import { createWriteStream } from "node:fs"
import * as Tool from "./tool"
import path from "path"
import DESCRIPTION from "./bash.txt"
import * as Log from "@opencode-ai/core/util/log"
import { Instance, type InstanceContext } from "../project/instance"
import { lazy } from "@/util/lazy"
@@ -14,20 +12,22 @@ import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { fileURLToPath } from "url"
import { Config } from "@/config/config"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Global } from "@opencode-ai/core/global"
import { Shell } from "@/shell/shell"
import { ShellKind, ShellToolID } from "./shell/id"
import { BashArity } from "@/permission/arity"
import * as Truncate from "./truncate"
import { Plugin } from "@/plugin"
import { Effect, Stream } from "effect"
import { ChildProcess } from "effect/unstable/process"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import { ShellPrompt, type Parameters } from "./shell/prompt"
import { BashArity } from "@/permission/arity"
import { InstanceState } from "@/effect/instance-state"
export { Parameters } from "./shell/prompt"
const MAX_METADATA_LENGTH = 30_000
const DEFAULT_TIMEOUT = Flag.OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS || 2 * 60 * 1000
const CWD = new Set(["cd", "push-location", "set-location"])
const CWD = new Set(["cd", "chdir", "popd", "pushd", "push-location", "set-location"])
const FILES = new Set([
...CWD,
"rm",
@@ -50,21 +50,9 @@ const FILES = new Set([
"new-item",
"rename-item",
])
const CMD_FILES = new Set(["copy", "del", "dir", "erase", "md", "mkdir", "move", "rd", "ren", "rename", "rmdir", "type"])
const FLAGS = new Set(["-destination", "-literalpath", "-path"])
const SWITCHES = new Set(["-confirm", "-debug", "-force", "-nonewline", "-recurse", "-verbose", "-whatif"])
export const Parameters = Schema.Struct({
command: Schema.String.annotate({ description: "The command to execute" }),
timeout: Schema.optional(PositiveInt).annotate({ description: "Optional timeout in milliseconds" }),
workdir: Schema.optional(Schema.String).annotate({
description: `The working directory to run the command in. Defaults to the current directory. Use this instead of 'cd' commands.`,
}),
description: Schema.String.annotate({
description:
"Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'",
}),
})
type Part = {
type: string
text: string
@@ -81,7 +69,7 @@ type Chunk = {
size: number
}
export const log = Log.create({ service: "bash-tool" })
export const log = Log.create({ service: "shell-tool" })
const resolveWasm = (asset: string) => {
if (asset.startsWith("file://")) return fileURLToPath(asset)
@@ -187,11 +175,16 @@ function prefix(text: string) {
return text.slice(0, match.index)
}
function pathArgs(list: Part[], ps: boolean) {
function pathArgs(list: Part[], ps: boolean, cmd = false) {
if (!ps) {
return list
.slice(1)
.filter((item) => !item.text.startsWith("-") && !(list[0]?.text === "chmod" && item.text.startsWith("+")))
.filter(
(item) =>
!item.text.startsWith("-") &&
!(cmd && item.text.startsWith("/")) &&
!(list[0]?.text === "chmod" && item.text.startsWith("+")),
)
.map((item) => item.text)
}
@@ -251,13 +244,13 @@ function tail(text: string, maxLines: number, maxBytes: number) {
}
}
const parse = Effect.fn("BashTool.parse")(function* (command: string, ps: boolean) {
const parse = Effect.fn("ShellTool.parse")(function* (command: string, ps: boolean) {
const tree = yield* Effect.promise(() => parser().then((p) => (ps ? p.ps : p.bash).parse(command)))
if (!tree) throw new Error("Failed to parse command")
if (!tree) return yield* Effect.die(new Error("Failed to parse command"))
return tree
})
const ask = Effect.fn("BashTool.ask")(function* (ctx: Tool.Context, scan: Scan) {
const ask = Effect.fn("ShellTool.ask")(function* (ctx: Tool.Context, scan: Scan) {
if (scan.dirs.size > 0) {
const globs = Array.from(scan.dirs).map((dir) => {
if (process.platform === "win32") return AppFileSystem.normalizePathPattern(path.join(dir, "*"))
@@ -273,7 +266,7 @@ const ask = Effect.fn("BashTool.ask")(function* (ctx: Tool.Context, scan: Scan)
if (scan.patterns.size === 0) return
yield* ctx.ask({
permission: "bash",
permission: ShellToolID.id,
patterns: Array.from(scan.patterns),
always: Array.from(scan.always),
metadata: {},
@@ -325,9 +318,8 @@ const parser = lazy(async () => {
return { bash, ps }
})
// TODO: we may wanna rename this tool so it works better on other shells
export const BashTool = Tool.define(
"bash",
export const ShellTool = Tool.define(
ShellToolID.id,
Effect.gen(function* () {
const config = yield* Config.Service
const spawner = yield* ChildProcessSpawner
@@ -335,7 +327,7 @@ export const BashTool = Tool.define(
const trunc = yield* Truncate.Service
const plugin = yield* Plugin.Service
const cygpath = Effect.fn("BashTool.cygpath")(function* (shell: string, text: string) {
const cygpath = Effect.fn("ShellTool.cygpath")(function* (shell: string, text: string) {
const lines = yield* spawner
.lines(ChildProcess.make(shell, ["-lc", 'cygpath -w -- "$1"', "_", text]))
.pipe(Effect.catch(() => Effect.succeed([] as string[])))
@@ -344,7 +336,7 @@ export const BashTool = Tool.define(
return AppFileSystem.normalizePath(file)
})
const resolvePath = Effect.fn("BashTool.resolvePath")(function* (text: string, root: string, shell: string) {
const resolvePath = Effect.fn("ShellTool.resolvePath")(function* (text: string, root: string, shell: string) {
if (process.platform === "win32") {
if (Shell.posix(shell) && text.startsWith("/") && AppFileSystem.windowsPath(text) === text) {
const file = yield* cygpath(shell, text)
@@ -355,7 +347,7 @@ export const BashTool = Tool.define(
return path.resolve(root, text)
})
const argPath = Effect.fn("BashTool.argPath")(function* (arg: string, cwd: string, ps: boolean, shell: string) {
const argPath = Effect.fn("ShellTool.argPath")(function* (arg: string, cwd: string, ps: boolean, shell: string) {
const text = ps ? expand(arg, cwd, shell) : home(unquote(arg))
const file = text && prefix(text)
if (!file || dynamic(file, ps)) return
@@ -364,7 +356,7 @@ export const BashTool = Tool.define(
return yield* resolvePath(next, cwd, shell)
})
const collect = Effect.fn("BashTool.collect")(function* (
const collect = Effect.fn("ShellTool.collect")(function* (
root: Node,
cwd: string,
ps: boolean,
@@ -376,14 +368,15 @@ export const BashTool = Tool.define(
patterns: new Set<string>(),
always: new Set<string>(),
}
const shellKind = ShellKind.from(Shell.name(shell))
for (const node of commands(root)) {
const command = parts(node)
const tokens = command.map((item) => item.text)
const cmd = ps ? tokens[0]?.toLowerCase() : tokens[0]
if (cmd && FILES.has(cmd)) {
for (const arg of pathArgs(command, ps)) {
if (cmd && (FILES.has(cmd) || (shellKind === "cmd" && CMD_FILES.has(cmd)))) {
for (const arg of pathArgs(command, ps, shellKind === "cmd")) {
const resolved = yield* argPath(arg, cwd, ps, shell)
log.info("resolved path", { arg, resolved })
if (!resolved || Instance.containsPath(resolved, instance)) continue
@@ -401,7 +394,7 @@ export const BashTool = Tool.define(
return scan
})
const shellEnv = Effect.fn("BashTool.shellEnv")(function* (ctx: Tool.Context, cwd: string) {
const shellEnv = Effect.fn("ShellTool.shellEnv")(function* (ctx: Tool.Context, cwd: string) {
const extra = yield* plugin.trigger(
"shell.env",
{ cwd, sessionID: ctx.sessionID, callID: ctx.callID },
@@ -413,7 +406,7 @@ export const BashTool = Tool.define(
}
})
const run = Effect.fn("BashTool.run")(function* (
const run = Effect.fn("ShellTool.run")(function* (
input: {
shell: string
command: string
@@ -527,7 +520,7 @@ export const BashTool = Tool.define(
const meta: string[] = []
if (expired) {
meta.push(
`bash tool terminated command after exceeding timeout ${input.timeout} ms. If this command is expected to take longer and is not waiting for interactive input, retry with a larger timeout value in milliseconds.`,
`shell tool terminated command after exceeding timeout ${input.timeout} ms. If this command is expected to take longer and is not waiting for interactive input, retry with a larger timeout value in milliseconds.`,
)
}
if (aborted) meta.push("User aborted the command")
@@ -546,7 +539,7 @@ export const BashTool = Tool.define(
}
if (meta.length > 0) {
output += "\n\n<bash_metadata>\n" + meta.join("\n") + "\n</bash_metadata>"
output += "\n\n<shell_metadata>\n" + meta.join("\n") + "\n</shell_metadata>"
}
if (sink) {
const stream = sink
@@ -577,32 +570,23 @@ export const BashTool = Tool.define(
const cfg = yield* config.get()
const shell = Shell.acceptable(cfg.shell)
const name = Shell.name(shell)
const chain =
name === "powershell"
? "If the commands depend on each other and must run sequentially, avoid '&&' in this shell because Windows PowerShell 5.1 does not support it. Use PowerShell conditionals such as `cmd1; if ($?) { cmd2 }` when later commands must depend on earlier success."
: "If the commands depend on each other and must run sequentially, use a single Bash call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like mkdir before cp, Write before Bash for git operations, or git add before git commit), run these operations sequentially instead."
log.info("bash tool using shell", { shell })
const limits = yield* trunc.limits()
const instance = yield* InstanceState.context
const prompt = ShellPrompt.render(name, process.platform, limits)
log.info("shell tool using shell", { shell })
return {
description: DESCRIPTION.replaceAll("${directory}", instance.directory)
.replaceAll("${tmp}", Global.Path.tmp)
.replaceAll("${os}", process.platform)
.replaceAll("${shell}", name)
.replaceAll("${chaining}", chain)
.replaceAll("${maxLines}", String(limits.maxLines))
.replaceAll("${maxBytes}", String(limits.maxBytes)),
parameters: Parameters,
execute: (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context) =>
description: prompt.description,
parameters: prompt.parameters,
execute: (params: Parameters, ctx: Tool.Context) =>
Effect.gen(function* () {
const executeInstance = yield* InstanceState.context
const cwd = params.workdir
? yield* resolvePath(params.workdir, executeInstance.directory, shell)
: executeInstance.directory
if (params.timeout !== undefined && params.timeout < 0) {
throw new Error(`Invalid timeout value: ${params.timeout}. Timeout must be a positive number.`)
return yield* Effect.die(
new Error(`Invalid timeout value: ${params.timeout}. Timeout must be a positive number.`),
)
}
const timeout = params.timeout ?? DEFAULT_TIMEOUT
const ps = Shell.ps(shell)

View File

@@ -0,0 +1,28 @@
export namespace ShellKind {
export const ids = ["bash", "pwsh", "powershell", "cmd"] as const
export type ID = (typeof ids)[number]
const kind = new Set<string>(ids)
const ps = new Set<string>(["pwsh", "powershell"])
export function has(value: string): value is ID {
return kind.has(value)
}
export function from(value: string): ID {
return has(value) ? value : "bash"
}
export function powershell(value: string) {
return ps.has(value)
}
}
export namespace ShellToolID {
export const id = "bash"
export type ID = typeof id
export function has(value: string): value is ID {
return value === id
}
}

View File

@@ -0,0 +1,296 @@
import { Schema } from "effect"
import DESCRIPTION from "./shell.txt"
const PS = new Set(["powershell", "pwsh"])
const CMD = new Set(["cmd"])
const descriptions = {
bash:
"Clear, concise description of what this command does in 5-10 words. Examples:\nInput: ls\nOutput: Lists files in current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: mkdir foo\nOutput: Creates directory 'foo'",
powershell:
'Clear, concise description of what this command does in 5-10 words. Examples:\nInput: Get-ChildItem -LiteralPath "."\nOutput: Lists current directory\n\nInput: git status\nOutput: Shows working tree status\n\nInput: npm install\nOutput: Installs package dependencies\n\nInput: New-Item -ItemType Directory -Path "tmp"\nOutput: Creates directory tmp',
cmd:
'Clear, concise description of what this command does in 5-10 words. Examples:\nInput: dir\nOutput: Lists current directory\n\nInput: if exist "package.json" type "package.json"\nOutput: Prints package.json when it exists\n\nInput: mkdir tmp\nOutput: Creates directory tmp',
}
export type Limits = {
maxLines: number
maxBytes: number
}
export function parameterSchema(description: string) {
return Schema.Struct({
command: Schema.String.annotate({ description: "The command to execute" }),
timeout: Schema.optional(Schema.Number).annotate({ description: "Optional timeout in milliseconds" }),
workdir: Schema.optional(Schema.String).annotate({
description: `The working directory to run the command in. Defaults to the current directory. Use this instead of 'cd' commands.`,
}),
description: Schema.String.annotate({ description }),
})
}
export const Parameters = parameterSchema(descriptions.bash)
export type Parameters = Schema.Schema.Type<typeof Parameters>
function renderPrompt(template: string, values: Record<string, string>) {
return template.replace(/\$\{(\w+)\}/g, (_, key: string) => {
const value = values[key]
if (value === undefined) throw new Error(`Missing shell prompt value: ${key}`)
return value
})
}
function shellDisplayName(name: string) {
if (name === "pwsh") return "PowerShell (7+)"
if (name === "powershell") return "Windows PowerShell (5.1)"
if (name === "cmd") return "cmd.exe"
return name
}
function powershellNotes(name: string) {
if (name === "pwsh") {
return `# PowerShell (7+) shell notes
- This cross-platform shell supports pipeline chain operators (\`&&\` and \`||\`).
- Use double quotes for interpolated strings (\`"Hello $name"\`), single quotes for verbatim strings.
- Prefer full cmdlet names like \`Get-ChildItem\`, \`Set-Content\`, \`Remove-Item\`, and \`New-Item\` over aliases.
- Use \`$(...)\` for subexpressions. Use \`@(...)\` for array expressions.
- To call a native executable whose path contains spaces, use the call operator: \`& "path/to/exe" args\`.
- Escape special characters with the PowerShell backtick character.`
}
if (name === "powershell") {
return `# Windows PowerShell (5.1) shell notes
- Use \`cmd1; if ($?) { cmd2 }\` to chain dependent commands.
- Use double quotes for interpolated strings (\`"Hello $name"\`), single quotes for verbatim strings.
- Prefer full cmdlet names like \`Get-ChildItem\`, \`Set-Content\`, \`Remove-Item\`, and \`New-Item\` over aliases.
- Use \`$(...)\` for subexpressions. Use \`@(...)\` for array expressions.
- To call a native executable whose path contains spaces, use the call operator: \`& "path/to/exe" args\`.
- Escape special characters with the PowerShell backtick character.`
}
return ""
}
function chainGuidance(name: string) {
if (name === "powershell") {
return "If the commands depend on each other and must run sequentially, avoid '&&' in this shell because Windows PowerShell (5.1) does not support it. Use PowerShell conditionals such as `cmd1; if ($?) { cmd2 }` when later commands must depend on earlier success."
}
if (PS.has(name)) {
return "If the commands depend on each other and must run sequentially, use a single Shell call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like New-Item before Copy-Item, Write before Shell for git operations, or git add before git commit), run these operations sequentially instead."
}
if (CMD.has(name)) {
return "If the commands depend on each other and must run sequentially, use a single Shell call with `&&` to chain them together (e.g., `mkdir out && dir out`). For instance, if one operation must complete before another starts, run these operations sequentially instead."
}
return "If the commands depend on each other and must run sequentially, use a single Bash call with '&&' to chain them together (e.g., `git add . && git commit -m \"message\" && git push`). For instance, if one operation must complete before another starts (like mkdir before cp, Write before Bash for git operations, or git add before git commit), run these operations sequentially instead."
}
function bashCommandSection(chain: string, limits: Limits) {
return `Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`ls\` to verify the parent directory exists and is the correct location
- For example, before running "mkdir foo/bar", first use \`ls foo\` to check that "foo" exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., rm "path with spaces/file.txt")
- Examples of proper quoting:
- mkdir "/Users/name/My Documents" (correct)
- mkdir /Users/name/My Documents (incorrect - will fail)
- python "/path/with spaces/script.py" (correct)
- python /path/with spaces/script.py (incorrect - will fail)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`head\`, \`tail\`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Bash with the \`find\`, \`grep\`, \`cat\`, \`head\`, \`tail\`, \`sed\`, \`awk\`, or \`echo\` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT find or ls)
- Content search: Use Grep (NOT grep or rg)
- Read files: Use Read (NOT cat/head/tail)
- Edit files: Use Edit (NOT sed/awk)
- Write files: Use Write (NOT echo >/cat <<EOF)
- Communication: Output text directly (NOT echo/printf)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use ';' only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID using \`cd <directory> && <command>\`. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="/foo/bar" with command: pytest tests
</good-example>
<bad-example>
cd /foo/bar && pytest tests
</bad-example>`
}
function powershellCommandSection(name: string, chain: string, pathSep: string, limits: Limits) {
return `${powershellNotes(name)}
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`Test-Path -LiteralPath <parent>\` to verify the parent directory exists and is the correct location
- For example, before creating \`foo${pathSep}bar\`, first use \`Test-Path -LiteralPath "foo"\` to check that \`foo\` exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., Remove-Item -LiteralPath "path with spaces${pathSep}file.txt")
- Examples of proper quoting:
- New-Item -ItemType Directory -Path "My Documents" (correct)
- New-Item -ItemType Directory -Path My Documents (incorrect - path is split)
- & "path with spaces${pathSep}script.ps1" (correct)
- path with spaces${pathSep}script.ps1 (incorrect - path is split and not invoked)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`Select-Object -First\`, \`Select-Object -Last\`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Shell with PowerShell file/content cmdlets unless explicitly instructed or when these cmdlets are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT Get-ChildItem)
- Content search: Use Grep (NOT Select-String)
- Read files: Use Read (NOT Get-Content)
- Edit files: Use Edit (NOT Set-Content)
- Write files: Use Write (NOT Set-Content/Out-File or here-strings)
- Communication: Output text directly (NOT Write-Output/Write-Host)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use \`;\` only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID changing directories inside the command. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="project${pathSep}subdir" with command: pytest tests
</good-example>
<bad-example>
${name === "powershell" ? `Set-Location -LiteralPath "project${pathSep}subdir"; if ($?) { pytest tests }` : `Set-Location -LiteralPath "project${pathSep}subdir" && pytest tests`}
</bad-example>`
}
function cmdCommandSection(chain: string, limits: Limits) {
return `# cmd.exe shell notes
- Use double quotes for paths with spaces.
- Use %VAR% for environment variables.
- Use \`if exist\` for existence checks.
- Use \`call\` when invoking batch files from another batch-style command.
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use \`if exist\` to verify the parent directory exists and is the correct location
- For example, before creating \`foo\\bar\`, first use \`if exist "foo\\" dir "foo"\` to check that \`foo\` exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., del "path with spaces\\file.txt")
- Examples of proper quoting:
- mkdir "My Documents" (correct)
- mkdir My Documents (incorrect - path is split)
- call "path with spaces\\script.bat" (correct)
- path with spaces\\script.bat (incorrect - path is split and not invoked correctly)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${limits.maxLines} lines or ${limits.maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use \`more\` or other pagination commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Shell with cmd.exe file/content commands unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT dir /s)
- Content search: Use Grep (NOT findstr)
- Read files: Use Read (NOT type)
- Edit files: Use Edit (NOT copy)
- Write files: Use Write (NOT echo > file)
- Communication: Output text directly (NOT echo)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Shell tool calls in a single message. For example, if you need to run "dir" and "where cmd", send a single message with two Shell tool calls in parallel.
- ${chain}
- Use \`&\` only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID changing directories inside the command. Use the \`workdir\` parameter to change directories instead.
<good-example>
Use workdir="project\\subdir" with command: dir
</good-example>
<bad-example>
cd /d "project\\subdir" && dir
</bad-example>`
}
function profile(name: string, platform: NodeJS.Platform, limits: Limits) {
const isPowerShell = PS.has(name)
const chain = chainGuidance(name)
if (CMD.has(name)) {
return {
intro: `Executes a given ${shellDisplayName(name)} command with optional timeout, ensuring proper handling and security measures.`,
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID changing directories inside the command - use `workdir` instead.",
commandSection: cmdCommandSection(chain, limits),
gitCommands: "git commands",
toolName: "Shell",
gitCommandRestriction: "git commands",
createPrInstruction: "Create PR using a temporary body file so cmd.exe quoting stays simple.",
createPrExample: `(\n echo ## Summary\n echo - ^<1-3 bullet points^>\n) > pr-body.txt\ngh pr create --title "the pr title" --body-file pr-body.txt`,
parameterDescription: descriptions.cmd,
}
}
if (isPowerShell) {
return {
intro: `Executes a given ${shellDisplayName(name)} command with optional timeout, ensuring proper handling and security measures.`,
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID changing directories inside the command - use `workdir` instead.",
commandSection: powershellCommandSection(name, chain, platform === "win32" ? "\\" : "/", limits),
gitCommands: "git commands",
toolName: "Shell",
gitCommandRestriction: "git commands",
createPrInstruction: "Create PR using gh pr create with a PowerShell here-string to pass the body correctly.",
createPrExample: `gh pr create --title "the pr title" --body @'
## Summary
- <1-3 bullet points>
'@`,
parameterDescription: descriptions.powershell,
}
}
return {
intro:
"Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.",
workdirSection:
"All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.",
commandSection: bashCommandSection(chain, limits),
gitCommands: "bash commands",
toolName: "Shell",
gitCommandRestriction: "git bash commands",
createPrInstruction:
"Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.",
createPrExample: `gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Summary
<1-3 bullet points>`,
parameterDescription: descriptions.bash,
}
}
export function render(name: string, platform: NodeJS.Platform, limits: Limits) {
const selected = profile(name, platform, limits)
return {
description: renderPrompt(DESCRIPTION, {
intro: selected.intro,
os: platform,
shell: name,
workdirSection: selected.workdirSection,
commandSection: selected.commandSection,
gitCommands: selected.gitCommands,
toolName: selected.toolName,
gitCommandRestriction: selected.gitCommandRestriction,
createPrInstruction: selected.createPrInstruction,
createPrExample: selected.createPrExample,
}),
parameters: parameterSchema(selected.parameterDescription),
}
}
export * as ShellPrompt from "./prompt"

View File

@@ -1,54 +1,14 @@
Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
${intro}
Be aware: OS: ${os}, Shell: ${shell}
All commands run in the current working directory by default. Use the `workdir` parameter if you need to run a command in a different directory. AVOID using `cd <directory> && <command>` patterns - use `workdir` instead.
${workdirSection}
Use `${tmp}` for temporary work outside the workspace. This directory has already been created, already exists, and is pre-approved for external directory access.
IMPORTANT: This tool is for terminal operations like git, npm, docker, etc. DO NOT use it for file operations (reading, writing, editing, searching, finding files) - use the specialized tools for this instead.
Before executing the command, please follow these steps:
1. Directory Verification:
- If the command will create new directories or files, first use `ls` to verify the parent directory exists and is the correct location
- For example, before running "mkdir foo/bar", first use `ls foo` to check that "foo" exists and is the intended parent directory
2. Command Execution:
- Always quote file paths that contain spaces with double quotes (e.g., rm "path with spaces/file.txt")
- Examples of proper quoting:
- mkdir "/Users/name/My Documents" (correct)
- mkdir /Users/name/My Documents (incorrect - will fail)
- python "/path/with spaces/script.py" (correct)
- python /path/with spaces/script.py (incorrect - will fail)
- After ensuring proper quoting, execute the command.
- Capture the output of the command.
Usage notes:
- The command argument is required.
- You can specify an optional timeout in milliseconds. If not specified, commands will time out after 120000ms (2 minutes).
- It is very helpful if you write a clear, concise description of what this command does in 5-10 words.
- If the output exceeds ${maxLines} lines or ${maxBytes} bytes, it will be truncated and the full output will be written to a file. You can use Read with offset/limit to read specific sections or Grep to search the full content. Do NOT use `head`, `tail`, or other truncation commands to limit output; the full output will already be captured to a file for more precise searching.
- Avoid using Bash with the `find`, `grep`, `cat`, `head`, `tail`, `sed`, `awk`, or `echo` commands, unless explicitly instructed or when these commands are truly necessary for the task. Instead, always prefer using the dedicated tools for these commands:
- File search: Use Glob (NOT find or ls)
- Content search: Use Grep (NOT grep or rg)
- Read files: Use Read (NOT cat/head/tail)
- Edit files: Use Edit (NOT sed/awk)
- Write files: Use Write (NOT echo >/cat <<EOF)
- Communication: Output text directly (NOT echo/printf)
- When issuing multiple commands:
- If the commands are independent and can run in parallel, make multiple Bash tool calls in a single message. For example, if you need to run "git status" and "git diff", send a single message with two Bash tool calls in parallel.
- ${chaining}
- Use ';' only when you need to run commands sequentially but don't care if earlier commands fail
- DO NOT use newlines to separate commands (newlines are ok in quoted strings)
- AVOID using `cd <directory> && <command>`. Use the `workdir` parameter to change directories instead.
<good-example>
Use workdir="/foo/bar" with command: pytest tests
</good-example>
<bad-example>
cd /foo/bar && pytest tests
</bad-example>
${commandSection}
# Committing changes with git
@@ -67,7 +27,7 @@ Git Safety Protocol:
- CRITICAL: If you already pushed to remote, NEVER amend unless user explicitly requests it (requires force push)
- NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following bash commands in parallel, each using the Bash tool:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following ${gitCommands} in parallel, each using the ${toolName} tool:
- Run a git status command to see all untracked files.
- Run a git diff command to see both staged and unstaged changes that will be committed.
- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
@@ -84,18 +44,18 @@ Git Safety Protocol:
4. If the commit fails due to pre-commit hook, fix the issue and create a NEW commit (see amend rules above)
Important notes:
- NEVER run additional commands to read or explore code, besides git bash commands
- NEVER run additional commands to read or explore code, besides ${gitCommandRestriction}
- NEVER use the TodoWrite or Task tools
- DO NOT push to the remote repository unless the user explicitly asks you to do so
- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
# Creating pull requests
Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a GitHub URL use the gh command to get the information needed.
Use the gh command via the ${toolName} tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a GitHub URL use the gh command to get the information needed.
IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following bash commands in parallel using the Bash tool, in order to understand the current state of the branch since it diverged from the main branch:
1. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following ${gitCommands} in parallel using the ${toolName} tool, in order to understand the current state of the branch since it diverged from the main branch:
- Run a git status command to see all untracked files
- Run a git diff command to see both staged and unstaged changes that will be committed
- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
@@ -104,11 +64,9 @@ IMPORTANT: When the user asks you to create a pull request, follow these steps c
3. You can call multiple tools in a single response. When multiple independent pieces of information are requested and all commands are likely to succeed, run multiple tool calls in parallel for optimal performance. run the following commands in parallel:
- Create new branch if needed
- Push to remote with -u flag if needed
- Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
- ${createPrInstruction}
<example>
gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Summary
<1-3 bullet points>
${createPrExample}
</example>
Important:

View File

@@ -1,17 +1,22 @@
import * as Tool from "./tool"
import DESCRIPTION from "./task.txt"
import { Bus } from "@/bus"
import { Session } from "@/session/session"
import { SessionID, MessageID } from "../session/schema"
import { MessageV2 } from "../session/message-v2"
import { Agent } from "../agent/agent"
import type { SessionPrompt } from "../session/prompt"
import { SessionStatus } from "@/session/status"
import { TuiEvent } from "@/cli/cmd/tui/event"
import { Cause, Effect, Option, Schema } from "effect"
import { Config } from "@/config/config"
import { Effect, Schema } from "effect"
import { BackgroundJob } from "@/background/job"
export interface TaskPromptOps {
cancel(sessionID: SessionID): void
resolvePromptParts(template: string): Effect.Effect<SessionPrompt.PromptInput["parts"]>
prompt(input: SessionPrompt.PromptInput): Effect.Effect<MessageV2.WithParts>
loop(input: SessionPrompt.LoopInput): Effect.Effect<MessageV2.WithParts>
}
const id = "task"
@@ -20,24 +25,66 @@ export const Parameters = Schema.Struct({
description: Schema.String.annotate({ description: "A short (3-5 words) description of the task" }),
prompt: Schema.String.annotate({ description: "The task for the agent to perform" }),
subagent_type: Schema.String.annotate({ description: "The type of specialized agent to use for this task" }),
task_id: Schema.optional(Schema.String).annotate({
task_id: Schema.optional(SessionID).annotate({
description:
"This should only be set if you mean to resume a previous task (you can pass a prior task_id and the task will continue the same subagent session as before instead of creating a fresh one)",
}),
command: Schema.optional(Schema.String).annotate({ description: "The command that triggered this task" }),
background: Schema.optional(Schema.Boolean).annotate({
description: "When true, launch the subagent in the background and return immediately",
}),
})
function output(sessionID: SessionID, text: string) {
return [
`task_id: ${sessionID} (for resuming to continue this task if needed)`,
"",
"<task_result>",
text,
"</task_result>",
].join("\n")
}
function backgroundOutput(sessionID: SessionID) {
return [
`task_id: ${sessionID} (for polling this task with task_status)`,
"state: running",
"",
"<task_result>",
"Background task started. Continue your current work and call task_status when you need the result.",
"</task_result>",
].join("\n")
}
function backgroundMessage(input: { sessionID: SessionID; description: string; state: "completed" | "error"; text: string }) {
const tag = input.state === "completed" ? "task_result" : "task_error"
const title =
input.state === "completed"
? `Background task completed: ${input.description}`
: `Background task failed: ${input.description}`
return [title, `task_id: ${input.sessionID}`, `state: ${input.state}`, `<${tag}>`, input.text, `</${tag}>`].join(
"\n",
)
}
function errorText(error: unknown) {
if (error instanceof Error) return error.message
return String(error)
}
export const TaskTool = Tool.define(
id,
Effect.gen(function* () {
const agent = yield* Agent.Service
const bus = yield* Bus.Service
const config = yield* Config.Service
const sessions = yield* Session.Service
const status = yield* SessionStatus.Service
const jobs = yield* BackgroundJob.Service
const run = Effect.fn("TaskTool.execute")(function* (
params: Schema.Schema.Type<typeof Parameters>,
ctx: Tool.Context,
) {
const run = Effect.fn(
"TaskTool.execute",
)(function* (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context) {
const cfg = yield* config.get()
if (!ctx.extra?.bypassAgentCheck) {
@@ -62,7 +109,7 @@ export const TaskTool = Tool.define(
const taskID = params.task_id
const session = taskID
? yield* sessions.get(SessionID.make(taskID)).pipe(Effect.catchCause(() => Effect.succeed(undefined)))
? yield* sessions.get(taskID).pipe(Effect.catchCause(() => Effect.succeed(undefined)))
: undefined
const parent = yield* sessions.get(ctx.sessionID)
const nextSession =
@@ -107,19 +154,118 @@ export const TaskTool = Tool.define(
modelID: msg.info.modelID,
providerID: msg.info.providerID,
}
const parentModel = {
modelID: msg.info.modelID,
providerID: msg.info.providerID,
}
const background = params.background === true
const metadata = {
sessionId: nextSession.id,
model,
...(background ? { background: true } : {}),
}
yield* ctx.metadata({
title: params.description,
metadata: {
sessionId: nextSession.id,
model,
},
metadata,
})
const ops = ctx.extra?.promptOps as TaskPromptOps
if (!ops) return yield* Effect.fail(new Error("TaskTool requires promptOps in ctx.extra"))
const messageID = MessageID.ascending()
const runTask = Effect.fn("TaskTool.runTask")(function* () {
const parts = yield* ops.resolvePromptParts(params.prompt)
const result = yield* ops.prompt({
messageID: MessageID.ascending(),
sessionID: nextSession.id,
model: {
modelID: model.modelID,
providerID: model.providerID,
},
agent: next.name,
tools: {
...(canTodo ? {} : { todowrite: false }),
...(canTask ? {} : { task: false }),
...Object.fromEntries((cfg.experimental?.primary_tools ?? []).map((item) => [item, false])),
},
parts,
})
return result.parts.findLast((item) => item.type === "text")?.text ?? ""
})
const continueIfIdle = Effect.fn("TaskTool.continueIfIdle")(function* (input: {
userID: MessageID
state: "completed" | "error"
}) {
if ((yield* status.get(ctx.sessionID)).type !== "idle") return
const latest = yield* sessions.findMessage(ctx.sessionID, (item) => item.info.role === "user")
if (Option.isNone(latest)) return
if (latest.value.info.id !== input.userID) return
yield* bus.publish(TuiEvent.ToastShow, {
title: input.state === "completed" ? "Background task complete" : "Background task failed",
message:
input.state === "completed"
? `Background task \"${params.description}\" finished. Resuming the main thread.`
: `Background task \"${params.description}\" failed. Resuming the main thread.`,
variant: input.state === "completed" ? "success" : "error",
duration: 5000,
})
yield* ops.loop({ sessionID: ctx.sessionID }).pipe(Effect.ignore)
})
if (background) {
const inject = Effect.fn("TaskTool.injectBackgroundResult")(function* (state: "completed" | "error", text: string) {
const message = yield* ops.prompt({
sessionID: ctx.sessionID,
noReply: true,
model: parentModel,
agent: ctx.agent,
parts: [
{
type: "text",
synthetic: true,
text: backgroundMessage({
sessionID: nextSession.id,
description: params.description,
state,
text,
}),
},
],
})
yield* continueIfIdle({ userID: message.info.id, state })
})
yield* jobs.start({
id: nextSession.id,
type: id,
title: params.description,
metadata: {
parentSessionID: ctx.sessionID,
sessionID: nextSession.id,
subagent: next.name,
},
run: runTask().pipe(
Effect.matchCauseEffect({
onSuccess: (text) => inject("completed", text).pipe(Effect.as(text)),
onFailure: (cause) => {
const text = errorText(Cause.squash(cause))
return inject("error", text).pipe(
Effect.catchCause(() => Effect.void),
Effect.andThen(Effect.failCause(cause)),
)
},
}),
),
})
return {
title: params.description,
metadata,
output: backgroundOutput(nextSession.id),
}
}
function cancel() {
ops.cancel(nextSession.id)
@@ -131,36 +277,11 @@ export const TaskTool = Tool.define(
}),
() =>
Effect.gen(function* () {
const parts = yield* ops.resolvePromptParts(params.prompt)
const result = yield* ops.prompt({
messageID,
sessionID: nextSession.id,
model: {
modelID: model.modelID,
providerID: model.providerID,
},
agent: next.name,
tools: {
...(canTodo ? {} : { todowrite: false }),
...(canTask ? {} : { task: false }),
...Object.fromEntries((cfg.experimental?.primary_tools ?? []).map((item) => [item, false])),
},
parts,
})
const text = yield* runTask()
return {
title: params.description,
metadata: {
sessionId: nextSession.id,
model,
},
output: [
`task_id: ${nextSession.id} (for resuming to continue this task if needed)`,
"",
"<task_result>",
result.parts.findLast((item) => item.type === "text")?.text ?? "",
"</task_result>",
].join("\n"),
metadata,
output: output(nextSession.id, text),
}
}),
() =>
@@ -168,13 +289,12 @@ export const TaskTool = Tool.define(
ctx.abort.removeEventListener("abort", cancel)
}),
)
})
}, Effect.orDie)
return {
description: DESCRIPTION,
parameters: Parameters,
execute: (params: Schema.Schema.Type<typeof Parameters>, ctx: Tool.Context) =>
run(params, ctx).pipe(Effect.orDie),
execute: run,
}
}),
)

View File

@@ -14,11 +14,13 @@ When NOT to use the Task tool:
Usage notes:
1. Launch multiple agents concurrently whenever possible, to maximize performance; to do that, use a single message with multiple tool uses
2. When the agent is done, it will return a single message back to you. The result returned by the agent is not visible to the user. To show the user the result, you should send a text message back to the user with a concise summary of the result. The output includes a task_id you can reuse later to continue the same subagent session.
3. Each agent invocation starts with a fresh context unless you provide task_id to resume the same subagent session (which continues with its previous messages and tool outputs). When starting fresh, your prompt should contain a highly detailed task description for the agent to perform autonomously and you should specify exactly what information the agent should return back to you in its final and only message to you.
4. The agent's outputs should generally be trusted
5. Clearly tell the agent whether you expect it to write code or just to do research (search, file reads, web fetches, etc.), since it is not aware of the user's intent. Tell it how to verify its work if possible (e.g., relevant test commands).
6. If the agent description mentions that it should be used proactively, then you should try your best to use it without the user having to ask for it first. Use your judgement.
2. By default, task waits for completion and returns the result immediately, along with a task_id you can reuse later to continue the same subagent session.
3. Set background=true to launch asynchronously. In background mode, continue your current work without waiting.
4. For background runs, use task_status(task_id=..., wait=false) to poll, or wait=true to block until done (optionally with timeout_ms).
5. Each agent invocation starts with a fresh context unless you provide task_id to resume the same subagent session (which continues with its previous messages and tool outputs). When starting fresh, your prompt should contain a highly detailed task description for the agent to perform autonomously and you should specify exactly what information the agent should return back to you in its final and only message to you.
6. The agent's outputs should generally be trusted
7. Clearly tell the agent whether you expect it to write code or just to do research (search, file reads, web fetches, etc.), since it is not aware of the user's intent. Tell it how to verify its work if possible (e.g., relevant test commands).
8. If the agent description mentions that it should be used proactively, then you should try your best to use it without the user having to ask for it first. Use your judgement.
Example usage (NOTE: The agents below are fictional examples for illustration only - use the actual agents listed above):

View File

@@ -0,0 +1,197 @@
import * as Tool from "./tool"
import DESCRIPTION from "./task_status.txt"
import { Session } from "@/session/session"
import { SessionID } from "@/session/schema"
import { MessageV2 } from "@/session/message-v2"
import { SessionStatus } from "@/session/status"
import { PositiveInt } from "@/util/schema"
import { Effect, Option, Schema } from "effect"
import { BackgroundJob } from "@/background/job"
const DEFAULT_TIMEOUT = 60_000
const POLL_MS = 300
const Parameters = Schema.Struct({
task_id: SessionID.annotate({ description: "The task_id returned by the task tool" }),
wait: Schema.optional(Schema.Boolean).annotate({ description: "When true, wait until the task reaches a terminal state or timeout" }),
timeout_ms: Schema.optional(PositiveInt).annotate({
description: "Maximum milliseconds to wait when wait=true (default: 60000)",
}),
})
type State = "running" | "completed" | "error"
type InspectResult = { state: State; text: string }
function format(input: { taskID: SessionID; state: State; text: string }) {
return [`task_id: ${input.taskID}`, `state: ${input.state}`, "", "<task_result>", input.text, "</task_result>"].join(
"\n",
)
}
function errorText(error: NonNullable<MessageV2.Assistant["error"]>) {
const data = Reflect.get(error, "data")
const message = data && typeof data === "object" ? Reflect.get(data, "message") : undefined
if (typeof message === "string" && message) return message
return error.name
}
function jobResult(job: BackgroundJob.Info): InspectResult {
if (job.status === "running") {
return {
state: "running",
text: "Task is still running.",
}
}
if (job.status === "completed") {
return {
state: "completed",
text: job.output ?? "",
}
}
return {
state: "error",
text: job.error ?? `Task ${job.status}.`,
}
}
export const TaskStatusTool = Tool.define(
"task_status",
Effect.gen(function* () {
const sessions = yield* Session.Service
const status = yield* SessionStatus.Service
const jobs = yield* BackgroundJob.Service
const inspect: (taskID: SessionID) => Effect.Effect<InspectResult> = Effect.fn("TaskStatusTool.inspect")(function* (
taskID: SessionID,
) {
const current = yield* status.get(taskID)
if (current.type === "busy" || current.type === "retry") {
return {
state: "running" as const,
text: current.type === "retry" ? `Task is retrying: ${current.message}` : "Task is still running.",
}
}
const latestAssistant = yield* sessions.findMessage(taskID, (item) => item.info.role === "assistant")
if (Option.isNone(latestAssistant)) {
return {
state: "running" as const,
text: "Task has started but has not produced output yet.",
}
}
if (latestAssistant.value.info.role !== "assistant") {
return {
state: "running" as const,
text: "Task has started but has not produced output yet.",
}
}
const latestUser = yield* sessions.findMessage(taskID, (item) => item.info.role === "user")
if (
Option.isSome(latestUser) &&
latestUser.value.info.role === "user" &&
latestUser.value.info.id > latestAssistant.value.info.id
) {
return {
state: "running" as const,
text: "Task is starting.",
}
}
const text = latestAssistant.value.parts.findLast((part) => part.type === "text")?.text ?? ""
if (latestAssistant.value.info.error) {
return {
state: "error" as const,
text: text || errorText(latestAssistant.value.info.error),
}
}
const done =
!!latestAssistant.value.info.finish && !["tool-calls", "unknown"].includes(latestAssistant.value.info.finish)
if (done) {
return {
state: "completed" as const,
text,
}
}
return {
state: "running" as const,
text: text || "Task is still running.",
}
})
const waitForTerminal: (
taskID: SessionID,
timeout: number,
) => Effect.Effect<{ result: InspectResult; timedOut: boolean }> = Effect.fn(
"TaskStatusTool.waitForTerminal",
)(function* (taskID: SessionID, timeout: number) {
const result = yield* inspect(taskID)
if (result.state !== "running") return { result, timedOut: false }
if (timeout <= 0) return { result, timedOut: true }
const sleep = Math.min(POLL_MS, timeout)
yield* Effect.sleep(sleep)
return yield* waitForTerminal(taskID, timeout - sleep)
})
const run = Effect.fn(
"TaskStatusTool.execute",
)(function* (params: Schema.Schema.Type<typeof Parameters>, _ctx: Tool.Context) {
yield* sessions.get(params.task_id)
const job = yield* jobs.get(params.task_id)
const waitedJob =
job && params.wait === true
? yield* jobs.wait({ id: params.task_id, timeout: params.timeout_ms ?? DEFAULT_TIMEOUT })
: { info: job, timedOut: false }
if (waitedJob.info) {
const result = jobResult(waitedJob.info)
return {
title: "Task status",
metadata: {
task_id: params.task_id,
state: result.state,
timed_out: waitedJob.timedOut,
},
output: format({
taskID: params.task_id,
state: result.state,
text: waitedJob.timedOut
? `Timed out after ${params.timeout_ms ?? DEFAULT_TIMEOUT}ms while waiting for task completion.`
: result.text,
}),
}
}
const waited =
params.wait === true
? yield* waitForTerminal(params.task_id, params.timeout_ms ?? DEFAULT_TIMEOUT)
: { result: yield* inspect(params.task_id), timedOut: false }
const outputText = waited.timedOut
? `Timed out after ${params.timeout_ms ?? DEFAULT_TIMEOUT}ms while waiting for task completion.`
: waited.result.text
return {
title: "Task status",
metadata: {
task_id: params.task_id,
state: waited.result.state,
timed_out: waited.timedOut,
},
output: format({
taskID: params.task_id,
state: waited.result.state,
text: outputText,
}),
}
}, Effect.orDie)
return {
description: DESCRIPTION,
parameters: Parameters,
execute: run,
}
}),
)

View File

@@ -0,0 +1,13 @@
Poll the status of a subagent task launched with the task tool.
Use this to check background tasks started with `task(background=true)`.
Parameters:
- `task_id` (required): the task session id returned by the task tool
- `wait` (optional): when true, wait for completion
- `timeout_ms` (optional): max wait duration in milliseconds when `wait=true`
Returns compact, parseable output:
- `task_id`
- `state` (`running`, `completed`, or `error`)
- `<task_result>...</task_result>` containing final output, error summary, or current progress text

View File

@@ -90,7 +90,7 @@ function bodyWithChecks(ast: SchemaAST.AST): z.ZodTypeAny {
// Schema.withDecodingDefault also attaches encoding, but we want `.default(v)`
// on the inner Zod rather than a transform wrapper — so optional ASTs whose
// encoding resolves a default from Option.none() route through body()/opt().
const hasEncoding = ast.encoding?.length && ast._tag !== "Declaration"
const hasEncoding = ast.encoding?.length && (ast._tag !== "Declaration" || ast.typeParameters.length === 0)
const hasTransform = hasEncoding && !(SchemaAST.isOptional(ast) && extractDefault(ast) !== undefined)
const base = hasTransform ? encoded(ast) : body(ast)
return ast.checks?.length ? applyChecks(base, ast.checks, ast) : base
@@ -256,6 +256,8 @@ function body(ast: SchemaAST.AST): z.ZodTypeAny {
return array(ast)
case "Declaration":
return decl(ast)
case "Suspend":
return z.lazy(() => walk(ast.thunk()))
default:
return fail(ast)
}

View File

@@ -0,0 +1,106 @@
import path from "path"
import { Global } from "@opencode-ai/core/global"
export type Reference = {
host: string
path: string
segments: string[]
owner?: string
repo: string
remote: string
label: string
}
function normalize(input: string) {
return input.trim().replace(/^git\+/, "").replace(/#.*$/, "").replace(/\/+$/, "")
}
function trimGitSuffix(input: string) {
return input.replace(/\.git$/, "")
}
function parts(input: string) {
return input
.split("/")
.map((item) => trimGitSuffix(item.trim()))
.filter(Boolean)
}
function hostLike(input: string) {
return input.includes(".") || input.includes(":") || input === "localhost"
}
function withSlash(input: string) {
return input.endsWith("/") ? input : `${input}/`
}
function githubRemote(pathname: string) {
const base = process.env.OPENCODE_REPO_CLONE_GITHUB_BASE_URL
if (!base) return `https://github.com/${pathname}.git`
return new URL(`${pathname}.git`, withSlash(base)).href
}
function build(input: { host: string; segments: string[]; remote?: string }) {
const segments = input.segments.map(trimGitSuffix).filter(Boolean)
if (!segments.length) return null
const pathname = segments.join("/")
const repo = segments[segments.length - 1]
const host = input.host.toLowerCase()
return {
host,
path: pathname,
segments,
owner: segments.length === 2 ? segments[0] : undefined,
repo,
remote: input.remote ?? (host === "github.com" ? githubRemote(pathname) : `https://${host}/${pathname}.git`),
label: host === "github.com" && segments.length === 2 ? pathname : `${host}/${pathname}`,
} satisfies Reference
}
export function parseRepositoryReference(input: string) {
const cleaned = normalize(input)
if (!cleaned) return null
const githubPrefixed = cleaned.match(/^github:([^/\s]+)\/([^/\s]+)$/)
if (githubPrefixed) return build({ host: "github.com", segments: [githubPrefixed[1], githubPrefixed[2]] })
if (!cleaned.includes("://")) {
const scp = cleaned.match(/^(?:[^@/\s]+@)?([^:/\s]+):(.+)$/)
if (scp) return build({ host: scp[1], segments: parts(scp[2]), remote: cleaned })
const direct = parts(cleaned)
if (direct.length >= 2 && hostLike(direct[0])) {
return build({ host: direct[0], segments: direct.slice(1) })
}
if (direct.length === 2) {
return build({ host: "github.com", segments: direct })
}
}
try {
const url = new URL(cleaned)
const pathname = parts(url.pathname)
const host = url.protocol === "file:" ? "file" : url.host
return build({ host, segments: pathname, remote: host === "github.com" ? githubRemote(pathname.join("/")) : cleaned })
} catch {
return null
}
}
export function parseGitHubRemote(input: string) {
const cleaned = normalize(input)
if (!cleaned.includes("://") && !cleaned.match(/^(?:[^@/\s]+@)?github\.com:/)) return null
const parsed = parseRepositoryReference(cleaned)
if (!parsed || parsed.host !== "github.com" || !parsed.owner || parsed.segments.length !== 2) return null
return { owner: parsed.owner, repo: parsed.repo }
}
export function repositoryCachePath(input: Reference) {
return path.join(Global.Path.repos, ...input.host.split(":"), ...input.segments)
}
export function sameRepositoryReference(left: Reference, right: Reference) {
return left.host === right.host && left.path === right.path
}

View File

@@ -0,0 +1,42 @@
import { Identifier } from "@/id/id"
import { SyncEvent } from "@/sync"
import { withStatics } from "@/util/schema"
import * as Schema from "effect/Schema"
export const ID = Schema.String.pipe(
Schema.brand("Event.ID"),
withStatics((s) => ({
create: () => s.make(Identifier.create("evt", "ascending")),
})),
)
export type ID = Schema.Schema.Type<typeof ID>
export function define<const Type extends string, Fields extends Schema.Struct.Fields>(input: {
type: Type
schema: Fields
aggregate: string
version?: number
}) {
const Payload = Schema.Struct({
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
type: Schema.Literal(input.type),
data: Schema.Struct(input.schema),
}).annotate({
identifier: input.type,
})
const Sync = SyncEvent.define({
type: input.type,
version: input.version ?? 1,
aggregate: input.aggregate,
schema: Payload.fields.data,
})
return Object.assign(Payload, {
Sync,
version: input.version,
aggregate: input.aggregate,
})
}
export * as Event from "./event"

View File

@@ -1,261 +0,0 @@
import { produce, type WritableDraft } from "immer"
import { SessionEvent } from "./session-event"
import { SessionEntry } from "./session-entry"
export type MemoryState = {
entries: SessionEntry.Entry[]
pending: SessionEntry.Entry[]
}
export interface Adapter<Result> {
readonly getCurrentAssistant: () => SessionEntry.Assistant | undefined
readonly updateAssistant: (assistant: SessionEntry.Assistant) => void
readonly appendEntry: (entry: SessionEntry.Entry) => void
readonly appendPending: (entry: SessionEntry.Entry) => void
readonly finish: () => Result
}
export function memory(state: MemoryState): Adapter<MemoryState> {
const activeAssistantIndex = () =>
state.entries.findLastIndex((entry) => entry.type === "assistant" && !entry.time.completed)
return {
getCurrentAssistant() {
const index = activeAssistantIndex()
if (index < 0) return
const assistant = state.entries[index]
return assistant?.type === "assistant" ? assistant : undefined
},
updateAssistant(assistant) {
const index = activeAssistantIndex()
if (index < 0) return
const current = state.entries[index]
if (current?.type !== "assistant") return
state.entries[index] = assistant
},
appendEntry(entry) {
state.entries.push(entry)
},
appendPending(entry) {
state.pending.push(entry)
},
finish() {
return state
},
}
}
export function stepWith<Result>(adapter: Adapter<Result>, event: SessionEvent.Event): Result {
const currentAssistant = adapter.getCurrentAssistant()
type DraftAssistant = WritableDraft<SessionEntry.Assistant>
type DraftTool = WritableDraft<SessionEntry.AssistantTool>
type DraftText = WritableDraft<SessionEntry.AssistantText>
type DraftReasoning = WritableDraft<SessionEntry.AssistantReasoning>
const latestTool = (assistant: DraftAssistant | undefined, callID?: string) =>
assistant?.content.findLast(
(item): item is DraftTool => item.type === "tool" && (callID === undefined || item.callID === callID),
)
const latestText = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftText => item.type === "text")
const latestReasoning = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftReasoning => item.type === "reasoning")
SessionEvent.Event.match(event, {
prompt: (event) => {
const entry = SessionEntry.User.fromEvent(event)
if (currentAssistant) {
adapter.appendPending(entry)
return
}
adapter.appendEntry(entry)
},
synthetic: (event) => {
adapter.appendEntry(SessionEntry.Synthetic.fromEvent(event))
},
"step.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.timestamp
}),
)
}
adapter.appendEntry(SessionEntry.Assistant.fromEvent(event))
},
"step.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.timestamp
draft.cost = event.cost
draft.tokens = event.tokens
}),
)
}
},
"text.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "text",
text: "",
})
}),
)
}
},
"text.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text += event.delta
}),
)
}
},
"text.ended": () => {},
"tool.input.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "tool",
callID: event.callID,
name: event.name,
time: {
created: event.timestamp,
},
state: {
status: "pending",
input: "",
},
})
}),
)
}
},
"tool.input.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
// oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string)
if (match && match.state.status === "pending") match.state.input += event.delta
}),
)
}
},
"tool.input.ended": () => {},
"tool.called": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match) {
match.time.ran = event.timestamp
match.state = {
status: "running",
input: event.input,
}
}
}),
)
}
},
"tool.success": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match && match.state.status === "running") {
match.state = {
status: "completed",
input: match.state.input,
output: event.output ?? "",
title: event.title,
metadata: event.metadata ?? {},
attachments: [...(event.attachments ?? [])],
}
}
}),
)
}
},
"tool.error": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.callID)
if (match && match.state.status === "running") {
match.state = {
status: "error",
error: event.error,
input: match.state.input,
metadata: event.metadata ?? {},
}
}
}),
)
}
},
"reasoning.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "reasoning",
text: "",
})
}),
)
}
},
"reasoning.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft)
if (match) match.text += event.delta
}),
)
}
},
"reasoning.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft)
if (match) match.text = event.text
}),
)
}
},
retried: (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.retries = [...(draft.retries ?? []), SessionEntry.AssistantRetry.fromEvent(event)]
}),
)
}
},
compacted: (event) => {
adapter.appendEntry(SessionEntry.Compaction.fromEvent(event))
},
})
return adapter.finish()
}
export function step(old: MemoryState, event: SessionEvent.Event): MemoryState {
return produce(old, (draft) => {
stepWith(memory(draft as MemoryState), event)
})
}
export * as SessionEntryStepper from "./session-entry-stepper"

View File

@@ -1,220 +0,0 @@
import { Schema } from "effect"
import { NonNegativeInt } from "@/util/schema"
import { SessionEvent } from "./session-event"
export const ID = SessionEvent.ID
export type ID = Schema.Schema.Type<typeof ID>
const Base = {
id: SessionEvent.ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}
export class User extends Schema.Class<User>("Session.Entry.User")({
...Base,
text: SessionEvent.Prompt.fields.text,
files: SessionEvent.Prompt.fields.files,
agents: SessionEvent.Prompt.fields.agents,
type: Schema.Literal("user"),
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}) {
static fromEvent(event: SessionEvent.Prompt) {
return new User({
id: event.id,
type: "user",
metadata: event.metadata,
text: event.text,
files: event.files,
agents: event.agents,
time: { created: event.timestamp },
})
}
}
export class Synthetic extends Schema.Class<Synthetic>("Session.Entry.Synthetic")({
...SessionEvent.Synthetic.fields,
...Base,
type: Schema.Literal("synthetic"),
}) {
static fromEvent(event: SessionEvent.Synthetic) {
return new Synthetic({
...event,
time: { created: event.timestamp },
})
}
}
export class ToolStatePending extends Schema.Class<ToolStatePending>("Session.Entry.ToolState.Pending")({
status: Schema.Literal("pending"),
input: Schema.String,
}) {}
export class ToolStateRunning extends Schema.Class<ToolStateRunning>("Session.Entry.ToolState.Running")({
status: Schema.Literal("running"),
input: Schema.Record(Schema.String, Schema.Unknown),
title: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}) {}
export class ToolStateCompleted extends Schema.Class<ToolStateCompleted>("Session.Entry.ToolState.Completed")({
status: Schema.Literal("completed"),
input: Schema.Record(Schema.String, Schema.Unknown),
output: Schema.String,
title: Schema.String,
metadata: Schema.Record(Schema.String, Schema.Unknown),
attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional),
}) {}
export class ToolStateError extends Schema.Class<ToolStateError>("Session.Entry.ToolState.Error")({
status: Schema.Literal("error"),
input: Schema.Record(Schema.String, Schema.Unknown),
error: Schema.String,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}) {}
export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]).pipe(
Schema.toTaggedUnion("status"),
)
export type ToolState = Schema.Schema.Type<typeof ToolState>
export class AssistantTool extends Schema.Class<AssistantTool>("Session.Entry.Assistant.Tool")({
type: Schema.Literal("tool"),
callID: Schema.String,
name: Schema.String,
state: ToolState,
time: Schema.Struct({
created: Schema.DateTimeUtc,
ran: Schema.DateTimeUtc.pipe(Schema.optional),
completed: Schema.DateTimeUtc.pipe(Schema.optional),
pruned: Schema.DateTimeUtc.pipe(Schema.optional),
}),
}) {}
export class AssistantText extends Schema.Class<AssistantText>("Session.Entry.Assistant.Text")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class AssistantReasoning extends Schema.Class<AssistantReasoning>("Session.Entry.Assistant.Reasoning")({
type: Schema.Literal("reasoning"),
text: Schema.String,
}) {}
export class AssistantRetry extends Schema.Class<AssistantRetry>("Session.Entry.Assistant.Retry")({
attempt: NonNegativeInt,
error: SessionEvent.RetryError,
time: Schema.Struct({
created: Schema.DateTimeUtc,
}),
}) {
static fromEvent(event: SessionEvent.Retried) {
return new AssistantRetry({
attempt: event.attempt,
error: event.error,
time: {
created: event.timestamp,
},
})
}
}
export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]).pipe(
Schema.toTaggedUnion("type"),
)
export type AssistantContent = Schema.Schema.Type<typeof AssistantContent>
export class Assistant extends Schema.Class<Assistant>("Session.Entry.Assistant")({
...Base,
type: Schema.Literal("assistant"),
content: AssistantContent.pipe(Schema.Array),
retries: AssistantRetry.pipe(Schema.Array, Schema.optional),
cost: Schema.Finite.pipe(Schema.optional),
tokens: Schema.Struct({
input: NonNegativeInt,
output: NonNegativeInt,
reasoning: NonNegativeInt,
cache: Schema.Struct({
read: NonNegativeInt,
write: NonNegativeInt,
}),
}).pipe(Schema.optional),
error: Schema.String.pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtc,
completed: Schema.DateTimeUtc.pipe(Schema.optional),
}),
}) {
static fromEvent(event: SessionEvent.Step.Started) {
return new Assistant({
id: event.id,
type: "assistant",
time: {
created: event.timestamp,
},
content: [],
retries: [],
})
}
}
export class Compaction extends Schema.Class<Compaction>("Session.Entry.Compaction")({
...SessionEvent.Compacted.fields,
type: Schema.Literal("compaction"),
...Base,
}) {
static fromEvent(event: SessionEvent.Compacted) {
return new Compaction({
...event,
type: "compaction",
time: { created: event.timestamp },
})
}
}
export const Entry = Schema.Union([User, Synthetic, Assistant, Compaction]).pipe(Schema.toTaggedUnion("type"))
export type Entry = Schema.Schema.Type<typeof Entry>
export type Type = Entry["type"]
/*
export interface Interface {
readonly decode: (row: typeof SessionEntryTable.$inferSelect) => Entry
readonly fromSession: (sessionID: SessionID) => Effect.Effect<Entry[], never>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/SessionEntry") {}
export const layer: Layer.Layer<Service, never, never> = Layer.effect(
Service,
Effect.gen(function* () {
const decodeEntry = Schema.decodeUnknownSync(Entry)
const decode: (typeof Service.Service)["decode"] = (row) => decodeEntry({ ...row, id: row.id, type: row.type })
const fromSession = Effect.fn("SessionEntry.fromSession")(function* (sessionID: SessionID) {
return Database.use((db) =>
db
.select()
.from(SessionEntryTable)
.where(eq(SessionEntryTable.session_id, sessionID))
.orderBy(SessionEntryTable.id)
.all()
.map((row) => decode(row)),
)
})
return Service.of({
decode,
fromSession,
})
}),
)
*/
export * as SessionEntry from "./session-entry"

View File

@@ -1,128 +1,75 @@
import { Identifier } from "@/id/id"
import { NonNegativeInt, withStatics } from "@/util/schema"
import * as DateTime from "effect/DateTime"
import { SessionID } from "@/session/schema"
import { NonNegativeInt } from "@/util/schema"
import { Event } from "./event"
import { FileAttachment, Prompt } from "./session-prompt"
import { Schema } from "effect"
export { FileAttachment }
import { ToolOutput } from "./tool-output"
export namespace SessionEvent {
export const ID = Schema.String.pipe(
Schema.brand("Session.Event.ID"),
withStatics((s) => ({
create: () => s.make(Identifier.create("evt", "ascending")),
})),
)
export type ID = Schema.Schema.Type<typeof ID>
type Stamp = Schema.Schema.Type<typeof Schema.DateTimeUtc>
type BaseInput = {
id?: ID
metadata?: Record<string, unknown>
timestamp?: Stamp
}
export const ID = Event.ID
export type ID = Schema.Schema.Type<typeof ID>
const Base = {
export const Source = Schema.Struct({
start: NonNegativeInt,
end: NonNegativeInt,
text: Schema.String,
}).annotate({
identifier: "session.next.event.source",
})
export type Source = Schema.Schema.Type<typeof Source>
const Base = {
timestamp: Schema.DateTimeUtcFromMillis,
sessionID: SessionID,
}
export const Prompted = Event.define({
type: "session.next.prompted",
aggregate: "sessionID",
version: 1,
schema: {
...Base,
id: ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
timestamp: Schema.DateTimeUtc,
}
prompt: Prompt,
},
})
export type Prompted = Schema.Schema.Type<typeof Prompted>
export class Source extends Schema.Class<Source>("Session.Event.Source")({
start: NonNegativeInt,
end: NonNegativeInt,
text: Schema.String,
}) {}
export class FileAttachment extends Schema.Class<FileAttachment>("Session.Event.FileAttachment")({
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
description: Schema.String.pipe(Schema.optional),
source: Source.pipe(Schema.optional),
}) {
static create(input: FileAttachment) {
return new FileAttachment({
uri: input.uri,
mime: input.mime,
name: input.name,
description: input.description,
source: input.source,
})
}
}
export class AgentAttachment extends Schema.Class<AgentAttachment>("Session.Event.AgentAttachment")({
name: Schema.String,
source: Source.pipe(Schema.optional),
}) {}
export class RetryError extends Schema.Class<RetryError>("Session.Event.Retry.Error")({
message: Schema.String,
statusCode: NonNegativeInt.pipe(Schema.optional),
isRetryable: Schema.Boolean,
responseHeaders: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
responseBody: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
}) {}
export class Prompt extends Schema.Class<Prompt>("Session.Event.Prompt")({
export const Synthetic = Event.define({
type: "session.next.synthetic",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("prompt"),
id: ID,
text: Schema.String,
files: Schema.Array(FileAttachment).pipe(Schema.optional),
agents: Schema.Array(AgentAttachment).pipe(Schema.optional),
}) {
static create(input: BaseInput & { text: string; files?: FileAttachment[]; agents?: AgentAttachment[] }) {
return new Prompt({
id: input.id ?? ID.create(),
type: "prompt",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
files: input.files,
agents: input.agents,
})
}
}
},
})
export type Synthetic = Schema.Schema.Type<typeof Synthetic>
export class Synthetic extends Schema.Class<Synthetic>("Session.Event.Synthetic")({
...Base,
type: Schema.Literal("synthetic"),
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Synthetic({
id: input.id ?? ID.create(),
type: "synthetic",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
export namespace Step {
export class Started extends Schema.Class<Started>("Session.Event.Step.Started")({
export namespace Step {
export const Started = Event.define({
type: "session.next.step.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("step.started"),
id: ID,
agent: Schema.String,
model: Schema.Struct({
id: Schema.String,
providerID: Schema.String,
variant: Schema.String.pipe(Schema.optional),
}),
}) {
static create(input: BaseInput & { model: { id: string; providerID: string; variant?: string } }) {
return new Started({
id: input.id ?? ID.create(),
type: "step.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
model: input.model,
})
}
}
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Ended extends Schema.Class<Ended>("Session.Event.Step.Ended")({
export const Ended = Event.define({
type: "session.next.step.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("step.ended"),
reason: Schema.String,
finish: Schema.String,
cost: Schema.Finite,
tokens: Schema.Struct({
input: NonNegativeInt,
@@ -133,177 +80,118 @@ export namespace SessionEvent {
write: NonNegativeInt,
}),
}),
}) {
static create(input: BaseInput & { reason: string; cost: number; tokens: Ended["tokens"] }) {
return new Ended({
id: input.id ?? ID.create(),
type: "step.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
reason: input.reason,
cost: input.cost,
tokens: input.tokens,
})
}
}
}
snapshot: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Text {
export class Started extends Schema.Class<Started>("Session.Event.Text.Started")({
export namespace Text {
export const Started = Event.define({
type: "session.next.text.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.started"),
}) {
static create(input: BaseInput = {}) {
return new Started({
id: input.id ?? ID.create(),
type: "text.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
})
}
}
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Text.Delta")({
export const Delta = Event.define({
type: "session.next.text.delta",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.delta"),
delta: Schema.String,
}) {
static create(input: BaseInput & { delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "text.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Text.Ended")({
export const Ended = Event.define({
type: "session.next.text.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("text.ended"),
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "text.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Reasoning {
export class Started extends Schema.Class<Started>("Session.Event.Reasoning.Started")({
export namespace Reasoning {
export const Started = Event.define({
type: "session.next.reasoning.started",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.started"),
}) {
static create(input: BaseInput = {}) {
return new Started({
id: input.id ?? ID.create(),
type: "reasoning.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
})
}
}
reasoningID: Schema.String,
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Reasoning.Delta")({
export const Delta = Event.define({
type: "session.next.reasoning.delta",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.delta"),
reasoningID: Schema.String,
delta: Schema.String,
}) {
static create(input: BaseInput & { delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "reasoning.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Reasoning.Ended")({
export const Ended = Event.define({
type: "session.next.reasoning.ended",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("reasoning.ended"),
reasoningID: Schema.String,
text: Schema.String,
}) {
static create(input: BaseInput & { text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "reasoning.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export namespace Tool {
export namespace Input {
export class Started extends Schema.Class<Started>("Session.Event.Tool.Input.Started")({
export namespace Tool {
export namespace Input {
export const Started = Event.define({
type: "session.next.tool.input.started",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
name: Schema.String,
type: Schema.Literal("tool.input.started"),
}) {
static create(input: BaseInput & { callID: string; name: string }) {
return new Started({
id: input.id ?? ID.create(),
type: "tool.input.started",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
name: input.name,
})
}
}
},
})
export type Started = Schema.Schema.Type<typeof Started>
export class Delta extends Schema.Class<Delta>("Session.Event.Tool.Input.Delta")({
export const Delta = Event.define({
type: "session.next.tool.input.delta",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
type: Schema.Literal("tool.input.delta"),
delta: Schema.String,
}) {
static create(input: BaseInput & { callID: string; delta: string }) {
return new Delta({
id: input.id ?? ID.create(),
type: "tool.input.delta",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
delta: input.delta,
})
}
}
},
})
export type Delta = Schema.Schema.Type<typeof Delta>
export class Ended extends Schema.Class<Ended>("Session.Event.Tool.Input.Ended")({
export const Ended = Event.define({
type: "session.next.tool.input.ended",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
type: Schema.Literal("tool.input.ended"),
text: Schema.String,
}) {
static create(input: BaseInput & { callID: string; text: string }) {
return new Ended({
id: input.id ?? ID.create(),
type: "tool.input.ended",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
text: input.text,
})
}
}
}
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export class Called extends Schema.Class<Called>("Session.Event.Tool.Called")({
export const Called = Event.define({
type: "session.next.tool.called",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.called"),
callID: Schema.String,
tool: Schema.String,
input: Schema.Record(Schema.String, Schema.Unknown),
@@ -311,148 +199,152 @@ export namespace SessionEvent {
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(
input: BaseInput & {
callID: string
tool: string
input: Record<string, unknown>
provider: Called["provider"]
},
) {
return new Called({
id: input.id ?? ID.create(),
type: "tool.called",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
tool: input.tool,
input: input.input,
provider: input.provider,
})
}
}
},
})
export type Called = Schema.Schema.Type<typeof Called>
export class Success extends Schema.Class<Success>("Session.Event.Tool.Success")({
export const Progress = Event.define({
type: "session.next.tool.progress",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.success"),
callID: Schema.String,
title: Schema.String,
output: Schema.String.pipe(Schema.optional),
attachments: Schema.Array(FileAttachment).pipe(Schema.optional),
structured: ToolOutput.Structured,
content: Schema.Array(ToolOutput.Content),
},
})
export type Progress = Schema.Schema.Type<typeof Progress>
export const Success = Event.define({
type: "session.next.tool.success",
aggregate: "sessionID",
schema: {
...Base,
callID: Schema.String,
structured: ToolOutput.Structured,
content: Schema.Array(ToolOutput.Content),
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(
input: BaseInput & {
callID: string
title: string
output?: string
attachments?: FileAttachment[]
provider: Success["provider"]
},
) {
return new Success({
id: input.id ?? ID.create(),
type: "tool.success",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
title: input.title,
output: input.output,
attachments: input.attachments,
provider: input.provider,
})
}
}
},
})
export type Success = Schema.Schema.Type<typeof Success>
export class Error extends Schema.Class<Error>("Session.Event.Tool.Error")({
export const Error = Event.define({
type: "session.next.tool.error",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("tool.error"),
callID: Schema.String,
error: Schema.String,
error: Schema.Struct({
type: Schema.String,
message: Schema.String,
}),
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}),
}) {
static create(input: BaseInput & { callID: string; error: string; provider: Error["provider"] }) {
return new Error({
id: input.id ?? ID.create(),
type: "tool.error",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
callID: input.callID,
error: input.error,
provider: input.provider,
})
}
}
}
},
})
export type Error = Schema.Schema.Type<typeof Error>
}
export class Retried extends Schema.Class<Retried>("Session.Event.Retried")({
export const RetryError = Schema.Struct({
message: Schema.String,
statusCode: NonNegativeInt.pipe(Schema.optional),
isRetryable: Schema.Boolean,
responseHeaders: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
responseBody: Schema.String.pipe(Schema.optional),
metadata: Schema.Record(Schema.String, Schema.String).pipe(Schema.optional),
}).annotate({
identifier: "session.next.retry_error",
})
export type RetryError = Schema.Schema.Type<typeof RetryError>
export const Retried = Event.define({
type: "session.next.retried",
aggregate: "sessionID",
schema: {
...Base,
type: Schema.Literal("retried"),
attempt: NonNegativeInt,
error: RetryError,
}) {
static create(input: BaseInput & { attempt: number; error: RetryError }) {
return new Retried({
id: input.id ?? ID.create(),
type: "retried",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
attempt: input.attempt,
error: input.error,
})
}
}
},
})
export type Retried = Schema.Schema.Type<typeof Retried>
export class Compacted extends Schema.Class<Compacted>("Session.Event.Compated")({
...Base,
type: Schema.Literal("compacted"),
auto: Schema.Boolean,
overflow: Schema.Boolean.pipe(Schema.optional),
}) {
static create(input: BaseInput & { auto: boolean; overflow?: boolean }) {
return new Compacted({
id: input.id ?? ID.create(),
type: "compacted",
timestamp: input.timestamp ?? DateTime.makeUnsafe(Date.now()),
metadata: input.metadata,
auto: input.auto,
overflow: input.overflow,
})
}
}
export const Event = Schema.Union(
[
Prompt,
Synthetic,
Step.Started,
Step.Ended,
Text.Started,
Text.Delta,
Text.Ended,
Tool.Input.Started,
Tool.Input.Delta,
Tool.Input.Ended,
Tool.Called,
Tool.Success,
Tool.Error,
Reasoning.Started,
Reasoning.Delta,
Reasoning.Ended,
Retried,
Compacted,
],
{
mode: "oneOf",
export namespace Compaction {
export const Started = Event.define({
type: "session.next.compaction.started",
aggregate: "sessionID",
schema: {
...Base,
id: ID,
reason: Schema.Union([Schema.Literal("auto"), Schema.Literal("manual")]),
},
).pipe(Schema.toTaggedUnion("type"))
export type Event = Schema.Schema.Type<typeof Event>
export type Type = Event["type"]
})
export type Started = Schema.Schema.Type<typeof Started>
export const Delta = Event.define({
type: "session.next.compaction.delta",
aggregate: "sessionID",
schema: {
...Base,
text: Schema.String,
},
})
export const Ended = Event.define({
type: "session.next.compaction.ended",
aggregate: "sessionID",
schema: {
...Base,
text: Schema.String,
include: Schema.String.pipe(Schema.optional),
},
})
export type Ended = Schema.Schema.Type<typeof Ended>
}
export const All = Schema.Union(
[
Prompted,
Synthetic,
Step.Started,
Step.Ended,
Text.Started,
Text.Delta,
Text.Ended,
Tool.Input.Started,
Tool.Input.Delta,
Tool.Input.Ended,
Tool.Called,
Tool.Progress,
Tool.Success,
Tool.Error,
Reasoning.Started,
Reasoning.Delta,
Reasoning.Ended,
Retried,
Compaction.Started,
Compaction.Delta,
Compaction.Ended,
],
{
mode: "oneOf",
},
).pipe(Schema.toTaggedUnion("type"))
// user
// assistant
// assistant
// assistant
// user
// compaction marker
// -> text
// assistant
export type Event = Schema.Schema.Type<typeof All>
export type Type = Event["type"]
export * as SessionEvent from "./session-event"

View File

@@ -0,0 +1,307 @@
import { produce, type WritableDraft } from "immer"
import { SessionEvent } from "./session-event"
import { SessionMessage } from "./session-message"
export type MemoryState = {
messages: SessionMessage.Message[]
}
export interface Adapter<Result> {
readonly getCurrentAssistant: () => SessionMessage.Assistant | undefined
readonly getCurrentCompaction: () => SessionMessage.Compaction | undefined
readonly updateAssistant: (assistant: SessionMessage.Assistant) => void
readonly updateCompaction: (compaction: SessionMessage.Compaction) => void
readonly appendMessage: (message: SessionMessage.Message) => void
readonly finish: () => Result
}
export function memory(state: MemoryState): Adapter<MemoryState> {
const activeAssistantIndex = () =>
state.messages.findLastIndex((message) => message.type === "assistant" && !message.time.completed)
const activeCompactionIndex = () => state.messages.findLastIndex((message) => message.type === "compaction")
return {
getCurrentAssistant() {
const index = activeAssistantIndex()
if (index < 0) return
const assistant = state.messages[index]
return assistant?.type === "assistant" ? assistant : undefined
},
getCurrentCompaction() {
const index = activeCompactionIndex()
if (index < 0) return
const compaction = state.messages[index]
return compaction?.type === "compaction" ? compaction : undefined
},
updateAssistant(assistant) {
const index = activeAssistantIndex()
if (index < 0) return
const current = state.messages[index]
if (current?.type !== "assistant") return
state.messages[index] = assistant
},
updateCompaction(compaction) {
const index = activeCompactionIndex()
if (index < 0) return
const current = state.messages[index]
if (current?.type !== "compaction") return
state.messages[index] = compaction
},
appendMessage(message) {
state.messages.push(message)
},
finish() {
return state
},
}
}
export function update<Result>(adapter: Adapter<Result>, event: SessionEvent.Event): Result {
const currentAssistant = adapter.getCurrentAssistant()
type DraftAssistant = WritableDraft<SessionMessage.Assistant>
type DraftTool = WritableDraft<SessionMessage.AssistantTool>
type DraftText = WritableDraft<SessionMessage.AssistantText>
type DraftReasoning = WritableDraft<SessionMessage.AssistantReasoning>
const latestTool = (assistant: DraftAssistant | undefined, callID?: string) =>
assistant?.content.findLast(
(item): item is DraftTool => item.type === "tool" && (callID === undefined || item.id === callID),
)
const latestText = (assistant: DraftAssistant | undefined) =>
assistant?.content.findLast((item): item is DraftText => item.type === "text")
const latestReasoning = (assistant: DraftAssistant | undefined, reasoningID: string) =>
assistant?.content.findLast(
(item): item is DraftReasoning => item.type === "reasoning" && item.id === reasoningID,
)
SessionEvent.All.match(event, {
"session.next.prompted": (event) => {
adapter.appendMessage(SessionMessage.User.fromEvent(event))
},
"session.next.synthetic": (event) => {
adapter.appendMessage(SessionMessage.Synthetic.fromEvent(event))
},
"session.next.step.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.data.timestamp
}),
)
}
adapter.appendMessage(SessionMessage.Assistant.fromEvent(event))
},
"session.next.step.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.time.completed = event.data.timestamp
draft.finish = event.data.finish
draft.cost = event.data.cost
draft.tokens = event.data.tokens
if (event.data.snapshot) draft.snapshot = { ...draft.snapshot, end: event.data.snapshot }
}),
)
}
},
"session.next.text.started": () => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "text",
text: "",
})
}),
)
}
},
"session.next.text.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text += event.data.delta
}),
)
}
},
"session.next.text.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestText(draft)
if (match) match.text = event.data.text
}),
)
}
},
"session.next.tool.input.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "tool",
id: event.data.callID,
name: event.data.name,
time: {
created: event.data.timestamp,
},
state: {
status: "pending",
input: "",
},
})
}),
)
}
},
"session.next.tool.input.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
// oxlint-disable-next-line no-base-to-string -- event.delta is a Schema.String (runtime string)
if (match && match.state.status === "pending") match.state.input += event.data.delta
}),
)
}
},
"session.next.tool.input.ended": () => {},
"session.next.tool.called": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match) {
match.provider = event.data.provider
match.time.ran = event.data.timestamp
match.state = {
status: "running",
input: event.data.input,
structured: {},
content: [],
}
}
}),
)
}
},
"session.next.tool.progress": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.state.structured = event.data.structured
match.state.content = [...event.data.content]
}
}),
)
}
},
"session.next.tool.success": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.provider = event.data.provider
match.time.completed = event.data.timestamp
match.state = {
status: "completed",
input: match.state.input,
structured: event.data.structured,
content: [...event.data.content],
}
}
}),
)
}
},
"session.next.tool.error": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestTool(draft, event.data.callID)
if (match && match.state.status === "running") {
match.provider = event.data.provider
match.time.completed = event.data.timestamp
match.state = {
status: "error",
error: event.data.error,
input: match.state.input,
structured: match.state.structured,
content: match.state.content,
}
}
}),
)
}
},
"session.next.reasoning.started": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
draft.content.push({
type: "reasoning",
id: event.data.reasoningID,
text: "",
})
}),
)
}
},
"session.next.reasoning.delta": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft, event.data.reasoningID)
if (match) match.text += event.data.delta
}),
)
}
},
"session.next.reasoning.ended": (event) => {
if (currentAssistant) {
adapter.updateAssistant(
produce(currentAssistant, (draft) => {
const match = latestReasoning(draft, event.data.reasoningID)
if (match) match.text = event.data.text
}),
)
}
},
"session.next.retried": () => {},
"session.next.compaction.started": (event) => {
adapter.appendMessage(SessionMessage.Compaction.fromEvent(event))
},
"session.next.compaction.delta": (event) => {
const currentCompaction = adapter.getCurrentCompaction()
if (currentCompaction) {
adapter.updateCompaction(
produce(currentCompaction, (draft) => {
draft.summary += event.data.text
}),
)
}
},
"session.next.compaction.ended": (event) => {
const currentCompaction = adapter.getCurrentCompaction()
if (currentCompaction) {
adapter.updateCompaction(
produce(currentCompaction, (draft) => {
draft.summary = event.data.text
draft.include = event.data.include
}),
)
}
},
})
return adapter.finish()
}
export * as SessionMessageUpdater from "./session-message-updater"

View File

@@ -0,0 +1,196 @@
import { Schema } from "effect"
import { Prompt } from "./session-prompt"
import { SessionEvent } from "./session-event"
import { Event } from "./event"
import { ToolOutput } from "./tool-output"
export const ID = Event.ID
export type ID = Schema.Schema.Type<typeof ID>
const Base = {
id: ID,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
}),
}
export class User extends Schema.Class<User>("Session.Message.User")({
...Base,
text: Prompt.fields.text,
files: Prompt.fields.files,
agents: Prompt.fields.agents,
type: Schema.Literal("user"),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
}),
}) {
static fromEvent(event: SessionEvent.Prompted) {
return new User({
id: event.data.id,
type: "user",
metadata: event.metadata,
text: event.data.prompt.text,
files: event.data.prompt.files,
agents: event.data.prompt.agents,
time: { created: event.data.timestamp },
})
}
}
export class Synthetic extends Schema.Class<Synthetic>("Session.Message.Synthetic")({
...Base,
sessionID: SessionEvent.Synthetic.fields.data.fields.sessionID,
text: SessionEvent.Synthetic.fields.data.fields.text,
type: Schema.Literal("synthetic"),
}) {
static fromEvent(event: SessionEvent.Synthetic) {
return new Synthetic({
sessionID: event.data.sessionID,
text: event.data.text,
id: event.data.id,
type: "synthetic",
time: { created: event.data.timestamp },
})
}
}
export class ToolStatePending extends Schema.Class<ToolStatePending>("Session.Message.ToolState.Pending")({
status: Schema.Literal("pending"),
input: Schema.String,
}) {}
export class ToolStateRunning extends Schema.Class<ToolStateRunning>("Session.Message.ToolState.Running")({
status: Schema.Literal("running"),
input: Schema.Record(Schema.String, Schema.Unknown),
structured: ToolOutput.Structured,
content: ToolOutput.Content.pipe(Schema.Array),
}) {}
export class ToolStateCompleted extends Schema.Class<ToolStateCompleted>("Session.Message.ToolState.Completed")({
status: Schema.Literal("completed"),
input: Schema.Record(Schema.String, Schema.Unknown),
attachments: SessionEvent.FileAttachment.pipe(Schema.Array, Schema.optional),
content: ToolOutput.Content.pipe(Schema.Array),
structured: ToolOutput.Structured,
}) {}
export class ToolStateError extends Schema.Class<ToolStateError>("Session.Message.ToolState.Error")({
status: Schema.Literal("error"),
input: Schema.Record(Schema.String, Schema.Unknown),
content: ToolOutput.Content.pipe(Schema.Array),
structured: ToolOutput.Structured,
error: Schema.Struct({
type: Schema.String,
message: Schema.String,
}),
}) {}
export const ToolState = Schema.Union([ToolStatePending, ToolStateRunning, ToolStateCompleted, ToolStateError]).pipe(
Schema.toTaggedUnion("status"),
)
export type ToolState = Schema.Schema.Type<typeof ToolState>
export class AssistantTool extends Schema.Class<AssistantTool>("Session.Message.Assistant.Tool")({
type: Schema.Literal("tool"),
id: Schema.String,
name: Schema.String,
provider: Schema.Struct({
executed: Schema.Boolean,
metadata: Schema.Record(Schema.String, Schema.Unknown).pipe(Schema.optional),
}).pipe(Schema.optional),
state: ToolState,
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
ran: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
completed: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
pruned: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
}) {}
export class AssistantText extends Schema.Class<AssistantText>("Session.Message.Assistant.Text")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class AssistantReasoning extends Schema.Class<AssistantReasoning>("Session.Message.Assistant.Reasoning")({
type: Schema.Literal("reasoning"),
id: Schema.String,
text: Schema.String,
}) {}
export const AssistantContent = Schema.Union([AssistantText, AssistantReasoning, AssistantTool]).pipe(
Schema.toTaggedUnion("type"),
)
export type AssistantContent = Schema.Schema.Type<typeof AssistantContent>
export class Assistant extends Schema.Class<Assistant>("Session.Message.Assistant")({
...Base,
type: Schema.Literal("assistant"),
agent: Schema.String,
model: SessionEvent.Step.Started.fields.data.fields.model,
content: AssistantContent.pipe(Schema.Array),
snapshot: Schema.Struct({
start: Schema.String.pipe(Schema.optional),
end: Schema.String.pipe(Schema.optional),
}).pipe(Schema.optional),
finish: Schema.String.pipe(Schema.optional),
cost: Schema.Number.pipe(Schema.optional),
tokens: Schema.Struct({
input: Schema.Number,
output: Schema.Number,
reasoning: Schema.Number,
cache: Schema.Struct({
read: Schema.Number,
write: Schema.Number,
}),
}).pipe(Schema.optional),
error: Schema.String.pipe(Schema.optional),
time: Schema.Struct({
created: Schema.DateTimeUtcFromMillis,
completed: Schema.DateTimeUtcFromMillis.pipe(Schema.optional),
}),
}) {
static fromEvent(event: SessionEvent.Step.Started) {
return new Assistant({
id: event.data.id,
type: "assistant",
agent: event.data.agent,
model: event.data.model,
time: {
created: event.data.timestamp,
},
content: [],
snapshot: event.data.snapshot ? { start: event.data.snapshot } : undefined,
})
}
}
export class Compaction extends Schema.Class<Compaction>("Session.Message.Compaction")({
type: Schema.Literal("compaction"),
reason: SessionEvent.Compaction.Started.fields.data.fields.reason,
summary: Schema.String,
include: Schema.String.pipe(Schema.optional),
...Base,
}) {
static fromEvent(event: SessionEvent.Compaction.Started) {
return new Compaction({
id: event.data.id,
type: "compaction",
metadata: event.metadata,
reason: event.data.reason,
summary: "",
time: { created: event.data.timestamp },
})
}
}
export const Message = Schema.Union([User, Synthetic, Assistant, Compaction])
.pipe(Schema.toTaggedUnion("type"))
.annotate({ identifier: "Session.Message" })
export type Message = Schema.Schema.Type<typeof Message>
export type Type = Message["type"]
export * as SessionMessage from "./session-message"

View File

@@ -0,0 +1,36 @@
import * as Schema from "effect/Schema"
export class Source extends Schema.Class<Source>("Prompt.Source")({
start: Schema.Number,
end: Schema.Number,
text: Schema.String,
}) {}
export class FileAttachment extends Schema.Class<FileAttachment>("Prompt.FileAttachment")({
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
description: Schema.String.pipe(Schema.optional),
source: Source.pipe(Schema.optional),
}) {
static create(input: FileAttachment) {
return new FileAttachment({
uri: input.uri,
mime: input.mime,
name: input.name,
description: input.description,
source: input.source,
})
}
}
export class AgentAttachment extends Schema.Class<AgentAttachment>("Prompt.AgentAttachment")({
name: Schema.String,
source: Source.pipe(Schema.optional),
}) {}
export class Prompt extends Schema.Class<Prompt>("Prompt")({
text: Schema.String,
files: Schema.Array(FileAttachment).pipe(Schema.optional),
agents: Schema.Array(AgentAttachment).pipe(Schema.optional),
}) {}

View File

@@ -1,69 +1,163 @@
import { Context, Layer, Schema, Effect } from "effect"
import { SessionEntry } from "./session-entry"
import { Struct } from "effect"
import { SessionMessageTable, SessionTable } from "@/session/session.sql"
import type { SessionID } from "@/session/schema"
import type { WorkspaceID } from "@/control-plane/schema"
import { and, asc, desc, eq, gt, gte, isNull, like, lt, or, type SQL } from "@/storage/db"
import * as Database from "@/storage/db"
import { Context, Effect, Layer, Schema } from "effect"
import { SessionMessage } from "./session-message"
import type { Prompt } from "./session-prompt"
import { Session } from "@/session/session"
import { SessionID } from "@/session/schema"
import { SessionPrompt } from "@/session/prompt"
import type { Event } from "./event"
export const ID = SessionID
export const Delivery = Schema.Union([Schema.Literal("immediate"), Schema.Literal("deferred")]).annotate({
identifier: "Session.Delivery",
})
export type Delivery = Schema.Schema.Type<typeof Delivery>
export type ID = Schema.Schema.Type<typeof ID>
export const DefaultDelivery = "immediate" satisfies Delivery
export class PromptInput extends Schema.Class<PromptInput>("Session.PromptInput")({
...Struct.omit(SessionEntry.User.fields, ["time", "type"]),
id: Schema.optionalKey(SessionEntry.ID),
sessionID: ID,
}) {}
export class CreateInput extends Schema.Class<CreateInput>("Session.CreateInput")({
id: Schema.optionalKey(ID),
}) {}
export class Info extends Schema.Class<Info>("Session.Info")({
id: ID,
model: Schema.Struct({
id: Schema.String,
providerID: Schema.String,
modelID: Schema.String,
}).pipe(Schema.optional),
}) {}
export const Info = Schema.Struct({}).annotate({ identifier: "Session" })
export interface Interface {
fromID: (id: ID) => Effect.Effect<Info>
create: (input: CreateInput) => Effect.Effect<Info>
prompt: (input: PromptInput) => Effect.Effect<SessionEntry.User>
readonly list: (input: {
limit?: number
order?: "asc" | "desc"
directory?: string
path?: string
workspaceID?: WorkspaceID
roots?: boolean
start?: number
search?: string
cursor?: {
id: SessionID
time: number
direction: "previous" | "next"
}
}) => Effect.Effect<Session.Info[], never>
readonly messages: (input: {
sessionID: SessionID
limit?: number
order?: "asc" | "desc"
cursor?: {
id: SessionMessage.ID
time: number
direction: "previous" | "next"
}
}) => Effect.Effect<SessionMessage.Message[], never>
readonly prompt: (input: {
id?: Event.ID
sessionID: SessionID
prompt: Prompt
delivery?: Delivery
}) => Effect.Effect<SessionMessage.User, never>
readonly compact: (sessionID: SessionID) => Effect.Effect<void, never>
readonly wait: (sessionID: SessionID) => Effect.Effect<void, never>
}
export class Service extends Context.Service<Service, Interface>()("Session.Service") {}
export class Service extends Context.Service<Service, Interface>()("@opencode/v2/Session") {}
export const layer = Layer.effect(Service)(
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const session = yield* Session.Service
const prompt = yield* SessionPrompt.Service
const decodeMessage = Schema.decodeUnknownSync(SessionMessage.Message)
const decode = (row: typeof SessionMessageTable.$inferSelect) =>
decodeMessage({ ...row.data, id: row.id, type: row.type })
const create: Interface["create"] = Effect.fn("Session.create")(function* (_input) {
throw new Error("Not implemented")
})
const result: Interface = {
list: Effect.fn("V2Session.list")(function* (input) {
const direction = input.cursor?.direction ?? "next"
let order = input.order ?? "desc"
// Query the adjacent rows in reverse, then flip them back into the requested order below.
if (direction === "previous" && order === "asc") order = "desc"
if (direction === "previous" && order === "desc") order = "asc"
const conditions: SQL[] = []
if (input.directory) conditions.push(eq(SessionTable.directory, input.directory))
if (input.path)
conditions.push(or(eq(SessionTable.path, input.path), like(SessionTable.path, `${input.path}/%`))!)
if (input.workspaceID) conditions.push(eq(SessionTable.workspace_id, input.workspaceID))
if (input.roots) conditions.push(isNull(SessionTable.parent_id))
if (input.start) conditions.push(gte(SessionTable.time_created, input.start))
if (input.search) conditions.push(like(SessionTable.title, `%${input.search}%`))
if (input.cursor) {
conditions.push(
order === "asc"
? or(
gt(SessionTable.time_created, input.cursor.time),
and(eq(SessionTable.time_created, input.cursor.time), gt(SessionTable.id, input.cursor.id)),
)!
: or(
lt(SessionTable.time_created, input.cursor.time),
and(eq(SessionTable.time_created, input.cursor.time), lt(SessionTable.id, input.cursor.id)),
)!,
)
}
const query = Database.Client()
.select()
.from(SessionTable)
.where(conditions.length > 0 ? and(...conditions) : undefined)
.orderBy(
order === "asc" ? asc(SessionTable.time_created) : desc(SessionTable.time_created),
order === "asc" ? asc(SessionTable.id) : desc(SessionTable.id),
)
const prompt: Interface["prompt"] = Effect.fn("Session.prompt")(function* (_input) {
throw new Error("Not implemented")
})
const rows = input.limit === undefined ? query.all() : query.limit(input.limit).all()
return (direction === "previous" ? rows.toReversed() : rows).map((row) => Session.fromRow(row))
}),
messages: Effect.fn("V2Session.messages")(function* (input) {
const direction = input.cursor?.direction ?? "next"
let order = input.order ?? "desc"
// Query the adjacent rows in reverse, then flip them back into the requested order below.
if (direction === "previous" && order === "asc") order = "desc"
if (direction === "previous" && order === "desc") order = "asc"
const boundary = input.cursor
? order === "asc"
? or(
gt(SessionMessageTable.time_created, input.cursor.time),
and(
eq(SessionMessageTable.time_created, input.cursor.time),
gt(SessionMessageTable.id, input.cursor.id),
),
)
: or(
lt(SessionMessageTable.time_created, input.cursor.time),
and(
eq(SessionMessageTable.time_created, input.cursor.time),
lt(SessionMessageTable.id, input.cursor.id),
),
)
: undefined
const where = boundary
? and(eq(SessionMessageTable.session_id, input.sessionID), boundary)
: eq(SessionMessageTable.session_id, input.sessionID)
const fromID: Interface["fromID"] = Effect.fn("Session.fromID")(function* (id) {
const match = yield* session.get(id)
return fromV1(match)
})
const rows = Database.use((db) => {
const query = db
.select()
.from(SessionMessageTable)
.where(where)
.orderBy(
order === "asc" ? asc(SessionMessageTable.time_created) : desc(SessionMessageTable.time_created),
order === "asc" ? asc(SessionMessageTable.id) : desc(SessionMessageTable.id),
)
const rows = input.limit === undefined ? query.all() : query.limit(input.limit).all()
return direction === "previous" ? rows.toReversed() : rows
})
return rows.map((row) => decode(row))
}),
prompt: Effect.fn("V2Session.prompt")(function* (input) {
const delivery = input.delivery ?? DefaultDelivery
return {} as any
}),
compact: Effect.fn("V2Session.compact")(function* (sessionID) {}),
wait: Effect.fn("V2Session.wait")(function* (sessionID) {}),
}
return Service.of({
create,
prompt,
fromID,
})
return Service.of(result)
}),
)
function fromV1(input: Session.Info): Info {
return new Info({
id: ID.make(input.id),
})
}
export const defaultLayer = layer.pipe(Layer.provide(SessionPrompt.defaultLayer))
export * as SessionV2 from "./session"

View File

@@ -0,0 +1,18 @@
export * as ToolOutput from "./tool-output"
import { Schema } from "effect"
export class TextContent extends Schema.Class<TextContent>("Tool.TextContent")({
type: Schema.Literal("text"),
text: Schema.String,
}) {}
export class FileContent extends Schema.Class<FileContent>("Tool.FileContent")({
type: Schema.Literal("file"),
uri: Schema.String,
mime: Schema.String,
name: Schema.String.pipe(Schema.optional),
}) {}
export const Content = Schema.Union([TextContent, FileContent]).pipe(Schema.toTaggedUnion("type"))
export const Structured = Schema.Record(Schema.String, Schema.Any)

View File

@@ -4,6 +4,7 @@ import path from "path"
import { provideInstance, tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { Agent } from "../../src/agent/agent"
import { Global } from "@opencode-ai/core/global"
import { Permission } from "../../src/permission"
// Helper to evaluate permission for a tool with wildcard pattern
@@ -31,6 +32,7 @@ test("returns default native agents when no config", async () => {
expect(names).toContain("plan")
expect(names).toContain("general")
expect(names).toContain("explore")
expect(names).toContain("scout")
expect(names).toContain("compaction")
expect(names).toContain("title")
expect(names).toContain("summary")
@@ -49,6 +51,8 @@ test("build agent has correct default properties", async () => {
expect(build?.native).toBe(true)
expect(evalPerm(build, "edit")).toBe("allow")
expect(evalPerm(build, "bash")).toBe("allow")
expect(evalPerm(build, "repo_clone")).toBe("deny")
expect(evalPerm(build, "repo_overview")).toBe("deny")
},
})
})
@@ -97,6 +101,28 @@ test("explore agent asks for external directories and allows Truncate.GLOB", asy
})
})
test("scout agent allows repo cloning and repo cache reads", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
fn: async () => {
const scout = await load(tmp.path, (svc) => svc.get("scout"))
expect(scout).toBeDefined()
expect(scout?.mode).toBe("subagent")
expect(evalPerm(scout, "repo_clone")).toBe("allow")
expect(evalPerm(scout, "repo_overview")).toBe("allow")
expect(evalPerm(scout, "edit")).toBe("deny")
expect(
Permission.evaluate(
"external_directory",
path.join(Global.Path.repos, "github.com", "owner", "repo", "README.md"),
scout!.permission,
).action,
).toBe("allow")
},
})
})
test("general agent denies todo tools", async () => {
await using tmp = await tmpdir()
await Instance.provide({
@@ -224,8 +250,8 @@ test("agent permission config merges with defaults", async () => {
expect(build).toBeDefined()
// Specific pattern is denied
expect(Permission.evaluate("bash", "rm -rf *", build!.permission).action).toBe("deny")
// Edit still allowed
expect(evalPerm(build, "edit")).toBe("allow")
// Edit still asks (default behavior)
expect(evalPerm(build, "edit")).toBe("ask")
},
})
})

View File

@@ -0,0 +1,49 @@
import { describe, expect } from "bun:test"
import { Deferred, Effect, Layer } from "effect"
import { BackgroundJob } from "@/background/job"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
import { provideTmpdirInstance } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
const it = testEffect(Layer.mergeAll(BackgroundJob.defaultLayer, CrossSpawnSpawner.defaultLayer))
describe("background.job", () => {
it.live("tracks started jobs through completion", () =>
provideTmpdirInstance(() =>
Effect.gen(function* () {
const jobs = yield* BackgroundJob.Service
const latch = yield* Deferred.make<void>()
const job = yield* jobs.start({
type: "test",
title: "test job",
run: Deferred.await(latch).pipe(Effect.as("done")),
})
expect(job.status).toBe("running")
yield* Deferred.succeed(latch, undefined)
const done = yield* jobs.wait({ id: job.id })
expect(done.info?.status).toBe("completed")
expect(done.info?.output).toBe("done")
expect((yield* jobs.list()).map((item) => item.id)).toEqual([job.id])
}),
),
)
it.live("can cancel running jobs", () =>
provideTmpdirInstance(() =>
Effect.gen(function* () {
const jobs = yield* BackgroundJob.Service
const latch = yield* Deferred.make<void>()
const job = yield* jobs.start({
type: "test",
run: Deferred.await(latch).pipe(Effect.as("done")),
})
const cancelled = yield* jobs.cancel(job.id)
expect(cancelled?.status).toBe("cancelled")
}),
),
)
})

View File

@@ -25,6 +25,16 @@ test("parses ssh:// URL without .git suffix", () => {
expect(parseGitHubRemote("ssh://git@github.com/sst/opencode")).toEqual({ owner: "sst", repo: "opencode" })
})
test("parses git protocol URLs from package metadata", () => {
expect(parseGitHubRemote("git://github.com/facebook/react.git")).toEqual({ owner: "facebook", repo: "react" })
expect(parseGitHubRemote("git+https://github.com/facebook/react.git")).toEqual({ owner: "facebook", repo: "react" })
expect(parseGitHubRemote("git+ssh://git@github.com/facebook/react.git")).toEqual({ owner: "facebook", repo: "react" })
})
test("parses npm-style github shorthand", () => {
expect(parseGitHubRemote("github:facebook/react")).toBeNull()
})
test("parses http URL", () => {
expect(parseGitHubRemote("http://github.com/owner/repo")).toEqual({ owner: "owner", repo: "repo" })
})

View File

@@ -54,6 +54,12 @@ function expectRgba(color: unknown) {
return color
}
function expectIndexed(color: unknown) {
const value = expectRgba(color)
expect(value.intent).toBe("indexed")
expect(value.slot).toBeLessThan(256)
}
function spread(color: RGBA) {
const [r, g, b] = color.toInts()
return Math.max(r, g, b) - Math.min(r, g, b)
@@ -71,10 +77,10 @@ test("returns syntax styles and indexed splash colors", async () => {
expect(theme.block.subtleSyntax).toBeDefined()
expect([...theme.block.syntax!.getAllStyles()].length).toBeGreaterThan(0)
expect([...theme.block.subtleSyntax!.getAllStyles()].length).toBeGreaterThan(0)
expect(RGBA.getIntentTag(expectRgba(theme.splash.left))).toBeLessThan(256)
expect(RGBA.getIntentTag(expectRgba(theme.splash.right))).toBeLessThan(256)
expect(RGBA.getIntentTag(expectRgba(theme.splash.leftShadow))).toBeLessThan(256)
expect(RGBA.getIntentTag(expectRgba(theme.splash.rightShadow))).toBeLessThan(256)
expectIndexed(theme.splash.left)
expectIndexed(theme.splash.right)
expectIndexed(theme.splash.leftShadow)
expectIndexed(theme.splash.rightShadow)
expectRgba(theme.footer.highlight)
expectRgba(theme.footer.surface)
} finally {

Some files were not shown because too many files have changed in this diff Show More